query
stringlengths 7
9.55k
| document
stringlengths 10
363k
| metadata
dict | negatives
listlengths 0
101
| negative_scores
listlengths 0
101
| document_score
stringlengths 3
10
| document_rank
stringclasses 102
values |
|---|---|---|---|---|---|---|
SQL for dropping a trigger from the database.
|
def drop_trigger_sql(table, name, opts=OPTS)
"DROP TRIGGER#{' IF EXISTS' if opts[:if_exists]} #{name} ON #{quote_schema_table(table)}#{' CASCADE' if opts[:cascade]}"
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def drop_trigger(table_name, trigger_name, options = {})\n SchemaMonkey::Middleware::Migration::CreateTrigger.start(connection: self, table_name: table_name, trigger_name: trigger_name, options: options) do |env|\n table_name = env.table_name\n trigger_name = env.trigger_name\n options = env.options\n\n sql = \"DROP TRIGGER\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{quote_table_name(trigger_name)} ON #{quote_table_name(table_name)}\"\n sql += \" CASCADE\" if options[:cascade]\n\n execute sql\n end\n end",
"def drop_trigger(table, name, opts=OPTS)\n self << drop_trigger_sql(table, name, opts)\n end",
"def drop_replication_trigger(trigger_name, table_name)\n %w(insert update delete).each do |action|\n execute \"DROP TRIGGER `#{trigger_name}_#{action}`;\"\n end\n execute \"DROP PROCEDURE `#{trigger_name}`;\"\n end",
"def remove_trigger(table_name, proc_name, options = {})\n\n end",
"def drop_trigger(database, table)\n trigger_name = \"#{options(table)[:rep_prefix]}_#{table}\"\n session.send(database).drop_replication_trigger trigger_name, table\n end",
"def remove_trigger(table, name, options={})\n options[:name] = name\n execute \"DROP TRIGGER #{trigger_name(table, [], options).to_sql_name} ON #{table} #{cascade_or_restrict(options[:deep])};\"\n end",
"def remove_trigger(table, name, options={})\n options[:name] = name\n execute \"DROP TRIGGER #{trigger_name(table, [], options).to_sql_name} ON #{table} #{cascade_or_restrict(options[:deep])};\"\n end",
"def drop_event_trigger(name, options = {})\n sql = 'DROP EVENT TRIGGER '\n sql << 'IF EXISTS ' if options[:if_exists]\n sql << quote_generic(name)\n sql << ' CASCADE' if options[:cascade]\n execute(\"#{sql};\")\n end",
"def to_drop_database_sql(db)\n db.send(:drop_database_sql, self.name, {})\n end",
"def to_drop_constraint_sql(db)\n if db.supports_external_drop_constraints?\n gen = ::Sequel::Schema::AlterTableGenerator.new(db)\n gen.drop_constraint(self.name)\n db.send(:alter_table_sql_list, relvar.namespace_qualified_name(db), gen.operations)[0]\n else\n \"\"\n end\n end",
"def down\nexecute <<-SQL\n\tDROP TRIGGER IF EXISTS add_animal;\nSQL\n\nexecute <<-SQL\n\tDROP TRIGGER IF EXISTS update_animal;\nSQL\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS all_animals\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_species\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_name\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_tank\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_habitat\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_by_birthday\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS create_animal\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS update_animal\nSQL\n\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS delete_animal\nSQL\nexecute <<-SQL\n DROP PROCEDURE IF EXISTS get_animal\nSQL\nexecute <<-SQL\n DROP FUNCTION IF EXISTS get_animal_count\nSQL\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_identifier(name)}\"\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)} CASCADE\"\n end",
"def delete_queries\n [\n \"DROP TRIGGER tr_#{suffix} ON #{src_table};\",\n \"DROP FUNCTION fn_#{suffix}();\",\n \"DROP TRIGGER tr_#{suffix}_cleaner ON #{surveys_table};\",\n \"DROP FUNCTION fn_#{suffix}_cleaner();\"\n ]\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end",
"def drop_database_sql(name, opts = {})\n \"DROP DATABASE #{quote_identifier(name)}\"\n end",
"def drop_schema_sql(name, opts=OPTS)\n \"DROP SCHEMA#{' IF EXISTS' if opts[:if_exists]} #{quote_identifier(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end",
"def drop_sequence_sql(name)\n \"DROP SEQUENCE #{name}\"\n end",
"def drop_table_statement(repository, model)\n \"DROP TABLE #{quote_table_name(model.storage_name(repository.name))}\"\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)}\"\n end",
"def drop_sql\n raise NotImplementedError, \"DatabaseSymbol should not be instanciated\"\n end",
"def drop_table_sql(name, options)\n \"DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def drop_view_sql(name, options)\n \"DROP VIEW#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def drop_database(name) #:nodoc:\n execute \"DROP DATABASE #{quote_table_name(name)}\"\n end",
"def drop_function_sql(name, opts=OPTS)\n \"DROP FUNCTION#{' IF EXISTS' if opts[:if_exists]} #{name}#{sql_function_args(opts[:args])}#{' CASCADE' if opts[:cascade]}\"\n end",
"def destroy!\n drop_ddl = tables.map(&:name).map do |t|\n \"drop table if exists #{t};\\n\"\n end.join\n ActiveRecord::Base.connection.execute(drop_ddl)\n end",
"def drop_sequence_statement(repository, property)\n \"DROP SEQUENCE IF EXISTS #{quote_column_name(sequence_name(repository, property))}\"\n end",
"def drop_schema schema_name\n execute \"DROP SCHEMA #{schema_name} CASCADE\"\n end",
"def disable_trigger(trigger = 'ALL')\n connection.disable_trigger(table_name, trigger)\n end",
"def drop_database(name)\n single_db_command(name, :dropDatabase => 1)\n end",
"def remove_trigger(id)\n\t transmit(:remove_trigger, id)\n\t triggers.delete(id)\n\tend",
"def to_sql_drop_namespace(db)\n if db.supports_schemas?\n db.send(:drop_schema_sql, self.name, {})\n else\n \"\"\n end\n end",
"def drop_language_sql(name, opts=OPTS)\n \"DROP LANGUAGE#{' IF EXISTS' if opts[:if_exists]} #{name}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_database(name)\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def drop_database(name) #:nodoc:\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def drop_database(name) # :nodoc:\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def drop_proc(name, columns=[], cascade=false)\n execute \"DROP FUNCTION #{name.to_sql_name}(#{columns.collect {|column| column}.join(\", \")}) #{cascade_or_restrict(cascade)};\"\n end",
"def drop_proc(name, columns=[], cascade=false)\n execute \"DROP FUNCTION #{name.to_sql_name}(#{columns.collect {|column| column}.join(\", \")}) #{cascade_or_restrict(cascade)};\"\n end",
"def drop_database(name)\n execute \"DROP DATABASE IF EXISTS #{name}\" \n end",
"def sql_for_remove\n \"DROP FUNCTION IF EXISTS #{proname}(#{get_function_args})\"\n end",
"def drop_table\n db.drop_table(table_name)\n end",
"def down\n \tdrop_table :solution_submissions\n\n # \texecute <<-SQL\n # \t\tDROP TYPE s_status;\n # \tSQL\n\n # \texecute <<-SQL\n # \t\tDROP TYPE lang;\n # \tSQL\n\n end",
"def drop_table(klass)\n # Remove leftover data from some join tabkes.\n klass.relations.each do |rel|\n if rel.class.to_s == \"Og::JoinsMany\" and rel.join_table\n target_class = rel.target_class\n exec \"DELETE FROM #{rel.join_table}\"\n end\n end\n exec \"DROP TABLE #{klass.table}\"\n end",
"def delete_trigger(trigger_id)\n http_delete \"/triggers/#{trigger_id}\"\n end",
"def destroy_db(options)\n info \"Dropped database '#{options[:name]}'\"\n end",
"def down\n execute <<-SQL\n DROP TABLE event_registrations;\n SQL\n\n execute <<-SQL\n DROP TABLE members;\n SQL\n\n execute <<-SQL\n DROP TABLE events;\n SQL\n\n execute <<-SQL\n DROP TABLE treatment_logs;\n SQL\n\n execute <<-SQL\n DROP TABLE feeding_logs;\n SQL\n\n execute <<-SQL\n DROP TABLE employees;\n SQL\n\n execute <<-SQL\n DROP TABLE animals;\n SQL\n\n execute <<-SQL\n DROP TABLE tanks;\n SQL\n\n execute <<-SQL\n DROP TABLE habitats;\n SQL\n end",
"def to_maql_drop\n maql = \"\"\n [ attributes, facts ].each do |obj|\n maql += obj.to_maql_drop\n end\n maql += \"DROP {#{self.identifier}};\\n\"\n end",
"def delete_table\n table_name = self.to_s.pluralize.underscore\n DATABASE.execute(\"DROP TABLE #{table_name}\")\n end",
"def drop!(db, colls = nil)\n db.in_transaction do |conn|\n schema_tables(conn).each do |table|\n conn.exec \"DROP TABLE IF EXISTS #{table}\"\n end\n end\n end",
"def drop_view_sql(name, opts=OPTS)\n \"DROP #{'MATERIALIZED ' if opts[:materialized]}VIEW#{' IF EXISTS' if opts[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_tables!\n migrate(:down)\n end",
"def delete_database(connection_string, db_name)\n drop_sql = <<-SQL\n DROP DATABASE #{db_name};\n SQL\n\n run \"#{connection_string} --execute=\\\"#{drop_sql}\\\"\"\nend",
"def drop_change_log(database)\n session.send(database).drop_table \"#{options[:rep_prefix]}_pending_changes\"\n end",
"def drop_database(db_name)\n check_return_code(PureHailDB.ib_database_drop(db_name))\n end",
"def drop_versioned_table\n self.connection.drop_table versioned_table_name\n end",
"def db_remove\n \"DELETE\" + from_table_where + sql_match_conditions\n end",
"def down\n \tdrop_table :problems\n\n \t#execute <<-SQL\n \t#\tDROP TYPE difficulty;\n \t#SQL\n end",
"def drop_event_log\n session.left.drop_table \"#{options[:rep_prefix]}_logged_events\"\n end",
"def drop_table\n self.connection.drop_table table_name\n end",
"def drop(db)\n\tif $table_exists\n\t\tdb.execute(\"DROP TABLE items;\")\n\t\t$table_exists = false\n\t\tputs \"\\nTable successfully deleted.\"\n\telse\n\t\tputs \"\\nTable successfully deleted.\"\n\tend\nend",
"def drop_sequence(name)\n execute drop_sequence_sql(name)\n end",
"def drop_view(name)\n execute(\"DROP VIEW #{name}\")\n end",
"def delete_buttons\n @db.execute(\"DROP TABLE Buttons\")\n end",
"def drop_schema(schema)\n execute \"DROP SCHEMA #{schema} RESTRICT\", 'Drop Schema'\n end",
"def drop_function(function_name, params, options = {})\n SchemaMonkey::Middleware::Migration::CreateFunction.start(connection: self, function_name: function_name, params: params, options: options) do |env|\n function_name = env.function_name\n params = env.params\n options = env.options\n function_type = (options[:function_type] || :function).to_s.upcase\n\n sql = \"DROP #{function_type}\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{function_name}(#{params})\"\n sql += \" CASCADE\" if options[:cascade]\n\n execute sql\n end\n end",
"def drop_schema(name, opts=OPTS)\n self << drop_schema_sql(name, opts)\n end",
"def drop_schema(schema_name, options = {})\n execute \"DROP SCHEMA#{' IF EXISTS' if options[:if_exists]} #{quote_schema_name(schema_name)} CASCADE\"\n end",
"def drop_sequence(sequence_name, options = {})\n SchemaMonkey::Middleware::Migration::DropSequence.start(connection: self, sequence_name: sequence_name, sequence_options: options) do |env|\n sequence_name = env.sequence_name\n options = env.sequence_options\n sql = \"DROP SEQUENCE\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{quote_table_name(sequence_name)}\"\n execute sql\n end\n end",
"def drop_sequence(name)\n self.execute(\"DROP TABLE IF EXISTS %s_sequence\" % name)\n end",
"def delete_traps\n @db.execute(\"DROP TABLE Traps\")\n end",
"def down\n execute <<-SQL\n DROP TABLE working_intervals;\n SQL\n end",
"def drop\n Statements::DropFunction.new(context: self)\n end",
"def drop_schema(name, opts = {})\n execute_ddl(drop_schema_sql(name, opts))\n end",
"def dropUserTable\n @conn.exec(\"DROPE users\")\n end",
"def replace_statement(target, stage)\n <<-SQLREPLACE\n begin transaction;\n drop table #{target};\n alter table #{stage} rename to #{target};\n end transaction;\n SQLREPLACE\n end",
"def migrated_down(migration)\n column_family.delete({\n where: {\n version: migration.version.to_s,\n name: migration.name,\n },\n })\n end",
"def drop_function(name, opts=OPTS)\n self << drop_function_sql(name, opts)\n end",
"def drop_schema(schema_name)\n execute(\"DROP SCHEMA \\\"#{schema_name}\\\"\")\n end",
"def uninstall_on(db, options = {})\n buffer, sql = \"\", \"\"\n all_objects_in_order.reverse.each{|o| \n sql = o.to_clean_sql(db)\n (buffer << sql << \";\\n\") unless sql.nil? or sql.empty?\n }\n execute_ddl(db, buffer, options)\n db\n end",
"def drop_function(name, custom_drop_statement = nil)\n Scenic.database.drop_function(name, custom_drop_statement)\n end",
"def drop_foreign_key(from_table, from_column)\n execute [ \"alter table #{quote_table_name from_table}\",\n \"drop foreign key #{constraint_name from_table, from_column}\"\n ].join(' ')\n end",
"def clear_sequence_setup(rep_prefix, table_name)\n sequence_table_name = \"#{rep_prefix}_sequences\"\n if tables.include?(sequence_table_name)\n trigger_name = \"#{rep_prefix}_#{table_name}_sequence\"\n trigger_row = select_one(<<-end_sql)\n select * from information_schema.triggers\n where trigger_schema = database()\n and trigger_name = '#{trigger_name}'\n end_sql\n if trigger_row\n execute \"DROP TRIGGER `#{trigger_name}`\"\n execute \"delete from #{sequence_table_name} where name = '#{table_name}'\"\n unless select_one(\"select * from #{sequence_table_name}\")\n # no more sequences left --> delete sequence table\n drop_table sequence_table_name.to_sym\n end\n end\n end\n end",
"def down\n \t# Example\n \t# Easy to undo something you create, but this doesn't work\n \t# because it is hard to fix mistakes.\n \t# drop_table :pictures\n end",
"def drop_foreign_key(table, field)\n execute \"ALTER TABLE #{table} DROP FOREIGN KEY #{constraint_name(table, field)}\"\n end",
"def drop\n ensure_service!\n service.drop_database instance_id, database_id\n true\n end",
"def drop\n ensure_service!\n service.drop_database instance_id, database_id\n true\n end",
"def drop_audit_schema!\n @config[:drop_audit_schema] = true\n end",
"def drop_function(function_name, options = {})\n function_name = full_function_name(function_name, options)\n\n execute \"DROP FUNCTION #{function_name}\"\n end",
"def drop_rule(name, table)\n execute \"DROP RULE #{quote_rule(name)} ON #{quote_table_name(table)};\"\n end",
"def uninstall_on!(db, options = {})\n sql = \"\"\n all_objects_in_order.reverse.each{|o| \n begin\n sql = o.to_clean_sql(db)\n execute_ddl(db, sql, options)\n rescue Sequel::Error => ex\n puts \"Ignoring: #{ex.message}\" if options[:verbose]\n end\n }\n db\n end",
"def drop_sequence(*args)\n options = args.extract_options!\n args.flatten!\n\n sql = 'DROP SEQUENCE '\n sql << 'IF EXISTS ' if options[:if_exists]\n sql << Array.wrap(args).collect { |s| quote_sequence(s) }.join(', ')\n sql << ' CASCADE' if options[:cascade]\n execute(\"#{sql};\")\n end",
"def drop\n @db.drop_collection(@name)\n end",
"def down\n drop_table TABLE_NAME\n end",
"def dropUserTable(tableName)\n @conn.exec(\"DROP TABLE #{tableName}\")\n end",
"def drop_table(*names)\n names.each {|n| execute(drop_table_sql(n))}\n end",
"def drop\n do_callback(:before_drop)\n collection.drop\n do_callback(:after_drop)\n end",
"def drop_sequence(repository, property)\n without_notices { execute(drop_sequence_statement(repository, property)) }\n end",
"def drop_sequence(name)\n name = quote_name(name)\n sql = \"DROP SEQUENCE #{name}\"\n execute(sql)\n end",
"def delete_sql(sql, name = nil)\n result = execute(sql, name)\n result.cmd_tuples\n end"
] |
[
"0.8119375",
"0.7688167",
"0.76867425",
"0.7282038",
"0.7202269",
"0.71872044",
"0.71872044",
"0.7185238",
"0.67402",
"0.658491",
"0.65673536",
"0.6561976",
"0.649828",
"0.6490543",
"0.6459425",
"0.6437757",
"0.6430077",
"0.64237756",
"0.6383998",
"0.6349056",
"0.63020337",
"0.6292031",
"0.6235674",
"0.62007296",
"0.61573863",
"0.613586",
"0.6118984",
"0.60996497",
"0.6090913",
"0.6065018",
"0.60551786",
"0.60291785",
"0.60187536",
"0.60057086",
"0.59598947",
"0.5957141",
"0.5941946",
"0.594184",
"0.594184",
"0.5928505",
"0.5923734",
"0.59222305",
"0.5922161",
"0.590778",
"0.5896831",
"0.5870791",
"0.5866002",
"0.58538413",
"0.58484924",
"0.5837365",
"0.582853",
"0.58041203",
"0.5772808",
"0.5767459",
"0.5758272",
"0.57477546",
"0.5738258",
"0.5736133",
"0.5723941",
"0.57146025",
"0.5714075",
"0.570452",
"0.56928587",
"0.568188",
"0.56793845",
"0.56765914",
"0.5662016",
"0.56424034",
"0.56370974",
"0.5636975",
"0.5634473",
"0.5609198",
"0.5606019",
"0.5597308",
"0.55933094",
"0.5577078",
"0.5553793",
"0.5546639",
"0.55364674",
"0.55322295",
"0.55285287",
"0.5515623",
"0.55118686",
"0.5501693",
"0.5501313",
"0.5498955",
"0.5498955",
"0.5494968",
"0.5485235",
"0.5467654",
"0.54593015",
"0.5446886",
"0.5446415",
"0.5439809",
"0.54341143",
"0.5417775",
"0.54173297",
"0.5405858",
"0.5400819",
"0.5389644"
] |
0.82029164
|
0
|
SQL for dropping a view from the database.
|
def drop_view_sql(name, opts=OPTS)
"DROP #{'MATERIALIZED ' if opts[:materialized]}VIEW#{' IF EXISTS' if opts[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if opts[:cascade]}"
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def drop_view(name)\n execute(\"DROP VIEW #{name}\")\n end",
"def drop_view_sql(name, options)\n \"DROP VIEW#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def drop_view(view_name, options = {})\n SchemaMonkey::Middleware::Migration::DropView.start(connection: self, view_name: view_name, options: options) do |env|\n view_name = env.view_name\n options = env.options\n materialized = options[:materialized] ? 'MATERIALIZED' : ''\n sql = \"DROP #{materialized} VIEW\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{quote_table_name(view_name)}\"\n execute sql\n end\n end",
"def drop_view(name, **kwargs)\n kwargs[:sqlite3] = !!(adapter_name =~ /sqlite/i)\n\n execute build_drop_view_query(name, **kwargs)\n end",
"def drop_materialized_view(*args)\n options = args.extract_options!\n args.flatten!\n\n sql = 'DROP MATERIALIZED VIEW '\n sql << 'IF EXISTS ' if options[:if_exists]\n sql << Array.wrap(args).collect { |v| quote_view_name(v) }.join(', ')\n sql << ' CASCADE' if options[:cascade]\n execute(\"#{sql};\")\n end",
"def drop_view(*names)\n options = names.last.is_a?(Hash) ? names.pop : OPTS\n names.each do |n|\n execute_ddl(drop_view_sql(n, options))\n remove_cached_schema(n)\n end\n nil\n end",
"def drop_materialized_view(name, **kwargs)\n supports_materialized_view!\n\n execute build_drop_materialized_view_query(name, **kwargs)\n end",
"def drop_views name, defs=nil\n defs = defs.delete(:dependent_views) if defs.is_a?(Hash)\n defs.each do |dependent_view|\n execute \"DROP VIEW IF EXISTS #{dependent_view}\"\n end if defs\n \n execute \"DROP VIEW IF EXISTS #{name}\"\n\n end",
"def drop_view(name)\n unless @enduser\n raise Empire::MissingEnduserError.new\n end\n path = \"view/#{name}\"\n request path, :delete\n end",
"def recreate_view name\n view_structure = ActiveRecord::Base.connection.select_value(\"select definition from pg_views where viewname='#{name}'\")\n if view_structure\n execute \"DROP VIEW IF EXISTS #{name}\"\n execute \"CREATE VIEW #{name} AS #{view_structure};\"\n end\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_identifier(name)}\"\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end",
"def drop_database_sql(name, opts = {})\n \"DROP DATABASE #{quote_identifier(name)}\"\n end",
"def drop_table_statement(repository, model)\n \"DROP TABLE #{quote_table_name(model.storage_name(repository.name))}\"\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end",
"def to_drop_database_sql(db)\n db.send(:drop_database_sql, self.name, {})\n end",
"def create_view(name, source, options = OPTS)\n execute_ddl(create_view_sql(name, source, options))\n remove_cached_schema(name)\n nil\n end",
"def test_materialized_view_metadata_drop\n skip(\"Materialized views were introduced in Cassandra 3.0.0\") if CCM.cassandra_version < '3.0.0'\n\n @session.execute(\"CREATE TABLE simplex.test (pk int PRIMARY KEY, c int)\")\n @session.execute(\"CREATE MATERIALIZED VIEW simplex.mv1 AS SELECT c,pk FROM simplex.test WHERE c IS NOT NULL AND pk IS NOT NULL PRIMARY KEY (pk, c)\")\n\n @listener.wait_for_materialized_view('simplex', 'mv1')\n assert @cluster.keyspace('simplex').has_materialized_view?('mv1')\n\n @session.execute(\"DROP MATERIALIZED VIEW simplex.mv1\")\n @cluster.refresh_schema\n refute @cluster.keyspace('simplex').has_materialized_view?('mv1')\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)}\"\n end",
"def drop_table\n db.drop_table(table_name)\n end",
"def drop\n @db.drop_collection(@name)\n end",
"def alter_materialized_view_drop_column_default(name, column, options = {})\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :drop_default => column\n }, options).to_sql\n end",
"def drop_versioned_table\n self.connection.drop_table versioned_table_name\n end",
"def drop_tables!\n migrate(:down)\n end",
"def drop_database(name) #:nodoc:\n execute \"DROP DATABASE #{quote_table_name(name)}\"\n end",
"def drop_index_sql(table, op)\n \"DROP INDEX #{quote_identifier(op[:name] || default_index_name(table, op[:columns]))} ON #{quote_schema_table(table)}\"\n end",
"def drop\n @db.drop_collection(@name)\n end",
"def drop_database(name)\n single_db_command(name, :dropDatabase => 1)\n end",
"def drop_index_sql(table, op)\n \"DROP INDEX #{quote_identifier(op[:name] || default_index_name(table, op[:columns]))}\"\n end",
"def drop_database(name)\n execute \"DROP DATABASE IF EXISTS #{name}\" \n end",
"def drop_index_sql(table, op)\n \"DROP INDEX #{quote_identifier(op[:name] || default_index_name(table, op[:columns]))}\"\n end",
"def drop_index_sql(table, op)\n \"DROP INDEX #{quote_identifier(op[:name] || default_index_name(table, op[:columns]))}\"\n end",
"def to_drop_constraint_sql(db)\n if db.supports_external_drop_constraints?\n gen = ::Sequel::Schema::AlterTableGenerator.new(db)\n gen.drop_constraint(self.name)\n db.send(:alter_table_sql_list, relvar.namespace_qualified_name(db), gen.operations)[0]\n else\n \"\"\n end\n end",
"def drop_database(name)\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def destroy!\n drop_ddl = tables.map(&:name).map do |t|\n \"drop table if exists #{t};\\n\"\n end.join\n ActiveRecord::Base.connection.execute(drop_ddl)\n end",
"def drop_database(name) #:nodoc:\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def drop_database(name) # :nodoc:\n execute \"DROP DATABASE IF EXISTS #{quote_table_name(name)}\"\n end",
"def drop_sequence_sql(name)\n \"DROP SEQUENCE #{name}\"\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)} CASCADE\"\n end",
"def update_view name, type, columns, options={}\n view_structure = ActiveRecord::Base.connection.select_value(\"select definition from pg_views where viewname='#{name}'\")\n raise ViewNotExistException(\"View #{name} does not exist in current db\") unless view_structure\n \n columns_str = columns.is_a?(Array) ? columns.join(',') : columns\n \n select_pattern = /select (.*) from/i\n select_str = view_structure[select_pattern,1]\n\n case type\n when :add\n view_structure.gsub!(select_pattern, \"SELECT #{select_str}, #{columns_str} FROM\")\n when :remove\n select_str.gsub!(\", #{columns_str}\", '')\n view_structure.gsub!(select_pattern, \"SELECT #{select_str} FROM\")\n when :replace\n view_structure.gsub!(select_pattern, \"SELECT #{columns_str} FROM\")\n end\n\n drop_views name, options[:dependent_views] \n execute \"CREATE VIEW #{name} AS #{view_structure};\"\n end",
"def drop_table\n self.connection.drop_table table_name\n end",
"def drop_schema(name, opts = {})\n execute_ddl(drop_schema_sql(name, opts))\n end",
"def drop!(db, colls = nil)\n db.in_transaction do |conn|\n schema_tables(conn).each do |table|\n conn.exec \"DROP TABLE IF EXISTS #{table}\"\n end\n end\n end",
"def drop_table_sql(name, options)\n \"DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def drop_schema_sql(name, opts=OPTS)\n \"DROP SCHEMA#{' IF EXISTS' if opts[:if_exists]} #{quote_identifier(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_schema(name, opts=OPTS)\n self << drop_schema_sql(name, opts)\n end",
"def remove_cluster_from_materialized_view(name)\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :remove_cluster => true\n }).to_sql\n end",
"def drop\n Aptly::runcmd \"aptly mirror drop #{@name.quote}\"\n end",
"def drop_index\n call(ft_drop)\n end",
"def create_or_replace_view(name, source, options = OPTS)\n if supports_create_or_replace_view?\n options = options.merge(:replace=>true)\n else\n swallow_database_error{drop_view(name)}\n end\n\n create_view(name, source, options)\n nil\n end",
"def drop_trigger_sql(table, name, opts=OPTS)\n \"DROP TRIGGER#{' IF EXISTS' if opts[:if_exists]} #{name} ON #{quote_schema_table(table)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def drop_database(db_name)\n check_return_code(PureHailDB.ib_database_drop(db_name))\n end",
"def destroy_db(options)\n info \"Dropped database '#{options[:name]}'\"\n end",
"def down\n drop_table TABLE_NAME\n end",
"def drop_index_sql(table, op)\n sch, _ = schema_and_table(table)\n \"DROP INDEX#{' CONCURRENTLY' if op[:concurrently]}#{' IF EXISTS' if op[:if_exists]} #{\"#{quote_identifier(sch)}.\" if sch}#{quote_identifier(op[:name] || default_index_name(table, op[:columns]))}#{' CASCADE' if op[:cascade]}\"\n end",
"def dropUserTable\n @conn.exec(\"DROPE users\")\n end",
"def delete_table\n table_name = self.to_s.pluralize.underscore\n DATABASE.execute(\"DROP TABLE #{table_name}\")\n end",
"def delete_consistency_group_snapshot_view(sys_id, cg_id, view_id)\n\t response = request(:delete, \"/devmgr/v2/storage-systems/#{sys_id}/consistency-groups/#{cg_id}/views/#{view_id}\")\n status(response, 204, 'Failed to remove consistency group snapshot view')\n end",
"def drop(db)\n\tif $table_exists\n\t\tdb.execute(\"DROP TABLE items;\")\n\t\t$table_exists = false\n\t\tputs \"\\nTable successfully deleted.\"\n\telse\n\t\tputs \"\\nTable successfully deleted.\"\n\tend\nend",
"def drop_table(name)\n not_exist(name)\n\n ORM::DBConnection.new(model_name(name)).remove_table(name)\n File.delete(\"app/services/db/tables/#{name}.json\")\n end",
"def drop_replication_trigger(trigger_name, table_name)\n %w(insert update delete).each do |action|\n execute \"DROP TRIGGER `#{trigger_name}_#{action}`;\"\n end\n execute \"DROP PROCEDURE `#{trigger_name}`;\"\n end",
"def drop_table(table)\n connection.drop_collection(database,table)\n end",
"def remove_view(listener)\n @views.remove listener\n end",
"def down\n \tdrop_table :solution_submissions\n\n # \texecute <<-SQL\n # \t\tDROP TYPE s_status;\n # \tSQL\n\n # \texecute <<-SQL\n # \t\tDROP TYPE lang;\n # \tSQL\n\n end",
"def drop_table(table_name = temporary_table_name)\n ::RailsRedshiftReplicator.connection.exec \"drop table if exists #{table_name}\"\n end",
"def drop_movies_table\n c = PGconn.new(:host => \"localhost\", :dbname => \"testdb\")\n c.exec \"DROP TABLE products;\"\n c.close\nend",
"def drop_sql\n raise NotImplementedError, \"DatabaseSymbol should not be instanciated\"\n end",
"def drop_database\n options = { database: Orientdb::ORM.connection_uri.database, user: Orientdb::ORM.connection_uri.user, password: Orientdb::ORM.connection_uri.password }\n Orientdb::ORM.with { |conn| conn.client.delete_database( options ) }\nend",
"def drop\n ensure_service!\n service.drop_database instance_id, database_id\n true\n end",
"def drop\n ensure_service!\n service.drop_database instance_id, database_id\n true\n end",
"def create_view_prefix_sql(name, options)\n create_view_sql_append_columns(\"CREATE #{'OR REPLACE 'if options[:replace]}VIEW #{quote_schema_table(name)}\", options[:columns])\n end",
"def drop_function_sql(name, opts=OPTS)\n \"DROP FUNCTION#{' IF EXISTS' if opts[:if_exists]} #{name}#{sql_function_args(opts[:args])}#{' CASCADE' if opts[:cascade]}\"\n end",
"def create_view(view_name, definition, options={})\n SchemaMonkey::Middleware::Migration::CreateView.start(connection: self, view_name: view_name, definition: definition, options: options) do |env|\n definition = env.definition\n view_name = env.view_name\n options = env.options\n definition = definition.to_sql if definition.respond_to? :to_sql\n\n if options[:materialized] && options[:allow_replace]\n raise ArgumentError, 'allow_replace is not supported for materialized views'\n end\n\n if options[:force]\n drop_view(view_name, {if_exists: true}.merge(options.slice(:materialized)))\n end\n\n command = if options[:materialized]\n \"CREATE MATERIALIZED\"\n elsif options[:allow_replace]\n \"CREATE OR REPLACE\"\n else\n \"CREATE\"\n end\n\n execute \"#{command} VIEW #{quote_table_name(view_name)} AS #{definition}\"\n end\n end",
"def create_or_replace_view(name, source)\n source = source.sql if source.is_a?(Dataset)\n execute(\"CREATE OR REPLACE VIEW #{name} AS #{source}\")\n end",
"def dropUserTable(tableName)\n @conn.exec(\"DROP TABLE #{tableName}\")\n end",
"def tableView(aView, validateDrop:info, proposedRow:row, proposedDropOperation:op)\n NSDragOperationEvery\n end",
"def refresh_view(view_name, options = {})\n SchemaMonkey::Middleware::Migration::RefreshView.start(connection: self, view_name: view_name, options: options) do |env|\n view_name = env.view_name\n sql = \"REFRESH MATERIALIZED VIEW #{quote_table_name(view_name)}\"\n execute sql\n end\n end",
"def drop_function(name, opts=OPTS)\n self << drop_function_sql(name, opts)\n end",
"def drop_schema schema_name\n execute \"DROP SCHEMA #{schema_name} CASCADE\"\n end",
"def alter_materialized_view_reset_options(name, *args)\n options = args.extract_options!\n\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :reset_options => args\n }, options).to_sql\n end",
"def destroy\n @view = View.find(params[:id])\n @view.destroy\n\n respond_to do |format|\n format.html { redirect_to views_url }\n format.json { head :no_content }\n end\n end",
"def down\n execute <<-SQL\n DROP TABLE event_registrations;\n SQL\n\n execute <<-SQL\n DROP TABLE members;\n SQL\n\n execute <<-SQL\n DROP TABLE events;\n SQL\n\n execute <<-SQL\n DROP TABLE treatment_logs;\n SQL\n\n execute <<-SQL\n DROP TABLE feeding_logs;\n SQL\n\n execute <<-SQL\n DROP TABLE employees;\n SQL\n\n execute <<-SQL\n DROP TABLE animals;\n SQL\n\n execute <<-SQL\n DROP TABLE tanks;\n SQL\n\n execute <<-SQL\n DROP TABLE habitats;\n SQL\n end",
"def drop_schema(schema)\n execute \"DROP SCHEMA #{schema} RESTRICT\", 'Drop Schema'\n end",
"def drop_sequence(name)\n execute drop_sequence_sql(name)\n end",
"def drop_schema(schema_name)\n execute(\"DROP SCHEMA \\\"#{schema_name}\\\"\")\n end",
"def delete_database(connection_string, db_name)\n drop_sql = <<-SQL\n DROP DATABASE #{db_name};\n SQL\n\n run \"#{connection_string} --execute=\\\"#{drop_sql}\\\"\"\nend",
"def drop_table(*names)\n options = names.last.is_a?(Hash) ? names.pop : OPTS \n names.each do |n|\n execute_ddl(drop_table_sql(n, options))\n remove_cached_schema(n)\n end\n nil\n end",
"def drop_table(*names)\n names.each {|n| execute(drop_table_sql(n))}\n end",
"def drop_movies_table\n c = connect\n c.exec \"DROP TABLE IF EXISTS movies;\" \n c.close\nend",
"def destroy\n @view = View.find(params[:id])\n @view.destroy\n\n respond_to do |format|\n format.html { redirect_to(views_url) }\n format.xml { head :ok }\n end\n end",
"def drop_trigger(table_name, trigger_name, options = {})\n SchemaMonkey::Middleware::Migration::CreateTrigger.start(connection: self, table_name: table_name, trigger_name: trigger_name, options: options) do |env|\n table_name = env.table_name\n trigger_name = env.trigger_name\n options = env.options\n\n sql = \"DROP TRIGGER\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{quote_table_name(trigger_name)} ON #{quote_table_name(table_name)}\"\n sql += \" CASCADE\" if options[:cascade]\n\n execute sql\n end\n end",
"def down\n \tdrop_table :problems\n\n \t#execute <<-SQL\n \t#\tDROP TYPE difficulty;\n \t#SQL\n end",
"def to_sql_drop_namespace(db)\n if db.supports_schemas?\n db.send(:drop_schema_sql, self.name, {})\n else\n \"\"\n end\n end",
"def drop_translated_table\n self.connection.drop_table translation_table_name\n end",
"def drop_language_sql(name, opts=OPTS)\n \"DROP LANGUAGE#{' IF EXISTS' if opts[:if_exists]} #{name}#{' CASCADE' if opts[:cascade]}\"\n end",
"def delete_sql(sql, name = nil)\n result = execute(sql, name)\n result.cmd_tuples\n end",
"def delete_sql(sql, name = nil)\n result = execute(sql, name)\n result.cmd_tuples\n end",
"def drop_function(function_name, options = {})\n function_name = full_function_name(function_name, options)\n\n execute \"DROP FUNCTION #{function_name}\"\n end",
"def drop_index(index_name)\n keyspace_execute \"DROP INDEX #{index_name}\"\n end",
"def drop_index(index_name)\n keyspace_execute \"DROP INDEX #{index_name}\"\n end"
] |
[
"0.8725758",
"0.8525937",
"0.83034474",
"0.8208464",
"0.796217",
"0.7813755",
"0.779644",
"0.72723156",
"0.68422073",
"0.6783589",
"0.6363096",
"0.63456047",
"0.63331085",
"0.62820417",
"0.62791556",
"0.62631696",
"0.6211049",
"0.6098684",
"0.6076997",
"0.6065854",
"0.6051685",
"0.60293543",
"0.6002137",
"0.5999222",
"0.59982413",
"0.5974913",
"0.59576833",
"0.5946054",
"0.59419477",
"0.5898935",
"0.58970034",
"0.58970034",
"0.58932626",
"0.588105",
"0.5833792",
"0.58255166",
"0.5823136",
"0.5814322",
"0.5804941",
"0.5773337",
"0.5769135",
"0.57604456",
"0.5758782",
"0.5754288",
"0.57501227",
"0.57344896",
"0.5709338",
"0.5695138",
"0.56917983",
"0.56765825",
"0.56628084",
"0.5660969",
"0.5643471",
"0.5642322",
"0.56064403",
"0.5603057",
"0.56027085",
"0.55912936",
"0.55906385",
"0.55751055",
"0.55615616",
"0.5559232",
"0.5555951",
"0.5537215",
"0.55312",
"0.55302924",
"0.5516168",
"0.55096424",
"0.5507169",
"0.5507169",
"0.55049896",
"0.5503625",
"0.54650635",
"0.5450059",
"0.54485565",
"0.5444665",
"0.5431808",
"0.5424735",
"0.54240894",
"0.54185843",
"0.5415906",
"0.5408283",
"0.54013896",
"0.5399781",
"0.5394251",
"0.5388914",
"0.53791654",
"0.5373542",
"0.5359914",
"0.53595555",
"0.534206",
"0.5335634",
"0.5324355",
"0.5299122",
"0.5295132",
"0.52916306",
"0.52916306",
"0.5287478",
"0.528504",
"0.528504"
] |
0.86309105
|
1
|
If opts includes a :schema option, use it, otherwise restrict the filter to only the currently visible schemas.
|
def filter_schema(ds, opts)
expr = if schema = opts[:schema]
schema.to_s
else
Sequel.function(:any, Sequel.function(:current_schemas, false))
end
ds.where{{pg_namespace[:nspname]=>expr}}
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def filter_shema(schema)\n schema.filter{|obj| include?(obj)}\n end",
"def schema_ds_filter(table_name, opts)\n if table_name\n [{:c__table_name=>table_name.to_s}]\n else\n [{:t__table_type=>'BASE TABLE'}]\n end\n end",
"def skip_schema_queries=(_arg0); end",
"def search_schemas_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug 'Calling API: DefaultApi.search_schemas ...'\n end\n if @api_client.config.client_side_validation && !opts[:'skip'].nil? && opts[:'skip'] < 0\n fail ArgumentError, 'invalid value for \"opts[:\"skip\"]\" when calling DefaultApi.search_schemas, must be greater than or equal to 0.'\n end\n\n if @api_client.config.client_side_validation && !opts[:'limit'].nil? && opts[:'limit'] > 50\n fail ArgumentError, 'invalid value for \"opts[:\"limit\"]\" when calling DefaultApi.search_schemas, must be smaller than or equal to 50.'\n end\n\n if @api_client.config.client_side_validation && !opts[:'limit'].nil? && opts[:'limit'] < 0\n fail ArgumentError, 'invalid value for \"opts[:\"limit\"]\" when calling DefaultApi.search_schemas, must be greater than or equal to 0.'\n end\n\n # resource path\n local_var_path = '/schemas'\n\n # query parameters\n query_params = {}\n query_params[:'searchString'] = opts[:'search_string'] if !opts[:'search_string'].nil?\n query_params[:'skip'] = opts[:'skip'] if !opts[:'skip'].nil?\n query_params[:'limit'] = opts[:'limit'] if !opts[:'limit'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = ['UserSecurity']\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'Array<Schema>')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: DefaultApi#search_schemas\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def filter_resources(resources, opts)\n if opts[:type] && opts[:name]\n resources.select { |r| r.type == opts[:type] && r.name == opts[:name] }\n elsif opts[:type]\n resources.select { |r| r.type == opts[:type] }\n elsif opts[:name]\n resources.select { |r| r.name == opts[:name] }\n else\n resources\n end\n end",
"def set_filter(opts)\n opts = check_params(opts,[:filters])\n super(opts)\n end",
"def search_schemas_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: DevelopersApi.search_schemas ...\"\n end\n if @api_client.config.client_side_validation && !opts[:'skip'].nil? && opts[:'skip'] < 0\n fail ArgumentError, 'invalid value for \"opts[:\"skip\"]\" when calling DevelopersApi.search_schemas, must be greater than or equal to 0.'\n end\n\n if @api_client.config.client_side_validation && !opts[:'limit'].nil? && opts[:'limit'] > 50\n fail ArgumentError, 'invalid value for \"opts[:\"limit\"]\" when calling DevelopersApi.search_schemas, must be smaller than or equal to 50.'\n end\n\n if @api_client.config.client_side_validation && !opts[:'limit'].nil? && opts[:'limit'] < 0\n fail ArgumentError, 'invalid value for \"opts[:\"limit\"]\" when calling DevelopersApi.search_schemas, must be greater than or equal to 0.'\n end\n\n # resource path\n local_var_path = \"/schemas\"\n\n # query parameters\n query_params = {}\n query_params[:'searchString'] = opts[:'search_string'] if !opts[:'search_string'].nil?\n query_params[:'skip'] = opts[:'skip'] if !opts[:'skip'].nil?\n query_params[:'limit'] = opts[:'limit'] if !opts[:'limit'].nil?\n\n # header parameters\n header_params = {}\n # HTTP header 'Accept' (if needed)\n header_params['Accept'] = @api_client.select_header_accept(['application/json'])\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = []\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'Array<Schema>')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: DevelopersApi#search_schemas\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def schema_find(opts = {})\n if Configuration.debugging\n Configuration.logger.debug \"Calling API: SchemaApi#schema_find ...\"\n end\n \n # resource path\n path = \"/schema\".sub('{format}','json')\n\n # query parameters\n query_params = {}\n query_params[:'model'] = opts[:'model'] if opts[:'model']\n\n # header parameters\n header_params = {}\n\n # HTTP header 'Accept' (if needed)\n _header_accept = ['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript']\n _header_accept_result = @api_client.select_header_accept(_header_accept) and header_params['Accept'] = _header_accept_result\n\n # HTTP header 'Content-Type'\n _header_content_type = ['application/json', 'application/x-www-form-urlencoded']\n header_params['Content-Type'] = @api_client.select_header_content_type(_header_content_type)\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n \n\n auth_names = []\n result = @api_client.call_api(:GET, path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'inline_response_200')\n if Configuration.debugging\n Configuration.logger.debug \"API called: SchemaApi#schema_find. Result: #{result.inspect}\"\n end\n return result\n end",
"def chooseSchema\n @metadata.chooseSchema\n end",
"def search_schemas(opts = {})\n data, _status_code, _headers = search_schemas_with_http_info(opts)\n data\n end",
"def skip_schema_queries; end",
"def search_schemas(opts = {})\n data, _status_code, _headers = search_schemas_with_http_info(opts)\n return data\n end",
"def dataset_to_select_tables(*tables)\n if @restricted_to_types && !@tables_restricting_type.values_at(*tables).any?\n super.filter(@qualified_type_column => @restricted_to_types)\n else\n super\n end\n end",
"def query_opts(q_opts = nil)\n return query_opts_hash if q_opts.nil?\n opts = @opts.dup\n [:snapshot, :max_scan, :show_disk_loc].each do |k|\n q_opts[k].nil? ? opts.delete(k) : opts.merge!(k => q_opts[k])\n end\n Scope.new(collection, selector, opts)\n end",
"def apply_narrowing_filters\n @filters[:narrowing].each do |filter|\n @query = @query.where(filter => @options[filter])\n end\n @query\n end",
"def set_path_if_required(schema)\n return if @connection.schema_search_path == schema\n @connection.schema_search_path = schema\n end",
"def schema_ds(table_name, opts)\n schema_ds_dataset.from(*schema_ds_from(table_name, opts)) \\\n .select(*schema_ds_select(table_name, opts)) \\\n .join(*schema_ds_join(table_name, opts)) \\\n .filter(*schema_ds_filter(table_name, opts))\n end",
"def api_filter(params, options = {})\n new_params = {}\n params.each{|param, value| new_params[param.to_s] = value} \n params = new_params\n\n api_validate params\n \n skip = Proc.new do |key|\n options[:only_filter] && !options[:only_filter].include?(key)\n end\n \n dataset = self\n dataset = dataset.api_include params unless skip.call(:include)\n dataset = dataset.api_sort params unless skip.call(:sort)\n dataset = dataset.api_limit params unless skip.call(:limit)\n dataset = dataset.api_offset params unless skip.call(:offset)\n\n # Prepare the records for serialization\n records = dataset.all\n\n if params['include'] && !skip.call(:include)\n records.each do |record|\n MF::API::IncludeNode.serialize_attrs(record, params['include'], options[:proc])\n end\n end\n \n records\n end",
"def filtered_tables(tables, table_opts=nil)\n table_opts ||= @cp_options[:tables]\n\n # Understands the following forms:\n # nil\n # :tables => :all\n # :tables => ['table1', 'table2', 'table3']\n # :tables => {:only => ['table1']}\n # :tables => {:only => 'table1'}\n # :tables => {:except => ['table2', 'table3']}\n case table_opts\n when nil, :all\n tables\n when Array\n table_opts & tables\n when Hash\n only = table_opts[:only]\n only = [only].flatten unless only.nil?\n except = table_opts[:except]\n except = [except].flatten unless except.nil?\n filtered = (only & tables) || tables.clone\n filtered -= except || []\n else\n raise ArgumentError.new(\"Invalid :tables option '#{table_opts}'\")\n end\n end",
"def inject_recent_app_dataset_filter\n @opts[:additional_visibility_filters] ||= {}\n @opts[:additional_visibility_filters][:apps] = proc do |dataset|\n AppsController.filter_dataset(dataset)\n end\n end",
"def check_filter_options() #:nodoc:\r\n table_name = @tables.first[1]\r\n model = @tables.first[0]\r\n session[table_name] ||= {}\r\n# process page\r\n session[table_name][:page] = params[:page] if params[:page]\r\n# new filter is applied\r\n if params[:filter]\r\n set_session_filter(table_name)\r\n session[table_name][:page] = 1\r\n end\r\n# if data model has field dc_site_id ensure that only documents which belong to the site are selected.\r\n site_id = dc_get_site._id if dc_get_site\r\n# dont't filter site if no dc_site_id field or user is ADMIN\r\n site_id = nil if !model.method_defined?('dc_site_id') or dc_user_can(DcPermission::CAN_ADMIN)\r\n# \r\n if @records = DcFilter.get_filter(session[table_name][:filter])\r\n @records = @records.and(dc_site_id: site_id) if site_id\r\n else\r\n @records = if site_id\r\n model.where(dc_site_id: site_id)\r\n else\r\n model\r\n end\r\n end\r\n=begin \r\n# TODO Use only fields requested. Higly experimental but necessary in some scenarios\r\n if (columns = @form['result_set']['columns'])\r\n cols = []\r\n columns.each { |k,v| cols << v['name'] }\r\n p '*',cols,'*'\r\n @records = @records.only(cols)\r\n end\r\n=end \r\n# pagination if required\r\n per_page = (@form['result_set']['per_page'] || 30).to_i\r\n if per_page > 0\r\n @records = @records.page(session[table_name][:page]).per(per_page)\r\n end\r\nend",
"def show_only_collections_deposited_by_current_user(solr_parameters)\n clauses = [query_for_my_collections]\n solr_parameters[:fq] ||= []\n solr_parameters[:fq] += [\"(#{clauses.join(' OR ')})\"]\n end",
"def show\n respond_with(@schema) do |format|\n format.json { render json: @schema.to_json }\n end\n end",
"def query_opts!(q_opts = nil)\n return self if q_opts.nil?\n [:snapshot, :max_scan, :show_disk_loc].each do |k|\n q_opts[k].nil? ? @opts.delete(k) : @opts.merge!(k => q_opts[k])\n end\n self\n end",
"def apply_dataset_options(type, request, ds)\n ds = apply_filter(type, request, ds)\n if order = order_for(type, request)\n ds = ds.order(*order)\n end\n if eager = eager_for(type, request)\n ds = ds.eager(eager)\n end\n if eager_graph = eager_graph_for(type, request)\n ds = ds.eager_graph(eager_graph)\n end\n ds\n end",
"def table_filter(schemaName, tblName, tblType)\n [\"information_schema\", \"pg_catalog\"].include?(schemaName) || tblType !~ /TABLE/i\n end",
"def set_wildmask(opts)\n opts = check_params(opts,[:wildmasks])\n super(opts)\n end",
"def _all_pred(opts)\n predicate = opts[:predicate]\n raise 'predicate must be provided' unless predicate\n\n visible = opts.fetch :visible, true\n %($.mainApp().getAllWithPredicate(\"#{predicate}\", #{visible});)\n end",
"def schema_get(opts = {})\n data, _status_code, _headers = schema_get_with_http_info(opts)\n return data\n end",
"def check_filter_options #:nodoc:\r\n table_name = CmsHelper.table_param(params).strip.split(';').first.underscore\r\n model = table_name.classify.constantize\r\n session[table_name] ||= {}\r\n # page is set\r\n session[table_name][:page] = params[:page] if params[:page]\r\n # if data model has field dc_site_id ensure that only documents which belong to the site are selected.\r\n site_id = dc_get_site._id if dc_get_site\r\n\r\n # don't filter site if no dc_site_id field or user is ADMIN\r\n site_id = nil if !model.method_defined?('dc_site_id') || dc_user_can(DcPermission::CAN_ADMIN)\r\n site_id = nil if session[table_name][:filter].to_s.match('dc_site_id')\r\n\r\n if @records = DcFilter.get_filter(session[table_name][:filter])\r\n @records = @records.and(dc_site_id: site_id) if site_id\r\n else\r\n @records = site_id ? model.where(dc_site_id: site_id) : model\r\n end\r\n process_select_and_deny_fields\r\n # pagination if required\r\n per_page = (@form['result_set']['per_page'] || 25).to_i\r\n @records = @records.page(session[table_name][:page]).per(per_page) if per_page > 0\r\nend",
"def apply_filter(rel)\n if filter.present?\n Response.do_search(rel, filter, :mission => mission)\n else\n rel\n end\n end",
"def filter_options options\n other_tables = options[:other_tables].dup || []\n temp_options = options.dup\n temp_options.keep_if do |k,v|\n ['name','page','search','order'].include? k.to_s\n end\n other_tables << temp_options\n end",
"def vulns(opts)\n ::ApplicationRecord.connection_pool.with_connection {\n # If we have the ID, there is no point in creating a complex query.\n if opts[:id] && !opts[:id].to_s.empty?\n return Array.wrap(Mdm::Vuln.find(opts[:id]))\n end\n\n wspace = Msf::Util::DBManager.process_opts_workspace(opts, framework)\n opts = opts.clone()\n opts.delete(:workspace)\n\n search_term = opts.delete(:search_term)\n if search_term && !search_term.empty?\n column_search_conditions = Msf::Util::DBManager.create_all_column_search_conditions(Mdm::Vuln, search_term)\n wspace.vulns.includes(:host).where(opts).where(column_search_conditions)\n else\n wspace.vulns.includes(:host).where(opts)\n end\n }\n end",
"def print_schema(schema)\n print_filtered_schema(schema, method(:is_defined_type))\n end",
"def set_schema\n s = Schema.find(params[:id]) if user_id = current_user\n if s.user_id == current_user.id\n schema = s\n else\n redirect_to schemas_url, notice: 'wrong url'\n end\n end",
"def adv_search\n @schema = params.fetch(:schema, \"\").downcase\n @username = params.fetch(:username, \"\").downcase\n @email = params.fetch(:email, \"\").downcase\n @oauth = params.fetch(:oauth, \"\").downcase\n\n if @schema != \"\"\n @schemata = Schema.where(\"title like '%#{@schema}%'\")\n @schemata = @schemata.page(params.fetch(:page, 0))\n if @schemata.count > 0\n render \"admin/schemata\"\n return\n end\n @alert = \"No results were found with the given criteria\"\n end\n if @username != \"\"\n @users = User\n .where('lower(username) LIKE ?', \"%#{@username}%\")\n .page(params.fetch(:page, 0))\n if @users.count > 0\n render 'admin/users'\n return\n end\n @alert = \"No results were found with the given criteria\"\n end\n if @email != \"\"\n @users = User\n .where('lower(email) LIKE ?', \"%#{@email}%\")\n .page(params.fetch(:page, 0))\n if @users.count > 0\n render 'admin/users'\n return\n end\n @alert = \"No results were found with the given criteria\"\n end\n if @oauth != \"\"\n @users = User\n .joins(:oauth_profiles)\n .where(oauth_profiles: {\"username\" => @oauth})\n .page(params.fetch(:page, 0))\n if @users.count > 0\n render 'admin/users'\n return\n end\n @alert = \"No results were found with the given criteria\"\n end\n end",
"def reporting_schema_via_param_or_feature_flag\n param_flag = ActiveRecord::Type::Boolean.new.deserialize(report_params[:v2])\n user_flag = current_admin.feature_enabled?(:reporting_schema_v2)\n return param_flag unless param_flag.nil?\n user_flag\n end",
"def scope(filters)\n scope = filters.first.is_a?(Proc) ? nil : filters.shift\n scope || doc\n end",
"def strict_filters=(_arg0); end",
"def query_builder_with_filter_from_hash(params)\n types = get_types_parameters\n\n validate_parameters(types, params)\n\n pattern = params[:q]\n\n only_liked = params[:only_liked] == 'true'\n only_shared = params[:only_shared] == 'true'\n only_subscriptions = params[:subscribed] == 'true'\n only_samples = params[:sample] == 'true'\n exclude_shared = params[:exclude_shared] == 'true'\n exclude_raster = params[:exclude_raster] == 'true'\n locked = params[:locked]\n shared = compose_shared(params[:shared], only_shared, exclude_shared)\n tags = params.fetch(:tags, '').split(',')\n tags = nil if tags.empty?\n bbox_parameter = params.fetch(:bbox,nil)\n privacy = params.fetch(:privacy,nil)\n only_with_display_name = params[:only_with_display_name] == 'true'\n with_dependent_visualizations = params[:with_dependent_visualizations].to_i\n only_published = params[:only_published] == 'true'\n\n vqb = VisualizationQueryBuilder.new\n .with_prefetch_user\n .with_prefetch_table\n .with_prefetch_permission\n .with_prefetch_synchronization\n .with_prefetch_external_source\n .with_types(types)\n .with_tags(tags)\n\n if !bbox_parameter.blank?\n vqb.with_bounding_box(Carto::BoundingBoxUtils.parse_bbox_parameters(bbox_parameter))\n end\n\n # FIXME Patch to exclude legacy visualization from data-library #5097\n if only_with_display_name\n vqb.with_display_name\n end\n\n vqb.with_published if only_published\n\n if current_user\n vqb.with_current_user_id(current_user.id)\n vqb.with_liked_by_user_id(current_user.id) if only_liked\n vqb.with_subscription if only_subscriptions\n vqb.with_sample if only_samples\n case shared\n when FILTER_SHARED_YES\n vqb.with_owned_by_or_shared_with_user_id(current_user.id)\n when FILTER_SHARED_NO\n vqb.with_user_id(current_user.id) if !only_liked\n when FILTER_SHARED_ONLY\n vqb.with_shared_with_user_id(current_user.id)\n .with_user_id_not(current_user.id)\n end\n\n vqb.without_raster if exclude_raster\n\n if locked == 'true'\n vqb.with_locked(true)\n elsif locked == 'false'\n vqb.with_locked(false)\n end\n\n if types.include? Carto::Visualization::TYPE_REMOTE\n vqb.without_synced_external_sources\n vqb.without_imported_remote_visualizations\n end\n\n vqb.with_privacy(privacy) unless privacy.nil?\n\n if with_dependent_visualizations.positive? && !current_user.has_feature_flag?('faster-dependencies')\n vqb.with_prefetch_dependent_visualizations\n end\n else\n user = Carto::User.where(username: CartoDB.extract_subdomain(request)).first\n raise Carto::ParamInvalidError.new(:username) unless user.present?\n vqb.with_user_id(user.id)\n .with_privacy(Carto::Visualization::PRIVACY_PUBLIC)\n end\n\n if pattern.present?\n vqb.with_partial_match(pattern)\n end\n\n vqb\n end",
"def filter(options={})\n raise NotImplementedError\n end",
"def whitelisted_filters_for(name, klass)\n requested_parameters = as_array(@params[name])\n if requested_parameters.empty?\n []\n else\n klass.where(slug: requested_parameters)\n end\n end",
"def find_all_resources options\n policy_scope(resource_class)\n end",
"def set_schema\n @schema = Schema.find(params[:id])\n end",
"def set_schema\n @schema = Schema.find(params[:id])\n end",
"def set_schema\n @schema = Schema.find(params[:id])\n end",
"def apply_permissive_visibility_filter(solr_params)\n return unless current_exhibit\n return if scope.respond_to?(:can?) && scope.can?(:curate, current_exhibit) && !blacklight_params[:public]\n\n solr_params.append_filter_query \"-#{blacklight_config.document_model.visibility_field(current_exhibit)}:false\"\n end",
"def set_schema\n @schema = Schema.find(params[:id])\n end",
"def schema\n absolutize(@schema)\n end",
"def schema\n absolutize(@schema)\n end",
"def set_search_scope(opts)\n opts = check_params(opts,[:search_scopes])\n super(opts)\n end",
"def filter!(options)\n @list = ObjectsFilter.new(all, options).objects\n index_by_fullname\n end",
"def only *filter_list\n filter.only filter_list\n self\n end",
"def extract_only!\n @parsed_query.gsub!(/(\\b)*only\\:(\\S*)(\\b)*/i, \"\")\n @only = [:episodes, :reviews, :podcasts, :tag, :user].detect{|o| o == $2.to_sym} unless $2.blank?\n end",
"def hide_flagged_records(solr_parameters)\n unless see_unpublished?\n solr_parameters[:fq] << '-flags_isim:[* TO *]'\n end\n end",
"def or_filters_provided?\n true\n end",
"def show_only_managed_collections_for_non_admins(solr_parameters)\n return if current_ability.admin?\n clauses = [\n '-' + ActiveFedora::SolrQueryBuilder.construct_query_for_rel(depositor: current_user_key),\n '-' + ActiveFedora::SolrQueryBuilder.construct_query_for_rel(has_model: Hyrax.config.admin_set_model, creator: current_user_key)\n ]\n solr_parameters[:fq] ||= []\n solr_parameters[:fq] += [\"(#{clauses.join(' OR ')})\"]\n end",
"def _filter r, options\n return r if params[:f].blank?\n return resource_class.filter r, params[:f], options if resource_class.respond_to? :filter\n r\n end",
"def enforce_show_permissions(_opts = {})\n permissions = current_ability.permissions_doc(solr_id)\n if (permissions['read_access_group_ssim'].present? && permissions['read_access_group_ssim'].include?('registered')) || can?(:discover, permissions)\n permissions\n else\n raise Blacklight::AccessControls::AccessDenied.new('You do not have sufficient access privileges to view this document, which has been marked private.', :discover, params[:id])\n end\n end",
"def set_filter_options\n @sort_modes = Organization.sort_modes\n @view_modes = Organization.view_modes\n\n @current_sort_mode = if @sort_modes.keys.include?(params[:sort_by])\n params[:sort_by]\n else\n @sort_modes.keys.first\n end\n\n @current_view_mode = if @view_modes.keys.include?(params[:view])\n params[:view]\n else\n @view_modes.keys.first\n end\n\n @query = params[:query]\n end",
"def filters_for_user(user)\n user.present? ? filters_with_external : public_filters\n end",
"def schema(table_name = nil, opts={})\n if opts[:reload] && @schemas\n if table_name\n @schemas.delete(table_name)\n else\n @schemas = nil\n end\n end\n\n if table_name\n return @schemas[table_name] if @schemas && @schemas[table_name]\n else\n return @schemas if @schemas\n end\n\n if table_name\n @schemas ||= {}\n @schemas[table_name] ||= schema_parse_table(table_name, opts)\n else\n @schemas = schema_parse_tables(opts)\n end\n end",
"def filter(sparql)\n raise \"Must be overridden\"\n end",
"def transformed_search opts\n\t\topts\n\tend",
"def filtered_dataset\n filter_args_from_query.inject(@dataset) do |filter, cond|\n filter.filter(cond)\n end\n end",
"def schema_params\n params.require(:schema).permit(:name)\n end",
"def get_filter_sql\n return @schema_filter.filter_sql\n end",
"def filters= options = {}\n prominent = [ :eid, :url ]\n narrowing = options.keys & Ydl::Videos.columns - prominent\n @filters = { prominent: prominent, narrowing: narrowing }\n end",
"def index\n # @schemas = Schema.where(default: false)\n\n ####CLEANUP FUNCTION - SUPER QUICK###\n orphans = Schema.where(disabled: false).where([ \"id NOT IN (?)\", Account.select(:schema_id).where(banned: false, created: true)])\n orphans.update_all(disabled: true)\n\n @schemas = Schema.ordered_by_use\n\n end",
"def list_filter(**opt)\n # May be overridden by the subclass.\n end",
"def set_schema( schema )\n @schema = schema if @schema == self\n schema\n end",
"def server_schema?(schema=nil)\n schema ||= jiak.bucket.schema\n jiak.client.schema(jiak.bucket).eql? schema\n end",
"def show\n @schema = Schema.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render json: @schema }\n end\n end",
"def show\n @schema = Schema.find(params[:id])\n\n respond_to do |format|\n format.html # show.html.erb\n format.json { render :json => @schema }\n end\n end",
"def prov_scope(opts)\n scope = []\n # Request date (created since X days ago)\n scope << [:created_recently, opts[:time_period].to_i] if opts[:time_period].present?\n # Select requester user across regions\n scope << [:with_requester, current_user.id] unless approver?\n scope << [:with_requester, opts[:user_choice]] if opts[:user_choice] && opts[:user_choice] != \"all\"\n\n scope << [:with_approval_state, opts[:applied_states]] if opts[:applied_states].present?\n scope << [:with_type, MiqRequest::MODEL_REQUEST_TYPES[model_request_type_from_layout].keys]\n scope << [:with_request_type, opts[:type_choice]] if opts[:type_choice] && opts[:type_choice] != \"all\"\n scope << [:with_reason_like, opts[:reason_text]] if opts[:reason_text].present?\n\n scope\n end",
"def user_filter_options(model) #:nodoc:\r\n table_name = @tables.first[1]\r\n if session[table_name]\r\n DcFilter.get_filter(session[table_name][:filter]) || model\r\n else\r\n model\r\n end\r\nend",
"def user_filter_options(model) #:nodoc:\r\n table_name = @tables.first[1]\r\n if session[table_name]\r\n DcFilter.get_filter(session[table_name][:filter]) || model\r\n else\r\n model\r\n end\r\nend",
"def find_resources_queried options\n case params[:f]\n when nil\n if parent?\n policy_scope(resource_class).tags_included?( params[:q].split(\" \") ).where( options )\n else\n policy_scope(resource_class).tags_included?( params[:q].split(\" \") )\n end\n else\n policy_scope(resource_class)\n end\n end",
"def filtered_primary_model(_ = nil)\n pm = primary_model\n a = filter_params_permitted[:app_type_id] if filter_params_permitted\n if a.present?\n # A filter was selected. Limit the results to just users that can access the specified app.\n a = a.to_i\n # Set the app_type_id param to nil, so the super method doesn't attempt to filter on it\n filter_params_permitted[:app_type_id] = nil\n ids = pm.all.select { |u| a.in?(u.accessible_app_type_ids) }.map(&:id)\n pm = pm.where(id: ids)\n end\n\n # Filter on everything (except the specified app_type_id, which has beem temporarily removed)\n res = super(pm)\n\n # Reset the filter params so that the buttons appear correctly\n filter_params_permitted[:app_type_id] = a.to_s if a.present?\n res\n end",
"def filter(options={})\n super\n end",
"def only(*args)\n clone.tap do |crit|\n if args.any?\n crit.options[:fields] = {:_type => 1}\n crit.field_list = args.flatten\n crit.field_list.each do |f|\n crit.options[:fields][f] = 1\n end\n end\n end\n end",
"def filter(filt = nil, *args, **opts)\n @filter = if filt.nil? && !defined?(@filter)\n Filters.default\n elsif filt\n Filters.resolve(filt, *args, **opts)\n else\n @filter\n end\n\n aggregator.filter = @filter\n\n @filter\n end",
"def autocomplete(opts={})\n type, request, assoc, query, exclude = opts.values_at(:type, :request, :association, :query, :exclude)\n if assoc\n if exclude && association_type(assoc) == :edit\n ref = model.association_reflection(assoc)\n block = lambda do |ds|\n ds.exclude(S.qualify(ref.associated_class.table_name, ref.right_primary_key)=>model.db.from(ref[:join_table]).where(ref[:left_key]=>exclude).select(ref[:right_key]))\n end\n end\n return associated_model_class(assoc).autocomplete(opts.merge(:type=>:association, :association=>nil), &block)\n end\n opts = autocomplete_options_for(type, request)\n callback_opts = {:type=>type, :request=>request, :query=>query}\n ds = all_dataset_for(type, request)\n ds = opts[:callback].call(ds, callback_opts) if opts[:callback]\n display = opts[:display] || S.qualify(model.table_name, :name)\n display = display.call(callback_opts) if display.respond_to?(:call)\n limit = opts[:limit] || 10\n limit = limit.call(callback_opts) if limit.respond_to?(:call)\n opts[:filter] ||= lambda{|ds1, _| ds1.where(S.ilike(display, \"%#{ds.escape_like(query)}%\"))}\n ds = opts[:filter].call(ds, callback_opts)\n ds = ds.select(S.join([S.qualify(model.table_name, model.primary_key), display], ' - ').as(:v)).\n limit(limit)\n ds = yield ds if block_given?\n ds.map(:v)\n end",
"def sync_filters\n super.presence || default_sync_filters\n end",
"def show_schema(api_or_ar)\n\t\t\tif api_or_ar == \"api\"\n\t\t\t\t\"#{@name} Api Model: #{@api_model.schema}\"\n\t\t\telsif api_or_ar == \"ar\"\n\t\t\t\t\"#{@name} AR Model: #{@ar_model.schema}\"\n\t\t\telse\n\t\t\t\tputs \"show_schema arg requires a String with a value of either 'api' or 'ar'\"\n\t\t\tend\n\t\tend",
"def filter\n do_authorize_class\n get_project_if_exists\n do_authorize_instance(:show, @project) unless @project.nil?\n\n filter_response, opts = Settings.api_response.response_advanced(\n api_filter_params,\n list_permissions,\n Site,\n Site.filter_settings\n )\n respond_filter(filter_response, opts)\n end",
"def filter_by_visibility(solr_parameters)\n # add a new solr facet query ('fq') parameter that limits results to those with a 'public_b' field of 1\n solr_parameters[:fq] ||= []\n fq = viewable_metadata_visibilities.map { |visibility| \"(visibility_ssi:\\\"#{visibility}\\\")\" }.join(\" OR \")\n solr_parameters[:fq] << \"(#{fq})\"\n end",
"def filter_rules?\n @filter_rules_predicate ||= schema_dsl.filter_rules?\n end",
"def index\n if @profile == current_profile\n @filters = @profile.filters\n else\n @filters = @profile.filters.opt_in\n end\n end",
"def show_only_public_files(solr_parameters, user_parameters)\n solr_parameters[:fq] ||= []\n solr_parameters[:fq] += [\"-#{Solrizer.solr_name('read_access_group', :symbol)}:#{Hydranorth::AccessControls::InstitutionalVisibility::UNIVERSITY_OF_ALBERTA}\"]\n end",
"def paginate_by_filter(opts={})\n paginate(filter_and_sort_options(opts))\n end",
"def types_filter\n types_filter = self[:types_filter] || []\n return types_filter if types_filter.any?\n options.fetch 'types_filter', []\n end",
"def apply_permissive_visibility_filter(solr_params)\n return unless current_exhibit\n return if !blacklight_params[:public] && scope&.context&.key?(:current_ability) && scope.context[:current_ability].can?(:curate, current_exhibit)\n\n solr_params.append_filter_query \"-#{blacklight_config.document_model.visibility_field(current_exhibit)}:false\"\n end",
"def apply_filters(query)\n query\n end",
"def apply_schema(name, type, options={})\n raise NotImplementedError\n end",
"def where_restrict_organisation(*args)\n args.map{|klass| \"#{klass.table_name}.organisation_id = #{Thread.current[:organisation_id]}\"}.join(\" AND \")\n end",
"def index\n # @offerings = Offering.all\n if params[:query].present?\n # sql_query = \" \\\n # offerings.name @@ :query \\\n # OR offerings.genre @@ :query \\\n # \"\n # @offerings = Offering.where(sql_query, query: \"%#{params[:query]}%\")\n @offerings = policy_scope(Offering.search_by_genre_and_name(\"%#{params[:query]}%\"))\n else\n @offerings = policy_scope(Offering)\n @offering = Offering.all\n end\n end",
"def autofilter\n true\n end",
"def schema_get_with_http_info(opts = {})\n if @api_client.config.debugging\n @api_client.config.logger.debug \"Calling API: SchemaApi.schema_get ...\"\n end\n # resource path\n local_var_path = \"/schema\".sub('{format}','json')\n\n # query parameters\n query_params = {}\n query_params[:'model'] = opts[:'model'] if !opts[:'model'].nil?\n\n # header parameters\n header_params = {}\n\n # HTTP header 'Accept' (if needed)\n local_header_accept = ['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript']\n local_header_accept_result = @api_client.select_header_accept(local_header_accept) and header_params['Accept'] = local_header_accept_result\n\n # HTTP header 'Content-Type'\n local_header_content_type = ['application/json', 'application/x-www-form-urlencoded']\n header_params['Content-Type'] = @api_client.select_header_content_type(local_header_content_type)\n\n # form parameters\n form_params = {}\n\n # http body (model)\n post_body = nil\n auth_names = []\n data, status_code, headers = @api_client.call_api(:GET, local_var_path,\n :header_params => header_params,\n :query_params => query_params,\n :form_params => form_params,\n :body => post_body,\n :auth_names => auth_names,\n :return_type => 'Object')\n if @api_client.config.debugging\n @api_client.config.logger.debug \"API called: SchemaApi#schema_get\\nData: #{data.inspect}\\nStatus code: #{status_code}\\nHeaders: #{headers}\"\n end\n return data, status_code, headers\n end",
"def table_filtered?(schema_name, table_type)\n %w[information_schema pg_catalog].include?(schema_name) || table_type !~ /TABLE/i\n end"
] |
[
"0.60258114",
"0.5587685",
"0.53232336",
"0.53136456",
"0.5252506",
"0.5210141",
"0.5191124",
"0.516309",
"0.51339316",
"0.5003855",
"0.49896494",
"0.49556172",
"0.49474233",
"0.4935256",
"0.49325764",
"0.49156702",
"0.48878336",
"0.4864185",
"0.4861228",
"0.48379704",
"0.4834989",
"0.48070335",
"0.48055282",
"0.48016536",
"0.4799341",
"0.47961822",
"0.47703597",
"0.47490007",
"0.47466198",
"0.47258058",
"0.47231382",
"0.47104433",
"0.4682598",
"0.46684104",
"0.46668574",
"0.46464917",
"0.46305126",
"0.46274662",
"0.46093437",
"0.46077543",
"0.46061686",
"0.4605032",
"0.46014053",
"0.4593634",
"0.4593634",
"0.45927858",
"0.45888233",
"0.457262",
"0.45636582",
"0.45636582",
"0.45623824",
"0.45609042",
"0.45594245",
"0.45387867",
"0.45380703",
"0.4531712",
"0.45183912",
"0.4515667",
"0.4513173",
"0.45091105",
"0.45075154",
"0.45002994",
"0.44978887",
"0.4489774",
"0.44861013",
"0.4480493",
"0.44705543",
"0.44691873",
"0.44662037",
"0.44656387",
"0.44634786",
"0.4461214",
"0.44591632",
"0.4456767",
"0.44567484",
"0.44562194",
"0.44562194",
"0.44519886",
"0.44455758",
"0.44455618",
"0.44448715",
"0.4442611",
"0.4428261",
"0.44216233",
"0.44178584",
"0.44166097",
"0.4415766",
"0.44155383",
"0.44122213",
"0.44078013",
"0.4387967",
"0.43839845",
"0.4375084",
"0.4375074",
"0.43747142",
"0.4373513",
"0.43700668",
"0.4369359",
"0.4368803",
"0.43678138"
] |
0.72058666
|
0
|
Setup datastructures shared by all postgres adapters.
|
def initialize_postgres_adapter
@primary_keys = {}
@primary_key_sequences = {}
@supported_types = {}
procs = @conversion_procs = CONVERSION_PROCS.dup
procs[1184] = procs[1114] = method(:to_application_timestamp)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def setup_database\n require \"sequel\"\n\n options = {\n :max_connections => 16,\n :pool_timeout => 240\n }\n\n database_config = YAML.load_file(\"#{$intrigue_basedir}/config/database.yml\")\n database_host = database_config[$intrigue_environment][\"host\"] || \"localhost\"\n database_port = database_config[$intrigue_environment][\"port\"] || 5432\n database_user = database_config[$intrigue_environment][\"user\"]\n database_pass = database_config[$intrigue_environment][\"password\"]\n database_name = database_config[$intrigue_environment][\"database\"]\n database_debug = database_config[$intrigue_environment][\"debug\"]\n\n if database_pass \n $db = Sequel.connect(\"postgres://#{database_user}:#{database_pass}@#{database_host}:#{database_port}/#{database_name}\", options)\n else\n $db = Sequel.connect(\"postgres://#{database_user}@#{database_host}:#{database_port}/#{database_name}\", options)\n end\n\n $db.loggers << Logger.new($stdout) if database_debug\n\n # Allow datasets to be paginated\n $db.extension :pagination\n Sequel.extension :pg_json_ops\n Sequel.extension :migration\nend",
"def setup_connection(conn)\n conn = super(conn)\n conn.extend(Sequel::Swift::Postgres::AdapterMethods)\n conn.db = self\n conn.apply_connection_settings\n conn\n end",
"def setup_for_postgres\n ActiveRecord::Base.establish_connection adapter: \"postgresql\", \n database: \"active_symbol_test\",\n :username=>:active_symbol_test, :password=>\"active_symbol_test\"\n setup_db\nend",
"def do_adapter_specific_setup; end",
"def adapter_initialize\n opts = @opts\n @mutex = Mutex.new\n @sqls = opts[:sqls] || []\n @shared_adapter = false\n\n case db_type = opts[:host] \n when String, Symbol\n db_type = db_type.to_sym\n unless mod = Sequel.synchronize{SHARED_ADAPTER_MAP[db_type]}\n begin\n require \"sequel/adapters/shared/#{db_type}\"\n rescue LoadError\n else\n mod = Sequel.synchronize{SHARED_ADAPTER_MAP[db_type]}\n end\n end\n\n if mod\n @shared_adapter = true\n extend(mod::DatabaseMethods)\n extend_datasets(mod::DatasetMethods)\n if mod.respond_to?(:mock_adapter_setup)\n mod.mock_adapter_setup(self)\n end\n end\n end\n\n unless @shared_adapter\n extend UnmodifiedIdentifiers::DatabaseMethods\n extend_datasets UnmodifiedIdentifiers::DatasetMethods\n end\n\n self.autoid = opts[:autoid]\n self.columns = opts[:columns]\n self.fetch = opts[:fetch]\n self.numrows = opts[:numrows]\n extend(opts[:extend]) if opts[:extend]\n sqls\n end",
"def setup_databases\n postgres_user = app_name\n postgres_pass = SecureRandom.urlsafe_base64\n postgres_port = find_open_port\n redis_port = find_open_port\n\n add_env \"REDIS_URL\",\n \"redis://localhost:#{redis_port}\"\n\n add_env \"DATABASE_URL\",\n \"postgres:///#{postgres_user}:#{postgres_pass}@localhost:#{postgres_port}\",\n skip_secrets: true\n\n template \"database.yml\",\n \"#{app_name}/config/database.yml\",\n force: true\n\n template \"docker-compose.yml\",\n \"#{app_name}/docker-compose.yml\",\n postgres_user: postgres_user,\n postgres_pass: postgres_pass,\n postgres_port: postgres_port,\n redis_port: redis_port\n end",
"def setup\n @dbcount = 1\n @dbids = Array.new\n (0..@dbcount).each do |i|\n @dbids[i] = (0...8).map{('a'..'z').to_a[rand(26)]}.join\n create_or_connect_db(@dbids[i])\n end\n end",
"def setup\n clear_db\n\n @url = Wgit::Url.new(Wgit::DatabaseDefaultData.url)\n @doc = Wgit::Document.new(Wgit::DatabaseDefaultData.doc)\n\n records = 3\n\n @urls = []\n records.times do\n @urls << Wgit::Url.new(Wgit::DatabaseDefaultData.url)\n end\n\n @docs = []\n records.times do\n @docs << Wgit::Document.new(Wgit::DatabaseDefaultData.doc)\n end\n end",
"def db_setup(options = {}, &block)\n # noinspection RubySimplifyBooleanInspection\n Sequel.single_threaded = (options.andand[:single_threaded] == true)\n\n db_connect.tap do |db|\n if options[:use_models]\n Sequel::Model.db = db\n\n if options[:use_prepares]\n Sequel::Model.plugin :prepared_statements\n Sequel::Model.plugin :prepared_statements_associations\n end\n\n require_relative '../models.rb'\n end\n\n if db.respond_to?(:stream_all_queries) && options[:stream_all_queries]\n db.stream_all_queries = options[:stream_all_queries]\n end\n\n block.call(db) if block_given?\n end\n end",
"def init_global_data(settings)\n Oj.load(File.read('assets/config/rune.rb.json')).each do |key, value|\n settings[key.upcase.to_sym] = value\n end\n settings[:STORAGE_TYPE] = settings[:STORAGE_TYPE].to_sym\n settings[:CONNECTION] = case settings[:STORAGE_TYPE]\n when :sqlite\n Sequel.sqlite('assets/sample.db3', pragma: :foreign_keys)\n when :postgres\n else Sequel.sqlite('assets/sample.db3')\n end\n settings[:PROTOCOL] = settings[:PROTOCOL].to_sym\n end",
"def initialize_datasets\n @datasets = gateways.each_with_object({}) do |(key, gateway), h|\n h[key] = gateway.schema if config.gateways[key][:infer_relations]\n end\n end",
"def init\n adapter.setup_connection_specification_name\n adapter.process_excluded_models\n end",
"def setup\n @factory = ::RGeo::Cartesian.preferred_factory(:srid => 3785)\n @geographic_factory = ::RGeo::Geographic.spherical_factory(:srid => 4326)\n cleanup_tables\n cleanup_caches\n end",
"def setup_data_dir\n changes = []\n\n case distro\n\n when RHEL\n unless pg_data_dir == pg_default_data_dir\n changes = rput( 'etc/sysconfig/pgsql/postgresql', user: :root )\n end\n\n sudo_if( \"[ ! -d '#{pg_data_dir}/base' ]\" ) do\n sudo <<-SH\n mkdir -p #{pg_data_dir}\n chown postgres:postgres #{pg_data_dir}\n chmod 700 #{pg_data_dir}\n SH\n pg_initdb\n end\n\n when Debian\n unless pg_data_dir == pg_default_data_dir\n sudo <<-SH\n if [ ! -d '#{pg_data_dir}/base' ]; then\n mkdir -p #{pg_data_dir}\n chown postgres:postgres #{pg_data_dir}\n chmod 700 #{pg_data_dir}\n mv #{pg_default_data_dir}/* #{pg_data_dir}/\n fi\n SH\n end\n else\n raise ContextError, \"Distro #{distro.class.name} not supported\"\n end\n\n changes\n end",
"def initialize\n local_db = ActiveRecord::Base.connection_config\n @raw_connection = PG.connect(dbname: local_db[:database], host: 'localhost', port: local_db[:port])\n\n @source_db_config = {\n dbname: ENV[\"DB_NAME\"] || 'dd_demo',\n host: ENV[\"DB_HOST\"] || 'localhost'\n }\n\n raise \"SOURCE_BASE_URL missing!\" unless ENV['SOURCE_BASE_URL']\n\n @source_base_url = ENV[\"SOURCE_BASE_URL\"]\n @uploads_path = ENV['UPLOADS_PATH']\n @uploader = ImportScripts::Uploader.new\n\n if ENV['SOURCE_CDN']\n @source_cdn = ENV['SOURCE_CDN']\n end\n\n local_version = @raw_connection.exec(\"select max(version) from schema_migrations\")\n local_version = local_version.first['max']\n source_version = source_raw_connection.exec(\"select max(version) from schema_migrations\")\n source_version = source_version.first['max']\n\n if local_version != source_version\n raise \"DB schema mismatch. Databases must be at the same migration version. Local is #{local_version}, other is #{source_version}\"\n end\n\n @encoder = PG::TextEncoder::CopyRow.new\n\n @merged_user_ids = []\n @tags = {}\n @tag_groups = {}\n @uploads = {}\n @post_actions = {}\n @notifications = {}\n @badge_groupings = {}\n @badges = {}\n @email_tokens = {}\n\n @auto_group_ids = Group::AUTO_GROUPS.values\n\n # add your authorized extensions here:\n SiteSetting.authorized_extensions = ['jpg', 'jpeg', 'png', 'gif'].join('|')\n\n @sequences = {}\n end",
"def common_bootstrap\n # postgresql package install starts postgresql, so stop it\n rsudo \"#{rubber_env.postgresql_ctl} stop\" rescue nil\n\n # After everything installed on machines, we need the source tree\n # on hosts in order to run rubber:config for bootstrapping the db\n rubber.update_code_for_bootstrap\n\n # Gen just the conf for the given postgresql role\n rubber.run_config(:file => \"role/(db|postgresql)/\", :force => true, :deploy_path => release_path)\n\n # reconfigure postgresql so that it sets up data dir in /mnt with correct files\n dirs = [rubber_env.postgresql_data_dir, rubber_env.postgresql_archive_dir]\n sudo_script 'reconfigure-postgresql', <<-ENDSCRIPT\n mkdir -p #{dirs.join(' ')}\n chown -R postgres:postgres #{dirs.join(' ')}\n chmod 700 #{rubber_env.postgresql_data_dir}\n ENDSCRIPT\n end",
"def initialize(opts = OPTS)\n @opts ||= opts\n @opts = connection_pool_default_options.merge(@opts)\n @loggers = Array(@opts[:logger]) + Array(@opts[:loggers])\n @opts[:servers] = {} if @opts[:servers].is_a?(String)\n @sharded = !!@opts[:servers]\n @opts[:adapter_class] = self.class\n @opts[:single_threaded] = @single_threaded = typecast_value_boolean(@opts.fetch(:single_threaded, Sequel.single_threaded))\n @default_string_column_size = @opts[:default_string_column_size] || DEFAULT_STRING_COLUMN_SIZE\n\n @schemas = {}\n @prepared_statements = {}\n @transactions = {}\n @symbol_literal_cache = {}\n\n @timezone = nil\n\n @dataset_class = dataset_class_default\n @cache_schema = typecast_value_boolean(@opts.fetch(:cache_schema, true))\n @dataset_modules = []\n @loaded_extensions = []\n @schema_type_classes = SCHEMA_TYPE_CLASSES.dup\n\n self.sql_log_level = @opts[:sql_log_level] ? @opts[:sql_log_level].to_sym : :info\n self.log_warn_duration = @opts[:log_warn_duration]\n self.log_connection_info = typecast_value_boolean(@opts[:log_connection_info])\n\n @pool = ConnectionPool.get_pool(self, @opts)\n\n reset_default_dataset\n adapter_initialize\n if typecast_value_boolean(@opts.fetch(:identifier_mangling, true))\n # SEQUEL5: Remove\n extension(:_deprecated_identifier_mangling)\n end\n\n unless typecast_value_boolean(@opts[:keep_reference]) == false\n Sequel.synchronize{::Sequel::DATABASES.push(self)}\n end\n Sequel::Database.run_after_initialize(self)\n if typecast_value_boolean(@opts[:preconnect]) && @pool.respond_to?(:preconnect, true)\n concurrent = typecast_value_string(@opts[:preconnect]) == \"concurrently\"\n @pool.send(:preconnect, concurrent)\n end\n end",
"def postgres\n @postgres ||= Connections::Postgres.new(options: @postgres_options)\n end",
"def setup_storage\n ActiveRecord::Base.establish_connection(\n adapter: 'sqlite3',\n database: 'meshchat.sqlite3',\n pool: 128\n )\n\n create_database\n end",
"def setup_shared_connection_pool\n handler = ActiveRecord::Base.connection_handler\n\n handler.connection_pool_names.each do |name|\n pool_manager = handler.send(:connection_name_to_pool_manager)[name]\n pool_manager.shard_names.each do |shard_name|\n writing_pool_config = pool_manager.get_pool_config(ActiveRecord.writing_role, shard_name)\n @saved_pool_configs[name][shard_name] ||= {}\n pool_manager.role_names.each do |role|\n next unless pool_config = pool_manager.get_pool_config(role, shard_name)\n next if pool_config == writing_pool_config\n\n @saved_pool_configs[name][shard_name][role] = pool_config\n pool_manager.set_pool_config(role, shard_name, writing_pool_config)\n end\n end\n end\n end",
"def setup_object_for_schema\n data_collection_names.each.with_index do |data_collection_name, i|\n # define getter\n self.define_singleton_method(var_names[i]) do\n @data_collections[i]\n end\n # define setter\n self.define_singleton_method((data_collection_names[i].to_s + \"=\").to_sym) do |val|\n @data_collections[i] = val\n end\n end\n \n data_object_names.each.with_index do |data_object_name, i|\n # getters\n self.define_singleton_method(data_object_name) do\n @data_objects[i]\n end\n # setters\n self.define_singleton_method((data_object_name.to_s + \"=\").to_sym) do |val|\n @data_objects[i] = val\n end\n end\n \n end",
"def setup!(scheduler = Scheduler)\n self.master_models ||= DEFAULT_MASTER_MODELS\n self.environment ||= (defined?(RAILS_ENV) ? RAILS_ENV : 'development')\n self.sticky_slave ||= false\n \n master = ActiveRecord::Base\n slaves = init_slaves\n raise \"No slaves databases defined for environment: #{self.environment}\" if slaves.empty?\n master.send :include, MultiDb::ActiveRecordExtensions\n ActiveRecord::Observer.send :include, MultiDb::ObserverExtensions\n master.connection_proxy = new(master, slaves, scheduler)\n master.logger.info(\"** multi_db with master and #{slaves.length} slave#{\"s\" if slaves.length > 1} loaded.\")\n end",
"def setup_tables(&block)\n return nil unless block_given?\n self.class_eval(&block)\n self.table_settings \n end",
"def setup\n clear_db\n\n @db = Wgit::Database.new\n end",
"def setup_database!\n mapper = Praxis::Application.instance.plugins[:praxis_mapper]\n Sequel.connect(mapper.config.repositories[\"default\"]['connection_settings'].dump) do |db|\n db.create_table! :people do\n primary_key :id\n string :name\n end\n end\nend",
"def setup_shared_connection_pool\n return unless ActiveRecord::TestFixtures.respond_to?(:setup_shared_connection_pool)\n @legacy_saved_pool_configs ||= Hash.new { |hash, key| hash[key] = {} }\n @saved_pool_configs ||= Hash.new { |hash, key| hash[key] = {} }\n\n ActiveRecord::TestFixtures.instance_method(:setup_shared_connection_pool).bind(self).call\n end",
"def setup_db\n @database.create_table :merchants do\n primary_key :id\n String :name\n end\n\n @database.create_table :cards do\n primary_key :id\n String :token, :unique => true, :null => false\n Integer :limit, :null => false\n Integer :balance, :null => false\n Integer :velocity_limit\n Integer :velocity_interval\n end\n\n @database.create_table :txns do\n primary_key :id\n Integer :card_id, :null => false\n Integer :merchant_id, :null => false\n Integer :amount, :null => false\n DateTime :created_at, :null => false\n end\n\n @database.create_table :locks do\n String :id, :unique => true, :null => false\n DateTime :created_at\n end\n\n return true\n end",
"def setup!\n configuration = RfmAdaptor::Configuration.new(:database)\n database = configuration.__send__(self.database_name)\n raise \"Database: #{self.database_name} configuration file not exists.\" if database.blank?\n raise \"Database: #{self.database_name} environment[#{self.env}] not exists.\" if database[self.env].blank?\n self.attributes = database[self.env]\n self.setup_state = true\n end",
"def dbs_init\n @dbs_hash = Dir[\"#{dbs_store}/*.db\"]\n .map do |dbfile|\n File.open(dbfile){|f| JSON.parse(f.read, symbolize_names: true)}\n end\n .inject({}) do |h, db|\n h.update({\n db[:name].to_sym => LaPack::const_get(db[:clazz]).new(self, db[:params])\n })\n end\n end",
"def setup!\n ActiveRecord::Base.configurations[configuration_name] = db_config\n ActiveRecord::Base.establish_connection(configuration_name).connection\n self\n end",
"def setup_env\n @databases = Influx::Config::DATABASES\n @influxdb = Influx::Config::CLIENT\nend",
"def setup_connection\n db = ENV['DB'].blank?? 'mysql' : ENV['DB']\n\n configurations = YAML.load_file(File.join(File.dirname(__FILE__), '..', '..', 'spec', 'database.yml'))\n raise \"no configuration for '#{db}'\" unless configurations.key? db\n configuration = configurations[db]\n\n ActiveRecord::Base.logger = Logger.new(STDOUT) if $0 == 'irb'\n puts \"using #{configuration['adapter']} adapter\" unless ENV['DB'].blank?\n\n gem 'sqlite3-ruby' if 'sqlite3' == db\n\n ActiveRecord::Base.establish_connection(configuration)\n ActiveRecord::Base.configurations = { db => configuration }\n #prepare ActiveRecord::Base.connection\n\n unless Object.const_defined?(:QUOTED_TYPE)\n Object.send :const_set, :QUOTED_TYPE, ActiveRecord::Base.connection.quote_column_name('type')\n end\nend",
"def initialize_drivers\n\t\tself.drivers = []\n\t\ttdrivers = %W{ postgresql mysql sqlite3 }\n\t\ttdrivers.each do |driver|\n\t\t\tbegin\n\t\t\t\tActiveRecord::Base.default_timezone = :utc\n\t\t\t\tActiveRecord::Base.establish_connection(:adapter => driver)\n\t\t\t\tif(self.respond_to?(\"driver_check_#{driver}\"))\n\t\t\t\t\tself.send(\"driver_check_#{driver}\")\n\t\t\t\tend\n\t\t\t\tActiveRecord::Base.remove_connection\n\t\t\t\tself.drivers << driver\n\t\t\trescue ::Exception\n\t\t\tend\n\t\tend\n\n\t\tif(not self.drivers.empty?)\n\t\t\tself.driver = self.drivers[0]\n\t\tend\n\n\t\t# Database drivers can reset our KCODE, do not let them\n\t\t$KCODE = 'NONE' if RUBY_VERSION =~ /^1\\.8\\./\n\tend",
"def initialize_db_schema\n @db.exec(\n 'create table if not exists nodes\n (\n id SERIAL PRIMARY KEY,\n host VARCHAR(256) UNIQUE,\n last_seen TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,\n current_report INTEGER\n );\n\n create table if not exists collections\n (\n id SERIAL PRIMARY KEY,\n collection VARCHAR(256) NOT NULL,\n archived BOOL DEFAULT FALSE\n );\n\n create table if not exists reports\n (\n id SERIAL PRIMARY KEY,\n node_id INTEGER NOT NULL,\n file_handle INTEGER,\n status INTEGER NOT NULL,\n collection INTEGER NOT NULL,\n time TIMESTAMP NOT NULL,\n resources_changed INTEGER NOT NULL,\n resources_failed INTEGER NOT NULL,\n resources_total INTEGER NOT NULL,\n runtime REAL NOT NULL,\n new_report BOOL DEFAULT FALSE,\n FOREIGN KEY (node_id) REFERENCES nodes (id),\n FOREIGN KEY (collection) REFERENCES collections(id)\n );\n\n create table if not exists schemaversion\n (\n id SERIAL PRIMARY KEY,\n complete BOOL DEFAULT FALSE,\n comment VARCHAR(256) NOT NULL\n );\n create table if not exists reportdata\n (\n id SERIAL PRIMARY KEY,\n report bytea\n );\n '\n )\n end",
"def setup_database\n Hanami::Model.load!\n end",
"def setup\n executeSQL(CREATE_ASSERTED_STATEMENTS_TABLE % @internedId)\n executeSQL(CREATE_ASSERTED_TYPE_STATEMENTS_TABLE % @internedId)\n executeSQL(CREATE_QUOTED_STATEMENTS_TABLE % @internedId)\n executeSQL(CREATE_NS_BINDS_TABLE % @internedId)\n executeSQL(CREATE_LITERAL_STATEMENTS_TABLE % @internedId)\n \n # Create indicies\n {\n asserted_table => {\n \"#{@internedId}_A_termComb_index\" => %w(termComb),\n \"#{@internedId}_A_s_index\" => %w(subject),\n \"#{@internedId}_A_p_index\" => %w(predicate),\n \"#{@internedId}_A_o_index\" => %w(object),\n \"#{@internedId}_A_c_index\" => %w(context),\n },\n asserted_type_table => {\n \"#{@internedId}_T_termComb_index\" => %w(termComb),\n \"#{@internedId}_T_member_index\" => %w(member),\n \"#{@internedId}_T_klass_index\" => %w(klass),\n \"#{@internedId}_T_c_index\" => %w(context),\n },\n literal_table => {\n \"#{@internedId}_L_termComb_index\" => %w(termComb),\n \"#{@internedId}_L_s_index\" => %w(subject),\n \"#{@internedId}_L_p_index\" => %w(predicate),\n \"#{@internedId}_L_c_index\" => %w(context),\n },\n quoted_table => {\n \"#{@internedId}_Q_termComb_index\" => %w(termComb),\n \"#{@internedId}_Q_s_index\" => %w(subject),\n \"#{@internedId}_Q_p_index\" => %w(predicate),\n \"#{@internedId}_Q_o_index\" => %w(object),\n \"#{@internedId}_Q_c_index\" => %w(context),\n },\n namespace_binds => {\n \"#{@internedId}_uri_index\" => %w(uri),\n }\n }.each_pair do |tablename, indicies|\n indicies.each_pair do |index, columns|\n executeSQL(\"CREATE INDEX #{index} on #{tablename} ('#{columns.join(', ')}')\")\n end\n end\n end",
"def init_item_data(settings)\n case settings[:STORAGE_TYPE]\n when :sqlite\n settings[:ITEM_SPAWNS] = settings[:CONNECTION][:item_spawns]\n settings[:ITEM_DEFINITIONS] = settings[:CONNECTION][:item_definitions]\n settings[:ITEM_EQUIPMENT] = settings[:CONNECTION][:item_equipment]\n when :postgres\n else \"Invalid storage mode! #{settings[:RRB_STORAGE_TYPE]}\"\n end\n rescue StandardError => e\n settings[:LOG].error 'An error occurred while loading game database!'\n settings[:LOG].error e\n settings[:LOG].error e.backtrace&.join(\"\\n\")\n end",
"def pg_configure\n files = %w[ pg_hba.conf pg_ident.conf postgresql.conf ]\n files += %w[ environment pg_ctl.conf ] if distro.is_a?( Debian )\n files = files.map { |f| File.join( 'postgresql', f ) }\n rput( *files, pg_config_dir, user: 'postgres' )\n end",
"def install_initial_state\n db = kernel.resources.db\n db.default.submissions = []\n db.default.valid_submissions = []\n db.default.challengers = []\n db.default.people = (1..5).collect{|i| user_tuple(i, true) }\n db.default.challengers = (1..5).collect{|i| challenger_tuple(i) }\n end",
"def install_databases\n package 'sqlite3'\n package 'libsqlite3-dev'\n\n package 'mysql-client'\n package 'mysql-server'\n package 'libmysql-dev', 'libmysqlclient-dev', /^libmysqlclient\\d*-dev$/\n\n package 'postgresql-client'\n package 'libpq-dev'\n\n # TODO: NoSQL stores.\n end",
"def initialize(config)\n self.connection = ConnectionExtenders.db_connect config\n self.config = config\n self.manual_primary_keys = {}\n end",
"def parse_postgresql_overrides\n Gitlab['patroni']['postgresql'] ||= {}\n POSTGRESQL_DCS_PARAMETERS.each do |key|\n Gitlab['patroni']['postgresql'][key] ||= postgresql_setting(key)\n end\n end",
"def initialize\n # Create temporary data directory\n create_data_dir\n\n # Init connection to ADS\n init_ads\n\n # Initialize Data science toolkit\n init_dstk\n\n # Init connection to SQLite\n # init_sqlite3\n end",
"def setup\n t = Thread.new { build_directories_records }\n @adapter = initialize_adapter\n t.join\n end",
"def setup\n begin\n create_campaign_table_if_not_exist\n seed_data\n rescue Exception => e\n raise \"Database setup failed with error #{e}\"\n ensure\n @connection.close\n end\n end",
"def configure(conf)\n super\n @tables = Hash.new\n end",
"def configure_connection\n #if encoding = config[:encoding]\n # The client_encoding setting is set by the driver and should not be altered.\n # If the driver detects a change it will abort the connection.\n # see http://jdbc.postgresql.org/documentation/91/connect.html\n # self.set_client_encoding(encoding)\n #end\n self.client_min_messages = config[:min_messages] || 'warning'\n self.schema_search_path = config[:schema_search_path] || config[:schema_order]\n\n # Use standard-conforming strings if available so we don't have to do the E'...' dance.\n set_standard_conforming_strings\n\n # If using Active Record's time zone support configure the connection to return\n # TIMESTAMP WITH ZONE types in UTC.\n # (SET TIME ZONE does not use an equals sign like other SET variables)\n if ActiveRecord::Base.default_timezone == :utc\n execute(\"SET time zone 'UTC'\", 'SCHEMA')\n elsif tz = local_tz\n execute(\"SET time zone '#{tz}'\", 'SCHEMA')\n end unless redshift?\n\n # SET statements from :variables config hash\n # http://www.postgresql.org/docs/8.3/static/sql-set.html\n (config[:variables] || {}).map do |k, v|\n if v == ':default' || v == :default\n # Sets the value to the global or compile default\n execute(\"SET SESSION #{k} TO DEFAULT\", 'SCHEMA')\n elsif ! v.nil?\n execute(\"SET SESSION #{k} TO #{quote(v)}\", 'SCHEMA')\n end\n end\n end",
"def configure_connection\n #if encoding = config[:encoding]\n # The client_encoding setting is set by the driver and should not be altered.\n # If the driver detects a change it will abort the connection.\n # see http://jdbc.postgresql.org/documentation/91/connect.html\n # self.set_client_encoding(encoding)\n #end\n self.client_min_messages = config[:min_messages] || 'warning'\n self.schema_search_path = config[:schema_search_path] || config[:schema_order]\n\n # Use standard-conforming strings if available so we don't have to do the E'...' dance.\n set_standard_conforming_strings\n\n # If using Active Record's time zone support configure the connection to return\n # TIMESTAMP WITH ZONE types in UTC.\n # (SET TIME ZONE does not use an equals sign like other SET variables)\n if ActiveRecord::Base.default_timezone == :utc\n execute(\"SET time zone 'UTC'\", 'SCHEMA')\n elsif tz = local_tz\n execute(\"SET time zone '#{tz}'\", 'SCHEMA')\n end unless redshift?\n\n # SET statements from :variables config hash\n # http://www.postgresql.org/docs/8.3/static/sql-set.html\n (config[:variables] || {}).map do |k, v|\n if v == ':default' || v == :default\n # Sets the value to the global or compile default\n execute(\"SET SESSION #{k} TO DEFAULT\", 'SCHEMA')\n elsif ! v.nil?\n execute(\"SET SESSION #{k} TO #{quote(v)}\", 'SCHEMA')\n end\n end\n end",
"def create_structure\n # Create variables table\n @db.execute(\"create table if not exists variables(key varchar(255) PRIMARY KEY, value TEXT);\");\n\n # Create quotes table\n @db.execute(\"create table if not exists quotes (id INTEGER PRIMARY KEY, quote TEXT);\")\n\n # Remember NSFW links\n @db.execute(\"create table if not exists nsfwlink_status (id INTEGER PRIMARY KEY, user VARCHAR(15), nsfw TINYINT DEFAULT 0);\")\n\n # Allow polls\n @poll_db.execute(\"create table if not exists polls (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, user VARCHAR(30), question VARCHAR(255), date TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL)\")\n\n # Voting alternatives for polls\n @poll_db.execute(\"create table if not exists answers (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, poll_id INTEGER REFERENCES polls(id), number INTEGER, answer VARCHAR(255));\")\n\n # Votes for polls\n @poll_db.execute(\"create table if not exists votes (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, usermask VARCHAR(255), poll_id INTEGER REFERENCES polls(id), answer_id INTEGER REFERENCES answers(id));\")\n\n\n if @debug == true\n puts \"Should have created tables by now\"\n end\n end",
"def initialize_database\n config = YAML::load(IO.read(Antfarm::Helpers.defaults_file))\n # Database setup based on adapter specified\n if config && config[ANTFARM_ENV] && config[ANTFARM_ENV].has_key?('adapter')\n if config[ANTFARM_ENV]['adapter'] == 'sqlite3'\n config[ANTFARM_ENV]['database'] = Antfarm::Helpers.db_file(ANTFARM_ENV)\n elsif config[ANTFARM_ENV]['adapter'] == 'postgres'\n config[ANTFARM_ENV]['database'] = ANTFARM_ENV\n else\n # If adapter specified isn't one of sqlite3 or postgresql,\n # default to SQLite3 database configuration.\n config = nil\n end\n else\n # If the current environment configuration doesn't specify a\n # database adapter, default to SQLite3 database configuration.\n config = nil\n end\n # Default to SQLite3 database configuration\n config ||= { ANTFARM_ENV => { 'adapter' => 'sqlite3', 'database' => Antfarm::Helpers.db_file(ANTFARM_ENV) } }\n if config[ANTFARM_ENV]['adapter'] == 'postgres'\n DataMapper.setup(:default, \"postgres:///#{config[ANTFARM_ENV]['database']}\")\n else\n DataMapper.setup(:default, \"sqlite3://#{config[ANTFARM_ENV]['database']}\")\n end\n end",
"def setup\n @mike = users(:mike)\n @admin = users(:frank)\n @issue = issues(:one)\n @update = updates(:one)\n end",
"def initialize(info = {})\n super\n\n # Register the options that all Postgres exploits may make use of.\n register_options(\n [\n Opt::RHOST,\n Opt::RPORT(5432),\n OptString.new('DATABASE', [ true, 'The database to authenticate against', 'template1']),\n OptString.new('USERNAME', [ true, 'The username to authenticate as', 'postgres']),\n OptString.new('PASSWORD', [ false, 'The password for the specified username. Leave blank for a random password.', 'postgres']),\n OptBool.new('VERBOSE', [false, 'Enable verbose output', false]),\n OptString.new('SQL', [ false, 'The SQL query to execute', 'select version()']),\n OptBool.new('RETURN_ROWSET', [false, \"Set to true to see query result sets\", true])\n ], Msf::Exploit::Remote::Postgres)\n\n register_autofilter_ports([ 5432 ])\n register_autofilter_services(%W{ postgres })\n end",
"def init_setup(adapter)\n Namespace.register :skos, SKOS\n Namespace.register :aeria, AERIA\n load_namespaces(adapter)\n ObjectManager.construct_classes\n @model = adapter.model\n @prefixes = Adapter.get_prefixes(adapter)\n end",
"def initialize config\n connect config\n create_table_if_not_exists!\n end",
"def setup\r\n setup_wiki\r\n setup_host_map\r\n setup_host\r\n end",
"def init_mob_data(settings)\n case settings[:STORAGE_TYPE]\n when :sqlite\n settings[:MOB_SPAWNS] = settings[:CONNECTION][:mob_spawns]\n settings[:MOB_STATS] = settings[:CONNECTION][:mob_stats]\n settings[:MOB_ANIMATIONS] = settings[:CONNECTION][:mob_animations]\n settings[:MOB_DEFINITIONS] = settings[:CONNECTION][:mob_definitions]\n when :postgres\n else \"Invalid storage mode! #{settings[:RRB_STORAGE_TYPE]}\"\n end\n rescue StandardError => e\n settings[:LOG].error 'An error occurred while loading game database!'\n settings[:LOG].error e\n settings[:LOG].error e.backtrace&.join(\"\\n\")\n end",
"def setup\n @subject = Fog::Compute[:google].servers\n @factory = ServersFactory.new(namespaced_name)\n @servers = ServersFactory.new(namespaced_name)\n @disks = DisksFactory.new(namespaced_name)\n end",
"def prepare_replication\n exclude_rubyrep_tables\n\n puts \"Verifying RubyRep tables\"\n ensure_infrastructure\n\n call_after_infrastructure_setup_handler\n\n puts \"Checking for and removing rubyrep triggers from unconfigured tables\"\n restore_unconfigured_tables\n\n puts \"Verifying rubyrep triggers of configured tables\"\n unsynced_table_pairs = []\n table_pairs = session.sort_table_pairs(session.configured_table_pairs)\n table_pairs.each do |table_pair|\n table_options = options(table_pair[:left])\n ensure_sequence_setup table_pair,\n table_options[:sequence_increment],\n table_options[:left_sequence_offset],\n table_options[:right_sequence_offset]\n\n unsynced = false\n [:left, :right].each do |database|\n next if session.configuration.send(database)[:mode] == :slave\n if !trigger_exists? database, table_pair[database]\n create_trigger database, table_pair[database]\n unsynced = true\n elsif !sync_complete? database, table_pair[database]\n unsynced = true\n end\n end\n if unsynced and table_options[:initial_sync]\n unsynced_table_pairs << table_pair\n end\n end\n unsynced_table_specs = unsynced_table_pairs.map do |table_pair|\n \"#{table_pair[:left]}, #{table_pair[:right]}\"\n end\n\n # Initialize heartbeat file\n RR.heartbeat(session.configuration.options[:heartbeat_file])\n\n unless session.configuration.options[:no_sync] || unsynced_table_specs.empty?\n puts \"Executing initial table syncs\"\n runner = SyncRunner.new\n runner.session = session\n runner.options = {:table_specs => unsynced_table_specs, :heartbeat_file => session.configuration.options[:heartbeat_file]}\n runner.execute\n end\n\n puts \"Starting replication\"\n end",
"def init(init_exec)\n data = get_data_from_db(init_exec)\n data = prepare_data(data)\n yield(data)\n @params_list = get_params # todo : les params ont déjà été get avec les valeurs par défaut, regarder s'il ne serait pas possible de faire usage du tableau de structures plutot que d'en demander un autre\n end",
"def init_slaves\n [].tap do |slaves|\n ActiveRecord::Base.configurations.each do |name, values|\n if name.to_s =~ /#{self.environment}_(slave_database.*)/\n weight = if values['weight'].blank?\n 1\n else\n (v=values['weight'].to_i.abs).zero?? 1 : v\n end\n MultiDb.module_eval %Q{\n class #{$1.camelize} < ActiveRecord::Base\n self.abstract_class = true\n establish_connection :#{name}\n WEIGHT = #{weight} unless const_defined?('WEIGHT')\n end\n }, __FILE__, __LINE__\n slaves << \"MultiDb::#{$1.camelize}\".constantize\n end\n end\n end\n end",
"def create_tables\n create_mirrors\n create_users\n create_user_tokens\n create_products\n create_users_products\n create_versions\n create_dependencies\n create_access_keys\n end",
"def setup\n @admin = users(:admin)\n @user = users(:user)\n end",
"def cache_setup(host, user, app, name)\n @namespace = [host, user, app, name].compact.join(':')\n Table.create_table?\n @store = Table\n end",
"def configure_connection\n if @config[:encoding]\n @connection.set_client_encoding(@config[:encoding])\n end\n # self.client_min_messages = @config[:min_messages] || 'warning'\n self.schema_search_path = @config[:schema_search_path] || @config[:schema_order]\n\n # Use standard-conforming strings so we don't have to do the E'...' dance.\n # set_standard_conforming_strings\n\n # If using Active Record's time zone support configure the connection to return\n # TIMESTAMP WITH ZONE types in UTC.\n # (SET TIME ZONE does not use an equals sign like other SET variables)\n # if ActiveRecord::Base.default_timezone == :utc\n # execute(\"SET time zone 'UTC'\", 'SCHEMA')\n # elsif @local_tz\n # execute(\"SET time zone '#{@local_tz}'\", 'SCHEMA')\n # end\n\n # SET statements from :variables config hash\n # http://www.postgresql.org/docs/8.3/static/sql-set.html\n variables = @config[:variables] || {}\n variables.map do |k, v|\n if v == ':default' || v == :default\n # Sets the value to the global or compile default\n execute(\"SET SESSION #{k} TO DEFAULT\", 'SCHEMA')\n elsif !v.nil?\n execute(\"SET SESSION #{k} TO #{quote(v)}\", 'SCHEMA')\n end\n end\n end",
"def adapters\n @__adapters__ ||= {}\n end",
"def setup(_config={})\n config.merge!(_config)\n @backend = nil\n @raise_errors = nil\n end",
"def build_tables\n @db.exec(%q[\n CREATE TABLE IF NOT EXISTS breeds(\n id serial NOT NULL PRIMARY KEY,\n breed varchar(30),\n price integer\n )])\n\n @db.exec(%q[\n CREATE TABLE IF NOT EXISTS puppies(\n id serial NOT NULL PRIMARY KEY,\n breed varchar(30),\n name varchar(30),\n age integer,\n created_at timestamp NOT NULL DEFAULT current_timestamp\n )])\n\n @db.exec(%q[\n CREATE TABLE IF NOT EXISTS requests(\n id serial NOT NULL PRIMARY KEY,\n breed text,\n status text,\n created_at timestamp NOT NULL DEFAULT current_timestamp\n )])\n end",
"def setup\n DBHandler.establish_test_connection\n end",
"def adapters\n @adapters ||= {}\n end",
"def prepare; end",
"def prepare; end",
"def prepare; end",
"def initialize(opts = {}, &block)\n @opts = opts\n \n @quote_identifiers = opts[:quote_identifiers] || @@quote_identifiers\n @single_threaded = opts[:single_threaded] || @@single_threaded\n @schemas = nil\n @pool = (@single_threaded ? SingleThreadedPool : ConnectionPool).new(connection_pool_default_options.merge(opts), &block)\n @pool.connection_proc = proc {connect} unless block\n\n @loggers = Array(opts[:logger]) + Array(opts[:loggers])\n ::Sequel::DATABASES.push(self)\n end",
"def setup_connection_adapter\n ActiveRecord::Base.establish_connection(self.to_hash)\n end",
"def _schema_ds\n @_schema_ds ||= begin\n ds = metadata_dataset.select{[\n pg_attribute[:attname].as(:name),\n SQL::Cast.new(pg_attribute[:atttypid], :integer).as(:oid),\n SQL::Cast.new(basetype[:oid], :integer).as(:base_oid),\n SQL::Function.new(:format_type, basetype[:oid], pg_type[:typtypmod]).as(:db_base_type),\n SQL::Function.new(:format_type, pg_type[:oid], pg_attribute[:atttypmod]).as(:db_type),\n SQL::Function.new(:pg_get_expr, pg_attrdef[:adbin], pg_class[:oid]).as(:default),\n SQL::BooleanExpression.new(:NOT, pg_attribute[:attnotnull]).as(:allow_null),\n SQL::Function.new(:COALESCE, SQL::BooleanExpression.from_value_pairs(pg_attribute[:attnum] => SQL::Function.new(:ANY, pg_index[:indkey])), false).as(:primary_key),\n Sequel[:pg_type][:typtype],\n (~Sequel[Sequel[:elementtype][:oid]=>nil]).as(:is_array),\n ]}.\n from(:pg_class).\n join(:pg_attribute, :attrelid=>:oid).\n join(:pg_type, :oid=>:atttypid).\n left_outer_join(Sequel[:pg_type].as(:basetype), :oid=>:typbasetype).\n left_outer_join(Sequel[:pg_type].as(:elementtype), :typarray=>Sequel[:pg_type][:oid]).\n left_outer_join(:pg_attrdef, :adrelid=>Sequel[:pg_class][:oid], :adnum=>Sequel[:pg_attribute][:attnum]).\n left_outer_join(:pg_index, :indrelid=>Sequel[:pg_class][:oid], :indisprimary=>true).\n where{{pg_attribute[:attisdropped]=>false}}.\n where{pg_attribute[:attnum] > 0}.\n order{pg_attribute[:attnum]}\n\n # :nocov:\n if server_version > 100000\n # :nocov:\n ds = ds.select_append{pg_attribute[:attidentity]}\n\n # :nocov:\n if server_version > 120000\n # :nocov:\n ds = ds.select_append{Sequel.~(pg_attribute[:attgenerated]=>'').as(:generated)}\n end\n end\n\n ds\n end\n end",
"def setup\n @admin = users(:admin)\n @user = users(:user)\n @user2 = users(:user2)\n @user3 = users(:user3)\n @user4 = users(:user4)\n @instance_teaming = instances(:instance_teaming)\n @instance_in_progress = instances(:instance_in_progress)\n @instance_complete = instances(:instance_complete)\n end",
"def connect\n @connection = ::NB::DB::PooledDBConnection.new(@connection_parameters[0]) do\n conn = ::NB::DB::FiberedPostgresConnection.connect(*@connection_parameters[1..(@connection_parameters.length-1)])\n=begin\n ::NB::DB::FiberedPostgresConnection.translate_results = false if ::NB::DB::FiberedPostgresConnection.respond_to?(:translate_results=)\n # Ignore async_exec and async_query when using postgres-pr.\n @async = @config[:allow_concurrency] && @connection.respond_to?(:async_exec)\n # Use escape string syntax if available. We cannot do this lazily when encountering\n # the first string, because that could then break any transactions in progress.\n # See: http://www.postgresql.org/docs/current/static/runtime-config-compatible.html\n # If PostgreSQL doesn't know the standard_conforming_strings parameter then it doesn't\n # support escape string syntax. Don't override the inherited quoted_string_prefix.\n NB.neverblock(false) do\n if supports_standard_conforming_strings?\n self.class.instance_eval do\n define_method(:quoted_string_prefix) { 'E' }\n end\n end\n # Money type has a fixed precision of 10 in PostgreSQL 8.2 and below, and as of\n # PostgreSQL 8.3 it has a fixed precision of 19. PostgreSQLColumn.extract_precision\n # should know about this but can't detect it there, so deal with it here.\n money_precision = (postgresql_version >= 80300) ? 19 : 10\n ::ActiveRecord::ConnectionAdapters::PostgreSQLColumn.module_eval(<<-end_eval)\n def extract_precision(sql_type)\n if sql_type =~ /^money$/\n #{money_precision}\n else\n super\n end\n end\n end_eval\n #configure_connection\n end\n\t conn\n=end\n end\n end",
"def setup\n setup_connection\n\n unless tables.count == 0\n puts \"Aborting! Job already has tables attached. Setup requires a new job.\"\n return\n end\n source_tables = source_connection.tables\n source_tables = source_tables.reject { |table| \n exclude_table_names.include?(table) || table.starts_with?('tmp_') \n }\n\n source_tables.each do |table_name|\n copy_table_schema(table_name)\n end\n\n puts \"Done setup!\"\n puts \"Warning! Table settings have been guessed but you can do extra configuration such as setting the insert_only flag on tables to further increase speed of loading.\"\n end",
"def bootstrap\n sql = 'CREATE TABLE IF NOT EXISTS states(uuid VARCHAR(128) PRIMARY KEY,'\n sql << ' id INTEGER, name VARCHAR(128), deploy_id VARCHAR(128), timestamp INTEGER,'\n sql << ' missing INTEGER, state VARCHAR(128), hyperv VARCHAR(128))'\n\n @db.execute(sql)\n\n sql = 'CREATE TABLE IF NOT EXISTS settings(key VARCHAR(128) PRIMARY KEY,'\n sql << ' value VARCHAR(128))'\n\n @db.execute(sql)\n end",
"def prepare()\n end",
"def configure_postgres\n<<BASH\necho \"deb http://apt.postgresql.org/pub/repos/apt/ trusty-pgdg main\" > /etc/apt/sources.list.d/pgdg.list\nwget --quiet https://www.postgresql.org/media/keys/ACCC4CF8.asc\napt-key add ACCC4CF8.asc\napt-get update\napt-get install postgresql-9.2 -y\necho \"host all all #{CS_VM_ADDRESS}/32 md5\" >> /etc/postgresql/9.2/main/pg_hba.conf\necho \"listen_addresses='*'\" >> /etc/postgresql/9.2/main/postgresql.conf\nservice postgresql restart\nexport PATH=/usr/lib/postgresql/9.2/bin:$PATH\nsudo -u postgres psql -c \"CREATE USER bofh SUPERUSER ENCRYPTED PASSWORD 'i1uvd3v0ps';\"\nBASH\nend",
"def before_setup\n Account.connection.drop_table :accounts, if_exists: true\n Account.connection.exec_query(\"CREATE SEQUENCE accounts_id_seq\")\n Account.connection.exec_query(\"\n CREATE TABLE accounts (\n id BIGINT PRIMARY KEY DEFAULT nextval('accounts_id_seq'),\n firm_id bigint,\n firm_name character varying,\n credit_limit integer\n )\n \")\n\n Company.connection.drop_table :companies, if_exists: true\n Company.connection.exec_query(\"CREATE SEQUENCE companies_nonstd_seq\")\n Company.connection.exec_query(\"\n CREATE TABLE companies (\n id BIGINT PRIMARY KEY DEFAULT nextval('companies_nonstd_seq'),\n type character varying,\n firm_id bigint,\n firm_name character varying,\n name character varying,\n client_of bigint,\n rating bigint,\n account_id integer,\n description character varying\n )\n \")\n\n Course.connection.drop_table :courses, if_exists: true\n Course.connection.exec_query(\"CREATE SEQUENCE courses_id_seq\")\n Course.connection.exec_query(\"\n CREATE TABLE courses (\n id INT PRIMARY KEY DEFAULT nextval('courses_id_seq'),\n name character varying,\n college_id integer\n )\n \")\n\n self.class.fixtures :accounts\n self.class.fixtures :companies\n self.class.fixtures :courses\n end",
"def init # should eventually take configuration options (hash || block)\n if ActiveRecord::Base.connection.tables.include? 'aux_codes'\n aux_codes_yml = File.join 'config', 'aux_codes.yml'\n if File.file? aux_codes_yml\n load_file aux_codes_yml\n create_classes!\n end\n end\n end",
"def prepare_schema\n tables = [];ActiveRecord::Base.connection.execute(\"show tables\").each{|t| tables << t[0].strip}\n \n ActiveRecord::Schema.define do\n App.log.info(\"preparing schema\")\n \n unless tables.include?(\"services\")\n # a service entry\n begin\n create_table :services do |t|\n t.string :name\n t.string :status, :null => false, :default => \"active\"\n end\n add_index :services, :name\n rescue\n App.log_exception\n end\n end\n \n unless tables.include?(\"queries\")\n begin\n # queries\n create_table :queries do |t|\n t.string :query \n t.column :last_twid, :bigint, :null => false, :default => 0\n t.timestamp :last_run\n t.integer :last_result_count\n t.string :status, :default => 'active', :null=> false\n end\n add_index :queries, :query\n rescue\n App.log_exception\n end\n end\n \n unless tables.include?(\"tweets\")\n begin\n # cache of tweets\n create_table :tweets do |t|\n t.column :twid, :bigint, :null => false\n t.string :from_user\n t.string :to_user\n t.integer :from_user_id\n t.integer :to_user_id\n t.string :text\n t.string :profile_image_url\n t.timestamp :created_at\n end\n add_index :tweets, :twid\n rescue\n App.log_exception\n end\n end\n \n end # define schema\n end",
"def initialize options\n host = options['host'] || 'localhost'\n port = options['port'] || 5432\n database = options['database']\n username = options['username']\n\n # always reuse an existing connection if it matches on these connection options\n conn_options = {:host => host, :port => port, :database => database, :username => username}\n existing_connection = self.class.persisted_connection(conn_options)\n\n if existing_connection.nil?\n # create and persist a new connection\n @connection = PG::Connection.new(\n host,\n port,\n nil,\n nil,\n database,\n username,\n options['password'] || ''\n )\n @connection.set_notice_processor do |msg|\n if msg =~ /^ERROR:/\n LOG.error('psql') { msg.gsub(/\\n/,'; ') }\n else\n LOG.info('psql') { msg.gsub(/\\n/,'; ') }\n end\n end\n self.class.persist_connection(@connection, conn_options)\n else\n # reuse an existing connection\n @connection = existing_connection\n end\n\n # set up trackig if it isn't set up already\n set_up_tracking if !tracking_tables?\n end",
"def initialize_db\n create_indexes_for_all_models\n end",
"def do_setup \n config = self.config\n host = config['app']['host']\n port = config['app']['port']\n @url_base = \"http://#{host}:#{port}\"\n puts config.inspect\n @pcap_dir = config.fetch 'pcap_dir'\n @index_dir = config.fetch 'index_dir'\n\n # Extract test pcaps and indexes\n FileUtils.rm_rf '/tmp/pcapr_local_test'\n FileUtils.mkdir_p @pcap_dir\n FileUtils.mkdir_p @index_dir\n\n\n # Recreate test database.\n begin\n couch = config['couch']\n RestClient.delete \"#{couch['uri']}/#{couch['database']}\"\n rescue RestClient::ResourceNotFound\n end\n db = @db = PcaprLocal.get_db(config)\n end",
"def establish_connections\n klass = Struct.new(:ip,:port)\n if t_servers = BackgrounDRb::BDRB_CONFIG[:client]\n connections = t_servers.split(',')\n connections.each do |conn_string|\n ip = conn_string.split(':')[0]\n port = conn_string.split(':')[1].to_i\n @bdrb_servers << klass.new(ip,port)\n end\n end\n @bdrb_servers << klass.new(BackgrounDRb::BDRB_CONFIG[:backgroundrb][:ip],BackgrounDRb::BDRB_CONFIG[:backgroundrb][:port].to_i)\n @bdrb_servers.each_with_index do |connection_info,index|\n next if @backend_connections.detect { |x| x.server_info == \"#{connection_info.ip}:#{connection_info.port}\" }\n @backend_connections << Connection.new(connection_info.ip,connection_info.port,self)\n end\n end",
"def load_postgres_driver()\n require 'postgres'\nrescue LoadError\n require 'rubygems'\n require 'rbconfig'\n sad = Config::CONFIG['sitearchdir']\n postgres_dir = sad.chomp('-gnu')\n $:.unshift(postgres_dir)\n require 'postgres'\nrescue LoadError\n # On SUSE, FreeBSD the driver is called ruby-postgres and is a gem\n require 'ruby-postgres'\nend",
"def optimize_puppetdb_settings(resources, with_external_postgresql)\n output_minimum_system_requirements_error_and_exit unless meets_minimum_system_requirements?(resources)\n\n percent_cpu_threads = 75\n minimum_cpu_threads = 1\n maximum_cpu_threads = resources['cpu'] - 1\n percent_mb_puppetdb = with_external_postgresql ? 50 : 25\n percent_mb_buffers = with_external_postgresql ? 0 : 25\n minimum_mb_puppetdb = fit_to_memory(resources['ram'], 512, 1024, 2048)\n maximum_mb_puppetdb = 8192\n minimum_mb_buffers = fit_to_memory(resources['ram'], 2048, 3072, 4096)\n maximum_mb_buffers = 16384\n minimum_mb_os = reserved_memory_os\n # minimum_mb_g1gc = 2048\n\n minimum_mb_buffers = with_external_postgresql ? 0 : minimum_mb_buffers\n\n settings = {}\n totals = {}\n\n if with_external_postgresql\n mb_buffers = 0\n else\n available_mb_for_buffers = resources['ram'] - minimum_mb_os\n if available_mb_for_buffers < minimum_mb_buffers\n Puppet.debug(\"Error: available_mb_for_buffers: #{available_mb_for_buffers} < minimum_mb_buffers: #{minimum_mb_buffers}\")\n output_minimum_system_requirements_error_and_exit\n end\n mb_buffers = clamp_percent_of_resource(resources['ram'], percent_mb_buffers, minimum_mb_buffers, maximum_mb_buffers)\n settings['puppet_enterprise::profile::database::shared_buffers'] = \"#{mb_buffers}MB\"\n end\n\n command_processing_threads = clamp_percent_of_resource(resources['cpu'], percent_cpu_threads, minimum_cpu_threads, maximum_cpu_threads)\n settings['puppet_enterprise::puppetdb::command_processing_threads'] = command_processing_threads\n\n available_mb_for_puppetdb = resources['ram'] - minimum_mb_os - mb_buffers\n\n if available_mb_for_puppetdb < minimum_mb_puppetdb\n Puppet.debug(\"Error: available_mb_for_puppetdb: #{available_mb_for_puppetdb} < minimum_mb_puppetdb: #{minimum_mb_puppetdb}\")\n output_minimum_system_requirements_error_and_exit\n end\n\n mb_puppetdb = clamp_percent_of_resource(resources['ram'], percent_mb_puppetdb, minimum_mb_puppetdb, maximum_mb_puppetdb)\n java_args_for_puppetdb = { 'Xms' => \"#{mb_puppetdb}m\", 'Xmx' => \"#{mb_puppetdb}m\" }\n # java_args_for_puppetdb['XX:+UseG1GC'] = '' if (jruby_9k_enabled? == false) && (mb_puppetdb >= minimum_mb_g1gc)\n settings['puppet_enterprise::profile::puppetdb::java_args'] = java_args_for_puppetdb\n\n cpu_used = command_processing_threads\n ram_used = mb_buffers + mb_puppetdb\n totals['CPU'] = { 'total' => resources['cpu'], 'used' => cpu_used }\n totals['RAM'] = { 'total' => resources['ram'], 'used' => ram_used }\n\n [settings, totals]\n end",
"def tables\r\n conn_exec do |driver|\r\n if !@tables\r\n require \"#{File.dirname(__FILE__)}/drivers/#{@opts[:type]}/knjdb_#{@opts[:type]}_tables\" if (!@opts.key?(:require) or @opts[:require])\r\n @tables = Kernel.const_get(\"KnjDB_#{@opts[:type]}\".to_sym).const_get(:Tables).new(\r\n :db => self\r\n )\r\n end\r\n \r\n return @tables\r\n end\r\n end",
"def connect_local_admin\n ActiveRecord::Base.establish_connection(\n :adapter => 'postgresql',\n :schema_search_path => 'public',\n :encoding => 'unicode',\n :database => 'postgres',\n :username => ENV['DB_USERNAME'], # accessmon\n :password => ENV['DB_PASSWORD'],\n :pool => POOL_COUNT,\n :port => PORT_NUMBER,\n :host => 'localhost')\n end",
"def setup_adapters(phase:)\n each_adapter { |adapter| adapter.setup(phase: phase) }\n end",
"def setup(params)\n\n\t\t@selected = []\n\t\t@driver = []\n\t\t@errors = []\n\t\t# @output_tmp = Dir.mktmpdir\n\n\tend",
"def prepare_db_for_restore\n raise \"restore unimplemented for #{adapter}\" unless (adapter = @db_conf[:adapter]) == 'postgresql'\n query = \"SELECT table_name FROM information_schema.tables WHERE table_schema='public' AND table_type='BASE TABLE'\"\n cmd = \"psql #{@db_conf[:database]} -t -c \\\"#{query}\\\"\"\n puts \"Executing: '#{cmd}'\"\n tables = `#{cmd}`\n\n query = \"DROP TABLE #{tables.map(&:chomp).map(&:strip).reject(&:empty?).join(\", \")} CASCADE\"\n cmd = \"psql #{@db_conf[:database]} -t -c \\\"#{query}\\\"\"\n puts \"Executing: '#{cmd}'\"\n `#{cmd}`\n end",
"def prepare!\n prepare_in_parallel!\n end",
"def adapters\n @adapters ||= {}\n end",
"def database_configuration; end",
"def database_configuration; end",
"def init_example_db\n # Create repos table\n database.create_table :repos do\n primary_key :id\n String :name\n foreign_id :token_id\n end\n\n # Add example repositories\n r = database[:repos]\n r.insert(:name => \"josefwaller/pycatan\")\n r.insert(:name => \"josefwaller/github_dash\")\n r.insert(:name => \"rails/rails\")\n\n # Create tokens table\n database.create_table :tokens do\n primary_key :id\n String :token\n String :name\n end\n\n # Add example token\n t = database[:tokens]\n ENV['GITHUB_DASH_TOKEN'] ||= \"ThisIsAnExampleGithubApiKey\"\n t.insert(:token => ENV['GITHUB_DASH_TOKEN'])\n end"
] |
[
"0.6303121",
"0.62652934",
"0.6196027",
"0.60925674",
"0.60923743",
"0.6072901",
"0.5956406",
"0.59346074",
"0.5915412",
"0.58782214",
"0.58357775",
"0.5783705",
"0.5774008",
"0.5756109",
"0.57473207",
"0.574559",
"0.5708784",
"0.5698038",
"0.56972337",
"0.5685741",
"0.5666665",
"0.5660667",
"0.5660393",
"0.5653067",
"0.5652492",
"0.5650826",
"0.56480765",
"0.56349784",
"0.5627398",
"0.5574496",
"0.5543246",
"0.55428797",
"0.5530555",
"0.54926777",
"0.54919076",
"0.54874104",
"0.54687583",
"0.5461001",
"0.5446989",
"0.54389775",
"0.5421502",
"0.54092807",
"0.5405505",
"0.5403875",
"0.5403026",
"0.53845745",
"0.5374683",
"0.5374683",
"0.53664935",
"0.5355367",
"0.5352382",
"0.53470534",
"0.53421855",
"0.53379893",
"0.5329725",
"0.53193516",
"0.5303155",
"0.5302635",
"0.5288221",
"0.5284085",
"0.52756995",
"0.5272858",
"0.525959",
"0.52580917",
"0.5245513",
"0.52407515",
"0.5238543",
"0.5229212",
"0.52210754",
"0.5217825",
"0.5217825",
"0.5217825",
"0.5214643",
"0.5213256",
"0.5213189",
"0.52109975",
"0.52038705",
"0.52031326",
"0.51937735",
"0.51807743",
"0.5175834",
"0.51719695",
"0.5164729",
"0.5156879",
"0.5154381",
"0.51526767",
"0.515062",
"0.51495665",
"0.51476103",
"0.5146756",
"0.51456285",
"0.5138391",
"0.51383144",
"0.5137749",
"0.51371396",
"0.51361644",
"0.5123758",
"0.51231724",
"0.51231724",
"0.5115042"
] |
0.68285835
|
0
|
Backbone of the tables and views support.
|
def pg_class_relname(type, opts)
ds = metadata_dataset.from(:pg_class).where(:relkind=>type).select(:relname).server(opts[:server]).join(:pg_namespace, :oid=>:relnamespace)
ds = filter_schema(ds, opts)
m = output_identifier_meth
if defined?(yield)
yield(ds)
elsif opts[:qualify]
ds.select_append{pg_namespace[:nspname]}.map{|r| Sequel.qualify(m.call(r[:nspname]).to_s, m.call(r[:relname]).to_s)}
else
ds.map{|r| m.call(r[:relname])}
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def table\n end",
"def models\r\n\r\n end",
"def table; end",
"def table; end",
"def table; end",
"def table; end",
"def model; end",
"def model; end",
"def model; end",
"def model; end",
"def model; end",
"def model; end",
"def model; end",
"def model; end",
"def model; end",
"def model; end",
"def _table; @table end",
"def index\n @title = 'Tables'\n @tables = Table.all\n\n respond_to do |format|\n format.html {\n @tables_rabl = Rabl.render(@tables, 'tables/index', view_path: 'app/views', format: 'json')\n @guests_rabl = Rabl.render(Guest.attending, 'tables/guests', view_path: 'app/views', format: 'json')\n }\n format.json { render json: @tables }\n end\n end",
"def render_table(arel)\n return if arel.first.nil?\n partial :table, :locals => {:model => arel.first.class, :arel => arel}\n end",
"def table\n model_class.arel_table\n end",
"def model\n end",
"def table_for(collection, options = {}, *attr_list)\n actions = false\n classes = options[:classes] || \"\"\n model_class_name = options[:model_name] || collection.name\n table_id = options[:id] || model_class_name.tableize\n table_klazz = model_class_name.constantize\n table_headers = []\n\n attr_list.flatten.each do |attr_name|\n if attr_name.class == Hash && !attr_name[:actions].nil?\n actions = attr_name[:actions]\n else\n header_content = table_klazz.human_attribute_name(attr_name)\n header = content_tag(:th, header_content)\n table_headers << header\n end\n end\n\n if actions\n table_headers << content_tag(:th, t('actions'), class: 'table_actions')\n end\n\n thead = content_tag :thead, content_tag(:tr, table_headers.join(\" \").html_safe)\n table_content = \"\"\n if options[:partial].present?\n table_content = render partial: options[:partial], collection: collection\n else\n table_content = render collection\n end\n tbody = content_tag :tbody, table_content\n table = content_tag(:table, \"#{thead} #{tbody}\".html_safe, id: table_id, class: \"table table-hover #{classes}\")\n table.html_safe\n end",
"def index\n \n get_table_name #method for table_names and model_names \n\n get_table_column_name # method for table_name, column_names and model_name \n\n get_table_data # method for table data\n\n @model_data = []\n \n @model_data = @model_name.order(sort_column + \" \" + sort_direction).page(params[:page]).per(10)\n\n respond_to do |format|\n\t format.html\n\t format.json\n \tend\n\n end",
"def view_database\n table_html = '<a href=\"/admin\"> << back to index</a>'\n table_names = ActiveRecord::Base.connection.tables.delete_if { |x| x == 'schema_migrations' }\n table_names.sort.each do |name|\n table_name = \"#{name.titleize.gsub(' ', '').singularize}\"\n table_data = eval(\"#{table_name}.all\")\n\n table_html << \"<h2 style='border-bottom: solid;background-color: lightBlue;'>#{table_name}<a href='/admin/delete_db_view_data/#{table_name}' class='btn btn-large btn-primary' style='float:right'>Delete All</a></h2>\"\n table_data.each do |row|\n table_html << \"<ul style='border: 3px dotted'>\"\n row.attributes.each do |column_name, column_data|\n table_html << \"<li>#{column_name} : #{column_data}</li>\"\n end\n table_html << \"<li><a href='/admin/delete_db_view_record/#{table_name}_#{row.id}' class='btn btn-large btn-primary' style='font-size:20px;'>[Delete Record]</a></li></ul>\"\n end\n\n end\n render :text => table_html\n end",
"def table_of(things,opts={})\n kind = things.first.table_name\n # columns = things.first.visible_columns\n add_class_to_html_options(opts, kind)\n content_tag(\n :table,\n render(:partial => \"/#{kind}/table_row\", :collection => things),\n opts\n )\n end",
"def render\n @mark.table_begin(\"table-striped table-sm table-responsive table-hover\")\n generate_headers\n generate_rows\n @mark.table_end\n @mark.render\n end",
"def table options = {} \n render_partial :table, template_locals(:table_row, options)\n end",
"def build_table_helpers(resource)\n @module.module_eval <<-end_eval\n def #{resource.singular}_table(opts={}, &block)\n content = capture(&block)\n opts[:class] = ResourcefulViews.resourceful_classnames('#{resource.singular}_table', *(opts.delete(:class) || '').split)\n concat(content_tag(:table, content, opts))\n end\n def #{resource.singular}_row(*args, &block)\n opts = args.extract_options!\n opts[:class] = ResourcefulViews.resourceful_classnames('#{resource.singular}', *(opts.delete(:class) || '').split)\n opts[:id] = '#{resource.singular}_' + args.first.id.to_s unless args.empty?\n content = capture(&block)\n concat(content_tag(:tr, content, opts))\n end\n end_eval\n end",
"def materialized_views(name = nil) raise \"Internal Error: Connection adapter didn't override abstract function\"; [] end",
"def model\n end",
"def model\n end",
"def model\n end",
"def orm; end",
"def table\n self.class.table\n end",
"def views\n Views.new(connection).all\n end",
"def tables\n [\n ]\n end",
"def create_extension_view_and_class\n self.const_get(\"Extended#{to_s}\")\n rescue\n clause = view_builder\n #this needs to be moved into the specific db adapter files\n connection.execute %{\n create or replace algorithm = merge SQL SECURITY DEFINER view #{extended_table_name} as select #{clause[:view_select]} from #{table_name} #{clause[:view_joins]}#{clause[:view_conditions]}\n }\n class_eval %{\n class Extended#{to_s} < #{to_s}\n set_table_name \"#{extended_table_name}\"\n def self.descends_from_active_record?\n true\n end\n end\n }\n true\n end",
"def index\n instance_variable_set(resource(:tableize), @model.all)\n end",
"def tables(opts=OPTS, &block)\n tables_or_views('TABLE', opts, &block)\n end",
"def views(opts=OPTS, &block)\n tables_or_views('VIEW', opts, &block)\n end",
"def overview(db)\n\toverview = db.execute(\"CREATE VIEW [Your top books!] AS SELECT book_name, author FROM books where rating > 3\")\n\treturn overview\nend",
"def show\n @table_header = @project.table_header\n @records = @project.get_table_class.all\n end",
"def _view; end",
"def set_up_table_view\n self.table_view\n _table_view = self.create_table_view_from_data(self.table_data)\n adjusted_frame = self.view.bounds\n adjusted_frame.size.width = app_delegate.panels.leftVisibleWidth\n _table_view.frame = adjusted_frame\n self.view = UIView.new\n self.view.addSubview(_table_view)\n end",
"def tables\r\n return @engine.tables\r\n end",
"def index\n @bs_tables = BsTable.all\n end",
"def table_list(objetos, show_all_actions = true, options = {})\n render :partial => '/admin/shared/table_list', :locals => { :objetos => objetos, :show_all_actions => show_all_actions, :options => options }\n end",
"def index\n @datatable = ComplainsDatatable.new view_context\n end",
"def render_table(rows, options = {})\n options = { :description => false }.merge options\n # Figure out the fields from the :model option\n if options[:model] && options[:fields].nil?\n options[:fields] = options[:model].default_field_order\n end\n # Figure out the fields from the first row\n if options[:fields].nil? && rows.first.class.respond_to?(:default_field_order)\n options[:fields] = rows.first.class.default_field_order\n end\n # Call to_row on all the rows\n rows = rows.map do |row|\n row.respond_to?(:to_row) ? row.to_row : row\n end\n # Call render_cell on all the cells\n rows.each do |row|\n # FIXME: default Api subclasses do not respond to #keys so specialising\n # #to_row is required to not break the following\n row.each_key do |k|\n row[k] = row[k].render_cell if row[k].respond_to? :render_cell\n end\n end\n if options[:s]\n # Simple output\n rows.each do |row|\n if options[:vertical]\n data options[:fields].map { |k| [k, row[k]].join(\"\\t\") }.join(\"\\n\")\n else\n data options[:fields].map { |k| row[k].is_a?(Array) ? row[k].join(\",\") : row[k] }.join(\"\\t\")\n end\n end\n elsif options[:vertical]\n # \"graphical\" table\n data ShowTable.render(rows, options)\n else\n data SimpleTable.render(rows, options)\n end\n end",
"def model\n self\n end",
"def table\n Airmodel.client.table base_config[:base_id], base_config[:table_name]\n end",
"def index_table(klass, objects)\n # get links from class' helper\n links = send(\"#{klass.table_name}_index_links\", objects).compact\n\n # if there are any batch links, insert the 'select all' link\n batch_ops = !links.reject{|l| !l.match(/class=\"batch_op_link\"/)}.empty?\n links.insert(0, select_all_link) if batch_ops\n \n # render, getting fields and checking if there are no objects at all\n render(\"layouts/index_table\",\n :klass => klass,\n :objects => objects,\n :paginated => objects.respond_to?(:total_entries),\n :links => links.flatten.join.html_safe,\n :fields => send(\"#{klass.table_name}_index_fields\"),\n :batch_ops => batch_ops\n )\n end",
"def view; end",
"def tableView(aTableView, willDisplayCell:cell, forTableColumn:aTableColumn, row:rowIndex)\n cell.setRepresentedObject(@videoCollection[rowIndex])\n end",
"def tables\n raise 'SevenZip#tables should never be called'\n end",
"def model_class; end",
"def tables\n sanity_check\n @handle.tables\n end",
"def index\n @product_tables = ProductTable.all\n end",
"def employee_tables\n @record.employees.each do |employee|\n h3 employee.full_name\n info_table(employee)\n contacts_table(employee)\n end\n end",
"def view_renderer; end",
"def view_renderer; end",
"def index\n @table_relations = TableRelation.all\n end",
"def index t,param\n subclass,id = param\n models = t.find_models subclass\n if models\n models.values[0].each do |model_name, model_attributes|\n if model_name.to_s ==\"sql_query\"\n #some magic is needed here ..parse the sql query?\n else\n m = Model.new model_name, models.keys[0].to_s\n row_values=m.get_row_by_id(id).first\n c1=Hash.new\n if row_values\n m.model.columns_hash.each_with_index do |column_name,i|\n c1[column_name[0]]=eval(\"row_values.#{column_name}\")\n end\n t.extract_id_line model_attributes, c1,row_values,m.get_datatypes\n t.make_triples(c1, models.keys[0].to_s , \"\", m.get_datatypes)\n end\n end\n end\n end\n #render :text => t.make_triples(c1, controller , \"\", t.dbd_types)\n \n end",
"def view(name)\n new_view = view_old(name)\n new_view.table_name = name\n new_view\n end",
"def get_table(object)\n raise NotImplementedError, \"Subclasses must implement private method get_table\"\n end",
"def get_tables\n\t\t@@tables\n\tend",
"def on_table?; @on_table; end",
"def index\n @table_defs = TableDef.all\n respond_to do |format|\n format.js\n format.html\n end\n end",
"def model_name; end",
"def model_name; end",
"def structure\n self.change_schema_to 'information_schema';\n @database_name, @table_name = request[:database_name], request[:name]\n @result = Hash.new success: true\n @result[:columns] = Column.where TABLE_SCHEMA: @database_name, TABLE_NAME: @table_name\n structure = Table.where(TABLE_SCHEMA: @database_name, TABLE_NAME: @table_name).first.attributes\n @result[:desc] = []\n structure.keys.length.times do |i|\n key = structure.keys[i]\n @result[:desc].push COLUMN_NAME: key, COLUMN_TYPE: structure[key]\n end\n \n @result[:type] = 'table'\n @result[:name] = @table_name\n \n render json: @result\n end",
"def working_items_table\n contract_info_tables = page.all(\"table.table-hover.table-bordered.table-striped\", visible: true)\n unless contract_info_tables[1].nil?\n return Helpers::OpsUiHelper::TableHelper.new(node: contract_info_tables[1])\n end\n Helpers::OpsUiHelper::TableHelper.new(node: contract_info_tables[0])\n end",
"def on_table(params = {})\n table = Yummi::Table::new params\n table.data = self\n return table\n end",
"def table\n\t@table ||=Table.new\nend",
"def all\n views_from_mysql.map(&method(:to_scenic_view))\n end",
"def get_model\n m = self.class.to_s.gsub(/Datatable$/,\"\").constantize\n AjaxDatatablesRails.configure do |config|\n config.db_adapter = m.connection_config[:adapter].gsub(/_/,\"\").to_sym\n end\n m\n end",
"def create_table objects, columns, title, date_param = nil, nosort = false\n\t\tcurr_user = current_user\n\t\n\t\tid_to_names = [\"trip_id\",\"destination_id\",\"bus_id\"]\n\t\ttimes = [\"depart_time\",\"arrive_time\",\"return_time\"]\n\t\tdates = [\"date\",\"start\",\"expiry\",\"offset_date\",\"start_date\",\"end_date\",\"created_at\",\"updated_at\"]\n\t\tno_management = [\"permissions\", \"roles\"]\n\t\tmanagement_headers = [\"updated_by\",\"created_at\",\"updated_at\"]\n\t\t\n\t\thtml = \"\"\n\t\thtml << '<h1>'.html_safe\n\t\thtml << title\n\t\t\n\t\thtml << '</h1>'.html_safe\n\t\t\n\t\thtml << '<table class=\"admin_table\">'.html_safe\n\t\t\n\t\thtml << '<tr class=\"tr_header\">'.html_safe\n\t\tcolumns.each do |col|\n\t\t\t\n\t\t\tcol_title = col\n\t\t\t\n\t\t\tif col.include? '_id' then col_title = col.split('_id')[0] end\n\t\t\t\n\t\t\tif management_headers.include? col\n\t\t\t\tif curr_user.has_permission? :management\n\t\t\t\t\thtml << '<th>'.html_safe\n\t\t\t\t\tif !nosort \n\t\t\t\t\t\thtml << sort_table(col, col_title.humanize, date_param).html_safe\n\t\t\t\t\telse\n\t\t\t\t\t\thtml << col_title.humanize\n\t\t\t\t\tend\n\t\t\t\t\thtml << '</th>'.html_safe\n\t\t\t\tend\n\t\t\telse\n\t\t\t\thtml << '<th>'.html_safe\n\t\t\t\tif !nosort \n\t\t\t\t\thtml << sort_table(col, col_title.humanize, date_param).html_safe\n\t\t\t\telse\n\t\t\t\t\thtml << col_title.humanize\n\t\t\t\tend\n\t\t\t\thtml << '</th>'.html_safe\n\t\t\tend\n\t\t\t\n\t\tend\n\t\t\n\t\t# Show Column\n\t\thtml << '<th></th>'.html_safe\n\t\t\n\t\t# Edit Column\n\t\tif (curr_user.has_permission? :admin) || (!(no_management.include? objects[0].class.name.tableize) && (curr_user.has_permission? :management))\n\t\t\thtml << '<th></th>'.html_safe\n\t\tend\n\t\t\n\t\t# Destroy Column\n\t\tif curr_user.has_permission? :admin\n\t\t\thtml << '<th></th>'.html_safe\n\t\tend\n\t\t\n\t\thtml << '</tr>'.html_safe\n\t\t\n\t\ti = 0\n\t\tobjects.each do |obj|\n\t\t\tif i.even?\n\t\t\t\thtml << '<tr class=\"tr_alt_1\">'.html_safe\n\t\t\telse\n\t\t\t\thtml << '<tr class=\"tr_alt_2\">'.html_safe\n\t\t\tend\n\t\t\t\tcolumns.each do |col|\n\t\t\t\t\t\n\t\t\t\t\tif id_to_names.include? col\n\t\t\t\t\t\thtml << '<td>'.html_safe\n\t\t\t\t\t\tcol = col.split('_id')[0]\n\t\t\t\t\t\thtml << (link_to obj.send(col).id.to_s + \": \" + obj.send(col).name, obj.send(col)).html_safe\n\t\t\t\t\telsif col == \"user_id\" || col == \"updated_by\"\n\t\t\t\t\t\thtml << '<td>'.html_safe\n\t\t\t\t\t\tcol = col.split('_id')[0]\n\t\t\t\t\t\tif obj.send(col)\n\t\t\t\t\t\t\thtml << (link_to obj.send(col).userid, obj.send(col)).html_safe\n\t\t\t\t\t\tend\n\t\t\t\t\telsif col == \"id\"\n\t\t\t\t\t\thtml << '<td class=\"td_links\">'.html_safe\n\t\t\t\t\t\thtml << obj.send(col).to_s\n\t\t\t\t\telsif col == \"weekday\"\n\t\t\t\t\t\thtml << '<td>'.html_safe\n\t\t\t\t\t\thtml << Date::DAYNAMES[obj.send(col)]\n\t\t\t\t\telsif times.include? col\n\t\t\t\t\t\thtml << '<td>'.html_safe\n\t\t\t\t\t\thtml << obj.send(col).strftime(\"%I:%M %p\")\n\t\t\t\t\telsif dates.include? col\n\t\t\t\t\t\thtml << '<td>'.html_safe\n\t\t\t\t\t\thtml << obj.send(col).strftime(\"%B %d, %Y\")\n\t\t\t\t\telsif col.include? \"_id\"\n\t\t\t\t\t\thtml << '<td>'.html_safe\n\t\t\t\t\t\tcol = col.split('_id')[0]\n\t\t\t\t\t\tif obj.send(col)\n\t\t\t\t\t\t\thtml << (link_to obj.send(col).id.to_s, obj.send(col)).html_safe\n\t\t\t\t\t\tend\n\t\t\t\t\telse\n\t\t\t\t\t\thtml << '<td>'.html_safe\n\t\t\t\t\t\thtml << obj.send(col).to_s\n\t\t\t\t\tend\n\t\t\t\t\t\n\t\t\t\t\thtml << '</td>'.html_safe\n\t\t\t\tend\n\t\t\t\t\n\t\t\t\t# Show Column\n\t\t\t\thtml << '<td class=\"td_links\">'.html_safe\n\t\t\t\thtml << (link_to \"Show\", obj).html_safe\n\t\t\t\thtml << '</td>'.html_safe\n\t\t\t\t\n\t\t\t\t# Edit Column\n\t\t\t\tif (curr_user.has_permission? :admin) || (!(no_management.include? objects[0].class.name.tableize) && (curr_user.has_permission? :management))\n\t\t\t\t\thtml << '<td class=\"td_links\">'.html_safe\n\t\t\t\t\thtml << (link_to \"Edit\", \"/\" + obj.class.name.tableize + \"/\" + obj.id.to_s + \"/edit\").html_safe\n\t\t\t\t\thtml << '</td>'.html_safe\n\t\t\t\tend\n\t\t\t\t\n\t\t\t\t# Destroy Column\n\t\t\t\tif curr_user.has_permission? :admin\n\t\t\t\t\thtml << '<td class=\"td_links\">'.html_safe\n\t\t\t\t\thtml << (link_to \"Destroy\", obj, :confirm => 'Are you sure?', :method => :delete).html_safe\n\t\t\t\t\thtml << '</td>'.html_safe\n\t\t\t\tend\n\t\t\n\t\t\thtml << '</tr>'.html_safe\n\t\t\ti = i + 1\n\t\tend\n\t\t\n\t\thtml << '</table>'.html_safe\n\t\t\n\t\thtml.html_safe\n\tend",
"def view_to_hash(view, fetch_data = false)\n root = {:head => [], :rows => []}\n\n has_checkbox = !@embedded && !@no_checkboxes\n\n # Show checkbox or placeholder column\n if has_checkbox\n root[:head] << {:is_narrow => true}\n end\n\n # Icon column, only for list with special icons\n root[:head] << {:is_narrow => true} if ::GtlFormatter::VIEW_WITH_CUSTOM_ICON.include?(view.db)\n\n view.headers.each_with_index do |h, i|\n col = view.col_order[i]\n next if view.column_is_hidden?(col, self)\n\n field = MiqExpression::Field.new(view.db_class, [], view.col_order[i])\n align = field.numeric? ? 'right' : 'left'\n\n root[:head] << {:text => h,\n :sort => 'str',\n :col_idx => i,\n :align => align}\n end\n\n if @row_button # Show a button as last col\n root[:head] << {:is_narrow => true}\n end\n\n # Add table elements\n table = view.sub_table || view.table\n view_context.instance_variable_set(:@explorer, @explorer)\n table.data.each do |row|\n target = @targets_hash[row.id] unless row['id'].nil?\n\n new_row = {\n :id => list_row_id(row),\n :long_id => row['id'].to_s,\n :cells => [],\n :clickable => params.fetch_path(:additional_options, :clickable)\n }\n\n if defined?(row.data) && defined?(params) && params[:active_tree] != \"reports_tree\"\n new_row[:parent_id] = \"rep-#{row.data['miq_report_id']}\" if row.data['miq_report_id']\n end\n new_row[:parent_id] = \"xx-#{CONTENT_TYPE_ID[target[:content_type]]}\" if target && target[:content_type]\n new_row[:tree_id] = TreeBuilder.build_node_id(target) if target\n if row.data[\"job.target_class\"] && row.data[\"job.target_id\"]\n controller = view_to_hash_controller_from_job_target_class(row.data[\"job.target_class\"])\n new_row[:parent_path] = (url_for_only_path(:controller => controller, :action => \"show\") rescue nil)\n new_row[:parent_id] = row.data[\"job.target_id\"].to_s if row.data[\"job.target_id\"]\n end\n root[:rows] << new_row\n\n if has_checkbox\n new_row[:cells] << {:is_checkbox => true}\n end\n\n options = {\n :clickable => params.fetch_path(:additional_options, :clickable),\n :row_button => @row_button\n }\n new_row[:cells].concat(::GtlFormatter.format_cols(view, row, self, options))\n end\n\n root\n end",
"def rows\n render 'rows.html'\n end",
"def tables_from(db=current_database)\n end",
"def _default_wrap_model; end",
"def index\n\t\t@tables = Table.all\n\tend",
"def relation_table(model)\n validate_model(model)\n model.arel_table\n end",
"def tableView(tableView, rowViewForRow:row)\n FBSidePanelTableRowView.new\n end",
"def collection; end",
"def collection; end",
"def table_or_view\n return unless Admin::MigrationGenerator.table_or_view_exists? table_name\n\n return :table if Admin::MigrationGenerator.table_exists? table_name\n\n :view\n end",
"def table(options = {}, &block)\n table_for(collection, options.merge(:class => resource_class), &block)\n end",
"def render_model\n model.render\n end",
"def index\n respond_to do |format|\n # format.html # index.html.erb\n # format.json { render json: @documents }\n # This will die if not asekd by our dataTables, because we're using params[:collection_id]\n format.json { render json: DocumentsMainDatatable.new(view_context, current_user)}\n end\n end",
"def make_and_model; end",
"def tables\n []\n end",
"def table config={}, &block\n #def tabular_widget config={}, &block\n require 'canis/core/widgets/table'\n events = [:PROPERTY_CHANGE, :LEAVE, :ENTER, :CHANGE, :ENTER_ROW, :PRESS ]\n block_event = nil\n # if no width given, expand to stack width\n #config.delete :title\n useform = nil\n\n w = Table.new useform, config # NO BLOCK GIVEN\n w.width ||= :expand \n w.height ||= :expand # TODO This has to come before other in stack next one will overwrite.\n _position(w)\n if block_given?\n #@current_object << w\n yield_or_eval &block\n #@current_object.pop\n end\n return w\n end",
"def table\n Response\n end",
"def grid\n\t\t@gridTitle = \"Subscriber Listings\"\n\t\t@searchPlaceholder = \"Search for Subscribers\"\n\t\tslim :grid, :layout => false\n\tend",
"def get_tables\n tables\n end",
"def index\n @tbl_layouts = TblLayout.all\n\n end",
"def index\n respond_to do |format|\n format.html\n format.json { render json: DataTable::Nodes.new(view_context) }\n end\n end",
"def initialize_table; end",
"def table_content\n # This makes a call to gift_rows and gets back an array of data that will \n # populate the columns and rows of a table I then included some styling to \n # include a header and make its text bold. I made the row background colors \n # alternate between grey and white Then I set the table column widths\n table gift_rows do\n row(0).font_style = :bold\n self.header = true\n self.row_colors = ['DDDDDD', 'FFFFFF']\n self.column_widths = [180, 180, 180]\n end\n end",
"def index\n @data_tables = DataTable.all\n\n # respond_to do |format|\n # format.html # index.html.erb\n # format.json { render json: @data_tables.as_json(only: [:title, :username, :name, :email, :hometown]) }\n # end\n end"
] |
[
"0.63070464",
"0.62012213",
"0.61893415",
"0.61893415",
"0.61893415",
"0.61893415",
"0.6097337",
"0.6097337",
"0.6097337",
"0.6097337",
"0.6097337",
"0.6097337",
"0.6097337",
"0.6097337",
"0.6097337",
"0.6097337",
"0.60805684",
"0.6072884",
"0.6008782",
"0.6000424",
"0.5969268",
"0.5962377",
"0.5922523",
"0.5918466",
"0.58429784",
"0.5840671",
"0.5837409",
"0.5794763",
"0.5781141",
"0.5774343",
"0.5774343",
"0.5774343",
"0.574653",
"0.57330227",
"0.5718744",
"0.570556",
"0.5695954",
"0.5684814",
"0.56825745",
"0.5661016",
"0.5653518",
"0.5652494",
"0.56328034",
"0.5605471",
"0.5590773",
"0.55883753",
"0.5574458",
"0.55626494",
"0.5553425",
"0.55486953",
"0.5524301",
"0.55211884",
"0.5518005",
"0.5511904",
"0.5492153",
"0.54670006",
"0.54529274",
"0.54397273",
"0.5438122",
"0.5437331",
"0.5437331",
"0.54261327",
"0.5419219",
"0.5411023",
"0.540331",
"0.5402252",
"0.54005235",
"0.53831553",
"0.53829944",
"0.53829944",
"0.5375514",
"0.5371078",
"0.5367343",
"0.5360624",
"0.53549063",
"0.53442913",
"0.5325626",
"0.53228414",
"0.5320857",
"0.5314705",
"0.53123033",
"0.5311587",
"0.5306848",
"0.5305089",
"0.5303823",
"0.5303823",
"0.53033656",
"0.5298583",
"0.52976704",
"0.5294539",
"0.52764815",
"0.52727187",
"0.5269159",
"0.52678716",
"0.52675194",
"0.52666116",
"0.52563155",
"0.5253419",
"0.52472407",
"0.5233862",
"0.5226303"
] |
0.0
|
-1
|
Return an expression the oid for the table expr. Used by the metadata parsing code to disambiguate unqualified tables.
|
def regclass_oid(expr, opts=OPTS)
if expr.is_a?(String) && !expr.is_a?(LiteralString)
expr = Sequel.identifier(expr)
end
sch, table = schema_and_table(expr)
sch ||= opts[:schema]
if sch
expr = Sequel.qualify(sch, table)
end
expr = if ds = opts[:dataset]
ds.literal(expr)
else
literal(expr)
end
Sequel.cast(expr.to_s,:regclass).cast(:oid)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def table\n if @table_expr.is_a?(AliasedExpression)\n @table_expr.expression\n else\n @table_expr\n end\n end",
"def table_alias\n if @table_expr.is_a?(AliasedExpression)\n @table_expr.alias\n end\n end",
"def expanded_identifier\n case type\n when :@ident, :@const, :@gvar, :@cvar, :@ivar, :@kw, :@op\n self[1]\n when :var_ref, :var_field, :const_ref, :symbol\n self[1].expanded_identifier\n when :top_const_ref, :top_const_field\n \"::#{self[1].expanded_identifier}\"\n when :const_path_ref, :const_path_field\n lhs, rhs = children\n \"#{lhs.expanded_identifier}::#{rhs.expanded_identifier}\"\n end\n end",
"def db_table_column(expr, entry = nil)\n storage_map = storage_map(:db, expr.domain_name)\n\n db = fetch_storage_class(storage_map.storage_key)\n column = storage_map.storage_attr(expr.domain_attr)\n\n [db.table_name, column]\n end",
"def aliased_expression_sql(ae)\n \"#{literal(ae.expression)} AS #{quote_identifier(ae.aliaz)}\"\n end",
"def expr \n\n\t$cst.add_branch(\"Expr\")\n\t\n\tcase scout_token\n\twhen \"T_DIGIT\"\n\t\tintexpr\n\twhen \"T_QUOTE\"\n\t\tstringexpr\n\twhen \"T_LPAREN\", \"T_BOOLEAN\"\n\t\tboolexpr\n\twhen \"T_ID\"\n\t\tid\n\telse\n\t\traise FaultyTokenError.new(\"T_DIGIT, T_QUOTE, T_LPAREN, or T_ID\", $tokens[$index])\n\tend\n\t\n\t$cst.ascend\n\t\nend",
"def name2oid(name)\n\t\toid = @manager.mib.oid(name)\n\t\tt = [['OID', 'Symbolic Name'], [\"#{oid}\", \"#{name}\"]]\n\t\ttable = t.to_table(:first_row_is_head => true)\n\t\tputs table.to_s\n\tend",
"def oid\n id(get_oid())\n end",
"def sql_expr\n @sql_expr ||= (meta[:sql_expr] || to_sql_name)\n end",
"def oid(identifier)\n prefix, *suffix = case identifier\n when Array\n identifier.map(&:to_s)\n else\n identifier.split(\".\", 2).map(&:to_s)\n end\n\n return unless prefix\n\n # early exit if it's an OID already\n unless prefix.integer?\n load_defaults\n # load module if need be\n idx = prefix.index(\"::\")\n if idx\n mod = prefix[0..(idx - 1)]\n type = prefix[(idx + 2)..-1]\n return if mod && !module_loaded?(mod) && !load(mod)\n else\n type = prefix\n end\n\n return if type.nil? || type.empty?\n\n prefix = @object_identifiers[type] ||\n raise(Error, \"can't convert #{type} to OID\")\n\n end\n\n [prefix, *suffix].join(\".\")\n end",
"def qualified_identifier_sql(qcr)\n \"#{quote_identifier(qcr.table)}.#{quote_identifier(qcr.column)}\"\n end",
"def v(o)\n case o\n when Symbol\n t, column, aliaz = Sequel.split_symbol(o)\n if t\n o\n elsif aliaz\n SQL::AliasedExpression.new(SQL::QualifiedIdentifier.new(@table, SQL::Identifier.new(column)), aliaz)\n else\n SQL::QualifiedIdentifier.new(@table, o)\n end\n when SQL::Identifier\n SQL::QualifiedIdentifier.new(@table, o)\n when SQL::QualifiedIdentifier, SQL::JoinClause\n # Return these directly, so we don't accidentally qualify symbols in them.\n o\n else\n super\n end\n end",
"def v(o)\n case o\n when Symbol\n t, column, aliaz = Sequel.split_symbol(o)\n if t\n o\n elsif aliaz\n SQL::AliasedExpression.new(SQL::QualifiedIdentifier.new(@table, SQL::Identifier.new(column)), aliaz)\n else\n SQL::QualifiedIdentifier.new(@table, o)\n end\n when SQL::Identifier\n SQL::QualifiedIdentifier.new(@table, o)\n when SQL::QualifiedIdentifier, SQL::JoinClause\n # Return these directly, so we don't accidentally qualify symbols in them.\n o\n else\n super\n end\n end",
"def ident_key\n :ident\n end",
"def get_oid(name, id)\n OIDPrefix + '.' + OIDMap[name].to_s + \".#{id}\"\n end",
"def safe_column_expr leaf, table_prefix = ''\n column_name = safe_column_name(leaf)\n type_name, = leaf.data_type(data_type_context)\n type_num = MM::DataType.intrinsic_type(type_name)\n Expression.new(table_prefix+column_name, type_num, leaf.is_mandatory)\n end",
"def identifier(name)\n SQL::Identifier.new(name)\n end",
"def unqualified(table_name)\n table_name.split('.').last\n end",
"def compiled_identifier\n identifier\n end",
"def compute_table_name\n \"#{full_table_name_prefix}#{undecorated_table_name(name)}#{table_name_suffix}\"\n end",
"def variable_id(variable)\n if internal?(variable)\n return INTERNAL_TABLE_IDS[variable.variable.name]\n end\n if variable.kind_of?(ScopedVariable)\n scope_id = variable_id(variable.scope)\n else\n scope_id = TOPLEVEL_SCOPE_ID\n end\n id = @internal_evaluator.select(\n [\"variable_id\"],\n \"variables\",\n [\"scope_id\", \"variable_name\"],\n [scope_id, variable.name]\n )\n raise DataError, \"More than one table id for table #{variable.to_s}.\" if id.length > 1\n raise DataError, \"No table id for table #{variable.to_s}.\" if id.length == 0\n id[0][0]\n end",
"def table_symbol_from x\n x.to_s.tableize.to_sym\n end",
"def identifier_for identifier\n \"#{name.gsub(/^.*::/,'').downcase}s.#{identifier}\"\n end",
"def unaExp_Handler(expr)\n\ttypeExpr = expression_Handler(expr.elem)\n\tcase expr.op\n\twhen /^[$']/\n\t\tif typeExpr == :CANVAS\n\t\t\treturn :CANVAS\n\t\telse\n\t\t\treturn nil\n\t\tend\n\twhen /\\^/\n\t\tif typeExpr == :BOOLEAN\n\t\t\treturn :BOOLEAN\n\t\telse\n\t\t\treturn nil\n\t\tend\n\twhen /-/\n\t\tif typeExpr == :NUMBER\n\t\t\treturn :NUMBER\n\t\telse\n\t\t\treturn nil\n\t\tend\n\tend\nend",
"def unaExp_Handler(expr)\n\ttypeExpr = expression_Handler(expr.elem)\n\tcase expr.op\n\twhen /^[$']/\n\t\tif typeExpr == :CANVAS\n\t\t\treturn :CANVAS\n\t\telse\n\t\t\treturn nil\n\t\tend\n\twhen /\\^/\n\t\tif typeExpr == :BOOLEAN\n\t\t\treturn :BOOLEAN\n\t\telse\n\t\t\treturn nil\n\t\tend\n\twhen /-/\n\t\tif typeExpr == :NUMBER\n\t\t\treturn :NUMBER\n\t\telse\n\t\t\treturn nil\n\t\tend\n\tend\nend",
"def table_alias_for(table_name)\n table_name.gsub(/\\./, '_')\n end",
"def to_table_reference(table_alias=nil)\n \"(#{sql})#{\" #{quote_identifier(table_alias)}\" if table_alias}\"\n end",
"def sql_of( expr, dataset)\n dataset.send :literal_expression_append, rv=String.new, expr\n rv\n end",
"def identifier_string\n name\n end",
"def qualifier_from_alias_symbol(aliaz, identifier)\n case identifier\n when SQL::QualifiedIdentifier\n if identifier.column.is_a?(String)\n Sequel.identifier(aliaz)\n else\n aliaz\n end\n when SQL::Identifier\n Sequel.identifier(aliaz)\n else\n aliaz\n end\n end",
"def table_id\n\n end",
"def oid\n @oid ||= OID.from_pointer(@struct.name, @struct.name_length)\n end",
"def as_sql(expression, aliaz)\n \"#{expression} #{quote_identifier(aliaz)}\"\n end",
"def on_ident(node, compiled_grammar)\n return node.children[0]\n end",
"def identifier\n @identifier ||= \"#{self.type_prefix}.#{Model::to_id @schema.title}.#{Model::to_id name}\"\n end",
"def expr()\n #This is a stub, used for indexing\n end",
"def expression_index_name(name)\n column_name, operator_name = split_column_name(name)\n\n result_name = if column_name =~ FUNCTIONAL_INDEX_REGEXP\n \"#{$1.downcase}_#{$3}\"\n else\n column_name\n end\n\n result_name += \"_\" + operator_name.parameterize.underscore if operator_name\n\n result_name\n end",
"def identifier\n @identifier ||= \"#{ATTRIBUTE_PREFIX}.#{Model::to_id @schema_ref}.#{Model::to_id @reference}\"\n end",
"def table_name_for(str, metadata)\n str = str.to_s.gsub(/::/, '_')\n if metadata.stores_lower_case_identifiers\n str.downcase\n elsif metadata.stores_upper_case_identifiers\n str.upcase\n else\n str\n end\n end",
"def identifier_name\n return nil if (code = @codes[@pos]).nil?\n\n pos0 = @pos\n chars = []\n if code == 0x5c and ucode = unicode_escape? and identifier_start?(ucode)\n chars.push(ucode)\n @pos += 6\n elsif identifier_start?(code)\n chars.push(code)\n @pos += 1\n else\n return nil\n end\n\n while true\n code = @codes[@pos]\n if code == 0x5c and ucode = unicode_escape? and identifier_part?(ucode)\n chars.push(ucode)\n @pos += 6\n elsif identifier_part?(code)\n chars.push(code)\n @pos += 1\n else\n name = chars.pack(\"U*\").to_sym\n return ECMA262::IdentifierName.get(name)\n end\n end\n end",
"def table_name_by_id(table_id)\n data_dictionary.table_by_id(table_id).fetch(\"NAME\", nil)\n end",
"def oid\n self.elements[:object_i_d]\n end",
"def identifier_for_filename(filename)\n if filename =~ /index\\.[^\\/]+$/\n regex = ((@config && @config[:allow_periods_in_identifiers]) ? /index\\.[^\\/\\.]+$/ : /index\\.[^\\/]+$/)\n else\n regex = ((@config && @config[:allow_periods_in_identifiers]) ? /\\.[^\\/\\.]+$/ : /\\.[^\\/]+$/)\n end\n filename.sub(regex, '').cleaned_identifier\n end",
"def dom_id(record, prefix = T.unsafe(nil)); end",
"def identifier\n @identifier ||= \"#{self.type_prefix}.#{Model::to_id(name)}\"\n end",
"def quote_table_name(str)\n str.to_s\n end",
"def simple_id\n \"_#{@table.id}_#{@id}\"\n end",
"def identifier\n @identifiers[0]\n end",
"def arel_table\n @arel_table ||= begin\n t= Arel::Table.new(table)\n t.table_alias = alias_name if alias_name != table\n t\n end\n end",
"def identifier\n num\n end",
"def current_ident\n \"#{current_path}_#{current_db_column}\"\n end",
"def query_id\n if prefix = entity_namespace.presence\n \"#{prefix}::#{@query_id}\"\n else\n @query_id\n end\n end",
"def column_name\n Name.new(\"#{name}_id\")\n end",
"def oid(item)\n return if item.nil?\n org_id = ->(rec) { rec[:org_id] || rec.try(:org_id) }\n # noinspection RailsParamDefResolve\n org =\n case item\n when Org then item\n when User then item\n when Integer then item\n when Symbol then orgs(item)\n when Hash then item.values_at(:org_id, :org).first\n when Model then org_id.(item) || item.try(:org)\n end\n org ||= find_user(uid(item))\n # noinspection RubyMismatchedReturnType\n case org\n when Org then org.id\n when Model then org_id.(org)\n when Integer then positive(org)\n end\n end",
"def value_to_ident(value)\n return nil if value.nil?\n if value.nil?\n return nil\n elsif value.kind_of?(Fixnum)\n the_id = value\n elsif value.respond_to?(:id)\n the_id = value.id\n else\n Kernel.raise \"Cannot search for invalid value #{value.inspect}\"\n end\n return index(the_id)\n end",
"def intexpr \n\n\t$cst.add_branch(\"IntExpr\")\n\t\n\tif $tokens[$index + 1].type == \"T_PLUS\"\n\t\tdigit\n\t\tintop\n\t\texpr\n\telse\n\t\tdigit\n\tend\n\t\n\t$cst.ascend\n\nend",
"def ident(args)\n args[ident_key()].to_s\n end",
"def oid2name(oid)\n\t\tname = @manager.mib.name(oid)\n\t\tt = [['OID', 'Symbolic Name'], [\"#{oid}\", \"#{name}\"]]\n\t\ttable = t.to_table(:first_row_is_head => true)\n\t\tputs table.to_s\n\tend",
"def evalIterInd(expr) \n\tsymbol = expr.get_symbol\n\tvalues = expr.get_values\n\ttype_expr = evalExpression(values[0])\n\tputs type_expr[1]\n\t# Los identificadores unicamente pueden ser booleanos o numeros\n\tif !$error\n\t\twhile type_expr[1] do\n\t\t\tevalInstr(values[1])\n\t\t\ttype_expr = evalExpression(values[0])\n\t\tend\n\tend\nend",
"def table\n @table ||= FACT_COLUMN_PREFIX + Model::to_id(name)\n end",
"def evalExpression(expr)\n\texprs = expr.get_expr\n\tif expr.class == EXPR_VALUE\n\t\tsymbol = exprs.get_symbol\n\t\tidentif = exprs.get_value\n\t\tif symbol == :IDENTIFIER\n\t\t\tsymbol = $tables.lookup(identif)\n\t\t\tif symbol[1] != nil\n\t\t\t\treturn symbol\n\t\t\telse\n\t\t\t\tputs \"ERROR: variable `#{identif}` no inicializada\"\n\t\t\t\t$error = true\n\t\t\t\treturn [:UNKNOW,nil]\n\t\t\tend\n\t\tend\n\t\treturn [symbol,identif] \n\n\t# Caso que sea una expresion binaria\n\telsif expr.class == EXPR_BIN\n\t\tarit = expr.get_arit\n\t\tsymbol1 = evalExpression(exprs[0])\n\t\tsymbol2 = evalExpression(exprs[1])\n\t\t# Chequea todos los tipos de expresiones binarias aritmeticas\n\t\t# Para cada caso, asegura que sean correctas\n\t\tif !$error\n\t\t\tcase arit\n\t\t\twhen :PLUS , :MINUS, :DIVISION, :MULTIPLY, :PERCENT\n\t\t\t\texpr_eval = expr.get_eval(arit,symbol1[1],symbol2[1])\n\t\t\t\tif !$error\t\n\t\t\t\t\tif expr_eval > 2147483647 || expr_eval < -2147483647\n\t\t\t\t\t\tputs \"ERROR: overflow numero de 32 bits excedido\"\n\t\t\t\t\t\t$error = true\n\t\t\t\t\t\treturn [:UNKNOW,nil]\n\t\t\t\t\telse\n\t\t\t\t\t\treturn [symbol1[0],expr_eval]\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\twhen :AND, :OR\n\t\t\t\texpr_eval = expr.get_eval(arit,symbol1[1],symbol2[1])\n\t\t\t\treturn [symbol1[0],expr_eval]\n\t\t\twhen :AMPERSAND, :VIRGUILE \n\t\t\t\texpr_eval = expr.get_eval(arit,symbol1[1],symbol2[1])\n\t\t\t\tif expr_eval == nil\n\t\t\t\t\tif arit == :AMPERSAND \n\t\t\t\t\t\tputs \"ERROR: concatenacion vertical incorrecta tamano incompatible\"\n\t\t\t\t\telse\n\t\t\t\t\t\tputs \"ERROR: concatenacion horizontal incorrecta tamano incompatible\"\n\t\t\t\t\tend\n\t\t\t\t\t$error = true\n\t\t\t\t\treturn [:UNKNOW,nil]\n\t\t\t\tend\n\t\t\t\treturn [symbol1[0],expr_eval]\n\t\t\twhen :LESS, :LESS_EQUAL, :MORE, :MORE_EQUAL\n\t\t\t\texpr_eval = expr.get_eval(arit,symbol1[1],symbol2[1])\n\t\t\t\treturn [:BOOLEAN,expr_eval]\n\t\t\twhen :EQUAL, :INEQUAL\n\t\t\t\texpr_eval = expr.get_eval(arit,symbol1[1],symbol2[1])\n\t\t\t\treturn [:BOOLEAN,expr_eval]\n\t\t\tend\n\t\tend\n\t\t# Caso que sea una expresion unaria\n\telsif expr.class == EXPR_UNARIA\n\t\tarit = expr.get_arit\n\t\tsymbol1 = evalExpression(exprs)\n\t\t# evalua las expresiones unarias aritmeticas\n\t\t# Para cada caso, chequea que sean correctas\n\t\tif !$error \n\t\t\tcase arit\n\t\t\twhen :MINUS_UNARY\n\t\t\t\texpr_eval = expr.get_eval(arit,symbol1[1])\n\t\t\t\treturn [symbol1[0],expr_eval]\n\t\t\twhen :DOLLAR, :APOSTROPHE \n\t\t\t\texpr_eval = expr.get_eval(arit,symbol1[1])\n\t\t\t\treturn [symbol1[0],expr_eval]\n\t\t\twhen :NOT\n\t\t\t\texpr_eval = expr.get_eval(arit,symbol1[1])\n\t\t\t\treturn [symbol1[0],expr_eval]\n\t\t\tend\n\t\tend\n\t# En caso de conseguir expresiones parentizadas, evalua la expresion\n\telsif expr.class == EXPR_PARENTHESIS\n\t\treturn evalExpression(exprs)\n\tend\nend",
"def compiled_identifier\n ids = [base.identifier]\n ids << Well.config.element_separator << identifier\n ids.join\n end",
"def fetch_expression (exp)\n\n fetch(exp)[0]\n end",
"def table_ref(t)\n case t\n when Dataset\n t.to_table_reference\n when Hash\n t.map {|k, v| \"#{table_ref(k)} #{table_ref(v)}\"}.join(COMMA_SEPARATOR)\n when Symbol\n symbol_to_column_ref(t)\n when String\n quote_identifier(t)\n else\n literal(t)\n end\n end",
"def domain_id_sql(object_name = nil)\n sql = MiqAeNamespace.select(\"COALESCE(miq_ae_namespaces.domain_id, miq_ae_namespaces.id)\")\n if object_name\n sql.joins(:ae_classes).where(\"#{object_name}.class_id = miq_ae_classes.id\").to_sql\n else\n sql.where(\"miq_ae_namespaces.id = miq_ae_classes.namespace_id\").to_sql\n end\n end",
"def get_relational(expr)\n\t\t[\"==\", \"!=\", \">\", \"<\", \">=\", \"<=\"].each do |ope|\n\t\t\tif expr.scan(ope).length == 1\n\t\t\t\treturn ope\n\t\t\tend\n\t\tend\n\tend",
"def id_column\n IdMethods::ID_COLUMN\n end",
"def expression_from(node)\n case node\n when Parser::AST::Node\n children_expression = node.children.map(&method(:expression_from)).join(' ')\n \"(#{node.type}#{' ' + children_expression if node.children.any?})\"\n when nil, 'nil'\n 'nil'\n when Symbol, String, Numeric\n '_'\n end\n end",
"def next_for(table)\n table = table.respond_to?(:table_name) ? table.table_name : table.to_s\n next_id :table => table\n end",
"def id\n `return #{@element}.id ? #{self}.Y(#{@element}.id) : #{nil};`\n end",
"def table(omim_id)\n @table[omim_id]\n end",
"def primary_key(table_name)\n 'id' # table.primary_key || 'id'\n end",
"def expr_eval_to_s\n if @last_eval_state.nil?\n return \"<unevaluated>\"\n end\n \n str = ''\n if @offset\n str << \"#{@offset < 0 ? '-' : ''}0x#{@offset.abs.to_s(16)}\"\n end\n \n if @last_eval_state[:special] || @last_eval_state[:register]\n if @offset\n str << '+'\n end\n if @last_eval_state[:special]\n str << @last_eval_state[:special_tok]\n elsif @last_eval_state[:register]\n str << @last_eval_state[:reg_tok]\n end \n end\n \n str\n end",
"def identifier_for_filename(filename)\n # Item is a directory with an index file\n if filename =~ /index(\\.[a-z]{2})?\\.[^\\/]+$/\n regex = ((@config && @config[:allow_periods_in_identifiers]) ? /index(\\.[a-z]{2})?(\\.[^\\/\\.]+)$/ : /index(\\.[a-z]{2})?(\\.[^\\/]+)$/)\n # Item is a directory with a named file\n elsif basename_of(filename).split(/\\//)[-1] == basename_of(filename).split(/\\//)[-2]\n regex = ((@config && @config[:allow_periods_in_identifiers]) ? /(\\/[^\\/]+)?(\\.[a-z]{2})?(\\.[^\\/\\.]+)$/ : /(\\/[^\\/]+)?(\\.[a-z]{2})?(\\.[^\\/]+)$/)\n # Item is a simple file\n else\n regex = ((@config && @config[:allow_periods_in_identifiers]) ? /(\\.[a-z]{2})?(\\.[^\\/\\.]+)$/ : /(\\.[a-z]{2})?(\\.[^\\/]+)$/)\n end\n\n filename.sub(regex, '').cleaned_identifier\n end",
"def unique_id\n \"#{@table.name}-#{@id}\"\n end",
"def get_expid_from_env(o = {})\n #puts \"[get_expid_from_env] #{$method}\"\n code = [(o[:col] || $col) , (o[:exp] || $exp) , (o[:method] || $method) , (o[:remark] || $remark)].join(SEP_1)\n o = $o if o.size == 0\n if o.size > 0\n code += (SEP_1 + o.find_all{|k,v|v}.map{|e|[e[0],((e[1].class==Symbol)? \":\" : \"\")+e[1].to_s.to_fname].join(SEP_3)}.join(SEP_2)) \n else\n code += SEP_1\n end\nend",
"def quote_identifier_append(sql, name)\n name = (table_mappings[name.to_sym] || name) if name.respond_to?(:to_sym)\n super(sql, name)\n end",
"def column_aliases\n if @table_expr.is_a?(AliasedExpression)\n @table_expr.columns\n end\n end",
"def ident\n ensure_valid\n @ident\n end",
"def convert_identifier(identifier)\n case identifier\n when SQL::Identifier\n identifier.value.to_s\n else\n identifier\n end\n end",
"def table_name\n \"#{Dynamoid::Config.namespace}_index_#{prefix}_#{name.collect(&:to_s).collect(&:pluralize).join('_and_')}\"\n end",
"def object_row_id(object)\n \"#{object.class.to_s.underscore}_#{object.id}\" if object.is_a?(ActiveRecord::Base)\n end",
"def quoted_table_name\n @quoted_table_name ||= \"`#{table_name}`\"\n end",
"def to_identifier\n \"#{self.document_template.identifier}::#{self.identifier}\"\n end",
"def table_name\n \"#{Dynamoid::Config.namespace}_index_\" + source.table_name.sub(\"#{Dynamoid::Config.namespace}_\", '').singularize + \"_#{name.collect(&:to_s).collect(&:pluralize).join('_and_')}\"\n end",
"def with_identifier(value)\n value = [value] if value.class == String\n t = Identifier.arel_table\n a = t[:cached].eq_any(value)\n self.joins(:identifiers).where(a.to_sql).references(:identifiers)\n end",
"def evalIterDet(expr) \n\tsymbol = expr.get_symbol\n\tvalues = expr.get_values\n\tsymbol2 = evalExpression(values[1])\n\tif !$error\n\t\tsymbol3 = evalExpression(values[2])\n\t\tif !$error\n\t\t\tif values[0] != nil\n\t\t\t\tidentif = values[0].get_value\n\t\t\t\t$tables.addscope\n\t\t\t\t$tables.insert(:INTEGER,identif,symbol2[1])\n\t\t\t\t$ftables << [$tables.get_actual,$alcance]\n\n\t\t\t\ti = $tables.lookup(identif)[1]\n\t\t\t\tmax = [symbol3[1]-symbol2[1]+1,0].max\n\t\t\t\twhile i < max do\n\t\t\t\t\tevalInstr(values[3])\n\t\t\t\t\t$tables.update(:INTEGER,identif,i+1)\n\t\t\t\t\ti = $tables.lookup(identif)[1]\n\t\t\t\tend\n\t\t\t\t$tables.endscope\n\t\t\telse\n\t\t\t\ti = symbol2[1]\n\t\t\t\tmax = [symbol3[1]-symbol2[1]+1,0].max\n\t\t\t\tfor j in i..max \n\t\t\t\t\tevalInstr(values[3])\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\tend\nend",
"def to_identifier(identifier)\n case identifier\n when String, Symbol then identifier.to_sym\n when Util::Identifier then identifier.send(identifier_method) # if module is included\n end\n end",
"def parse_literal(expr)\n val = expr[1][1][1]\n val = val.to_sym if expr[0] == :symbol_literal ||\n expr[0] == :assoc_new\n val\n end",
"def expr_to_s\n buf = ''\n each_term do |term|\n if term[:operator]\n buf << term[:operator]\n end\n buf << term[:token]\n end\n buf\n end",
"def underlying_table\n @underlying_table ||= db.handler[relvar_def.namespace_qualified_name(db.handler)]\n end",
"def identity_path\n \"#{OID}/#{ERB::Util.url_encode(edipi_with_aaid.to_s)}\"\n end",
"def argot_rollup_id(spec)\n lambda do |rec, acc|\n extractor = MarcExtractor.cached(spec, separator: nil)\n oclc_num = extractor.extract(rec).collect! do |o|\n Marc21Semantics.oclcnum_extract(o)\n end.compact\n acc << \"OCLC#{oclc_num.first}\"\n end\n end",
"def identifier_for_filename(meta_filename)\n # Split into components\n components = meta_filename.gsub(%r{(^/|/$)}, '').split('/')\n components[-1].sub!(/(\\.[a-z]+)+$/, '')\n\n if components[-1] == 'index'\n components[0..-2].join('/').cleaned_identifier\n else\n components.join('/').cleaned_identifier\n end\n end",
"def __ident__() @__grammar__.ident end",
"def table_name\n if qualified? && meta[:qualified].is_a?(Symbol)\n meta[:qualified]\n else\n source.dataset\n end\n end",
"def expression\r\n # -> uncomment the next line to manually enable rule tracing\r\n # trace_in( __method__, 1 )\r\n result = nil\r\n a = nil\r\n b = nil\r\n\r\n begin\r\n # at line 9:4: a= NUMBER '+' b= NUMBER\r\n a = match( NUMBER, TOKENS_FOLLOWING_NUMBER_IN_expression_28 )\r\n match( T__6, TOKENS_FOLLOWING_T__6_IN_expression_30 )\r\n b = match( NUMBER, TOKENS_FOLLOWING_NUMBER_IN_expression_34 )\r\n # --> action\r\n result = a.text.to_i + b.text.to_i \r\n # <-- action\r\n\r\n rescue ANTLR3::Error::RecognitionError => re\r\n report_error(re)\r\n recover(re)\r\n\r\n ensure\r\n # -> uncomment the next line to manually enable rule tracing\r\n # trace_out( __method__, 1 )\r\n\r\n end\r\n \r\n return result\r\n end",
"def object_identifier\n [\"#{self.class.name}\", (id.nil? ? nil : \"##{id}\"), \":0x#{self.object_id.to_s(16)}\"].join\n end",
"def parse_expression(expr); end",
"def table_alias_name(value)\n data.table_alias_name = value\n end"
] |
[
"0.6339249",
"0.6206004",
"0.5620424",
"0.53660464",
"0.5346105",
"0.5278037",
"0.52770036",
"0.52121466",
"0.50888646",
"0.5088609",
"0.50797004",
"0.50403136",
"0.50403136",
"0.502814",
"0.5012339",
"0.49936682",
"0.49752772",
"0.49023044",
"0.48680702",
"0.4855873",
"0.4853828",
"0.48344135",
"0.48085976",
"0.47998422",
"0.47998422",
"0.47992152",
"0.4779546",
"0.47768596",
"0.47760367",
"0.4759756",
"0.474007",
"0.47268072",
"0.47251844",
"0.47153467",
"0.4714606",
"0.47106597",
"0.47072068",
"0.4701768",
"0.4698401",
"0.46972042",
"0.46893042",
"0.46704146",
"0.46591237",
"0.46591038",
"0.46355775",
"0.46227986",
"0.45909",
"0.457966",
"0.45776173",
"0.45597386",
"0.45523444",
"0.4551751",
"0.4542263",
"0.4539303",
"0.4536763",
"0.4530157",
"0.45296335",
"0.45270422",
"0.4505203",
"0.45020345",
"0.44894388",
"0.44862086",
"0.4482671",
"0.4476523",
"0.44760814",
"0.44706568",
"0.44665217",
"0.44618338",
"0.4461773",
"0.44607526",
"0.44596344",
"0.44567105",
"0.4456709",
"0.44500402",
"0.44479454",
"0.44466105",
"0.44410333",
"0.44387588",
"0.4435807",
"0.44355825",
"0.44352114",
"0.44251904",
"0.44242683",
"0.44185573",
"0.44167385",
"0.44144574",
"0.440597",
"0.43985352",
"0.43936566",
"0.43902218",
"0.43867323",
"0.43802118",
"0.43757012",
"0.43753603",
"0.43676397",
"0.4367258",
"0.43501034",
"0.43493178",
"0.43487537",
"0.43421787"
] |
0.6270272
|
1
|
Remove the cached entries for primary keys and sequences when a table is changed.
|
def remove_cached_schema(table)
tab = quote_schema_table(table)
Sequel.synchronize do
@primary_keys.delete(tab)
@primary_key_sequences.delete(tab)
end
super
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def remove_cached_schema(table)\n tab = quote_schema_table(table)\n Sequel.synchronize do\n @primary_keys.delete(tab)\n end\n super\n end",
"def clean\n disable_referential_integrity do\n tables_cache.keys.reverse_each do |table|\n ActiveRecord::Base.connection.execute %(\n DELETE FROM #{table}\n )\n end\n end\n end",
"def clear_table_cache!(table_name)\n @columns.delete table_name\n @columns_hash.delete table_name\n @primary_keys.delete table_name\n @tables.delete table_name\n end",
"def forget\n @id_table.values.each(&:forget)\n end",
"def remove_cached_schema(table)\n k = quote_schema_table(table)\n Sequel.synchronize{@indexes.delete(k)}\n super\n end",
"def remove_previous_entries\n Entry.delete_all\n Entry.connection.execute('ALTER TABLE entries AUTO_INCREMENT = 0')\n end",
"def reset_primary_key_sequence(table)\n return unless seq = primary_key_sequence(table)\n pk = SQL::Identifier.new(primary_key(table))\n db = self\n s, t = schema_and_table(table)\n table = Sequel.qualify(s, t) if s\n\n if server_version >= 100000\n seq_ds = metadata_dataset.from(:pg_sequence).where(:seqrelid=>regclass_oid(LiteralString.new(seq)))\n increment_by = :seqincrement\n min_value = :seqmin\n # :nocov:\n else\n seq_ds = metadata_dataset.from(LiteralString.new(seq))\n increment_by = :increment_by\n min_value = :min_value\n # :nocov:\n end\n\n get{setval(seq, db[table].select(coalesce(max(pk)+seq_ds.select(increment_by), seq_ds.select(min_value))), false)}\n end",
"def clear_changes\n versioned_columns.each do |key|\n changed_attributes.delete(key)\n end\n end",
"def clear!\n transaction do\n Schema::TABLES.each do |table|\n execute(\"DELETE FROM #{table}\")\n execute(\"DELETE FROM sqlite_sequence WHERE name = '#{table}'\") # reset the auto-increment sequence\n end\n end\n self\n end",
"def flush_db\n [ 'active_sources', 'data_records', 'semantic_properties', 'semantic_relations', 'workflows'].reverse.each { |f| ActiveRecord::Base.connection.execute \"DELETE FROM #{f}\" }\n # Also remove the \"unsaved cache\" for the wrappers (may be important during testing)\n TaliaCore::SemanticCollectionWrapper.instance_variable_set(:'@unsaved_source_cache', {})\n end",
"def delete_all(table)\n\t\t\t\ttable.clear\n\t\t\tend",
"def clear_caches\n ar_stubs.each(&:reset_column_information)\n ActiveRecord::Base.connection.schema_cache.clear!\n end",
"def clear_sequence_setup(database, table)\n table_options = options(table)\n if table_options[:adjust_sequences]\n session.send(database).clear_sequence_setup(\n table_options[:rep_prefix], table\n )\n end\n end",
"def reset\n synchronize do\n @table = nil\n @instances = nil\n @codes = nil\n end\n end",
"def unlink_from(table)\n invalidate_cache\n remove_layers_from(table)\n end",
"def remove_all\n synchronized { @hash = {} }\n end",
"def rebuild(table); end",
"def alter_table(name, *)\n super\n remove_cached_schema(name)\n nil\n end",
"def _save_update_all_columns_hash\n v = @values.dup\n Array(primary_key).each{|x| v.delete(x) unless changed_columns.include?(x)}\n v.delete(model.lock_column)\n v\n end",
"def clear!\n @columns.clear\n @columns_hash.clear\n @primary_keys.clear\n @tables.clear\n @version = nil\n end",
"def rebuild\n raise \"Too many entries.\" if @rebuilds >= MAX_REBUILDS old = @table\n @slots = PRIMES[@rebuilds]\n self.size = 0\n fill_table @slots\n old.each do |e|\n upsert e.key, e.value if e\n end\n @rebuilds += 1\nend",
"def sweep\n @accessed_keys.each { |k| @values.delete k }\n @accessed_keys = []\n end",
"def reload\n clear_memoizations!\n remove = data.keys.find_all do |k|\n ![:id, :name].include?(k.to_sym)\n end\n remove.each do |k|\n data.delete(k)\n end\n super\n end",
"def reset\n @entry_cache.clear\n end",
"def reset_sequence_numbers\n result = Database.connection.exec(\"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public';\")\n table_names = result.map { |row| row.values_at('table_name')[0] }\n\n table_names_with_id_column = table_names.select do |table_name|\n result = Database.connection.exec(\"SELECT column_name FROM information_schema.columns WHERE table_name = '#{table_name}';\")\n column_names = result.map { |row| row.values_at('column_name')[0] }\n column_names.include?('id')\n end\n\n table_names_with_id_column.each do |table_name|\n result = Database.connection.exec(\"SELECT pg_get_serial_sequence('#{table_name}', 'id');\")\n sequence_name = result.getvalue(0, 0)\n Database.connection.exec(\"SELECT setval('#{sequence_name}', (select MAX(id) from #{table_name}));\")\n end\n end",
"def unflag_row_deltas\n tables.each(&:unflag_row_deltas)\n end",
"def reset_indecies\n puts 'Resetting AR indecies.'\n ApplicationRecord.connection.tables.each { |t| ApplicationRecord.connection.reset_pk_sequence!(t) }\n puts 'Restore complete'\n end",
"def clear_changes_information\n @mutations_before_last_save = nil\n forget_attribute_assignments\n @mutations_from_database = nil\n end",
"def clear_sequence_setup(rep_prefix, table_name)\n sequence_table_name = \"#{rep_prefix}_sequences\"\n if tables.include?(sequence_table_name)\n trigger_name = \"#{rep_prefix}_#{table_name}_sequence\"\n trigger_row = select_one(<<-end_sql)\n select * from information_schema.triggers\n where trigger_schema = database()\n and trigger_name = '#{trigger_name}'\n end_sql\n if trigger_row\n execute \"DROP TRIGGER `#{trigger_name}`\"\n execute \"delete from #{sequence_table_name} where name = '#{table_name}'\"\n unless select_one(\"select * from #{sequence_table_name}\")\n # no more sequences left --> delete sequence table\n drop_table sequence_table_name.to_sym\n end\n end\n end\n end",
"def clear\n\n set_id_to_cache_key_map\n\n @id_to_cache_key_map.each do |_, keys|\n Memcache.delete(keys[:kit])\n Memcache.delete_from_all_instances(keys[:saas])\n end\n\n nil\n\n end",
"def reset_db_peak_sequence\n ActiveRecord::Base.connection.tables.each do |t|\n ActiveRecord::Base.connection.reset_pk_sequence!(t)\n end\nend",
"def reset_db_peak_sequence\n ActiveRecord::Base.connection.tables.each do |t|\n ActiveRecord::Base.connection.reset_pk_sequence!(t)\n end\nend",
"def primary_keys_cleanup\n primary_keys = []\n\n # Removes the ADD CONSTRAINT statements for primary keys and stores the info of which statements have been removed.\n dump.gsub!(/^-- Name: [\\w\\s]+?(?<name>\\w+_pkey); Type: CONSTRAINT[\\s-]+ALTER TABLE ONLY (?<table>[\\w.]+)\\s+ADD CONSTRAINT \\k<name> PRIMARY KEY \\((?<column>[^,\\)]+)\\);$/) do\n primary_keys.push([$LAST_MATCH_INFO[:table], $LAST_MATCH_INFO[:column]])\n\n ''\n end\n\n # Adds the PRIMARY KEY property to each column for which it's statement has just been removed.\n primary_keys.each do |table, column|\n dump.gsub!(/^(?<statement>CREATE TABLE #{table} \\(.*?\\s+#{column}\\s+[^,\\n]+)/m) do\n \"#{$LAST_MATCH_INFO[:statement].remove(/ NOT NULL\\z/)} PRIMARY KEY\"\n end\n end\n end",
"def cleanup\n keys = redis.keys(raw_data_key('*')) + redis.keys(data_key('*'))\n multi do\n keys.each{|key| redis.del(key)}\n end\n super\n end",
"def delete_all_rows\n scan.each do |row|\n delete_row(row['key'])\n end\n end",
"def clear_garbage\n self.tmp_garbage.each do |relation, record|\n if record.is_a? Array\n record.each { |r| r.destroy }\n else\n record.destroy\n end\n end if self.tmp_garbage.present?\n self.tmp_garbage = {}\n end",
"def delete table\n table = table.to_sym\n @lookup = @lookup.reject { |k, v| k == table }\n @schema = @schema.reject { |k, v| k == table }\n nil\n end",
"def purge_cache\n return unless self.respond_to? :tracked_cache_keys\n\n self.delete_from_cache(*self.tracked_cache_keys, after_commit: true)\n end",
"def delete_all\n super\n Rails.cache.delete_matched(/#{self.configure.cache.namespace}/)\n end",
"def clear_cache!\n super\n # Make sure all query plans are *really* gone\n @connection.execute 'DEALLOCATE ALL' if active?\n end",
"def clear_columns_cache\n @cache.delete(:_columns)\n end",
"def cache_db_seq_entries(root)\n dbseq_lst = root.xpath('//DBSequence')\n dbseq_lst.each do |dnode|\n id = dnode[\"id\"].to_sym\n acc_id = dnode[\"accession\"]\n @db_seq_h[id] = acc_id.to_sym\n end\n end",
"def cache_delete\n model.cache_delete_pk(pk)\n end",
"def cache_db_seq_entries(root)\n dbseq_lst = root.xpath('//DBSequence')\n dbseq_lst.each do |dnode|\n id = dnode[\"id\"]\n acc_id = dnode[\"accession\"]\n @db_seq_h[id] = acc_id\n end\n end",
"def before_commit\n cache.delete(id)\n end",
"def clear_caches\n self.log.debug \"Clearing entry and values caches.\"\n\t\t@entry = nil\n\t\t@values.clear\n\tend",
"def flush_cache\n Rails.cache.delete([self.class.name, symbol_was.to_s])\n Rails.cache.delete([self.class.name, id])\n end",
"def squash_pk_changes(record)\n pk_def = record.class.primary_key\n if pk_def.is_a?(Array) # composite\n # Since we didn't actually fetch the record from the DB, CPK \"mishandles\" our update;\n # it builds a 'where' clause with null values, so it fails to locate the persisted record.\n # Non-CPK records don't have this \"problem\".\n # Workaround: fool dirty-checking into thinking the PK cols haven't changed.\n\n col_val_map = Hash[ pk_def.zip(record.id) ]\n # Caution: the workaround is all or nothing. It could be dangerous to partially set the PK.\n if col_val_map.values.all?\n col_val_map.each do |col, val|\n dangerously_reset_attribute(record, col, val)\n end\n end\n end\n end",
"def purge\n @db.execute( \"DELETE FROM #{TABLE_NAME};\" )\n end",
"def clear\n lib.tcidbvanish( @db )\n end",
"def map_cleanup\n if !identity_count.zero?\n nowi = Time.now.to_i\n # delete_if is atomic\n # contents should not mutate during this call\n identity_map.delete_if do |identity, compo|\n if (flag = compo.eviction_timeout <= nowi)\n evict_flush(compo.codec)\n end\n flag\n end\n end\n current_size_and_limit\n end",
"def _save_update_all_columns_hash\n v = Hash[@values]\n Array(primary_key).each{|x| v.delete(x) unless changed_columns.include?(x)}\n v\n end",
"def _save_update_all_columns_hash\n v = Hash[@values]\n Array(primary_key).each{|x| v.delete(x) unless changed_columns.include?(x)}\n v\n end",
"def purge!\n @map = {}\n end",
"def clear_with_new_table(table)\n @db_table = table\n init_parameters\n @filename = nil\n end",
"def clear_cache\n @all = nil\n end",
"def test_resets_to_min_pk_with_specified_pk_and_sequence\n @instances.each do |instance|\n model = instance.class\n model.delete_all\n model.connection.reset_pk_sequence!(model.table_name, model.primary_key, model.sequence_name)\n\n instance.save!\n assert_equal 1, instance.id, \"Sequence reset for #{model.table_name} failed.\"\n end\n end",
"def truncate_all\n Content::Version.all.map(&:destroy)\n ContentKey::Version.all.map(&:destroy)\n Content.all.map(&:destroy)\n ContentKey.all.map(&:destroy)\n end",
"def update_cache\r\n Rails.cache.delete(\"All#{self.class.name.to_s}\")\r\n end",
"def clear_primary_key\n @attributes[self.primary_key_attribute] = nil\n end",
"def clear\n @table.clear\n end",
"def reset\n tables = MODELS + [ENV['SCHEMA_TABLE']]\n tables.each { |t|\n DB << \"DROP TABLE IF EXISTS #{t.inspect};\"\n }\nend",
"def delete_all\n self.store.delete_keys(find_keys)\n end",
"def test_resets_to_min_pk_with_default_pk_and_sequence\n @instances.each do |instance|\n model = instance.class\n model.delete_all\n model.connection.reset_pk_sequence!(model.table_name)\n\n instance.save!\n assert_equal 1, instance.id, \"Sequence reset for #{model.table_name} failed.\"\n end\n end",
"def remove_pre_mongified_ids\n self.copy_tables.each do |t|\n Mongify::Status.publish('remove_pre_mongified', :size => 1, :name => \"Removing pre_mongified_id #{t.name}\", :action => 'add')\n no_sql_connection.remove_pre_mongified_ids(t.name)\n Mongify::Status.publish('remove_pre_mongified', :action => 'finish')\n end\n end",
"def clean_up_keyspace!(conn)\n conn.schema.cf_defs.each do |cf|\n conn.send(:each_key, cf.name) do |k|\n conn.remove(cf.name, k)\n end\n end\n end",
"def clear_prior_rows(key: nil, existing_entries: nil)\n # TODO: store a reference to those, and delete only after the new ones were imported successfully\n # And support multiple per run\n raise \"No data provided\" if key.nil? && existing_entries.nil?\n existing_entries ||= raw_data.where(key: key)\n\n if existing_entries.count > 0\n puts \"Using database #{ENV['DATABASE_URL'][0...30]}...\"\n puts \"Already #{existing_entries.count} entries for #{key}, are you sure you want to replace all of those entries? (y/n)\"\n raise \"user cancelled\" unless gets.strip == 'y'\n existing_entries.delete\n puts \"Deleted...\"\n end\n end",
"def teardown\n cleanup_tables\n cleanup_caches\n end",
"def update_table_cache\n return unless connected?\n new_table_names = connection.list_tables.table_names\n @table_names = new_table_names\n end",
"def reset_persistence_values\n self.id = nil\n\n if respond_to?(:updated_at=)\n self.updated_at = nil\n self.updated_at_will_change!\n end\n\n if respond_to?(:created_at=)\n self.created_at = nil\n self.created_at_will_change!\n end\n\n # mark all other attributes is changing\n (attributes.keys - changes.keys).each{ |key| self.send(:\"#{key}_will_change!\") }\n end",
"def remove_expired_keys\n self.user_key.each do |user_key|\n if user_key.expired?\n self.remove_user_key(user_key)\n user_key.delete\n end\n end\n end",
"def delete_all_caching_without_touching_additives\n\t\t\tself.delete_category_menu_fragments\n \t\tself.delete_cache\n \t\tself.delete_shared_item_items\n\t\t\tself.delete_category_browser_fragments\nend",
"def reset_id_seq *tables\n tables.each do |table|\n sql \"SELECT setval('#{table}_id_seq',max(id)) FROM #{table}\"\n end\n end",
"def remove_all\n @sequence.remove_all\n end",
"def down\n Widget.update_all data_table_id: nil\n\n [DataTableDataset, DataRow, DataTable].each do |clazz|\n clazz.delete_all\n end\n end",
"def clear_cache\n @lit_cache = {}\n @lit_nextpos = {}\n end",
"def delete_all_records_from_all_tables\n if Rails.env.production?\n raise \"deleting all records in production is not alllowed\"\n else\n Rake::Task[\"db:schema:load\"].invoke\n end\nend",
"def delete_all_records_from_all_tables\n if Rails.env.production?\n raise \"deleting all records in production is not alllowed\"\n else\n Rake::Task[\"db:schema:load\"].invoke\n end\nend",
"def remove_from_cache\n redis.hdel 'identifiers', self.typed_id\n redis.srem 'identifier:' + item.typed_id, self.typed_id\n end",
"def clear\n @cache.clear\n entries.clear\n self\n end",
"def clear\n @lock.synchronize do\n @values.clear\n @references_to_keys_map.clear\n end\n end",
"def reset_fast_pk_lookup_sql\n @fast_pk_lookup_sql = if @simple_table && @simple_pk\n \"SELECT * FROM #@simple_table WHERE #@simple_pk = \".freeze\n end\n @fast_instance_delete_sql = if @simple_table && @simple_pk\n \"DELETE FROM #@simple_table WHERE #@simple_pk = \".freeze\n end\n end",
"def refill\n now = Time.now.to_i\n cache_set.each {|rec|\n ttl = rec[:end_ts].to_i - now\n cache_write( rec[:key], rec[:value], ttl ) if ttl > 0 \\\n if rec[:value] and rec[:value].length > MAX_OBJ_SIZE and ttl > 0\n }\n end",
"def reset_fast_pk_lookup_sql\n @fast_pk_lookup_sql = if @simple_table && @simple_pk\n \"SELECT * FROM #{@simple_table} WHERE #{@simple_pk} = \".freeze\n end\n @fast_instance_delete_sql = if @simple_table && @simple_pk\n \"DELETE FROM #{@simple_table} WHERE #{@simple_pk} = \".freeze\n end\n end",
"def _delete_records\n @ids_to_delete.each do |table, ids|\n delete_from_table(table, ids)\n end\n end",
"def db_clear\n [Project, Milestone, Category, Version, LoaderRelease].each(&:delete_all)\n end",
"def clear_cache\n @cache = {}\n end",
"def clean!\n if @keys.size > Cache.max_size\n @keys = @keys[(Cache.max_size/2)...@keys.size]\n reject! { |key, value| !@keys.include?(key) }\n end\n end",
"def clear\n @metadata = {}\n clear_cache\n end",
"def test_reset_empty_table_with_custom_pk_sequence\n @connection.exec_query(\"CREATE SEQUENCE widgets_seq\")\n @connection.exec_query(\"\n CREATE TABLE widgets (\n widgetid INT PRIMARY KEY DEFAULT nextval('widgets_seq'),\n name string\n )\n \")\n assert_equal 1, Widget.create(name: \"weather\").id\n end",
"def test_reset_empty_table_with_custom_pk_sequence\n @connection.exec_query(\"CREATE SEQUENCE widgets_seq\")\n @connection.exec_query(\"\n CREATE TABLE widgets (\n widgetid INT PRIMARY KEY DEFAULT nextval('widgets_seq'),\n name string\n )\n \")\n assert_equal 1, Widget.create(name: \"weather\").id\n end",
"def clear\n\t\t@data_base = {}\n\t\tupdate_database\n\tend",
"def destroy\n [METADATA_TABLE_NAME, RUN_HISTORY_TABLE_NAME,\n DISABLED_MONITOR_TABLE_NAME, MONITOR_INFO_TABLE_NAME].each do |table|\n @db.execute(\"DROP TABLE IF EXISTS #{table}\")\n end\n\n create()\n end",
"def clear_timestamp_attributes\n all_timestamp_attributes_in_model.each do |attribute_name|\n self[attribute_name] = nil\n clear_attribute_change(attribute_name)\n end\n end",
"def reset_sequence!(table, column, sequence = nil)\n # Do nothing by default. Implement for PostgreSQL, Oracle, ...\n end",
"def clear\n ts = @_st_tableSize\n if ts > 20\n ts = 19\n end\n self.__clear(ts)\n end",
"def roomer_reset\n roomer_set_table_name_prefix\n roomer_reset_table_name\n reset_column_information\n reset_associations\n end",
"def clear\n @cache = {}\n end",
"def clear_cache() @cache = {}; end",
"def clear\n if namespace\n keys.each do |key|\n delete(key)\n end\n delete(KEYS_ADDRESS)\n else\n database.clear\n end\n end"
] |
[
"0.71739537",
"0.6630591",
"0.65746415",
"0.6339876",
"0.63392925",
"0.6294856",
"0.62028897",
"0.6157955",
"0.61444277",
"0.60552055",
"0.60023355",
"0.60008526",
"0.5963297",
"0.5955787",
"0.59497863",
"0.5938426",
"0.59146667",
"0.59047925",
"0.5871068",
"0.5853251",
"0.5849962",
"0.58445394",
"0.5811742",
"0.5791136",
"0.57824033",
"0.57764447",
"0.57721335",
"0.5768787",
"0.57603323",
"0.57510096",
"0.5742216",
"0.5742216",
"0.5726616",
"0.57093394",
"0.56947696",
"0.5693148",
"0.56905895",
"0.56824505",
"0.5670372",
"0.56652135",
"0.56591284",
"0.5649512",
"0.5642774",
"0.56390625",
"0.5636642",
"0.5628233",
"0.5613529",
"0.56062007",
"0.5602115",
"0.5593847",
"0.55763316",
"0.55723774",
"0.55723774",
"0.5569677",
"0.5557179",
"0.5553524",
"0.55526245",
"0.5541993",
"0.55257595",
"0.5518239",
"0.5514914",
"0.5508087",
"0.549849",
"0.5496768",
"0.54865116",
"0.5483368",
"0.5480114",
"0.5473163",
"0.5469522",
"0.54599285",
"0.5458272",
"0.54578525",
"0.54548746",
"0.5451482",
"0.54467267",
"0.54429704",
"0.5440649",
"0.5440649",
"0.5436693",
"0.5432599",
"0.5423654",
"0.5418884",
"0.5416628",
"0.54155433",
"0.54147774",
"0.5406657",
"0.54053104",
"0.5396433",
"0.5393619",
"0.5386071",
"0.5386071",
"0.5376261",
"0.5374541",
"0.537377",
"0.53696513",
"0.5368719",
"0.53586227",
"0.53571504",
"0.5355103",
"0.53535044"
] |
0.7296072
|
0
|
SQL DDL statement for renaming a table. PostgreSQL doesn't allow you to change a table's schema in a rename table operation, so specifying a new schema in new_name will not have an effect.
|
def rename_table_sql(name, new_name)
"ALTER TABLE #{quote_schema_table(name)} RENAME TO #{quote_identifier(schema_and_table(new_name).last)}"
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def rename_table_sql(name, new_name)\n \"ALTER TABLE #{quote_schema_table(name)} RENAME TO #{quote_schema_table(new_name)}\"\n end",
"def rename_table_sql(name, new_name)\n \"ALTER TABLE #{quote_schema_table(name)} RENAME TO #{quote_schema_table(new_name)}\"\n end",
"def rename_table_sql(name, new_name)\n \"ALTER TABLE #{quote_schema_table(name)} RENAME TO #{quote_schema_table(new_name)}\"\n end",
"def rename_table_sql(name, new_name)\n \"ALTER TABLE #{quote_identifier(name)} RENAME TO #{quote_identifier(new_name)}\"\n end",
"def rename_table(name, new_name)\n execute_ddl(rename_table_sql(name, new_name))\n remove_cached_schema(name)\n nil\n end",
"def rename_table(table_name, new_name, **options)\n validate_table_length!(new_name) unless options[:_uses_legacy_table_name]\n schema_cache.clear_data_source_cache!(table_name.to_s)\n schema_cache.clear_data_source_cache!(new_name.to_s)\n execute \"RENAME TABLE #{quote_table_name(table_name)} TO #{quote_table_name(new_name)}\"\n rename_table_indexes(table_name, new_name)\n end",
"def rename_table(table_name, new_name)\n schema_cache.clear_data_source_cache!(table_name.to_s)\n schema_cache.clear_data_source_cache!(new_name.to_s)\n exec_query \"ALTER TABLE #{quote_table_name(table_name)} RENAME TO #{quote_table_name(new_name)}\"\n rename_table_indexes(table_name, new_name)\n end",
"def rename_table(name, new_name)\n @logger.unknown(\"ODBCAdapter#rename_table>\") if @trace\n @logger.unknown(\"args=[#{name}|#{new_name}]\") if @trace\n execute \"RENAME TABLE #{name} TO #{new_name}\"\n rescue Exception => e\n @logger.unknown(\"exception=#{e}\") if @trace\n raise ActiveRecord::ActiveRecordError, e.message\n end",
"def rename_table(*args)\n execute(rename_table_sql(*args))\n end",
"def rename_table(table_name, new_name)\n clear_cache!\n execute \"ALTER TABLE #{quote_table_name(table_name)} RENAME TO #{quote_table_name(new_name)}\"\n end",
"def rename_table(name, new_name) #:nodoc:\r\n execute \"EXECUTE PROCEDURE sp_RenameDDObject(#{quote_table_name(name)} , #{quote_table_name(new_name)}, 1 /* ADS_DD_TABLE_OBJECT */, 0 /* Rename File */)\"\r\n end",
"def rename_tablespace(old_name, new_name)\n execute(\"ALTER TABLESPACE #{quote_tablespace(old_name)} RENAME TO #{quote_tablespace(new_name)};\")\n end",
"def rename_rule(old_name, table, new_name)\n execute \"ALTER RULE #{quote_rule(old_name)} ON #{quote_table_name(table)} RENAME TO #{quote_rule(new_name)};\"\n end",
"def rename_table(old_tablename, new_tablename)\r\n raise \"Cannot rename table running in client mode!\" if client?\r\n raise \"Table does not exist!\" unless table_exists?(old_tablename)\r\n raise(ArgumentError, 'Existing table name must be a symbol!') \\\r\n unless old_tablename.is_a?(Symbol)\r\n raise(ArgumentError, 'New table name must be a symbol!') unless \\\r\n new_tablename.is_a?(Symbol)\r\n raise \"Table already exists!\" if table_exists?(new_tablename)\r\n\r\n @table_hash.delete(old_tablename)\r\n @engine.rename_table(old_tablename, new_tablename)\r\n get_table(new_tablename)\r\n end",
"def clone_table_schema(table_name, new_table_name, preserve_splits = true)\n @admin.cloneTableSchema(TableName.valueOf(table_name),\n TableName.valueOf(new_table_name),\n preserve_splits)\n end",
"def alter_sequence_schema(name, schema, options = {})\n execute(\"ALTER SEQUENCE #{quote_sequence(name)} SET SCHEMA #{quote_schema(schema)};\")\n end",
"def rename(old_name, new_name); end",
"def rename(old_name, new_name); end",
"def create_schema schema_name\n execute \"CREATE SCHEMA #{quote_schema_name(schema_name)}\"\n end",
"def update_schema\n create_table unless table_exists?\n (schema_columns - column_names).each { |column| ActiveRecord::Migration.add_column(table_name, column, :string) }\n (column_names - protected_columns - schema_columns).each { |column| ActiveRecord::Migration.remove_column(table_name, column) }\n end",
"def create_schema_sql(name, opts = {})\n \"CREATE SCHEMA #{quote_identifier(name)}\"\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end",
"def create_schema_sql(name, opts = {})\n \"CREATE SCHEMA #{quote_identifier(name)}\"\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end",
"def rename_columns(table_name, columns_old_new_create)\n temporary_table_name = \"#{table_name}_temp\"\n\n DataMapper::Transaction.new(adapter).commit do\n adapter.execute(\n \"ALTER TABLE #{quote table_name} \"\\\n \"RENAME TO #{quote temporary_table_name}\"\n )\n\n create_table table_name do\n columns_old_new_create.each do |_old, new, *options|\n column new, *options\n end\n end\n\n columns_old = columns_old_new_create.map { |c| quote c[0] }\n columns_new = columns_old_new_create.map { |c| quote c[1] }\n\n adapter.execute(\n \"INSERT INTO #{quote table_name}\" +\n \"(#{columns_new.join(',')}) \" +\n \"SELECT #{columns_old.join(',')} \" +\n \"FROM #{quote temporary_table_name}\")\n\n drop_table temporary_table_name\n end\n end",
"def schema_name=schema_name\n self.table_name_prefix = \"#{schema_name}.\" if schema_name && !schema_name.blank?\n self.table_name = \"#{self.table_name_prefix}#{self.table_name}\" unless self.abstract_class?\n end",
"def up\n rename_column TABLE_NAME, OLD_COLUMN_NAME, NEW_COLUMN_NAME\n end",
"def up\n rename_column TABLE_NAME, OLD_COLUMN_NAME, NEW_COLUMN_NAME\n end",
"def replace_statement(target, stage)\n <<-SQLREPLACE\n begin transaction;\n drop table #{target};\n alter table #{stage} rename to #{target};\n end transaction;\n SQLREPLACE\n end",
"def new_table_name=(name)\n @new_table_name = \"layer_table_#{name}\"\n end",
"def drop_and_create_schema_migrations_table\n sql = [\n \"USE #{@database}\",\n 'DROP TABLE IF EXISTS schema_migrations',\n 'CREATE TABLE schema_migrations ( version varchar(255) COLLATE utf8_unicode_ci NOT NULL, UNIQUE KEY unique_schema_migrations (version)) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci'\n ]\n\n run_commands(sql)\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)}\"\n end",
"def drop_schema(name, opts = {})\n execute_ddl(drop_schema_sql(name, opts))\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)} CASCADE\"\n end",
"def drop_schema schema_name\n execute \"DROP SCHEMA #{schema_name} CASCADE\"\n end",
"def alter_table(name, &block)\n g = Schema::AlterTableGenerator.new(self, &block)\n alter_table_sql_list(name, g.operations).each {|sql| execute(sql)}\n end",
"def alter_table(name, generator=nil, options=nil, &block)\n if Hash === options\n generator ||= Schema::AlterTableGenerator.new(self, &block)\n\t\t alter_table_sql_list(name, generator.operations, options).\n\t\t flatten.each {|sql| execute_ddl(sql)}\n\t\t remove_cached_schema(name)\n\t\t nil\n else\n\t super(name, generator, &block)\n end\n end",
"def copy_table(old_table_name, new_table_name)\n transaction do\n execute \"CREATE TABLE #{new_table_name} LIKE #{old_table_name}\"\n execute \"INSERT INTO #{new_table_name} SELECT * FROM #{old_table_name}\"\n end\n end",
"def rename_column(table_name, old_name, new_name)\n column_functional(table_name) do\n rename_table_column(old_name, new_name)\n end\n end",
"def copy_table(old_table_name, new_table_name)\r\n execute add_select_into_table(new_table_name, \"SELECT * FROM #{old_table_name}\")\r\n end",
"def rename(name, new_name)\n @driver.renameRule([name], [new_name])\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_identifier(name)}\"\n end",
"def copy_table(old_table_name, new_table_name)\n transaction do\n execute \"CREATE TABLE #{new_table_name} LIKE #{old_table_name}\"\n execute \"INSERT INTO #{new_table_name} SELECT * FROM #{old_table_name}\"\n end\n end",
"def copy_table(old_table_name, new_table_name)\n execute add_select_into_table(new_table_name, \"SELECT * FROM #{old_table_name}\")\n end",
"def create_schema(name)\n sql = %{CREATE SCHEMA \"#{name}\"}\n ActiveRecord::Base.connection.execute sql\n end",
"def drop_schema_sql(name, opts=OPTS)\n \"DROP SCHEMA#{' IF EXISTS' if opts[:if_exists]} #{quote_identifier(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def rename_column(table_name, column_name, new_column_name)\n execute \"ALTER TABLE #{quote_table_name(table_name)} RENAME COLUMN #{quote_column_name(column_name)} TO #{quote_column_name(new_column_name)}\"\n end",
"def alter_table(name, *)\n super\n remove_cached_schema(name)\n nil\n end",
"def drop_schema(schema_name)\n execute(\"DROP SCHEMA \\\"#{schema_name}\\\"\")\n end",
"def quote_table_name(name)\n schema, name_part = extract_vertica_identifier_from_name(name.to_s)\n\n unless name_part\n quote_column_name(schema)\n else\n table_name, name_part = extract_vertica_identifier_from_name(name_part)\n \"#{quote_column_name(schema)}.#{quote_column_name(table_name)}\"\n end\n end",
"def drop_schema(schema_name, options = {})\n execute \"DROP SCHEMA#{' IF EXISTS' if options[:if_exists]} #{quote_schema_name(schema_name)} CASCADE\"\n end",
"def create_table_with_storing_name(table_name, options = {}, &block)\n @@table_name = table_name\n create_table_without_storing_name table_name, options, &block\n AirBlade::Migrations::SchemaDefinitions.foreign_keys = []\n end",
"def create_schema(schema)\n execute \"CREATE SCHEMA #{schema}\", 'Create Schema'\n end",
"def rename_column(table_name, column_name, new_column_name)\n execute \"ALTER TABLE #{quote_table_name(table_name)} ALTER #{quote_column_name(column_name)} TO #{quote_column_name(new_column_name)}\"\n end",
"def set_table_name(name) # rubocop:disable Naming/AccessorMethodName\n @table_name = name\n end",
"def rename new_name\n @name = new_name.to_sym\n end",
"def rename new_name\n @name = new_name.to_sym\n end",
"def down\n rename_column TABLE_NAME, NEW_COLUMN_NAME, OLD_COLUMN_NAME\n end",
"def down\n rename_column TABLE_NAME, NEW_COLUMN_NAME, OLD_COLUMN_NAME\n end",
"def TableRenameClicked\n unless getDBConn\n msgbox(_(\"Warning\"), _(\"Please open a database before trying to rename a table.\"), \"warning\")\n return false\n end\n\n # Getting the marked table and run some possible error-handeling.\n tables = treeview_getSelection(@tv_tables)\n if count(tables) <= 0\n msgbox(_(\"Warning\"), _(\"Please select the table, that you would like to rename.\"), \"warning\")\n return false\n end\n\n tables.each do |table|\n # Getting the table.new-name from the user.\n tablename = knj_input(_(\"New table name\"), _(\"Please enter the table.new-name:\"), table[0])\n break if tablename == \"cancel\"\n\n # If he has enteret the same name.\n if strtolower(tablename) == strtolower(table[0])\n msgbox(_(\"Warning\"), _(\"The entered name was the same as the current table-name.\"), \"warning\")\n break\n end\n\n # Checking if the table.new-name if valid.\n unless preg_match(\"/^[a-zA-Z][a-zA-Z0-9_]+/\", tablename, match)\n msgbox(_(\"Warning\"), _(\"The enteret name was not a valid table-name.\"), \"warning\")\n break\n end\n\n # Renaming table and refreshing treeviews.\n begin\n getDBConn.getTable(table[0]).rename(tablename)\n rescue => e\n knj_msgbox.error_exc(e)\n end\n end\n\n @dbpage.TablesUpdate()\n end",
"def rename(new_name)\n raise 'to be implemented in subclass'\n end",
"def rename_event_trigger(name, new_name)\n execute \"ALTER EVENT TRIGGER #{quote_generic(name)} RENAME TO #{quote_generic(new_name)};\"\n end",
"def rename(old_name, new_name)\n move(old_name, new_name)\n end",
"def drop_schema(name, opts=OPTS)\n self << drop_schema_sql(name, opts)\n end",
"def drop_new_tables(new_table_names)\n indent do\n new_table_names.reverse.each {|table_name| add_line \"drop_table #{table_name.inspect}\" }\n end\n end",
"def alter_table_sql(table, op)\n case op[:op]\n when :rename_column\n unless sch = op[:schema]\n raise(Error, \"can't find existing schema entry for #{op[:name]}\") unless sch = op[:schema] || schema(table).find{|c| c.first == op[:name]}\n sch = sch.last\n end\n [\n alter_table_sql(table, :op=>:add_column, :name=>op[:new_name], :default=>sch[:ruby_default], :type=>sch[:db_type], :null=>sch[:allow_null]),\n from(table).update_sql(op[:new_name]=>op[:name]),\n alter_table_sql(table, :op=>:drop_column, :name=>op[:name])\n ]\n when :set_column_null, :set_column_default\n raise(Error, \"can't find existing schema entry for #{op[:name]}\") unless sch = op[:schema] || schema(table).find{|c| c.first == op[:name]}\n sch = sch.last\n\n sch = if op[:op] == :set_column_null\n sch.merge(:allow_null=>op[:null])\n else\n sch.merge(:ruby_default=>op[:default])\n end\n\n [\n alter_table_sql(table, :op=>:rename_column, :name=>op[:name], :new_name=>:sequel_access_backup_column, :schema=>sch),\n alter_table_sql(table, :op=>:rename_column, :new_name=>op[:name], :name=>:sequel_access_backup_column, :schema=>sch)\n ]\n else\n super\n end\n end",
"def alter_table_sql(table, op)\n case op[:op]\n when :add_column\n \"ALTER TABLE #{quote_schema_table(table)} ADD #{column_definition_sql(op)}\"\n when :drop_column\n \"ALTER TABLE #{quote_schema_table(table)} DROP #{column_definition_sql(op)}\"\n when :rename_column\n \"ALTER TABLE #{quote_schema_table(table)} ALTER #{quote_identifier(op[:name])} TO #{quote_identifier(op[:new_name])}\"\n when :set_column_type\n \"ALTER TABLE #{quote_schema_table(table)} ALTER #{quote_identifier(op[:name])} TYPE #{type_literal(op)}\"\n else\n super(table, op)\n end\n end",
"def chrono_rename_temporal_indexes(name, new_name)\n on_temporal_schema do\n temporal_indexes = indexes(new_name)\n temporal_indexes.map(&:name).each do |old_idx_name|\n if old_idx_name =~ /^index_#{name}_on_(?<columns>.+)/\n new_idx_name = \"index_#{new_name}_on_#{$~['columns']}\"\n execute \"ALTER INDEX #{old_idx_name} RENAME TO #{new_idx_name}\"\n end\n end\n end\n end",
"def pgt_mangled_table_name(table)\n quote_schema_table(table).gsub('\"', '').gsub(/[^A-Za-z0-9]/, '_').gsub(/_+/, '_')\n end",
"def change_name(new_name)\n Dropio::Resource.client.change_drop_name(self,new_name)\n end",
"def rename_column(table_name, column_name, new_column_name, options = {}) #:nodoc:\n column_info = select_one(\"SHOW FULL FIELDS FROM #{table_name} LIKE '#{column_name}'\")\n current_type = column_info[\"Type\"]\n options[:comment] ||= column_info[\"Comment\"]\n sql = \"ALTER TABLE #{table_name} CHANGE #{column_name} #{new_column_name} #{current_type}\"\n sql << \" COMMENT #{quote(options[:comment])}\" unless options[:comment].blank?\n execute sql\n end",
"def create_schema_sql(name, opts=OPTS)\n \"CREATE SCHEMA #{'IF NOT EXISTS ' if opts[:if_not_exists]}#{quote_identifier(name)}#{\" AUTHORIZATION #{literal(opts[:owner])}\" if opts[:owner]}\"\n end",
"def drop_table_sql(name, options)\n \"DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def undo_cleanup_concurrent_column_rename(table, old_column, new_column, type: nil, batch_column_name: :id)\n Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas.require_ddl_mode!\n\n setup_renamed_column(__callee__, table, new_column, old_column, type, batch_column_name)\n\n with_lock_retries do\n install_bidirectional_triggers(table, old_column, new_column)\n end\n end",
"def create_table_prefix_sql(name, options)\n \"CREATE #{temporary_table_sql if options[:temp]}TABLE#{' IF NOT EXISTS' if options[:if_not_exists]} #{options[:temp] ? quote_identifier(name) : quote_schema_table(name)}\"\n end",
"def alter_materialized_view_schema(name, schema, options = {})\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :set_schema => schema\n }, options).to_sql\n end",
"def create_schema(name, opts=OPTS)\n self << create_schema_sql(name, opts)\n end",
"def add_schema(export_type = nil)\n mig_text = schema_generator_script(db_migration_schema, 'create')\n write_db_migration mig_text, \"#{db_migration_schema}_schema\", export_type: export_type\n end",
"def create_schema(schema)\n ActiveRecord::Base.connection.execute(\"CREATE SCHEMA #{schema}\")\n end",
"def change_name(field, new_name)\n CONNECTION.execute(\"UPDATE #{table_name} SET #{field} = '#{new_name}' WHERE id = #{@id};\")\n end",
"def renamenx(old_name, new_name); end",
"def renamenx(old_name, new_name); end",
"def quote_table_name(name)\n name\n end",
"def add_select_into_table(new_table_name, sql_query)\n \"CREATE TABLE #{new_table_name} \" + sql_query\n end",
"def quote_table_or_view(name, options)\n schema = options[:schema]\n if schema\n \"\\\"#{schema}\\\".\\\"#{name}\\\"\"\n else\n \"\\\"#{name}\\\"\"\n end\n end",
"def drop_table(name)\n not_exist(name)\n\n ORM::DBConnection.new(model_name(name)).remove_table(name)\n File.delete(\"app/services/db/tables/#{name}.json\")\n end",
"def create_database(name, options = {})\n execute(\n \"CREATE SCHEMA #{quote_table_name(name)}\",\n SCHEMA_LOG_NAME\n )\n end",
"def drop_table(table_name = temporary_table_name)\n ::RailsRedshiftReplicator.connection.exec \"drop table if exists #{table_name}\"\n end",
"def drop_table(*names)\n options = names.last.is_a?(Hash) ? names.pop : OPTS \n names.each do |n|\n execute_ddl(drop_table_sql(n, options))\n remove_cached_schema(n)\n end\n nil\n end",
"def rename_column_concurrently(table, old_column, new_column, type: nil, batch_column_name: :id)\n Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas.require_ddl_mode!\n\n setup_renamed_column(__callee__, table, old_column, new_column, type, batch_column_name)\n\n with_lock_retries do\n install_bidirectional_triggers(table, old_column, new_column)\n end\n end",
"def create_table_as_sql(name, sql, options)\n \"#{create_table_prefix_sql(name, options)} AS #{sql}\"\n end",
"def rename_column(old_col_name, new_col_name)\r\n raise \"Do not execute this method in client/server mode!\" if \\\r\n @db.client?\r\n\r\n raise \"Cannot rename recno column!\" if old_col_name == :recno\r\n raise \"Cannot give column name of recno!\" if new_col_name == :recno\r\n\r\n raise 'Invalid column name to rename: ' % old_col_name unless \\\r\n @field_names.include?(old_col_name)\r\n \r\n raise 'New column name already exists: ' % new_col_name if \\\r\n @field_names.include?(new_col_name)\r\n\r\n @db.engine.rename_column(self, old_col_name, new_col_name)\r\n\r\n # Need to reinitialize the table instance and associated indexes.\r\n @db.engine.remove_recno_index(@name)\r\n @db.engine.remove_indexes(@name)\r\n\r\n update_header_vars\r\n create_indexes\r\n create_table_class unless @db.server?\r\n end",
"def create_schema(schema_name, pg_username = nil)\n if pg_username.nil? # AR 4.0 compatibility - accepts only single argument\n execute \"CREATE SCHEMA #{schema_name}\"\n else\n execute(\"CREATE SCHEMA \\\"#{schema_name}\\\" AUTHORIZATION \\\"#{pg_username}\\\"\")\n end\n end",
"def rename(old_name, new_name)\n send_command([:rename, old_name, new_name])\n end",
"def table_name=(value)\n @original_table_name = @table_name if defined?(@table_name)\n @table_name = value && value.to_s\n @quoted_table_name = nil\n end",
"def fdw_adjusted_table_name(name)\n name[0...PG_MAX_TABLE_NAME_LENGTH]\n end",
"def pg_table_name\n table_name\n end",
"def rename_column(table_name, column_name, new_column_name)\n execute \"exec sp_rename '#{table_name}.#{column_name}', '#{new_column_name}'\"\n end",
"def test_rename_table\n user = User.create! :login => 'looser'\n begin\n ActiveRecord::Base.connection.rename_table 'users', 'loosers'\n loosers = Class.new(ActiveRecord::Base)\n loosers.table_name = 'loosers'\n assert_kind_of ActiveRecord::Base, loosers.find(user.id)\n ensure\n disable_logger do\n CreateUsers.up rescue nil\n ActiveRecord::Base.connection.drop_table(\"loosers\") rescue nil\n end\n end\n end",
"def alter_table_sql(table, op)\n quoted_name = quote_identifier(op[:name]) if op[:name]\n alter_table_op = case op[:op]\n when :add_column\n \"ADD COLUMN #{column_definition_sql(op)}\"\n when :drop_column\n \"DROP COLUMN #{quoted_name}\"\n when :rename_column\n \"RENAME COLUMN #{quoted_name} TO #{quote_identifier(op[:new_name])}\"\n when :set_column_type\n \"ALTER COLUMN #{quoted_name} TYPE #{op[:type]}\"\n when :set_column_default\n \"ALTER COLUMN #{quoted_name} SET DEFAULT #{literal(op[:default])}\"\n when :set_column_null\n \"ALTER COLUMN #{quoted_name} #{op[:null] ? 'DROP' : 'SET'} NOT NULL\"\n when :add_index\n return index_definition_sql(table, op)\n when :drop_index\n return drop_index_sql(table, op)\n when :add_constraint\n \"ADD #{constraint_definition_sql(op)}\"\n when :drop_constraint\n \"DROP CONSTRAINT #{quoted_name}\"\n else\n raise Error, \"Unsupported ALTER TABLE operation\"\n end\n \"ALTER TABLE #{quote_schema_table(table)} #{alter_table_op}\"\n end"
] |
[
"0.83021253",
"0.82999986",
"0.82999986",
"0.7922324",
"0.78168976",
"0.75273216",
"0.7434364",
"0.7324648",
"0.7227108",
"0.71809715",
"0.7058117",
"0.69426525",
"0.675414",
"0.6689098",
"0.657591",
"0.6376583",
"0.6037233",
"0.6037233",
"0.60329074",
"0.60227203",
"0.598793",
"0.5984311",
"0.59518933",
"0.5940387",
"0.59096956",
"0.58863467",
"0.5882885",
"0.5882885",
"0.58795005",
"0.5840825",
"0.5826575",
"0.5782594",
"0.57710975",
"0.5758377",
"0.573262",
"0.5707243",
"0.56982774",
"0.5675693",
"0.5664636",
"0.5651276",
"0.5648084",
"0.56277055",
"0.5627427",
"0.5627009",
"0.5616566",
"0.5614745",
"0.55600625",
"0.5552814",
"0.553264",
"0.5513623",
"0.5486823",
"0.54762334",
"0.547015",
"0.5469151",
"0.5464891",
"0.5463858",
"0.5463858",
"0.5460138",
"0.5460138",
"0.54562676",
"0.5455438",
"0.5429323",
"0.5419674",
"0.5413077",
"0.5403063",
"0.5402258",
"0.53974503",
"0.5372922",
"0.53641826",
"0.5356594",
"0.5350963",
"0.53493905",
"0.5342065",
"0.53269655",
"0.53187495",
"0.53150356",
"0.5306411",
"0.5298861",
"0.5293332",
"0.52929425",
"0.5283906",
"0.5283906",
"0.52730393",
"0.52477854",
"0.52332276",
"0.5231067",
"0.52292824",
"0.5226885",
"0.5225879",
"0.5218788",
"0.5214523",
"0.52126837",
"0.5210889",
"0.5206152",
"0.52000886",
"0.5194596",
"0.51925224",
"0.51912177",
"0.5189558",
"0.51845205"
] |
0.82057774
|
3
|
Handle interval and citext types.
|
def schema_column_type(db_type)
case db_type
when /\Ainterval\z/io
:interval
when /\Acitext\z/io
:string
else
super
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def getIntervalType\n\t\t# get the interval type from settings\n\t\t# currently not implemented. It will useful in future\n\t\tintervalType = 'W'\n\t\tintervalType\n\tend",
"def nonregular_type; end",
"def handle_ts\n handle_at\n handle_in\n @time_special = @tokens[@index].get_tag(TimeSpecial).type\n @index += 1\n @precision = :time_special\n end",
"def conv_itype(itype)\n case itype\n when 'concept' : itype\n when 'query_doctrack' : 'query'\n else 'document'\n end\nend",
"def valid_type_for_conditional_formatting\n {\n 'cell' => 'cellIs',\n 'date' => 'date',\n 'time' => 'time',\n 'average' => 'aboveAverage',\n 'duplicate' => 'duplicateValues',\n 'unique' => 'uniqueValues',\n 'top' => 'top10',\n 'bottom' => 'top10',\n 'text' => 'text',\n 'time_period' => 'timePeriod',\n 'blanks' => 'containsBlanks',\n 'no_blanks' => 'notContainsBlanks',\n 'errors' => 'containsErrors',\n 'no_errors' => 'notContainsErrors',\n '2_color_scale' => '2_color_scale',\n '3_color_scale' => '3_color_scale',\n 'data_bar' => 'dataBar',\n 'formula' => 'expression',\n 'icon_set' => 'iconSet'\n }\n end",
"def valid_type_for_conditional_formatting\n {\n 'cell' => 'cellIs',\n 'date' => 'date',\n 'time' => 'time',\n 'average' => 'aboveAverage',\n 'duplicate' => 'duplicateValues',\n 'unique' => 'uniqueValues',\n 'top' => 'top10',\n 'bottom' => 'top10',\n 'text' => 'text',\n 'time_period' => 'timePeriod',\n 'blanks' => 'containsBlanks',\n 'no_blanks' => 'notContainsBlanks',\n 'errors' => 'containsErrors',\n 'no_errors' => 'notContainsErrors',\n '2_color_scale' => '2_color_scale',\n '3_color_scale' => '3_color_scale',\n 'data_bar' => 'dataBar',\n 'formula' => 'expression'\n }\n end",
"def type_of_type(value)\n if value.is_a?(Range) || value.is_a?(Array) || value.is_a?(Hash)\n :complex\n else\n :simple\n end\nend",
"def type(type); end",
"def interval=(_arg0); end",
"def getIntervalValue(intervalType)\n\t\tintervalVal = 1\n\t\tif intervalType == 'W'\n\t\t\tintervalVal = 7\n\t\tend\n\t\tintervalVal\n\tend",
"def handle_ds\n @day_special = @tokens[@index].get_tag(DaySpecial).type\n @index += 1\n @precision = :day_special\n end",
"def type() end",
"def value_types; end",
"def typecast(value)\n if value.kind_of?(Range) then Range.new(typecast(value.first), typecast(value.last))\n elsif value.kind_of?(Array) then value.map{|v| typecast(v)}\n elsif primitive == BigDecimal then super(value).to_f\n elsif primitive == DateTime then Time.parse(super(value).to_s).to_i\n elsif primitive == Date then Time.parse(super(value).to_s).to_i\n elsif primitive == Time then super(value).to_i\n else\n super(value) # Good luck\n end\n end",
"def interval=(val)\n attribute_set(:interval,val.kind_of?(Numeric) ? val : INTERVALS_MAP[val.to_s])\n end",
"def get_value(literal)\n if literal.typed?\n literal.humanize\n else\n # Hack to fix incorrect dattime\n case literal.to_s\n when RDF::Literal::Duration::GRAMMAR\n get_value(RDF::Literal::Duration.new(literal))\n when RDF::Literal::Date::GRAMMAR\n get_value(RDF::Literal::Date.new(literal))\n when RDF::Literal::Time::GRAMMAR\n get_value(RDF::Literal::Time.new(literal))\n when RDF::Literal::DateTime::GRAMMAR\n get_value(RDF::Literal::DateTime.new(literal))\n when %r(\\A-?\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}(:\\d{2})?(\\.\\d+)?(([\\+\\-]\\d{2}:\\d{2})|UTC|Z)?\\Z)\n # Hack to fix incorrect DateTimes in examples:\n get_value(RDF::Literal::DateTime.new(literal))\n else\n literal.humanize\n end\n end\n end",
"def cell_type_from_value(v)\n if v.is_a?(Date)\n :date\n elsif v.is_a?(Time)\n :time\n elsif v.is_a?(TrueClass) || v.is_a?(FalseClass)\n :boolean\n elsif v.respond_to?(:to_i) && Axlsx::NUMERIC_REGEX.match?(v.to_s)\n :integer\n elsif v.respond_to?(:to_f) && (Axlsx::SAFE_FLOAT_REGEX.match?(v.to_s) || ((matchdata = MAYBE_FLOAT_REGEX.match(v.to_s)) && matchdata[:exp].to_i.between?(Float::MIN_10_EXP, Float::MAX_10_EXP)))\n :float\n elsif Axlsx::ISO_8601_REGEX.match?(v.to_s)\n :iso_8601\n elsif v.is_a? RichText\n :richtext\n else\n :string\n end\n end",
"def convert_numeric_strings\n @interval = @interval.to_i if @interval\n @timeout = @timeout.to_i if @timeout\n @maximum_reconnects = @maximum_reconnects.to_i if @maximum_reconnects\n end",
"def types(types); end",
"def types; end",
"def types; end",
"def types; end",
"def types; end",
"def types; end",
"def get_value(literal)\n if literal.typed?\n literal.humanize\n else\n # Hack to fix incorrect dattime\n case literal.to_s\n when RDF::Literal::Duration::GRAMMAR\n get_value(RDF::Literal::Duration.new(literal))\n when RDF::Literal::Date::GRAMMAR\n get_value(RDF::Literal::Date.new(literal))\n when RDF::Literal::Time::GRAMMAR\n get_value(RDF::Literal::Time.new(literal))\n when RDF::Literal::DateTime::GRAMMAR\n get_value(RDF::Literal::DateTime.new(literal))\n when %r(\\A-?\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}(:\\d{2})?(\\.\\d+)?(([\\+\\-]\\d{2}:\\d{2})|UTC|Z)?\\Z)\n # Hack to fix incorrect DateTimes in examples:\n get_value(RDF::Literal::DateTime.new(literal))\n else\n literal.to_s\n end\n end\n end",
"def value_type\n 'PERIOD'\n end",
"def get_interval(interval)\n case interval\n when :custom\n return {}\n when :hourly\n @start = (@run_time - 1.hour).beginning_of_hour\n @end = (@run_time - 1.hour).end_of_hour\n return {@field => @start..@end}\n when :daily\n @start = @run_time.yesterday.beginning_of_day\n @end = @run_time.yesterday.end_of_day\n return {@field => @start..@end}\n when :weekly\n @start = @run_time.yesterday.beginning_of_week\n @end = @run_time.yesterday.end_of_week\n return {@field => @start..@end}\n when :monthly\n @start = @run_time.yesterday.beginning_of_month\n @end = @run_time.yesterday.end_of_month\n return {@field => @start..@end}\n when :yearly\n @start = @run_time.yesterday.beginning_of_year\n @end = @run_time.yesterday.end_of_year\n return {@field => @start..@end}\n end\n end",
"def initialize(value, datatype: nil, lexical: nil, **options)\n @datatype = RDF::URI(datatype || self.class.const_get(:DATATYPE))\n @string = lexical || (value if value.is_a?(String))\n @object = case\n when value.class == ::Date\n @zone = nil\n # Use midnight as midpoint of the interval\n ::DateTime.parse(value.strftime('%FT00:00:00'))\n when value.respond_to?(:to_datetime)\n dt = value.to_datetime\n @zone = dt.zone\n dt\n else\n md = value.to_s.match(GRAMMAR)\n _, dt, tz = Array(md)\n if tz\n @zone = tz == 'Z' ? '+00:00' : tz\n else\n @zone = nil # No timezone\n end\n # Use midnight as midpoint of the interval\n ::DateTime.parse(\"#{dt}T00:00:00#{@zone}\")\n end rescue ::DateTime.new\n end",
"def calculate_type\n case @age\n when 0..12 then \"Child\"\n when 13..19 then \"Teenager\"\n else \"Adult\"\n end\n end",
"def type_conversion(value_data, type) \n case type\n when 'boolean'\n return to_boolean(value_data) \n when 'int'\n val = Integer(value_data)\n if (val < -2147483648) || (val > 2147483647)\n raise \"Integer out of range: #{val}\" \n end\n return val\n when 'long'\n val = Integer(value_data)\n if (val < -9223372036854775808) || (val > 9223372036854775807)\n raise \"Long out of range: #{val}\" \n end\n return val\n when 'double'\n return Float(value_data)\n when 'string'\n return value_data\n else\n raise \"Unsupported type: #{type}\" \n end \n end",
"def parse_type_and_options(type)\n case type\n when /(string|text|binary|integer)\\{(\\d+)\\}/\n return $1, limit: $2.to_i\n when /decimal\\{(\\d+)[,.-](\\d+)\\}/\n return :decimal, precision: $1.to_i, scale: $2.to_i\n when /(references|belongs_to)\\{(.+)\\}/\n type = $1\n provided_options = $2.split(/[,.-]/)\n options = Hash[provided_options.map { |opt| [opt.to_sym, true] }]\n return type, options\n else\n return type, {}\n end\n end",
"def range_marker_type_label\n raise NotImplementedError.new(\"Method must be overridden in a subclass of this abstract base class.\")\n end",
"def handle(*types); end",
"def column_type(type_indicator)\n case type_indicator\n when :eval; :text\n when :text; :text\n when :string; :string\n when :sec; :double\n when :msec; :double\n when :duration; :double\n when :float; :double\n when :double; :double\n when :integer; :integer\n when :int; :int\n when :timestamp; :datetime\n when :datetime; :datetime\n when :date; :date\n else :string\n end\n end",
"def difftype() [1, \"s\"] end",
"def my_type(type)\n case type\n when /bond/i\n \"Bonds\"\n when /stock/i\n \"Stocks\"\n when /alternative/i\n \"Alternatives\"\n else\n \"Unclassified\"\n end\n end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end"
] |
[
"0.5744355",
"0.57287997",
"0.56326085",
"0.5621758",
"0.5467443",
"0.54393935",
"0.5346401",
"0.5315598",
"0.53015447",
"0.52042335",
"0.51975113",
"0.51377654",
"0.5134359",
"0.51323223",
"0.50979125",
"0.50302005",
"0.50287396",
"0.5001532",
"0.49825865",
"0.49769452",
"0.49769452",
"0.49769452",
"0.49769452",
"0.49769452",
"0.49674293",
"0.49533668",
"0.4950265",
"0.49404472",
"0.49402043",
"0.4935591",
"0.49313757",
"0.49263492",
"0.4896382",
"0.48748448",
"0.48681992",
"0.4863842",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273",
"0.4861273"
] |
0.60351527
|
0
|
The schema :type entry to use for array types.
|
def schema_array_type(db_type)
:array
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def array_type?\n type = self.sexp_type\n @@array_types.include? type\n end",
"def specimen_array_type=(type)\n setSpecimenArrayType(type)\n unless type.nil? then\n copy_container_type_capacity\n end\n type\n end",
"def datatypes\n value.document.fetch(\"#{value.key}_type\", [])\n end",
"def Array(type)\n Strict(::Array).of(type)\n end",
"def types\n @types ||= []\n end",
"def test_array_subtype_shortform\n s = Schema.new do\n type :array, :integer\n end\n assert_nothing_raised { s.validate! [5] }\n assert_verr { s.validate! [nil] }\n assert_verr { s.validate! ['a'] }\n assert_verr { s.validate! [5, 'a'] }\n assert_verr { s.validate! [5, nil] }\n end",
"def get_array_type(array)\r\n\t\t\r\n\t\tif @l_identificadores != nil\r\n\t\t\tarray << @tipo1\r\n\t\t\t@l_identificadores.get_array_type(array)\r\n\t\telse\r\n\t\t\tarray << @tipo2\r\n\t\t\tarray << @tipo1\t\r\n\t\t\treturn array\r\n\t\tend\r\n\r\n\tend",
"def array(sym, options)\n size = options.fetch(:size, 0)\n default = options.fetch(:default) { ->(_, _) { Array.new(size) } }\n type = Array[options.fetch(:type)]\n field sym, options.merge(type: type, default: default)\n end",
"def type_schema\n steps.type_schema\n end",
"def paramstype\n \"Array\"\n end",
"def is_array_type?(type)\n return type =~ /^\\w+\\[\\w+\\]$/ ? true : false\n end",
"def array_of(type)\n new_class = Class.new(TypedArray)\n new_class.set_element_type(type)\n new_class\n end",
"def set_data_type_and_array\n column_schema = SearchFacet.get_table_schema(column_name: self.big_query_id_column)\n detected_type = column_schema[:data_type]\n self.is_array_based = detected_type.include?('ARRAY')\n item_type = BQ_DATA_TYPES.detect {|d| detected_type.match(d).present?}\n self.data_type = BQ_TO_FACET_TYPES[item_type]\n end",
"def type\n self['type']['type']\n end",
"def data_type\n case query.class\n when ActiveRecord::Relation then\n 'ActiveRecord'\n when Array then\n 'Array'\n end\n end",
"def set_data_type_and_array\n column_schema = SearchFacet.get_table_schema(column_name: big_query_id_column)\n detected_type = column_schema[:data_type]\n self.is_array_based = detected_type.include?('ARRAY')\n item_type = BQ_DATA_TYPES.detect { |d| detected_type.match(d).present? }\n self.data_type = BQ_TO_FACET_TYPES[item_type]\n end",
"def array?\n GtaScm::Types::ARRAY_TYPES.include?(self.arg_type_id)\n end",
"def create_array_tc(length, element_type)\n return CORBA::TypeCode::Array.new(element_type, length)\n end",
"def type\n @types ||= strip(:type)\n end",
"def schema_data_type\n case type\n when \"N\", \"F\"\n decimal > 0 ? \":float\" : \":integer\"\n when \"I\"\n \":integer\"\n when \"D\"\n \":date\"\n when \"T\"\n \":datetime\"\n when \"L\"\n \":boolean\"\n when \"M\"\n \":text\"\n else\n \":string, :limit => #{length}\"\n end\n end",
"def type\n self['type']\n end",
"def split_array_type(type)\n if type =~ /^(\\w+)\\[(\\w+)\\]$/\n element_type = $1\n length = $2\n unless length =~ /^\\d+$/\n length = railgun.const(length)\n end\n\n return element_type.to_sym, length.to_i\n else\n raise \"Can not split non-array type #{type}\"\n end\n end",
"def type\n @data['type']\n end",
"def type\n @ar_column_info.type\n end",
"def add_column_types obj\n FbArray.new(obj)\n end",
"def type\n @data['type']\n end",
"def data_type\n ns_definition(:query_spec_datatypes)\n end",
"def types\n FIELD_TYPES\n end",
"def type_a\n @type_a ||= type[2]\n end",
"def test_array_shortform_invalid\n assert_raises Exceptions::InvalidSchemaError do\n Schema.new do\n type(:array, %i[array integer], min: 2)\n end\n end\n end",
"def type\n field[:type]\n end",
"def type\n data['type'].to_sym\n end",
"def types\n @types ||= inject({}) { |memo, schema| memo.merge(schema.types) }\n end",
"def create_array(element_type, length = 0, size: nil, typename: nil, **options)\n element_type = validate_type_argument(element_type)\n typename ||= \"#{element_type.name}[#{length}]\"\n if !size && element_type.size\n size = element_type.size * length\n end\n ModelKit::Types.validate_typename(typename)\n array_t = ArrayType.new_submodel(deference: element_type, typename: typename, registry: self,\n length: length, size: size, **options)\n register(array_t)\n end",
"def code\n\t\tself.AsArray[0].type_code\n\tend",
"def type\n data.type\n end",
"def types\n load_schema! unless schema_loaded?\n @types\n end",
"def type\n raw_data['type']\n end",
"def types\n configuration[:types]\n end",
"def array(name, option={})\n option[:is_a] = :array\n register_attribute_member(name, option)\n define_attribute_accessor(name, option)\n define_validations(name, option)\n end",
"def types\n @data.keys & TYPES\n end",
"def type\n TYPES[@type_id]\n end",
"def types; end",
"def types; end",
"def types; end",
"def types; end",
"def types; end",
"def encode_array_as=(pg_type)\n\t\tcase pg_type\n\t\t\twhen :array\n\t\t\twhen :json\n\t\t\twhen :record\n\t\t\twhen /\\A_/\n\t\t\telse\n\t\t\t\traise ArgumentError, \"invalid pg_type #{pg_type.inspect}\"\n\t\tend\n\n\t\t@encode_array_as = pg_type\n\n\t\tinit_encoders\n\tend",
"def csharp_type_for_schema_definition( schema )\n types = {\n ::Respect::ArraySchema => \"List<string>\",\n ::Respect::BooleanSchema => \"bool#{ schema.allow_nil? ? \"?\" : \"\" }\",\n ::Respect::FloatSchema => \"float#{ schema.allow_nil? ? \"?\" : \"\" }\",\n ::Respect::HashSchema => \"Dictionary<string, object>\",\n ::Respect::IntegerSchema => \"long#{ schema.allow_nil? ? \"?\" : \"\" }\",\n ::Respect::StringSchema => \"string\",\n ::Respect::AnySchema => \"object\",\n ::Respect::GeoPointSchema => \"global::#{ base_api_namespace() }.Fields.GeoPoint\",\n ::Respect::AttachmentSchema => \"global::#{ base_api_namespace() }.Fields.Attachment\"\n }\n\n types = if options[:use_noda_time] == \"true\" || options[:use_noda_time] == \"1\"\n types.merge({\n ::Respect::DatetimeSchema => \"NodaTime.Instant#{ schema.allow_nil? ? \"?\" : \"\" }\",\n ::Respect::InstantSchema => \"NodaTime.Instant#{ schema.allow_nil? ? \"?\" : \"\" }\",\n ::Respect::DateSchema => \"NodaTime.LocalDate#{ schema.allow_nil? ? \"?\" : \"\" }\"\n })\n else\n types.merge({\n ::Respect::DatetimeSchema => \"DateTime#{ schema.allow_nil? ? \"?\" : \"\" }\",\n ::Respect::InstantSchema => \"DateTime#{ schema.allow_nil? ? \"?\" : \"\" }\",\n ::Respect::DateSchema => \"DateTime#{ schema.allow_nil? ? \"?\" : \"\" }\"\n })\n end\n\n type = types[ schema.class ]\n\n custom = nil\n collection = false\n\n schema_item = schema.instance_variable_get(:@item)\n if type == \"List<string>\" && schema_item && schema_item.is_a?(::Respect::AnySchema)\n type = \"List<object>\"\n end\n\n if schema.is_a?(Respect::HasOneSchema)\n type = sanitize_model_namespace( schema.of ) + \".\" + sanitize_model_class( schema.of )\n custom = type\n collection = false\n end\n\n if schema.is_a?(Respect::EnumSchema)\n type = sanitize_model_namespace( schema.values_module ) + \".\" + sanitize_model_class( schema.values_module )\n custom = type\n collection = false\n\n emit_enum!( schema.values_module )\n end\n\n if schema.is_a?(Respect::HasManySchema)\n type = \"List<\" + sanitize_model_namespace( schema.of ) + \".\" + sanitize_model_class( schema.of ) + \">\"\n custom = type\n collection = true\n end\n\n return type, custom, collection\n end",
"def type(member)\n\n\n\n # 332:7: type_name[member] ( array_brackets[member] )*\n type_name(member)\n\n # 332:25: ( array_brackets[member] )*\n while true\n alt44 = 2\n # ()* loopback of 332:25: ( array_brackets[member] )*\n look_ahead44_0 = look_ahead(1)\n if look_ahead44_0 == :LEFT_SQUARE_BRACKET \n alt44 = 1\n end\n case alt44\n when 1\n # 332:26: array_brackets[member]\n array_brackets(member)\n\n else\n break\n end\n end\n\n\n\n end",
"def type\n @type\n end",
"def type\n @type\n end",
"def type\n @type\n end",
"def type\n @type\n end",
"def type\n\t\t\t@data[\"type\"]\n\t\tend",
"def puppet_type_to_ruby(type)\n array = false\n\n type = $1 if type =~ /Optional\\[(.+)/\n\n if type =~ /Array\\[(.+)/\n type = $1\n array = true\n end\n\n return [Numeric, array] if type =~ /Integer/\n return [Numeric, array] if type =~ /Float/\n return [Hash, array] if type =~ /Hash/\n return [:boolean, array] if type =~ /Boolean/\n\n [String, array]\n end",
"def create_types\n\t\t[]\n\tend",
"def create_types\n\t\t[]\n\tend",
"def describe_types\n [@options[:type]].flatten.join('/')\n end",
"def type\n @type ||= @collection.nil? ? nil : @collection.label.to_sym\n end",
"def is_struct_type?(type)\n return type.kind_of? Array\n end",
"def type\n self[:type]\n end",
"def type\n @type\n end",
"def type\n @attributes[:type]\n end",
"def type\n @attributes[:type]\n end",
"def type\n @attributes[:type]\n end",
"def type\n read_attr :type, :to_sym\n end",
"def types=(types)\n @types = Array(types) if types\n end",
"def schema\n OpenGraph::TYPES.each_pair do |schema, types|\n return schema if types.include?(type)\n end\n nil\n end",
"def type\n self['TYPE'].to_i\n end",
"def type\n end",
"def type\n compile! unless @compiled\n [@type].flatten\n end",
"def itemtype\n types = resource_type || []\n ResourceTypesService.microdata_type(types.first)\n end",
"def normalize_type(data, field)\n type_name = field[:type]\n result = case data\n when Array\n data.map {|item| normalize_item(type_name, item)}\n else\n normalize_item(type_name, data)\n end\n # If field signature allows an array, forcing array structure even for one\n # item.\n if !field[:min_occurs].nil? and\n (field[:max_occurs] == :unbounded ||\n (!field[:max_occurs].nil? and field[:max_occurs] > 1))\n result = arrayize(result)\n end\n return result\n end",
"def collection_type\n components[0].to_sym rescue nil\n end",
"def array(type, key, default=nil)\n meth = :\"_convert_array_#{type}\"\n raise ProgrammerError, \"no typecast_params type registered for #{type.inspect}\" unless respond_to?(meth, true)\n process_arg(meth, key, default, send(:\"_max_input_bytesize_for_#{type}\")) if require_hash!\n end",
"def type ; metadata[:type] ; end",
"def type\n result = [self.ext]\n result += Types.constants.select { |c| Types.const_get(c).include?(self.ext) }.map {|c| c.downcase}\n returning(Querable.new(result)) { |q| q.list = result }\n end",
"def type\n types.first\n end",
"def type\n self[:type]\n end",
"def type\n @json['type']\n end",
"def type\n types.first\n end",
"def isArrayType(obj)\n return((obj.kind_of? FFI::Struct::InlineArray) or (obj.kind_of? ::Array))\n end",
"def type\n @type\n end",
"def _type\n special_attribute('@type'.freeze)\n end",
"def entry_type\n @entry_type ||= ENTRY_TYPES[int_type]\n end",
"def type\n @type.name\n end",
"def schema\n []\n end",
"def types(array)\n yield(array)\nend",
"def type\n attr_val('./@typeCode')\n end",
"def type\n @attributes[:type]\n end",
"def create_types\n\t[]\nend",
"def create_types\n\t[]\nend",
"def type\n @type.to_s\n end",
"def type_id\n\t\ttypes_id = [self.type1, self.type2]\n\t\treturn types_id\n\tend",
"def type\n declaration.type if declaration.respond_to? :type\n end",
"def types\n get_metadata unless @types\n return @types\n end",
"def get_type_index(item)\n (TYPE_INDICES[item && item.attrs['fields']['issuetype']['name']] || 0)\n end",
"def type\n @type ||= IDS_TYPES[type_id]\n end",
"def validate_array(_record, attribute, value)\n errors.add(attribute, 'not an array.') unless value.is_a? Array\n end"
] |
[
"0.6450744",
"0.6379572",
"0.63508356",
"0.625358",
"0.6250903",
"0.6245001",
"0.6201754",
"0.61653715",
"0.61405534",
"0.6108816",
"0.61036503",
"0.60928565",
"0.60765946",
"0.60398626",
"0.6015898",
"0.5971627",
"0.59705085",
"0.5964192",
"0.5859733",
"0.5846391",
"0.58404374",
"0.58145833",
"0.579724",
"0.5786061",
"0.577471",
"0.57714266",
"0.5732894",
"0.5704175",
"0.56995803",
"0.56946397",
"0.56940305",
"0.56925875",
"0.5680905",
"0.5673457",
"0.56672657",
"0.5655661",
"0.56210864",
"0.5618799",
"0.561329",
"0.56106603",
"0.560631",
"0.5593813",
"0.5585671",
"0.5585671",
"0.5585671",
"0.5585671",
"0.5585671",
"0.55806637",
"0.55774415",
"0.5561124",
"0.55526257",
"0.55526185",
"0.55526185",
"0.55526185",
"0.5541936",
"0.5537477",
"0.5530198",
"0.5530198",
"0.55289227",
"0.5524187",
"0.5512624",
"0.5494022",
"0.54845566",
"0.546895",
"0.546895",
"0.546895",
"0.54671806",
"0.5466112",
"0.5462948",
"0.54523134",
"0.5450562",
"0.5448066",
"0.5438388",
"0.54308397",
"0.5428889",
"0.54204273",
"0.5419555",
"0.54143125",
"0.54129213",
"0.5401528",
"0.53977185",
"0.5393194",
"0.5390214",
"0.5379116",
"0.5376868",
"0.5371537",
"0.53652036",
"0.5350549",
"0.53475565",
"0.53442705",
"0.5341616",
"0.53349555",
"0.53349555",
"0.53251916",
"0.5321871",
"0.53180856",
"0.53099924",
"0.5301702",
"0.5301141",
"0.5300036"
] |
0.81926167
|
0
|
The schema :type entry to use for row/composite types.
|
def schema_composite_type(db_type)
:composite
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def schema_composite_type(db_type)\n @row_schema_types[db_type] || super\n end",
"def inheritance_column\n \"type\"\n end",
"def type\n self['type']\n end",
"def type\n self['type']['type']\n end",
"def type ; metadata[:type] ; end",
"def type_schema\n steps.type_schema\n end",
"def type\n self[:type]\n end",
"def entry_type\n @entry_type ||= ENTRY_TYPES[int_type]\n end",
"def type\n @type.name\n end",
"def type\n TYPES[@type_id]\n end",
"def type\n read_attr :type, :to_sym\n end",
"def type\n @type\n end",
"def type\n @type\n end",
"def type\n @type\n end",
"def type\n @type\n end",
"def type\n self[:type]\n end",
"def type()\n\t\t@db.hget('sgt-structure:' + @id, 'type')\n\tend",
"def type_name\n self['type_name']\n end",
"def type\n field[:type]\n end",
"def type_column\n arel_table[polymorphic_type.to_sym] if polymorphic_type\n end",
"def type\n @type\n end",
"def schema_data_type\n case type\n when \"N\", \"F\"\n decimal > 0 ? \":float\" : \":integer\"\n when \"I\"\n \":integer\"\n when \"D\"\n \":date\"\n when \"T\"\n \":datetime\"\n when \"L\"\n \":boolean\"\n when \"M\"\n \":text\"\n else\n \":string, :limit => #{length}\"\n end\n end",
"def type\n @type.to_s\n end",
"def type\n @attributes[:type]\n end",
"def type\n @attributes[:type]\n end",
"def type\n @attributes[:type]\n end",
"def type\n data['type'].to_sym\n end",
"def type\n end",
"def schema_column_type(db_type)\n case db_type\n when /\\Ainterval\\z/io\n :interval\n when /\\Acitext\\z/io\n :string\n else\n super\n end\n end",
"def type\n @type ||= begin\n base_type = case\n when is_many?, is_many_ints?\n :multi\n when @associations.values.flatten.length > 1\n :string\n else\n translated_type_from_database\n end\n \n if base_type == :string && @crc\n base_type = :integer\n else\n @crc = false unless base_type == :multi && is_many_strings? && @crc\n end\n \n base_type\n end\n end",
"def add_type(type)\n\n # does nothing, types are differentiated by the 'typ' column\n end",
"def column_type\r\n @type\r\n end",
"def type\n declaration.type if declaration.respond_to? :type\n end",
"def add_type(type)\n\n # does nothing, types are differentiated by the 'typ' column\n end",
"def name\n @type_name\n end",
"def type\n @type\n end",
"def schema_type_class(type)\n @schema_type_classes[type]\n end",
"def type\n raw_data['type']\n end",
"def type\n @ar_column_info.type\n end",
"def type\n @data['type']\n end",
"def type\n @data['type']\n end",
"def type_name; end",
"def type_name; end",
"def type_name; end",
"def type_name; end",
"def type_name; end",
"def type_name; end",
"def type_name; end",
"def type_name; end",
"def type_name; end",
"def type_name; end",
"def type_name; end",
"def type_name; end",
"def type_name; end",
"def type_name; end",
"def type_name; end",
"def type_c\n @type_c ||= type[1]\n end",
"def type\n self.attributes[:type]\n end",
"def type\n _type\n end",
"def type_name\n @type_name ||= determine_type_name(descriptor)\n end",
"def type(key)\n column(key).type\n end",
"def type(column); self[column]['type']; end",
"def type\n self['TYPE'].to_i\n end",
"def type\n @props[:type]\n end",
"def type\n @types ||= strip(:type)\n end",
"def type\n self.class.type\n end",
"def type\n self.class.type\n end",
"def type\n self.class.type\n end",
"def java_type\n Jrodb::Type.java_type_for(type)\n end",
"def type\n result_hash['typ']\n end",
"def java_type\n Jrodb::Model.type_map[self]\n end",
"def register_row_type(db_type, opts=OPTS)\n procs = @conversion_procs\n rel_oid = nil\n array_oid = nil\n parser_opts = {}\n\n # Try to handle schema-qualified types.\n type_schema, type_name = schema_and_table(db_type)\n schema_type_string = type_name.to_s\n\n # Get basic oid information for the composite type.\n ds = from(:pg_type).\n select{[pg_type[:oid], :typrelid, :typarray]}.\n where([[:typtype, 'c'], [:typname, type_name.to_s]])\n if type_schema\n ds = ds.join(:pg_namespace, [[:oid, :typnamespace], [:nspname, type_schema.to_s]])\n schema_type_symbol = :\"pg_row_#{type_schema}__#{type_name}\" \n else\n schema_type_symbol = :\"pg_row_#{type_name}\"\n end\n unless row = ds.first\n raise Error, \"row type #{db_type.inspect} not found in database\"\n end\n # Manually cast to integer using to_i, because adapter may not cast oid type\n # correctly (e.g. swift)\n parser_opts[:oid], rel_oid, array_oid = row.values_at(:oid, :typrelid, :typarray).map(&:to_i)\n\n # Get column names and oids for each of the members of the composite type.\n res = from(:pg_attribute).\n join(:pg_type, :oid=>:atttypid).\n where(:attrelid=>rel_oid).\n where{attnum > 0}.\n exclude(:attisdropped).\n order(:attnum).\n select_map{[:attname, Sequel.case({0=>:atttypid}, pg_type[:typbasetype], pg_type[:typbasetype]).as(:atttypid)]}\n if res.empty?\n raise Error, \"no columns for row type #{db_type.inspect} in database\"\n end\n parser_opts[:columns] = res.map{|r| r[0].to_sym}\n parser_opts[:column_oids] = res.map{|r| r[1].to_i}\n\n # Using the conversion_procs, lookup converters for each member of the composite type\n parser_opts[:column_converters] = parser_opts[:column_oids].map do |oid|\n procs[oid]\n end\n\n # Setup the converter and typecaster\n parser_opts[:converter] = opts.fetch(:converter){HashRow.subclass(db_type, parser_opts[:columns])}\n parser_opts[:typecaster] = opts.fetch(:typecaster, parser_opts[:converter])\n\n parser = Parser.new(parser_opts)\n add_conversion_proc(parser.oid, parser)\n\n if respond_to?(:register_array_type) && array_oid && array_oid > 0\n array_type_name = if type_schema\n \"#{type_schema}.#{type_name}\"\n else\n type_name\n end\n register_array_type(array_type_name, :oid=>array_oid, :converter=>parser, :scalar_typecast=>schema_type_symbol)\n end\n\n @row_types[literal(db_type)] = opts.merge(:parser=>parser, :type=>db_type)\n @row_schema_types[schema_type_string] = schema_type_symbol \n @schema_type_classes[schema_type_symbol] = ROW_TYPE_CLASSES\n @row_type_method_module.class_eval do\n meth = :\"typecast_value_#{schema_type_symbol}\"\n define_method(meth) do |v|\n row_type(db_type, v)\n end\n private meth\n alias_method(meth, meth)\n end\n\n nil\n end",
"def type\n Type.new(type_param).yard_type_string\n end",
"def type\n @attributes[:type]\n end",
"def type\n @type ||= IDS_TYPES[type_id]\n end",
"def type\n TYPES[self[:type_flags] & 0x3]\n end",
"def type\n return @type\n end",
"def type\n return @type\n end",
"def type\n return @type\n end",
"def type\n return @type\n end",
"def type\n return @type\n end",
"def type\n return @type\n end",
"def type\n return @type\n end",
"def type\n return @type\n end",
"def type\n self.class.type\n end",
"def type\n self.class.type\n end",
"def type\n self.class.type\n end",
"def type\n types.first\n end",
"def _type\n special_attribute('@type'.freeze)\n end",
"def column_type(type)\n case type\n when :integer then Integer\n when :float, :decimal then Float\n when :string, :text, :uuid then String\n when :datetime, :timestamp, :time then DateTime\n when :date then Date\n when :boolean then Virtus::Attribute::Boolean # Boolean is not a standard Ruby class\n else\n raise \"Could not match column type '#{type}' for #{model_name}\"\n end\n end",
"def type\n types.first\n end",
"def type\n self.class::TYPE\n end",
"def types\n @types ||= inject({}) { |memo, schema| memo.merge(schema.types) }\n end",
"def type\n attr_val('./@typeCode')\n end",
"def scaffold_table_column_type(c)\n column = self.properties[c]\n if column then\n if column.type == DataMapper::Property::Text\n :text\n else\n column.class.to_s.split(\"::\").last.downcase.intern\n end\n else\n nil\n end\n end",
"def schema_type_class(column)\n if (sch = db_schema[column]) && (type = sch[:type])\n db.schema_type_class(type)\n end\n end",
"def schema_type_class(column)\n if (sch = db_schema[column]) && (type = sch[:type])\n db.schema_type_class(type)\n end\n end",
"def content_type\n @record.send( :\"#{@column}_content_type\" ) rescue ''\n end",
"def csharp_type_for_schema_definition( schema )\n types = {\n ::Respect::ArraySchema => \"List<string>\",\n ::Respect::BooleanSchema => \"bool#{ schema.allow_nil? ? \"?\" : \"\" }\",\n ::Respect::FloatSchema => \"float#{ schema.allow_nil? ? \"?\" : \"\" }\",\n ::Respect::HashSchema => \"Dictionary<string, object>\",\n ::Respect::IntegerSchema => \"long#{ schema.allow_nil? ? \"?\" : \"\" }\",\n ::Respect::StringSchema => \"string\",\n ::Respect::AnySchema => \"object\",\n ::Respect::GeoPointSchema => \"global::#{ base_api_namespace() }.Fields.GeoPoint\",\n ::Respect::AttachmentSchema => \"global::#{ base_api_namespace() }.Fields.Attachment\"\n }\n\n types = if options[:use_noda_time] == \"true\" || options[:use_noda_time] == \"1\"\n types.merge({\n ::Respect::DatetimeSchema => \"NodaTime.Instant#{ schema.allow_nil? ? \"?\" : \"\" }\",\n ::Respect::InstantSchema => \"NodaTime.Instant#{ schema.allow_nil? ? \"?\" : \"\" }\",\n ::Respect::DateSchema => \"NodaTime.LocalDate#{ schema.allow_nil? ? \"?\" : \"\" }\"\n })\n else\n types.merge({\n ::Respect::DatetimeSchema => \"DateTime#{ schema.allow_nil? ? \"?\" : \"\" }\",\n ::Respect::InstantSchema => \"DateTime#{ schema.allow_nil? ? \"?\" : \"\" }\",\n ::Respect::DateSchema => \"DateTime#{ schema.allow_nil? ? \"?\" : \"\" }\"\n })\n end\n\n type = types[ schema.class ]\n\n custom = nil\n collection = false\n\n schema_item = schema.instance_variable_get(:@item)\n if type == \"List<string>\" && schema_item && schema_item.is_a?(::Respect::AnySchema)\n type = \"List<object>\"\n end\n\n if schema.is_a?(Respect::HasOneSchema)\n type = sanitize_model_namespace( schema.of ) + \".\" + sanitize_model_class( schema.of )\n custom = type\n collection = false\n end\n\n if schema.is_a?(Respect::EnumSchema)\n type = sanitize_model_namespace( schema.values_module ) + \".\" + sanitize_model_class( schema.values_module )\n custom = type\n collection = false\n\n emit_enum!( schema.values_module )\n end\n\n if schema.is_a?(Respect::HasManySchema)\n type = \"List<\" + sanitize_model_namespace( schema.of ) + \".\" + sanitize_model_class( schema.of ) + \">\"\n custom = type\n collection = true\n end\n\n return type, custom, collection\n end",
"def schema_column_type(db_type)\n case db_type\n when /\\A(int(eger)?|bigint|smallint)\\z/\n :integer\n when /\\A(character( varying)?|varchar|text)\\z/\n :string\n when /\\Adate\\z/\n :date\n when /\\A(datetime|timestamp( with(out)? time zone)?)\\z/\n :datetime\n when /\\Atime( with(out)? time zone)?\\z/\n :time\n when /\\A(boolean|tinyint)\\z/\n :boolean\n when /\\A(real|float|double( precision)?)\\z/\n :float\n when /\\A(numeric|decimal|money)\\z/\n :decimal\n end\n end"
] |
[
"0.7407996",
"0.6968629",
"0.6907057",
"0.67898774",
"0.6775752",
"0.6767818",
"0.6758813",
"0.6750373",
"0.6736719",
"0.67302775",
"0.6687589",
"0.6660528",
"0.66603494",
"0.66603494",
"0.66603494",
"0.6624262",
"0.6578886",
"0.6575831",
"0.65535015",
"0.6541125",
"0.6524185",
"0.6521637",
"0.65143126",
"0.65030086",
"0.65030086",
"0.65030086",
"0.6494197",
"0.64857394",
"0.6471474",
"0.64544195",
"0.6449058",
"0.64342034",
"0.6431676",
"0.6428188",
"0.64221495",
"0.64138156",
"0.6410253",
"0.6398813",
"0.6385996",
"0.63853776",
"0.6377612",
"0.6372644",
"0.6372644",
"0.6372644",
"0.6372644",
"0.6372644",
"0.6372644",
"0.6372644",
"0.6372644",
"0.6372644",
"0.6372644",
"0.6372644",
"0.6372644",
"0.6372644",
"0.6372644",
"0.6372644",
"0.6371852",
"0.6361708",
"0.6351204",
"0.6348069",
"0.63439065",
"0.63400155",
"0.63391215",
"0.6323981",
"0.6316354",
"0.63148874",
"0.63148874",
"0.63148874",
"0.6305942",
"0.630244",
"0.6297273",
"0.6297189",
"0.62877166",
"0.62856126",
"0.6281953",
"0.62740934",
"0.6269339",
"0.6269339",
"0.6269339",
"0.6269339",
"0.6269339",
"0.6269339",
"0.6269339",
"0.6251514",
"0.62458396",
"0.62458396",
"0.62458396",
"0.6242482",
"0.6238626",
"0.62243086",
"0.6222188",
"0.6216952",
"0.6203399",
"0.6201619",
"0.6199876",
"0.618706",
"0.618706",
"0.61869234",
"0.61840326",
"0.6168226"
] |
0.7014892
|
1
|
The schema :type entry to use for enum types.
|
def schema_enum_type(db_type)
:enum
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def schema_type(column)\n column.type == :enum_set ? :enum : super\n end",
"def type\n self.class.type_enum_map.key(@cred_struct[:type])\n end",
"def enum_types\n @enum_types ||= schemas.map do |namespace, schema|\n schema.enum_types.map do |name, enum_type|\n [ \"#{namespace}.#{name}\", enum_type ]\n end.to_h\n end.reduce({}, :merge)\n end",
"def enum_types\n @enum_types ||= schemas.map do |namespace, schema|\n schema.enum_types.map do |name, enum_type|\n [ \"#{namespace}.#{name}\", enum_type ]\n end.to_h\n end.reduce({}, :merge)\n end",
"def type\n read_attr :type, :to_sym\n end",
"def type\n @type.name\n end",
"def type\n TYPES[self[:type_flags] & 0x3]\n end",
"def type\n attr_val('./@typeCode')\n end",
"def type\n TYPES[@type_id]\n end",
"def type\n self['type']['type']\n end",
"def human_type\n Core::TYPES_DESC[type]\n end",
"def name\n @type_name\n end",
"def entry_type\n @entry_type ||= ENTRY_TYPES[int_type]\n end",
"def type\n self['type']\n end",
"def type\n @type.to_s\n end",
"def visit_enum(binding_type)\n if not input.is_a? Enum\n # TODO: Verify if the value is instance of actual binding class\n report_error('vapi.bindings.typeconverter.unexpected.ruby.type',\n Enum, input.class)\n end\n self.result = binding_type.definition.new_value(input)\n end",
"def type\n @attributes[:type]\n end",
"def type\n @attributes[:type]\n end",
"def type\n @attributes[:type]\n end",
"def type\n self[:type]\n end",
"def type\n @type ||= IDS_TYPES[type_id]\n end",
"def type=(type)\n validator = EnumAttributeValidator.new('String', ['active', 'notActive', 'unknown'])\n unless validator.valid?(type)\n fail ArgumentError, %Q'invalid value for \"type\", must be one of #{validator.allowable_values}.'\n end\n @type = type\n end",
"def translate_type(uri, schema)\n case\n when values = schema[\"enum\"]\n unless values.is_a?(Array)\n raise ValidationError.new(message: \"Invalid JSON Schema: enum: #{values}\")\n end\n\n types = values.map { |literal| literal_type(literal) }\n Types::Union.new(types: types, location: nil)\n when const = schema[\"const\"]\n literal_type(const)\n when schema[\"type\"] == \"array\" || schema.key?(\"items\")\n case\n when schema[\"items\"].is_a?(Array)\n # tuple\n types = schema[\"items\"].map { |definition| translate_type(uri, definition) }\n Types::Tuple.new(types: types, location: nil)\n when schema[\"items\"].is_a?(Hash)\n # array\n elem_type = translate_type(uri, schema[\"items\"])\n BuiltinNames::Array.instance_type(elem_type)\n else\n BuiltinNames::Array.instance_type(untyped_type)\n end\n when schema[\"type\"] == \"object\" || schema.key?(\"properties\") || schema.key?(\"additionalProperties\")\n case\n when properties = schema[\"properties\"]\n fields = properties.each.with_object({}) do |pair, hash|\n key, value = pair\n\n unless stringify_keys\n key = key.to_sym\n end\n\n hash[key] = translate_type(uri, value)\n end\n\n Types::Record.new(fields: fields, location: nil)\n when prop = schema[\"additionalProperties\"]\n BuiltinNames::Hash.instance_type(\n BuiltinNames::String.instance_type,\n translate_type(uri, prop)\n )\n else\n BuiltinNames::Hash.instance_type(\n BuiltinNames::String.instance_type,\n untyped_type\n )\n end\n when one_of = schema[\"oneOf\"]\n Types::Union.new(\n types: one_of.map { |defn| translate_type(uri, defn) },\n location: nil\n )\n when all_of = schema[\"allOf\"]\n Types::Intersection.new(\n types: all_of.map { |defn| translate_type(uri, defn) },\n location: nil\n )\n when ty = schema[\"type\"]\n case ty\n when \"integer\"\n BuiltinNames::Integer.instance_type\n when \"number\"\n BuiltinNames::Numeric.instance_type\n when \"string\"\n BuiltinNames::String.instance_type\n when \"boolean\"\n Types::Bases::Bool.new(location: nil)\n when \"null\"\n Types::Bases::Nil.new(location: nil)\n else\n raise ValidationError.new(message: \"Invalid JSON Schema: type: #{ty}\")\n end\n when ref = schema[\"$ref\"]\n ref_uri =\n begin\n # Parse URI of `$ref`\n URI.parse(schema[\"$ref\"])\n rescue URI::InvalidURIError => _\n raise ValidationError.new(message: \"Invalid URI encountered in: $ref = #{ref}\")\n end\n\n resolved_uri = resolve_uri(uri, ref_uri) # Resolve `$ref` URI with respect to current URI\n # Generate AST::Declarations::Alias\n generate_rbs(resolved_uri, read_from_uri(resolved_uri))\n\n # Assign alias type with appropriate namespace\n Types::Alias.new(\n name: generate_type_name_for_uri(resolved_uri, namespace: resolved_uri.path != uri.path),\n location: nil\n )\n else\n raise ValidationError.new(message: \"Invalid JSON Schema: #{schema.keys.join(\", \")}\")\n end\n end",
"def type=(type)\n validator = EnumAttributeValidator.new('String', [\"random\", \"hash\", \"client\"])\n unless validator.valid?(type)\n fail ArgumentError, \"invalid value for \\\"type\\\", must be one of #{validator.allowable_values}.\"\n end\n @type = type\n end",
"def type=(type)\n validator = EnumAttributeValidator.new('String', [\"Paragraph\", \"Character\", \"Table\", \"List\"])\n if type.to_i == 0\n unless validator.valid?(type)\n raise ArgumentError, \"invalid value for 'type', must be one of #{validator.allowable_values}.\"\n end\n @type = type\n else\n @type = validator.allowable_values[type.to_i]\n end\n end",
"def type\n field[:type]\n end",
"def type=(type)\n validator = EnumAttributeValidator.new('String', ['Once', 'Hourly', 'Daily', 'Weekly', 'Monthly', 'Yearly'])\n unless validator.valid?(type)\n fail ArgumentError, 'invalid value for \"type\", must be one of #{validator.allowable_values}.'\n end\n @type = type\n end",
"def type\n @type\n end",
"def type\n @type\n end",
"def type\n @type\n end",
"def type\n @type\n end",
"def enum_class\n self\n end",
"def type\n data['type'].to_sym\n end",
"def type=(type)\n validator = EnumAttributeValidator.new('String', [\"Agreement\", \"CreditMemo\", \"DownPayment\", \"Miscellaneous\", \"Progress\", \"Standard\"])\n unless validator.valid?(type)\n fail ArgumentError, \"invalid value for 'type', must be one of #{validator.allowable_values}.\"\n end\n @type = type\n end",
"def type_name\n self['type_name']\n end",
"def type=(type)\n validator = EnumAttributeValidator.new('String', ['String', 'Number', 'Date'])\n unless validator.valid?(type)\n fail ArgumentError, 'invalid value for \"type\", must be one of #{validator.allowable_values}.'\n end\n @type = type\n end",
"def visit_enum(binding_type)\n #VAPI.log.debug \"VERBOSE: visit_enum called - #{self.input}\"\n enum_string = self.input.value\n if binding_type.binding_class\n self.result = binding_type.binding_class.from_string(enum_string)\n else\n #TODO: port this to ruby\n #self.result = Enum.new(enum_string)\n self.result = enum_string\n end\n end",
"def type=(value)\n @type = value.to_sym\n end",
"def type=(type)\n validator = EnumAttributeValidator.new('String', [\"Weekly\", \"BiWeekly\", \"SemiMonthly\", \"Monthly\"])\n unless validator.valid?(type)\n fail ArgumentError, \"invalid value for 'type', must be one of #{validator.allowable_values}.\"\n end\n @type = type\n end",
"def map_type type\n type\n end",
"def map_type type\n type\n end",
"def type=(val)\n @type = val.to_sym\n end",
"def human_type\n self[:type].to_human\n end",
"def type_name\n @type_name ||= determine_type_name(descriptor)\n end",
"def type\n @attributes[:type]\n end",
"def type=(type)\n validator = EnumAttributeValidator.new('String', [\"fc-initiator\", \"fc-nvme-initiator\", \"fc-nvme-target\", \"fc-target\"])\n unless validator.valid?(type)\n fail ArgumentError, \"invalid value for \\\"type\\\", must be one of #{validator.allowable_values}.\"\n end\n @type = type\n end",
"def entry_type= e\n self.update_attribute(:int_type, ENTRY_TYPES.index(e)) if ENTRY_TYPES.include?(e)\n end",
"def type\n self[:type]\n end",
"def _type\n special_attribute('@type'.freeze)\n end",
"def type\n @type\n end",
"def type=(type)\n validator = EnumAttributeValidator.new('String', [\"ITEM\", \"CATEGORY\", \"ITEM_VARIATION\", \"TAX\", \"DISCOUNT\", \"MODIFIER_LIST\", \"MODIFIER\"])\n unless validator.valid?(type)\n fail ArgumentError, \"invalid value for 'type', must be one of #{validator.allowable_values}.\"\n end\n @type = type\n end",
"def type\n @gapi[\"type\"]\n end",
"def type\n self.attributes[:type]\n end",
"def type\n response[\"type\"]\n end",
"def type\n response[\"type\"]\n end",
"def type_name\n @values['typeName']\n end",
"def fbe_type\n TYPE\n end",
"def fbe_type\n TYPE\n end",
"def fbe_type\n TYPE\n end",
"def fbe_type\n TYPE\n end",
"def fbe_type\n TYPE\n end",
"def fbe_type\n TYPE\n end",
"def fbe_type\n TYPE\n end",
"def fbe_type\n TYPE\n end",
"def fbe_type\n TYPE\n end",
"def fbe_type\n TYPE\n end",
"def fbe_type\n TYPE\n end",
"def fbe_type\n TYPE\n end",
"def fbe_type\n TYPE\n end",
"def fbe_type\n TYPE\n end",
"def fbe_type\n TYPE\n end",
"def fbe_type\n TYPE\n end",
"def type=(type)\n validator = EnumAttributeValidator.new('String', ['string', 'number', 'boolean', 'date', 'address', 'country', 'email', 'url', 'image', 'signature', 'barcode', 'combined'])\n unless validator.valid?(type)\n fail ArgumentError, 'invalid value for \"type\", must be one of #{validator.allowable_values}.'\n end\n @type = type\n end",
"def type\n @types ||= strip(:type)\n end",
"def _enum_class_template\n File.read(SCHEMA_ENUM_PATH).strip\n end",
"def name\n @enum_name\n end",
"def type_code\n type.try(:code)\n end",
"def type\n @type\n end",
"def cached_enum type_name\n cached_obj @prior_enums, type_name\n end",
"def query_type_enum\n return {\n 0=>'empty',\n 1=>'iso',\n 2=>'multi'\n }\n end",
"def type\n @node[\"type\"]\n end",
"def type\n self['TYPE'].to_i\n end",
"def type_schema\n steps.type_schema\n end",
"def type_name\n @type_name ||= StringHelpers.underscore(StringHelpers.demodulize(@value.class.name)).to_sym\n end",
"def type\n _type.split(\"::\").last.downcase\n end",
"def type\n end",
"def type=(type)\n validator = EnumAttributeValidator.new('String', ['digital', 'physical'])\n unless validator.valid?(type)\n fail ArgumentError, 'invalid value for \"type\", must be one of #{validator.allowable_values}.'\n end\n @type = type\n end",
"def type_str\n Types.type_str(type)\n end",
"def value_type\n @type.value_type\n end",
"def type\n types.first\n end",
"def type=(value)\n @type = value\n end",
"def type=(value)\n @type = value\n end",
"def type=(value)\n @type = value\n end",
"def type=(value)\n @type = value\n end",
"def type=(value)\n @type = value\n end",
"def type=(value)\n @type = value\n end",
"def type=(value)\n @type = value\n end",
"def type\n types.first\n end",
"def type\n query_root_node(\"rdf:type/@rdf:resource\", @@NAMESPACES).to_s\n end",
"def type\n t = get(TYPE)\n\n raise \"binding codes not contain a type\" if t.nil?\n\n t\n end"
] |
[
"0.70020103",
"0.67391855",
"0.6676959",
"0.6676959",
"0.65402603",
"0.65355814",
"0.6528048",
"0.6519687",
"0.6457913",
"0.63885754",
"0.6387319",
"0.6386674",
"0.63202965",
"0.6282332",
"0.6236421",
"0.6234482",
"0.62296706",
"0.62296706",
"0.62296706",
"0.61989784",
"0.6192092",
"0.6186986",
"0.6176338",
"0.617194",
"0.6168546",
"0.61685",
"0.6157621",
"0.6140223",
"0.6140223",
"0.6140223",
"0.61397886",
"0.6136007",
"0.61269236",
"0.6124048",
"0.6113529",
"0.6079147",
"0.60783076",
"0.6069421",
"0.6068539",
"0.60508525",
"0.60508525",
"0.60423696",
"0.6034892",
"0.6016727",
"0.6015646",
"0.6012701",
"0.59945637",
"0.5979118",
"0.5973764",
"0.5960842",
"0.59585035",
"0.59555197",
"0.59537745",
"0.5952161",
"0.5952161",
"0.5948685",
"0.5945563",
"0.5945563",
"0.5945563",
"0.5945563",
"0.5945563",
"0.5945563",
"0.5945563",
"0.5945563",
"0.5945563",
"0.5945563",
"0.5945563",
"0.5945563",
"0.5945563",
"0.5945563",
"0.5945563",
"0.5945563",
"0.5936124",
"0.59340835",
"0.5923586",
"0.59235823",
"0.5917275",
"0.59152",
"0.5913118",
"0.59111166",
"0.5908501",
"0.5905209",
"0.5902257",
"0.5889708",
"0.58770907",
"0.5868106",
"0.5862146",
"0.5860807",
"0.5860436",
"0.5857399",
"0.585293",
"0.585293",
"0.585293",
"0.585293",
"0.585293",
"0.585293",
"0.585293",
"0.58507514",
"0.5842668",
"0.58371145"
] |
0.8054382
|
0
|
The schema :type entry to use for range types.
|
def schema_range_type(db_type)
:range
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def schema_multirange_type(db_type)\n :multirange\n end",
"def index\n @range_types = RangeType.all\n end",
"def schema_column_type(db_type)\n case db_type\n when /\\Ainterval\\z/io\n :interval\n when /\\Acitext\\z/io\n :string\n else\n super\n end\n end",
"def set_range_type\n @range_type = RangeType.find(params[:id])\n end",
"def type\n @type\n end",
"def type\n @type\n end",
"def type\n @type\n end",
"def type\n @type\n end",
"def type\n field[:type]\n end",
"def map_type type\n type\n end",
"def map_type type\n type\n end",
"def type\n self['type']\n end",
"def type\n self['type']['type']\n end",
"def type\n end",
"def range_type_params\n params.require(:range_type).permit(:name, :description)\n end",
"def type\n @type.name\n end",
"def type\n self[:type]\n end",
"def data_type\n ns_definition(:query_spec_datatypes)\n end",
"def type\n @type\n end",
"def type\n @type.to_s\n end",
"def type\n @type\n end",
"def type(type); end",
"def range(range)\n assert_range range\n schema do |s|\n s.type range.begin.is_a?(Integer) ? 'integer' : 'number'\n s.minimum range.begin\n s.maximum range.end, exclusive: range.exclude_end? unless range.end.nil?\n end\n end",
"def schema_enum_type(db_type)\n :enum\n end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type\n _type\n end",
"def schema_data_type\n case type\n when \"N\", \"F\"\n decimal > 0 ? \":float\" : \":integer\"\n when \"I\"\n \":integer\"\n when \"D\"\n \":date\"\n when \"T\"\n \":datetime\"\n when \"L\"\n \":boolean\"\n when \"M\"\n \":text\"\n else\n \":string, :limit => #{length}\"\n end\n end",
"def tt_range\n ARange.new(ttstart_at, ttend_at)\n end",
"def create_range_data( rng, minrng=0, type=0, direction=2 )\n return Handlers::Range.createRange( rng, minrng, type, direction )\n end",
"def cell_type(row, col)\n @types[row * @col_count + col]\n end",
"def type\n @types ||= strip(:type)\n end",
"def type\n end",
"def type\n Type.new(type_param).yard_type_string\n end",
"def type() end",
"def entry_type\n @entry_type ||= ENTRY_TYPES[int_type]\n end",
"def range_marker_type_label\n raise NotImplementedError.new(\"Method must be overridden in a subclass of this abstract base class.\")\n end",
"def type_schema\n steps.type_schema\n end"
] |
[
"0.71249956",
"0.63931143",
"0.63107294",
"0.62629265",
"0.60207546",
"0.60207546",
"0.60207546",
"0.60200715",
"0.601815",
"0.59934384",
"0.59934384",
"0.5989703",
"0.5982694",
"0.5975312",
"0.5952823",
"0.59515506",
"0.59414446",
"0.5932852",
"0.59324676",
"0.5932421",
"0.59178543",
"0.59166586",
"0.5906963",
"0.5892234",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58838886",
"0.58795947",
"0.58790797",
"0.58782595",
"0.58618486",
"0.58536154",
"0.58502185",
"0.58419603",
"0.58386165",
"0.5825027",
"0.5817282",
"0.57949185",
"0.5779895"
] |
0.8037973
|
0
|
The schema :type entry to use for multirange types.
|
def schema_multirange_type(db_type)
:multirange
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def schema_range_type(db_type)\n :range\n end",
"def schema_column_type(db_type)\n case db_type\n when /\\Ainterval\\z/io\n :interval\n when /\\Acitext\\z/io\n :string\n else\n super\n end\n end",
"def type\n self['type']['type']\n end",
"def types; end",
"def types; end",
"def types; end",
"def types; end",
"def types; end",
"def datatypes\n value.document.fetch(\"#{value.key}_type\", [])\n end",
"def type\n end",
"def type\n self['type']\n end",
"def type\n @type\n end",
"def type\n @type\n end",
"def type\n @type\n end",
"def type\n @type\n end",
"def type\n self[:type]\n end",
"def type\n @type\n end",
"def type\n @types ||= strip(:type)\n end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type; end",
"def type\n @type.name\n end",
"def map_type type\n type\n end",
"def map_type type\n type\n end",
"def type\n @type\n end",
"def type\n field[:type]\n end",
"def type(type); end",
"def type\n end",
"def type\n self[:type]\n end",
"def type_name; end",
"def type_name; end",
"def type_name; end",
"def type_name; end",
"def type_name; end",
"def type_name; end",
"def type_name; end",
"def type_name; end",
"def type_name; end",
"def type_name; end"
] |
[
"0.6891105",
"0.6131667",
"0.6009287",
"0.5958005",
"0.5958005",
"0.5958005",
"0.5958005",
"0.5958005",
"0.5939438",
"0.5932011",
"0.59073764",
"0.58985317",
"0.58985317",
"0.58985317",
"0.5897736",
"0.58881843",
"0.583995",
"0.5836035",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.58355254",
"0.5829381",
"0.5809851",
"0.5809851",
"0.5791446",
"0.5787638",
"0.57862246",
"0.57772917",
"0.5775473",
"0.5767447",
"0.5767447",
"0.5767447",
"0.5767447",
"0.5767447",
"0.5767447",
"0.5767447",
"0.5767447",
"0.5767447",
"0.5767447"
] |
0.808547
|
0
|
The dataset used for parsing table schemas, using the pg_ system catalogs.
|
def schema_parse_table(table_name, opts)
m = output_identifier_meth(opts[:dataset])
_schema_ds.where_all(Sequel[:pg_class][:oid]=>regclass_oid(table_name, opts)).map do |row|
row[:default] = nil if blank_object?(row[:default])
if row[:base_oid]
row[:domain_oid] = row[:oid]
row[:oid] = row.delete(:base_oid)
row[:db_domain_type] = row[:db_type]
row[:db_type] = row.delete(:db_base_type)
else
row.delete(:base_oid)
row.delete(:db_base_type)
end
db_type = row[:db_type]
row[:type] = if row.delete(:is_array)
schema_array_type(db_type)
else
send(TYPTYPE_METHOD_MAP[row.delete(:typtype)], db_type)
end
identity = row.delete(:attidentity)
if row[:primary_key]
row[:auto_increment] = !!(row[:default] =~ /\A(?:nextval)/i) || identity == 'a' || identity == 'd'
end
# :nocov:
if server_version >= 90600
# :nocov:
case row[:oid]
when 1082
row[:min_value] = MIN_DATE
row[:max_value] = MAX_DATE
when 1184, 1114
if Sequel.datetime_class == Time
row[:min_value] = MIN_TIMESTAMP
row[:max_value] = MAX_TIMESTAMP
end
end
end
[m.call(row.delete(:name)), row]
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def _schema_ds\n @_schema_ds ||= begin\n ds = metadata_dataset.select{[\n pg_attribute[:attname].as(:name),\n SQL::Cast.new(pg_attribute[:atttypid], :integer).as(:oid),\n SQL::Cast.new(basetype[:oid], :integer).as(:base_oid),\n SQL::Function.new(:format_type, basetype[:oid], pg_type[:typtypmod]).as(:db_base_type),\n SQL::Function.new(:format_type, pg_type[:oid], pg_attribute[:atttypmod]).as(:db_type),\n SQL::Function.new(:pg_get_expr, pg_attrdef[:adbin], pg_class[:oid]).as(:default),\n SQL::BooleanExpression.new(:NOT, pg_attribute[:attnotnull]).as(:allow_null),\n SQL::Function.new(:COALESCE, SQL::BooleanExpression.from_value_pairs(pg_attribute[:attnum] => SQL::Function.new(:ANY, pg_index[:indkey])), false).as(:primary_key),\n Sequel[:pg_type][:typtype],\n (~Sequel[Sequel[:elementtype][:oid]=>nil]).as(:is_array),\n ]}.\n from(:pg_class).\n join(:pg_attribute, :attrelid=>:oid).\n join(:pg_type, :oid=>:atttypid).\n left_outer_join(Sequel[:pg_type].as(:basetype), :oid=>:typbasetype).\n left_outer_join(Sequel[:pg_type].as(:elementtype), :typarray=>Sequel[:pg_type][:oid]).\n left_outer_join(:pg_attrdef, :adrelid=>Sequel[:pg_class][:oid], :adnum=>Sequel[:pg_attribute][:attnum]).\n left_outer_join(:pg_index, :indrelid=>Sequel[:pg_class][:oid], :indisprimary=>true).\n where{{pg_attribute[:attisdropped]=>false}}.\n where{pg_attribute[:attnum] > 0}.\n order{pg_attribute[:attnum]}\n\n # :nocov:\n if server_version > 100000\n # :nocov:\n ds = ds.select_append{pg_attribute[:attidentity]}\n\n # :nocov:\n if server_version > 120000\n # :nocov:\n ds = ds.select_append{Sequel.~(pg_attribute[:attgenerated]=>'').as(:generated)}\n end\n end\n\n ds\n end\n end",
"def schema_ds_dataset\n schema_utility_dataset\n end",
"def current_schemata\n extension :pg_array\n metadata_dataset.select(Sequel::function(:current_schemas, false).\n cast('varchar[]')).single_value.map(&:to_sym)\n end",
"def schema_ds_from(table_name, opts)\n [:information_schema__tables___t]\n end",
"def dataset\n database[table_name]\n end",
"def tables(opts={})\n m = output_identifier_meth\n metadata_dataset.from(:information_schema__tables___t).\n select(:table_name).\n filter(:table_type=>'BASE TABLE', :table_schema=>(opts[:schema]||default_schema||'dbo').to_s).\n map{|x| m.call(x[:table_name])}\n end",
"def schema\n execute(<<-eosql).collect { |row| row[0] }.collect { |t| table_schema(t) }\nSELECT rdb$relation_name FROM rdb$relations WHERE rdb$system_flag != 1\neosql\n end",
"def schema_parse_table(table, opts={})\n ds = dataset\n ds.identifier_output_method = :downcase\n schema_and_table = \"#{\"#{quote_identifier(opts[:schema])}.\" if opts[:schema]}#{quote_identifier(table)}\"\n table_schema = []\n metadata = transaction(opts){|conn| conn.describe_table(schema_and_table)}\n metadata.columns.each do |column|\n table_schema << [\n column.name.downcase.to_sym,\n {\n :type => column.data_type,\n :db_type => column.type_string.split(' ')[0],\n :type_string => column.type_string,\n :charset_form => column.charset_form,\n :char_used => column.char_used?,\n :char_size => column.char_size,\n :data_size => column.data_size,\n :precision => column.precision,\n :scale => column.scale,\n :fsprecision => column.fsprecision,\n :lfprecision => column.lfprecision,\n :allow_null => column.nullable?\n }\n ]\n end\n table_schema.instance_variable_set :@features, {\n :owner => :\"#{metadata.obj_schema.downcase}\",\n :clustered => (metadata.clustered? rescue nil),\n :temporary => (metadata.is_temporary? rescue nil),\n :partitioning => (metadata.partitioned? rescue nil),\n :typed => (metadata.is_typed? rescue nil),\n :index_only => (metadata.index_only? rescue nil)\n }\n table_schema\n end",
"def dataset\n @dataset ||= begin\n create_table!\n database[:data_stores]\n end\n end",
"def schema_ds(table_name, opts)\n schema_ds_dataset.from(*schema_ds_from(table_name, opts)) \\\n .select(*schema_ds_select(table_name, opts)) \\\n .join(*schema_ds_join(table_name, opts)) \\\n .filter(*schema_ds_filter(table_name, opts))\n end",
"def schema_utility_dataset\n @schema_utility_dataset ||= dataset\n end",
"def schema_utility_dataset\n @schema_utility_dataset ||= dataset\n end",
"def schema_tables\n @schema_tables ||= %i[\n projects\n amazon_clouds\n data_centers\n jmeter_plans\n load_agents\n target_hosts\n clusters\n execution_cycles\n client_stats\n page_stats\n target_stats\n jtl_files\n ]\n end",
"def schema\n schema = Schema.new\n\n for name in db.tables\n next if IGNORED_TABLES.include? name\n table = schema.add_table( name )\n import_table( table )\n end\n\n schema\n end",
"def schema_parse_table(table_name, opts)\n m = output_identifier_meth(opts[:dataset])\n\n table = if opts[:schema]\n Sequel.qualify(opts[:schema], table_name)\n else\n Sequel.identifier(table_name)\n end\n\n describe(table, opts).map do |row|\n row[:db_type] = row[:type]\n row[:type] = schema_column_type(row[:db_type])\n row[:default] = nil\n row[:primary_key] = false\n [m.call(row.delete(:name)), row]\n end\n end",
"def schema_and_table(table_name)\n schema_utility_dataset.schema_and_table(table_name)\n end",
"def schema_parse_tables(opts)\n schemas = {}\n schema_ds(nil, opts).each do |row|\n (schemas[row.delete(:table_name).to_sym] ||= []) << row\n end\n schemas.each do |table, rows|\n schemas[table] = schema_parse_rows(rows)\n end\n schemas\n end",
"def schema_utility_dataset\n @default_dataset\n end",
"def table_name\n if qualified? && meta[:qualified].is_a?(Symbol)\n meta[:qualified]\n else\n source.dataset\n end\n end",
"def generate_dataset\n return nil if query.nil?\n # execute the query\n data = Array ActiveRecord::Base.connection.execute(query)\n return [] if data.empty?\n # Convert the query into an easy to read format\n @dataset = [data.first.keys]+data.map{|dd|dd.values}\n return @dataset\n end",
"def schema(table, opts={})\n hero = execute(\"DESCRIBE #{table}\")\n hero.map do |h|\n [\n h[:col_name].strip.to_sym,\n { :db_type => h[:data_type].strip , :comment => h[:comment].strip }\n ]\n end\n end",
"def load_tables\n tables = @table_ast.keys\n tables.each {|table|\n columns = Hash.new\n column_names = @table_ast[table].keys\n rows = @dbh.execute(\"SELECT * FROM #{table}\")\n @mem_db_row[table] = rows\n column_names.each {|col|\n col_data = Array.new\n rows.each {|data|\n col_data.push(data[col])\n }\n columns[col] = col_data\n }\n @mem_db_col[table] = columns\n }\n return @mem_db_col, @mem_db_row\n end",
"def data\n @data ||= Dir['data/*'].reduce({}) do |data, file|\n raw_data = File.read(file)\n filename = parse_filename(file)\n schema = schemas[filename]\n\n data[filename] = raw_data.split(\"\\n\").map do |row|\n schema.reduce({}) do |object, column|\n # slice from the beginning of the row the relevant number of\n # characters based on the column's attribute length\n raw_value = row.slice!(0, column[:length].to_i)\n\n parsed_value = parse_attribute(raw_value, column[:type])\n object[column[:name]] = parsed_value\n object\n end\n end\n end\n end",
"def schema\n @schema ||= (default_schema || ETL::Schema::Table.new)\n end",
"def schema_meta_structure\n CreateVersionsTableQuery.new.to_cql\n end",
"def schema_parse_table(table_name, opts)\n m = output_identifier_meth\n m2 = input_identifier_meth\n ds = metadata_dataset.from(:information_schema__tables___t).\n join(:information_schema__columns___c, :table_catalog=>:table_catalog,\n :table_schema => :table_schema, :table_name => :table_name).\n select(:column_name___column, :data_type___db_type, :character_maximum_length___max_chars, :column_default___default, :is_nullable___allow_null).\n filter(:c__table_name=>m2.call(table_name.to_s))\n if schema = opts[:schema] || default_schema\n ds.filter!(:table_schema=>schema)\n end\n ds.map do |row|\n row[:allow_null] = row[:allow_null] == 'YES' ? true : false\n row[:default] = nil if blank_object?(row[:default])\n row[:type] = schema_column_type(row[:db_type])\n [m.call(row.delete(:column)), row]\n end\n end",
"def tables\n []\n end",
"def schema_names\n select_value(<<-SQL, 'SCHEMA')\n SELECT nspname\n FROM pg_namespace\n WHERE nspname !~ '^pg_.*'\n AND nspname NOT IN ('information_schema')\n ORDER by nspname;\n SQL\n end",
"def schema\n []\n end",
"def get_db_schema(reload = reload_db_schema?)\n set_columns(nil)\n return nil unless @dataset\n schema_hash = {}\n ds_opts = dataset.opts\n get_columns = proc{check_non_connection_error{columns} || []}\n schema_array = check_non_connection_error(false){db.schema(dataset, :reload=>reload)} if db.supports_schema_parsing?\n if schema_array\n schema_array.each{|k,v| schema_hash[k] = v}\n\n # Set the primary key(s) based on the schema information,\n # if the schema information includes primary key information\n if schema_array.all?{|k,v| v.has_key?(:primary_key)}\n pks = schema_array.map{|k,v| k if v[:primary_key]}.compact\n pks.length > 0 ? set_primary_key(pks) : no_primary_key\n end\n\n if (select = ds_opts[:select]) && !(select.length == 1 && select.first.is_a?(SQL::ColumnAll))\n # We don't remove the columns from the schema_hash,\n # as it's possible they will be used for typecasting\n # even if they are not selected.\n cols = get_columns.call\n cols.each{|c| schema_hash[c] ||= {}}\n def_column_accessor(*schema_hash.keys)\n else\n # Dataset is for a single table with all columns,\n # so set the columns based on the order they were\n # returned by the schema.\n cols = schema_array.map{|k,v| k}\n set_columns(cols)\n # Also set the columns for the dataset, so the dataset\n # doesn't have to do a query to get them.\n dataset.send(:columns=, cols)\n end\n else\n # If the dataset uses multiple tables or custom sql or getting\n # the schema raised an error, just get the columns and\n # create an empty schema hash for it.\n get_columns.call.each{|c| schema_hash[c] = {}}\n end\n schema_hash\n end",
"def get_db_schema(reload = reload_db_schema?)\n set_columns(nil)\n return nil unless @dataset\n schema_hash = {}\n ds_opts = dataset.opts\n get_columns = proc{check_non_connection_error{columns} || []}\n schema_array = check_non_connection_error(false){db.schema(dataset, :reload=>reload)} if db.supports_schema_parsing?\n if schema_array\n schema_array.each{|k,v| schema_hash[k] = v}\n\n # Set the primary key(s) based on the schema information,\n # if the schema information includes primary key information\n if schema_array.all?{|k,v| v.has_key?(:primary_key)}\n pks = schema_array.map{|k,v| k if v[:primary_key]}.compact\n pks.length > 0 ? set_primary_key(pks) : no_primary_key\n end\n\n if (select = ds_opts[:select]) && !(select.length == 1 && select.first.is_a?(SQL::ColumnAll))\n # We don't remove the columns from the schema_hash,\n # as it's possible they will be used for typecasting\n # even if they are not selected.\n cols = get_columns.call\n cols.each{|c| schema_hash[c] ||= {}}\n def_column_accessor(*schema_hash.keys)\n else\n # Dataset is for a single table with all columns,\n # so set the columns based on the order they were\n # returned by the schema.\n cols = schema_array.map{|k,v| k}\n set_columns(cols)\n # Also set the columns for the dataset, so the dataset\n # doesn't have to do a query to get them.\n dataset.send(:columns=, cols)\n end\n else\n # If the dataset uses multiple tables or custom sql or getting\n # the schema raised an error, just get the columns and\n # create an empty schema hash for it.\n get_columns.call.each{|c| schema_hash[c] = {}}\n end\n schema_hash\n end",
"def schemas\n sql = \"SELECT nspname FROM pg_namespace WHERE nspname !~ '^pg_.*'\"\n ActiveRecord::Base.connection.query(sql).flatten\n end",
"def schema\n adapter.schema\n end",
"def tables_from(db=current_database)\n end",
"def schema_names\n select_values(\n \"SELECT nspname FROM pg_namespace\" <<\n \" WHERE nspname !~ '^pg_.*' AND nspname NOT IN ('information_schema')\" <<\n \" ORDER by nspname;\",\n 'SCHEMA')\n end",
"def schema\n raise NotImplementedError\n end",
"def schema_meta_data\n schema = []\n # not sure if there needs to be a header element with the number of columns in it\n # would look sort of like this\n # header = SchemaElement.new\n # header.name = @csv.rows.first.first\n # header.num_children = num_rows\n # schema << header\n @csv.headers.each do |header|\n schema << Configurator.schema_element_with_defaults({\n name: header\n })\n end\n\n schema\n end",
"def initialize_db_schema\n @db.exec(\n 'create table if not exists nodes\n (\n id SERIAL PRIMARY KEY,\n host VARCHAR(256) UNIQUE,\n last_seen TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,\n current_report INTEGER\n );\n\n create table if not exists collections\n (\n id SERIAL PRIMARY KEY,\n collection VARCHAR(256) NOT NULL,\n archived BOOL DEFAULT FALSE\n );\n\n create table if not exists reports\n (\n id SERIAL PRIMARY KEY,\n node_id INTEGER NOT NULL,\n file_handle INTEGER,\n status INTEGER NOT NULL,\n collection INTEGER NOT NULL,\n time TIMESTAMP NOT NULL,\n resources_changed INTEGER NOT NULL,\n resources_failed INTEGER NOT NULL,\n resources_total INTEGER NOT NULL,\n runtime REAL NOT NULL,\n new_report BOOL DEFAULT FALSE,\n FOREIGN KEY (node_id) REFERENCES nodes (id),\n FOREIGN KEY (collection) REFERENCES collections(id)\n );\n\n create table if not exists schemaversion\n (\n id SERIAL PRIMARY KEY,\n complete BOOL DEFAULT FALSE,\n comment VARCHAR(256) NOT NULL\n );\n create table if not exists reportdata\n (\n id SERIAL PRIMARY KEY,\n report bytea\n );\n '\n )\n end",
"def schema(table, opts={})\n hero = execute(\"DESCRIBE #{table}\")\n hero.map do |h|\n [ h[:col_name].to_sym, { :db_type => h[:data_type] , :comment => h[:comment] } ]\n end\n end",
"def dataset\n res = Database.connection.exec(\"SELECT dataset from application_seeds LIMIT 1;\")\n res.getvalue(0, 0)\n rescue PG::Error => e\n e.message =~ /relation \"application_seeds\" does not exist/ ? nil : raise\n end",
"def parse_schema(fio)\n data_system.parse(fio)\n end",
"def tables\n [\n ]\n end",
"def get_schema_struct(table_name)\n dbres = do_sql_command(\"DESC #{table_name};\")\n\n dbstruct = []\n\n if(dbres) then\n dbres.each_hash do | row |\n dbstruct_hash = {}\n row.each {|key, val|\n dbstruct_hash[key.downcase.to_sym] = val \n }\n dbstruct << dbstruct_hash\n end \n end\n\n dbstruct\nend",
"def schema\n @schema ||= []\n end",
"def get(table)\n if internal?(table)\n INTERNAL_SCHEMATA[table.variable.name]\n else\n table_id = @table_manager.variable_id(table)\n Schema.new(\n @internal_evaluator.select(\n [\"column_name\", \"index\", \"type_id\", \"short_default\", \"boolean_default\", \"string_default\", \"double_default\", \"integer_default\"],\n \"schemata\",\n [\"table_id\"],\n [table_id]\n ).sort_by { |tuple| tuple[1] }.map do |tuple|\n name = tuple[0]\n type = StorageType.by_id(tuple[2])\n default = case type \n when StorageType::SHORT then tuple[3]\n when StorageType::BOOLEAN then tuple[4]\n when StorageType::STRING then tuple[5]\n when StorageType::DOUBLE then tuple[6]\n when StorageType::INTEGER then tuple[7]\n else\n raise\n end\n if default.nil?\n Column.new(name, type)\n else\n Column.new(name, type, default)\n end\n end\n )\n end\n end",
"def initialize_datasets\n @datasets = gateways.each_with_object({}) do |(key, gateway), h|\n h[key] = gateway.schema if config.gateways[key][:infer_relations]\n end\n end",
"def tables\n sanity_check\n @handle.tables\n end",
"def ctype\n select_value(\n \"SELECT pg_database.datctype FROM pg_database WHERE pg_database.datname LIKE '#{current_database}'\",\n 'SCHEMA')\n end",
"def schema\n jiak.data.schema\n end",
"def dataset(opts=nil)\n Sequel::JDBC::AS400::Dataset.new(self, opts)\n end",
"def dataset(opts=nil)\n Sequel::JDBC::AS400::Dataset.new(self, opts)\n end",
"def schema\n connection.schema\n end",
"def data\r\n tables_ret = []\r\n tables.list.each do |name, table|\r\n tables_ret << table.data\r\n end\r\n \r\n return {\r\n \"tables\" => tables_ret\r\n }\r\n end",
"def table_schema(tbl)\n column_sql = <<-eosql\nSELECT rf.rdb$field_name AS \"name\",\n field.rdb$field_type AS \"type_code\",\n field.rdb$field_sub_type AS \"subtype_code\",\n-- -- -- field.rdb$field_length AS \"length\", -- -- --\n field.rdb$field_precision AS \"precision\",\n field.rdb$field_scale AS \"scale\",\n CASE\n WHEN rf.rdb$null_flag > 0\n THEN 'NO'\n ELSE 'YES'\n END AS \"nullable\",\n CASE\n WHEN iseg.rdb$index_name IS NOT NULL\n THEN 'YES'\n ELSE 'NO'\n END AS \"primary_key\"\nFROM rdb$relation_fields rf\nJOIN rdb$fields field ON rf.rdb$field_source = field.rdb$field_name\nLEFT JOIN rdb$relation_constraints c\n ON c.rdb$relation_name = rf.rdb$relation_name\n AND\n c.rdb$constraint_type = 'PRIMARY KEY'\nLEFT JOIN rdb$index_segments iseg\n ON iseg.rdb$index_name = c.rdb$index_name\n AND\n iseg.rdb$field_name = rf.rdb$field_name\nWHERE rf.rdb$relation_name = ?\nORDER BY rf.rdb$field_position, rf.rdb$field_name\neosql\n\n info = RDBI::Schema.new([], [])\n res = execute(column_sql, tbl.to_s.upcase)\n res.as(:Struct)\n while row = res.fetch[0]\n type = RDBI::Driver::Rubyfb::Types::field_type_to_rubyfb(row[:type_code], row[:subtype_code])\n info.columns << RDBI::Column.new(\n row[:name].to_sym,\n type,\n RDBI::Driver::Rubyfb::Types::rubyfb_to_rdbi(type, row[:scale]),\n row[:precision],\n row[:scale],\n row[:nullable] == 'YES',\n #nil, # metadata\n #nil, # default\n #nil, # table\n )\n (info.columns[-1].primary_key = row[:primary_key] == 'YES') rescue nil # pk > rdbi 0.9.1\n end\n return unless info.columns.length > 0\n info.tables << tbl\n info\n end",
"def all\n partitions.group_by { |row| row['table_name'] }.map(&method(:to_tablature_table))\n end",
"def tables(opts=OPTS, &block)\n pg_class_relname(['r', 'p'], opts, &block)\n end",
"def tables\n execute(\"SHOW TABLES FROM #{@opts[:database]}\")\n .map{ |i| i.values }\n .reduce(:+)\n .map{ |i| i.to_sym }\n end",
"def schema\n self.class.schema\n end",
"def tables\n @tables ||= {}.tap do |tables|\n parsed_data['tables'].map do |table|\n parsed_table_rows = parse_choices(table['rows'])\n tables[table['id']] = Table.new(table['name'], table['parameters'], parsed_table_rows)\n end\n end\n end",
"def dataset(opts=nil)\n Sequel::Swift::Postgres::Dataset.new(self, opts)\n end",
"def dataset\n ds = Sequel::Dataset.new(self)\n end",
"def dataset\n ds = Sequel::Dataset.new(self)\n end",
"def import_schemata!\n output 'Dropping and re-creating table definitions'\n result = mysql_root_cmd \"source #{Jetpants.export_location}/create_tables_#{@port}.sql\", terminator: '', schema: true\n output result\n end",
"def import_schemata!\n output 'Dropping and re-creating table definitions'\n result = mysql_root_cmd \"source #{Jetpants.export_location}/create_tables_#{@port}.sql\", terminator: '', schema: true\n output result\n end",
"def tables\r\n return @engine.tables\r\n end",
"def get_tables\n get_schemas.keys\n end",
"def __create_meta_data_table_for schema\n ActiveRecord::Base.establish_connection(self.connection_data) unless schema.connected?\n\n # Clears the table cache for the schema (remove TableDoesNotExists if a table actually exists)\n schema.clear_cache!\n\n unless schema.table_exists?\n ActiveRecord::Schema.define do\n create_table schema.table_name do |t|\n t.column :version, :float\n end\n end\n end\n end",
"def tables\n db_connection.select_values(\"show tables\")\n end",
"def schema_contents\n File.read(schema_path)\n end",
"def schema_source\n column_families\n end",
"def tables\r\n conn_exec do |driver|\r\n if !@tables\r\n require \"#{File.dirname(__FILE__)}/drivers/#{@opts[:type]}/knjdb_#{@opts[:type]}_tables\" if (!@opts.key?(:require) or @opts[:require])\r\n @tables = Kernel.const_get(\"KnjDB_#{@opts[:type]}\".to_sym).const_get(:Tables).new(\r\n :db => self\r\n )\r\n end\r\n \r\n return @tables\r\n end\r\n end",
"def schema_and_table_name\n if qualified_table_name.include? '.'\n schema_name, table_name = qualified_table_name.split('.', 2)\n else\n table_name = qualified_table_name\n schema_name = self.class.default_schema_name\n end\n [schema_name, table_name]\n end",
"def available_schemas\n load_schemas.transform_values(&:description).to_a\n end",
"def schema(table_name = nil, opts={})\n table_name = table_name.to_sym if table_name\n if opts[:reload] && @schemas\n if table_name\n @schemas.delete(table_name)\n else\n @schemas = nil\n end\n end\n\n if @schemas\n if table_name\n return @schemas[table_name] if @schemas[table_name]\n else\n return @schemas\n end\n end\n\n if table_name\n @schemas ||= {}\n if respond_to?(:schema_parse_table, true)\n @schemas[table_name] ||= schema_parse_table(table_name, opts)\n else\n raise Error, 'schema parsing is not implemented on this database'\n end\n else\n if respond_to?(:schema_parse_tables, true)\n @schemas = schema_parse_tables(opts)\n elsif respond_to?(:schema_parse_table, true) and respond_to?(:tables, true)\n tables.each{|t| schema(t, opts)}\n @schemas\n else\n raise Error, 'schema parsing is not implemented on this database'\n end\n end\n end",
"def tables\n execute('select table_name from information_schema.tables where table_schema = ?', [schema]).map(&:first)\n end",
"def schema\n\t\tunless @schema\n\t\t\tschemahash = self.conn.schema\n\t\t\t@schema = Treequel::Schema.new( schemahash )\n\t\tend\n\n\t\treturn @schema\n\tend",
"def load_table_schema(conn, builder, table)\n builder.relvar(table){\n primary_key_columns = load_table_heading(conn, builder, table)\n load_table_constraints(conn, builder, table, primary_key_columns)\n }\n end",
"def all_schemas\n query('SELECT schema_name FROM information_schema.schemata').flatten\n end",
"def tables(name = nil)\n query(<<-SQL, 'SCHEMA').map { |row| row[0] }\n SELECT tablename\n FROM pg_tables\n WHERE schemaname = ANY (ARRAY['public'])\n SQL\n end",
"def tables(name = nil, schema = nil, table = nil)\n schema = schema ? \"'#{quote_string(schema)}'\" : 'CURRENT_SCHEMA'\n select_rows(\n \"SELECT table_name \"+\n \"FROM information_schema.tables \"+\n \"WHERE table_type = 'TABLE' \"+\n \" AND table_schema = #{schema} \"+\n (table ? \"AND table_name = '#{quote_string(table)}'\" : \"\"),\n SCHEMA_LOG_NAME\n ).map { |row|\n row[0]\n }\n end",
"def chooseSchema\n @metadata.chooseSchema\n end",
"def schema\n @schema ||= metadata.ancestors('Schema').first\n end",
"def schema_data\n dev_schema\n rescue\n VetsJsonSchema::SCHEMAS[@schema_name]\n end",
"def schema_parse_rows(rows)\n schema = []\n rows.each do |row| \n row[:allow_null] = row[:allow_null] == 'YES' ? true : false\n row[:default] = nil if row[:default].blank?\n row[:type] = schema_column_type(row[:db_type])\n schema << [row.delete(:column).to_sym, row]\n end\n schema\n end",
"def generate_query\n unless databases.nil?\n databases.each do |db|\n create_query[db] = [\"create table #{tablename} (\"]\n end\n csv_column_datatypes.each do |header, datatype|\n append_to_query = build_query_for_datatype(header, datatype)\n append_to_query.each do |key, value|\n create_query[key].push(value)\n end\n end\n prepare_sql_statements\n prepare_import_csv\n # Pass the prepared statements to options varaible.\n # Which gets passed on to print_metadata_analysis\n options[:create_query] = create_query\n options[:import_query] = import_query\n end\n print_metadata_analysis\n end",
"def dataset(opts=nil)\n Sequel::JDBC::Oracle::Dataset.new(self, opts)\n end",
"def list_tables\n data.keys\n end",
"def tables\n raise 'SevenZip#tables should never be called'\n end",
"def prepare_schema\n tables = [];ActiveRecord::Base.connection.execute(\"show tables\").each{|t| tables << t[0].strip}\n \n ActiveRecord::Schema.define do\n App.log.info(\"preparing schema\")\n \n unless tables.include?(\"services\")\n # a service entry\n begin\n create_table :services do |t|\n t.string :name\n t.string :status, :null => false, :default => \"active\"\n end\n add_index :services, :name\n rescue\n App.log_exception\n end\n end\n \n unless tables.include?(\"queries\")\n begin\n # queries\n create_table :queries do |t|\n t.string :query \n t.column :last_twid, :bigint, :null => false, :default => 0\n t.timestamp :last_run\n t.integer :last_result_count\n t.string :status, :default => 'active', :null=> false\n end\n add_index :queries, :query\n rescue\n App.log_exception\n end\n end\n \n unless tables.include?(\"tweets\")\n begin\n # cache of tweets\n create_table :tweets do |t|\n t.column :twid, :bigint, :null => false\n t.string :from_user\n t.string :to_user\n t.integer :from_user_id\n t.integer :to_user_id\n t.string :text\n t.string :profile_image_url\n t.timestamp :created_at\n end\n add_index :tweets, :twid\n rescue\n App.log_exception\n end\n end\n \n end # define schema\n end",
"def schema(path = nil)\n s = \"ActiveRecord::Schema.define do\\n\"\n s << \" create_table \\\"#{File.basename(@data.path, \".*\")}\\\" do |t|\\n\"\n columns.each do |column|\n s << \" t.column #{column.schema_definition}\"\n end\n s << \" end\\nend\"\n \n if path\n File.open(path, 'w') {|f| f.puts(s)}\n end\n \n s\n end",
"def __foreign_key_list_ds(reverse)\n if reverse\n ctable = Sequel[:att2]\n cclass = Sequel[:cl2]\n rtable = Sequel[:att]\n rclass = Sequel[:cl]\n else\n ctable = Sequel[:att]\n cclass = Sequel[:cl]\n rtable = Sequel[:att2]\n rclass = Sequel[:cl2]\n end\n\n if server_version >= 90500\n cpos = Sequel.expr{array_position(co[:conkey], ctable[:attnum])}\n rpos = Sequel.expr{array_position(co[:confkey], rtable[:attnum])}\n # :nocov:\n else\n range = 0...32\n cpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:conkey], [x]), x]}, 32, ctable[:attnum])}\n rpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:confkey], [x]), x]}, 32, rtable[:attnum])}\n # :nocov:\n end\n\n ds = metadata_dataset.\n from{pg_constraint.as(:co)}.\n join(Sequel[:pg_class].as(cclass), :oid=>:conrelid).\n join(Sequel[:pg_attribute].as(ctable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:conkey])).\n join(Sequel[:pg_class].as(rclass), :oid=>Sequel[:co][:confrelid]).\n join(Sequel[:pg_attribute].as(rtable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:confkey])).\n join(Sequel[:pg_namespace].as(:nsp), :oid=>Sequel[:cl2][:relnamespace]).\n order{[co[:conname], cpos]}.\n where{{\n cl[:relkind]=>%w'r p',\n co[:contype]=>'f',\n cpos=>rpos\n }}.\n select{[\n co[:conname].as(:name),\n ctable[:attname].as(:column),\n co[:confupdtype].as(:on_update),\n co[:confdeltype].as(:on_delete),\n cl2[:relname].as(:table),\n rtable[:attname].as(:refcolumn),\n SQL::BooleanExpression.new(:AND, co[:condeferrable], co[:condeferred]).as(:deferrable),\n nsp[:nspname].as(:schema)\n ]}\n\n if reverse\n ds = ds.order_append(Sequel[:nsp][:nspname], Sequel[:cl2][:relname])\n end\n\n ds\n end",
"def tenant_names\n connection.execute('select datname from pg_database;').collect { |row| row['datname'] }\n end",
"def data_attributes\n @schema.schema.select {|k,_| k.to_s.start_with?('data_') or k.to_s.start_with?('data-')}.inject({}) {|col,(k,v)| col[k[5..-1].to_sym]=v;col}\n end",
"def table(args={})\n args[:connection_name] = @connection_name unless args.has_key?(:connection_name)\n req(:required => [:table_name, :db_name],\n :args_object => args)\n dbh = Mysqladmin::Exec.new(:connection_name => args[:connection_name])\n dbh.use(args[:db_name])\n dbh.query(:sql => \"SHOW TABLE STATUS LIKE '#{args[:table_name]}'\")\n if dbh.rows > 0\n dbh.fetch_hash do |table_data|\n return {\n :table_name => table_data[\"Name\"],\n :engine => table_data[\"Engine\"].downcase,\n :data_length => table_data[\"Data_length\"].to_i,\n :index_length => table_data[\"Index_length\"].to_i,\n :total_length => (table_data[\"Data_length\"].to_i + table_data[\"Index_length\"].to_i),\n :collation => table_data[\"Collation\"].downcase,\n :rows => table_data[\"Rows\"].to_i,\n :avg_row_length => table_data[\"Avg_row_length\"].to_i,\n :max_data_length => table_data[\"Max_data_length\"].to_i,\n :row_format => table_data[\"Row_format\"].downcase\n }\n end\n end\n \n # :connection_name => The named connection to use for database statistics,\n # :db_name => The database to gather statistics on\n def database(args={})\n args[:connection_name] = @connection_name unless args.has_key?(:connection_name)\n req(:required => [:db_name],\n :args_object => args)\n data = {}\n dbh = Mysqladmin::Exec.new(:connection_name => args[:connection_name])\n dbh.use(args[:db_name])\n dbh.list_tables.each do |table_name|\n data[table_name] = table(:table_name => args[:table_name], :db_name => args[:db_name], :connection_name => args[:connection_name])\n end\n return data\n end\n \n end",
"def db_table\n name = (\"evaldata_\" + term.title + \"_\" + title).strip\n name = ActiveSupport::Inflector.transliterate(name).downcase\n name.gsub(/[^a-z0-9_]+/, \"_\")\n end",
"def _select_serial_sequence_ds\n @_serial_sequence_ds ||= metadata_dataset.\n from{[\n pg_class.as(:seq),\n pg_attribute.as(:attr),\n pg_depend.as(:dep),\n pg_namespace.as(:name),\n pg_constraint.as(:cons),\n pg_class.as(:t)\n ]}.\n where{[\n [seq[:oid], dep[:objid]],\n [seq[:relnamespace], name[:oid]],\n [seq[:relkind], 'S'],\n [attr[:attrelid], dep[:refobjid]],\n [attr[:attnum], dep[:refobjsubid]],\n [attr[:attrelid], cons[:conrelid]],\n [attr[:attnum], cons[:conkey].sql_subscript(1)],\n [attr[:attrelid], t[:oid]],\n [cons[:contype], 'p']\n ]}.\n select{[\n name[:nspname].as(:schema),\n seq[:relname].as(:sequence)\n ]}\n end",
"def get_dataset(table)\n #puts \"converting to a dataset\"\n to_dataset(@datafiles[table].content)\n end",
"def tables\n table_names.map { |tn| table(tn) }\n end",
"def table_structure(table_name)\n execute('select * from information_schema.columns where table_schema = ?' \\\n 'AND table_name = ?', [schema, table_name])\n end",
"def dataset(opts=nil)\n Sequel::JDBC::MSSQL::Dataset.new(self, opts)\n end"
] |
[
"0.7500667",
"0.6867395",
"0.66640764",
"0.6582821",
"0.6533815",
"0.6410895",
"0.632692",
"0.6290313",
"0.6165314",
"0.6144493",
"0.610729",
"0.610729",
"0.60320973",
"0.59904265",
"0.59676796",
"0.595397",
"0.5949822",
"0.5930337",
"0.5926021",
"0.59037954",
"0.58810246",
"0.587168",
"0.58701175",
"0.5869719",
"0.5868299",
"0.58578223",
"0.58311427",
"0.580213",
"0.5796813",
"0.57952183",
"0.57952183",
"0.57928544",
"0.5790282",
"0.5766281",
"0.5763276",
"0.5747641",
"0.57357633",
"0.57344025",
"0.57255757",
"0.5722004",
"0.57166845",
"0.5715954",
"0.5705927",
"0.57036465",
"0.57000184",
"0.5690152",
"0.5665588",
"0.56595814",
"0.5656399",
"0.56298333",
"0.56298333",
"0.5623373",
"0.5620091",
"0.55754817",
"0.5570235",
"0.55643964",
"0.55625546",
"0.55595386",
"0.5555602",
"0.5555377",
"0.55393577",
"0.55393577",
"0.55372727",
"0.55372727",
"0.5533905",
"0.55280906",
"0.5518571",
"0.55010945",
"0.5500873",
"0.54973763",
"0.54895395",
"0.5466671",
"0.54637384",
"0.54539794",
"0.5452421",
"0.54521275",
"0.54485905",
"0.54472315",
"0.544617",
"0.5445771",
"0.54446816",
"0.5443919",
"0.5442612",
"0.54393756",
"0.5430404",
"0.5430091",
"0.54236287",
"0.5408561",
"0.5399222",
"0.53947407",
"0.539289",
"0.5390311",
"0.5387387",
"0.5360807",
"0.53605413",
"0.535216",
"0.53501195",
"0.5342986",
"0.53417903",
"0.53415215"
] |
0.6407045
|
6
|
Set the transaction isolation level on the given connection
|
def set_transaction_isolation(conn, opts)
level = opts.fetch(:isolation, transaction_isolation_level)
read_only = opts[:read_only]
deferrable = opts[:deferrable]
if level || !read_only.nil? || !deferrable.nil?
sql = String.new
sql << "SET TRANSACTION"
sql << " ISOLATION LEVEL #{Sequel::Database::TRANSACTION_ISOLATION_LEVELS[level]}" if level
sql << " READ #{read_only ? 'ONLY' : 'WRITE'}" unless read_only.nil?
sql << " #{'NOT ' unless deferrable}DEFERRABLE" unless deferrable.nil?
log_connection_execute(conn, sql)
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def set_transaction_isolation(conn, opts)\n level = opts.fetch(:isolation, transaction_isolation_level)\n if (jdbc_level = JDBC_TRANSACTION_ISOLATION_LEVELS[level]) &&\n conn.getMetaData.supportsTransactionIsolationLevel(jdbc_level)\n _trans(conn)[:original_jdbc_isolation_level] = conn.getTransactionIsolation\n log_connection_yield(\"Transaction.isolation_level = #{level}\", conn){conn.setTransactionIsolation(jdbc_level)}\n end\n end",
"def begin_isolated_db_transaction(isolation)\n name = isolation.to_s.upcase; name.sub!('_', ' ')\n log(\"/* BEGIN */; SET TRANSACTION ISOLATION LEVEL #{name}\") do\n @connection.begin(isolation)\n end\n end",
"def begin_isolated_db_transaction(isolation)\n @connection.transaction transaction_isolation_levels.fetch(isolation, isolation)\n end",
"def run_in_transaction isolation\n if isolation\n Base.transaction isolation: isolation do\n yield\n end\n else\n yield\n end\n end",
"def run_in_transaction isolation\n if isolation\n Base.transaction isolation: isolation do\n yield\n end\n else\n yield\n end\n end",
"def begin_transaction(conn, opts={})\n set_transaction_isolation(conn, opts)\n super\n end",
"def transaction_isolation_levels\n {\n read_committed: 'READ COMMITTED',\n repeatable_read: 'REPEATABLE READ',\n serializable: 'SERIALIZABLE'\n }\n end",
"def begin_isolated_db_transaction(isolation)\n log(\"BEGIN ISOLATED - #{isolation}\", 'TRANSACTION') { @connection.begin(isolation) }\n end",
"def supports_transaction_isolation_levels?\n true\n end",
"def supports_transaction_isolation_levels?\n true\n end",
"def supports_transaction_isolation_levels?\n synchronize{|conn| conn.getMetaData.supportsTransactionIsolationLevel(JavaSQL::Connection::TRANSACTION_SERIALIZABLE)}\n end",
"def supports_transaction_isolation_level?(level)\n @connection.supports_transaction_isolation?(level)\n end",
"def test_supports_transaction_isolation\n assert ActiveRecord::Base.connection.supports_transaction_isolation?\n\n assert ActiveRecord::Base.connection.supports_transaction_isolation_level?(:read_uncommitted)\n assert ActiveRecord::Base.connection.supports_transaction_isolation_level?(:read_committed)\n assert ActiveRecord::Base.connection.supports_transaction_isolation_level?(:repeatable_read)\n assert ActiveRecord::Base.connection.supports_transaction_isolation_level?(:serializable)\n end",
"def supports_transaction_isolation_levels?\n false\n end",
"def supports_transaction_isolation?\n true\n end",
"def begin_transaction(conn, opts=OPTS)\n log_connection_yield('Transaction.begin', conn){conn.autocommit = false}\n set_transaction_isolation(conn, opts)\n end",
"def supports_transaction_isolation?\n false\n end",
"def supports_transaction_isolation?\n false\n end",
"def begin_db_transaction\n @transaction = @connection.transaction('READ COMMITTED')\n end",
"def remove_transaction(conn, committed)\n if jdbc_level = _trans(conn)[:original_jdbc_isolation_level]\n log_connection_yield(\"Transaction.restore_isolation_level\", conn){conn.setTransactionIsolation(jdbc_level)}\n end\n unless in_savepoint?(conn)\n conn.setAutoCommit(true)\n end\n ensure\n super\n end",
"def with_transaction(read_only: false, &block)\n @env.transaction(read_only, &block)\n end",
"def with_connection\n ActiveRecord::Base.connection_pool.with_connection do |connection|\n connection.transaction do\n if connection.adapter_name == \"PostgreSQL\"\n connection.execute \"SET TRANSACTION READ ONLY\"\n connection.execute \"SET LOCAL statement_timeout = 100\"\n # TODO support equivalent options for other adapters (such as mysql)\n end\n\n yield connection\n end\n end\n end",
"def transaction\n use do |connection|\n connection.transaction do |conn|\n begin\n yield conn\n rescue Rollback\n return\n end\n end\n end\n end",
"def perform_atomic_update(&block)\n Edition.connection.execute \"set transaction isolation level serializable\"\n Edition.connection.transaction do\n yield\n end\n end",
"def transaction\n raise Mysql::Error, 'Not Connected' if @my.nil?\n\n if block_given?\n begin\n @my.query('START TRANSACTION WITH CONSISTENT SNAPSHOT')\n yield # Start executing the query black.\n @my.query('COMMIT')\n rescue Mysql::Error => e\n @my.query('ROLLBACK')\n raise e\n end\n end\n end",
"def locked_transaction(&block)\n unless connection.open_transactions.zero?\n raise \"locked_transaction cannot be called from within another transaction!\"\n end\n unsafe_locked_transaction do\n yield if block_given?\n end\n end",
"def transaction(&block)\n self['AutoCommit'] = false\n self.do_transaction(&block)\n self['AutoCommit'] = true\n end",
"def transaction( &block )\n connect do | conn |\n conn.transaction do | conn |\n yield SqlRunner.new(SingleConnectionPool.new( conn ))\n end\n end\n end",
"def begin_db_transaction\n log('begin transaction', nil) do\n begin_isolated_db_transaction(default_transaction_isolation)\n end\n end",
"def transaction\n raise Mysql2::Error, 2002 if @my.nil?\n\n if block_given?\n begin\n @my.query('START TRANSACTION WITH CONSISTENT SNAPSHOT')\n yield # Start executing the query black.\n @my.query('COMMIT')\n rescue Mysql2::Error => e\n @my.query('ROLLBACK')\n raise e\n end\n end\n end",
"def begin_transaction(conn, opts=OPTS)\n if in_savepoint?(conn)\n _trans(conn)[:savepoints][-1][:obj] = log_connection_yield('Transaction.savepoint', conn){conn.set_savepoint}\n else\n log_connection_yield('Transaction.begin', conn){conn.setAutoCommit(false)}\n set_transaction_isolation(conn, opts)\n end\n end",
"def transaction(&block)\n @in_transaction += 1\n begin\n yield self\n self.commit if @in_transaction > 0\n rescue => e\n self.rollback\n raise e\n ensure\n @in_transaction -= 1 unless @in_transaction == 0\n end\n end",
"def transaction(session, access_mode = :write)\n if !block_given?\n tx = self.class.transaction_class.new(session)\n tx.access_mode = access_mode\n tx.begin\n return tx\n end\n\n begin\n tx = transaction(session, access_mode)\n yield tx\n rescue => e\n tx.mark_failed if tx\n raise e\n ensure\n tx.close if tx\n end\n end",
"def begin_transaction(conn, opts={})\n log_yield(TRANSACTION_BEGIN){conn.setAutoCommit(false)}\n conn\n end",
"def start_transaction!\n fail DbMod::Exceptions::AlreadyInTransaction if @in_transaction\n @in_transaction = true\n\n query 'BEGIN'\n end",
"def transaction(start_db_transaction=true)\n yield\n end",
"def transaction(opts={:auto_enable => false})\n @transaction_level||=0\n @transaction_level+=1\n session do\n raise_if_transaction_running\n @transaction ||= true\n raise Error.cannot_obtain_transaction_lock if not lock_transaction\n begin\n yield\n # get_transaction in case cfgsave or cfgenable was run in transaction block\n # if there is no transaction we do not need to run it\n # if there is transaction but opend by someone else then t\n cfg_save if @transaction_level==1 && get_transaction\n rescue => e\n abort_transaction\n raise e\n end \n end\n ensure\n @transaction_level-=1\n @transaction = nil if @transaction_level==0\n end",
"def isolation_context\n self.class.isolation_context\n end",
"def transaction(object)\n object.db.transaction {raise ::Sequel::Error::Rollback unless yield}\n end",
"def begin_db_transaction\n @connection.autocommit = false\n end",
"def set_locking_strategy\n if @config.enable_locking\n @lock = Aws::SessionStore::DynamoDB::Locking::Pessimistic.new(@config)\n else\n @lock = Aws::SessionStore::DynamoDB::Locking::Null.new(@config)\n end\n end",
"def apply_causal_consistency!(selector, connection)\n return unless selector[:startTransaction]\n\n apply_causal_consistency_if_possible(selector, connection)\n end",
"def begin_transaction(conn)\n log_info(TRANSACTION_BEGIN)\n conn.setAutoCommit(false)\n conn\n end",
"def begin_new_transaction(conn, opts)\n super\n if opts.has_key?(:synchronous)\n case sync = opts[:synchronous]\n when true\n sync = :on\n when false\n sync = :off\n when nil\n return\n end\n\n log_connection_execute(conn, \"SET LOCAL synchronous_commit = #{sync}\")\n end\n end",
"def transaction\n start\n yield self\n rescue Object => ex\n rollback\n debug \"#{ex.class}: #{ex.message}\"\n ex.backtrace.each { |line| debug line }\n else\n commit\n end",
"def transaction\n @transaction ||= PerseveringTransaction.new(connection)\n end",
"def transaction(mode = :deferred, &block)\n @db.transaction(mode, &block)\n end",
"def lock(mode, &block)\n sql = LOCK % [@opts[:from], mode]\n @db.synchronize do\n if block # perform locking inside a transaction and yield to block\n @db.transaction {@db.execute(sql); yield}\n else\n @db.execute(sql) # lock without a transaction\n self\n end\n end\n end",
"def transaction\n start_transaction\n\n yield\n ensure\n end_transaction if transaction_started?\n end",
"def with_transaction\n ActiveRecord::Base.transaction { yield }\n end",
"def begin_db_transaction\n log('BEGIN', 'TRANSACTION') { @connection.begin }\n end",
"def transaction\n @pool.hold do |conn|\n @transactions ||= []\n if @transactions.include? Thread.current\n return yield(conn)\n end\n log_info(SQL_BEGIN)\n conn.execute(SQL_BEGIN)\n begin\n @transactions << Thread.current\n yield(conn)\n rescue Exception => e\n log_info(SQL_ROLLBACK)\n conn.execute(SQL_ROLLBACK)\n raise e unless Error::Rollback === e\n ensure\n unless e\n log_info(SQL_COMMIT)\n conn.execute(SQL_COMMIT)\n end\n @transactions.delete(Thread.current)\n end\n end\n end",
"def begin_transaction\n return System.begin_transaction\n end",
"def transaction\n @pool.hold do |conn|\n @transactions ||= []\n if @transactions.include? Thread.current\n return yield(conn)\n end\n conn.execute(SQL_BEGIN)\n begin\n @transactions << Thread.current\n result = yield(conn)\n conn.execute(SQL_COMMIT)\n result\n rescue => e\n conn.execute(SQL_ROLLBACK)\n raise e unless Error::Rollback === e\n ensure\n @transactions.delete(Thread.current)\n end\n end\n end",
"def transaction(&block)\n ActiveRecord::Base.transaction(&block)\n end",
"def testTransaction1()\n t = Scalaris::Transaction.new()\n t.close_connection()\n end",
"def testTransaction1()\n t = Scalaris::Transaction.new()\n t.close_connection()\n end",
"def commit(txn)\n @current_transaction = txn.outer\n if @current_transaction\n @mode = @current_transaction.mode\n @current_transaction.read_cache = @read_cache\n @current_transaction.write_cache = @write_cache\n @current_transaction.delete_cache = @delete_cache\n else\n close\n end\n end",
"def testTransaction3()\n t = Scalaris::Transaction.new(conn = Scalaris::JSONConnection.new(url = Scalaris::DEFAULT_URL))\n t.close_connection()\n end",
"def testTransaction3()\n t = Scalaris::Transaction.new(conn = Scalaris::JSONConnection.new(url = Scalaris::DEFAULT_URL))\n t.close_connection()\n end",
"def transaction(mode = READ_CACHING)\n @default_caching_mode = mode\n open(mode)\n\n outer = @current_transaction\n if outer\n # freeze old cache\n outer.read_cache = @read_cache.dup\n outer.write_cache = @write_cache.dup\n outer.delete_cache = @delete_cache.dup\n end\n\n if iterator?\n if @transaction_mode == DYNAMIC_TRANSACTION_MODE\n\tODBM.Fail ErrMixedTransaction \n end\n @transaction_mode = STATIC_TRANSACTION_MODE\n @current_transaction = StaticTransaction.new(self, mode, outer)\n# @current_transaction.transaction do\n#\tyield @current_transaction\n# end\n @current_transaction.transaction do |txn|\n\tyield txn\n end\n else\n if @transaction_mode == STATIC_TRANSACTION_MODE\n\tODBM.Fail ErrMixedTransaction \n end\n \n @transaction_mode = DYNAMIC_TRANSACTION_MODE\n\n @current_transaction = DynamicTransaction.new(self, mode, outer)\n @current_transaction.start\n return @current_transaction\n end\n\n end",
"def with_transaction(&block)\n base_model.transaction(&block)\n end",
"def ddl_transaction(migration)\n if use_transaction?(migration)\n ActiveRecord::Base.transaction { yield }\n else\n yield\n end\n end",
"def begin_savepoint(conn, opts)\n super\n\n unless (read_only = opts[:read_only]).nil?\n log_connection_execute(conn, \"SET TRANSACTION READ #{read_only ? 'ONLY' : 'WRITE'}\")\n end\n end",
"def transaction\n begin\n if block_given?\n begin_db_transaction\n result = yield\n commit_db_transaction\n result\n end\n rescue Exception => database_transaction_rollback\n rollback_db_transaction\n raise\n end\n end",
"def transaction\n @database.transaction { yield self }\n end",
"def transaction\n start_transaction!\n\n result = yield\n\n query 'COMMIT'\n\n result\n rescue\n query 'ROLLBACK'\n raise\n\n ensure\n end_transaction!\n end",
"def test_transaction3()\n t = Scalaroid::Transaction.new(conn = Scalaroid::JSONConnection.new(url = Scalaroid::DEFAULT_URL))\n t.close_connection()\n end",
"def begin_db_transaction\n # PG driver doesn't really do anything on setAutoCommit(false)\n # except for commit-ing a previous pending transaction if any\n log('/* BEGIN */') { @connection.begin }\n end",
"def in_transaction(opts = {})\n yield\n end",
"def use_transactions; end",
"def use_transactions; end",
"def checked_transaction(opts=OPTS)\n use_transaction?(opts) ? db.transaction({:server=>this_server}.merge!(opts)){yield} : yield\n end",
"def checked_transaction(opts=OPTS)\n use_transaction?(opts) ? db.transaction({:server=>this_server}.merge!(opts)){yield} : yield\n end",
"def test_transaction1()\n t = Scalaroid::Transaction.new()\n t.close_connection()\n end",
"def start_transaction!(opts)\n tgi = threadlocal_graph_info\n tx_depth = tgi[:tx_depth] ||= 0\n tgi[:tx_depth] += 1\n begin\n if (not disable_transactions) and blueprints_graph.is_a? TransactionalGraph\n if tx_depth == 0\n close_implicit_transaction\n base_tx_finalizers\n elsif opts[:nesting] == true\n nested_tx_finalizers\n else\n fail NestedTransactionError, \"To use nested transactions, use nesting: true\"\n end\n else\n if tx_depth == 0\n mock_base_tx_finalizers\n else\n mock_nested_tx_finalizers\n end\n end\n rescue Exception\n tgi[:tx_depth] -= 1\n raise\n end\n end",
"def spy_transaction!\n @within_transaction = nil\n allow(TestSqlCaller).to receive(:transaction).and_wrap_original do |meth, *args, &block|\n @within_transaction = true\n meth.call(*args, &block)\n ensure\n @within_transaction = false\n end\n end",
"def transaction(start_db_transaction = true)\n transaction_open = false\n begin\n if block_given?\n if start_db_transaction\n begin_db_transaction\n transaction_open = true\n end\n yield\n end\n rescue Exception => database_transaction_rollback\n if transaction_open\n transaction_open = false\n rollback_db_transaction\n end\n raise unless database_transaction_rollback.is_a? ActiveRecord::Rollback\n end\n ensure\n if transaction_open\n begin\n commit_db_transaction\n rescue Exception => database_transaction_rollback\n rollback_db_transaction\n raise\n end\n end\n end",
"def outside_transaction\n # ActiveRecord manages connections per-thread, so the only way to\n # convince it to open another connection is to start another thread.\n thread = Thread.new do\n begin\n yield\n end\n end\n thread.value\n end",
"def transaction(&block)\n yield\n commit\n end",
"def with_lock_retries(*args, **kwargs, &block)\n if transaction_open?\n if enable_lock_retries?\n Gitlab::AppLogger.warn 'Lock retries already enabled, executing the block directly'\n yield\n else\n raise <<~EOF\n #{__callee__} can not be run inside an already open transaction\n\n Use migration-level lock retries instead, see https://docs.gitlab.com/ee/development/migration_style_guide.html#retry-mechanism-when-acquiring-database-locks\n EOF\n end\n else\n super(*args, **kwargs.merge(allow_savepoints: false), &block)\n end\n end",
"def transaction(options={}, &block)\n connection.transaction(options.update(:requires_new => true), &block)\n end",
"def lock(mode, &block)\n sql = LOCK % [source_list(@opts[:from]), mode]\n @db.synchronize do\n if block # perform locking inside a transaction and yield to block\n @db.transaction {@db.execute(sql); yield}\n else\n @db.execute(sql) # lock without a transaction\n self\n end\n end\n end",
"def autocommit=(value)\n IBM_DB.autocommit(@conn, value ? IBM_DB::SQL_AUTOCOMMIT_ON : IBM_DB::SQL_AUTOCOMMIT_OFF)\n end",
"def set_lock\n klass = self.class\n lock_column_name = klass.locking_column\n\n if has_attribute?(lock_column_name) && klass.parent_relation_keys.count > 0\n criteria = klass.parent_relation_keys.inject({}) do |result, parent_key|\n result[parent_key] = read_attribute(parent_key)\n result\n end\n relation = klass.unscoped.where(criteria)\n\n sql = <<-SQL\n #{relation.select(\"COUNT(#{lock_column_name}) AS value, 0 sort_order\").to_sql}\n UNION\n #{relation.select(\"MAX(#{lock_column_name}) AS value, 1 sort_order\").to_sql}\n ORDER BY sort_order\n SQL\n\n result = klass.connection.execute(sql)\n current_count = result[0][\"value\"]\n current_max = result[1][\"value\"]\n\n lock_value = (current_count.to_s == \"0\" ? 0 : current_max.to_i + 1)\n\n write_attribute(lock_column_name, lock_value)\n end\n end",
"def within_transaction; end",
"def within_transaction; end",
"def transaction(options = {}, &block)\n @klass.connection.run_queries_on_shard(@shard) do\n @klass = @klass.connection.transaction(options, &block)\n end\n end",
"def commit_transaction(conn)\n log_connection_execute(conn, commit_transaction_sql) unless Thread.current[:sequel_transaction_depth] > 1\n end",
"def transaction\n raise ArgumentError, 'No block was given' unless block_given?\n\n with_client do |client|\n begin\n client.query('BEGIN')\n yield client\n client.query('COMMIT')\n rescue StandardError => e\n client.query('ROLLBACK')\n raise e\n end\n end\n end",
"def restart_transaction\n ActiveRecord::Base.connection.execute(\"COMMIT\")\n ActiveRecord::Base.connection.execute(\"BEGIN\")\n end",
"def autoflush=(autoflush)\n @connection.autoflush = autoflush\n end",
"def in rails_env = Bj.rails_env, &block\n transaction(:rails_env => rails_env.to_s, &block)\n end",
"def transaction(&block)\n @@semaphore.synchronize{\n block.call\n }\n end",
"def synchronize(*)\n if ActiveRecord.version >= Gem::Version.new(\"5.1.0\")\n activerecord_connection.lock.synchronize do\n yield activerecord_raw_connection\n end\n else\n yield activerecord_raw_connection\n end\n end",
"def transaction(opts = {})\n commit, rollback = start_transaction! opts\n tx_depth = nil\n begin\n if Pacer.verbose == :very\n tx_depth = threadlocal_graph_info[:dx_depth]\n puts \"--#{self.class.name} transaction #{ tx_depth } --> \"\n puts caller[0,3]\n end\n r = yield commit, rollback\n commit.call(false)\n r\n rescue Exception => e\n rollback.call e.message\n raise\n ensure\n puts \"--#{self.class.name} #{ tx_depth } <-- \" if Pacer.verbose == :very\n finish_transaction!\n end\n end",
"def begin_transaction_sql\n SQL_BEGIN\n end",
"def begin_transaction_sql\n SQL_BEGIN\n end",
"def force_begin_read_write\n @grpc_transaction = @connection.session.create_transaction\n end",
"def begin_transaction(mutable: false, graph_name: nil)\n raise NotImplementedError\n end"
] |
[
"0.78659075",
"0.6963964",
"0.6858327",
"0.6758857",
"0.6758857",
"0.6613616",
"0.657389",
"0.65524185",
"0.62963855",
"0.6283287",
"0.6274031",
"0.6241779",
"0.6174861",
"0.616061",
"0.6112377",
"0.59543675",
"0.58255845",
"0.58255845",
"0.56776065",
"0.56605256",
"0.5651041",
"0.55683225",
"0.5516384",
"0.54890686",
"0.5459672",
"0.54505754",
"0.54402435",
"0.54296273",
"0.5416206",
"0.5404849",
"0.53798956",
"0.5370092",
"0.5359423",
"0.53492963",
"0.53340995",
"0.5302415",
"0.52850837",
"0.52771485",
"0.5268003",
"0.52250147",
"0.5180783",
"0.5161099",
"0.51541245",
"0.5153845",
"0.5125867",
"0.51238036",
"0.50898206",
"0.5077335",
"0.50663245",
"0.506167",
"0.50470036",
"0.5042037",
"0.5037643",
"0.5036256",
"0.50330424",
"0.50306875",
"0.50306875",
"0.50263566",
"0.5002542",
"0.5002542",
"0.5001922",
"0.49969974",
"0.49833614",
"0.49812323",
"0.49734405",
"0.4970999",
"0.49631009",
"0.4949651",
"0.4949368",
"0.49335542",
"0.49094176",
"0.49094176",
"0.49078017",
"0.49078017",
"0.48993504",
"0.48988777",
"0.4841707",
"0.48380795",
"0.481307",
"0.4806693",
"0.47801146",
"0.47714418",
"0.47712722",
"0.47676992",
"0.47434688",
"0.47410324",
"0.47410324",
"0.47406134",
"0.47402012",
"0.47365728",
"0.47346234",
"0.4734314",
"0.47306436",
"0.47304276",
"0.47283313",
"0.47265273",
"0.47263342",
"0.47263342",
"0.47255516",
"0.47171813"
] |
0.7724414
|
1
|
Turns an array of argument specifiers into an SQL fragment used for function arguments. See create_function_sql.
|
def sql_function_args(args)
"(#{Array(args).map{|a| Array(a).reverse.join(' ')}.join(', ')})"
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def function_sql(f)\n args = f.args\n \"#{f.f}#{args.empty? ? '()' : literal(args)}\"\n end",
"def irregular_function_sql(f)\n \"#{f.f}(#{literal(f.arg1)} #{f.joiner} #{literal(f.arg2)})\"\n end",
"def compose_sql_array(ary)\n statement, *values = ary\n if values.first.is_a?(Hash) and statement =~ /:\\w+/\n replace_named_bind_variables(statement, values.first)\n elsif statement.include?('?')\n replace_bind_variables(statement, values)\n else\n statement % values.collect { |value| client.escape(value.to_s) }\n end\n end",
"def convert_to_sql(array_of_values)\n array_of_values.inspect[1...-1].gsub('[', '(').gsub(']', ')')\n end",
"def to_php\n \"#{function}(#{arguments.join(', ')})\"\n end",
"def to_sql(arel, binds = [])\n if arel.respond_to?(:ast)\n binds = binds.dup\n visitor.accept(arel.ast) do\n quote(*binds.shift.reverse)\n end\n else\n arel\n end\n end",
"def create_function_sql(name, definition, opts=OPTS)\n args = opts[:args]\n if !opts[:args].is_a?(Array) || !opts[:args].any?{|a| Array(a).length == 3 and %w'OUT INOUT'.include?(a[2].to_s)}\n returns = opts[:returns] || 'void'\n end\n language = opts[:language] || 'SQL'\n <<-END\n CREATE#{' OR REPLACE' if opts[:replace]} FUNCTION #{name}#{sql_function_args(args)}\n #{\"RETURNS #{returns}\" if returns}\n LANGUAGE #{language}\n #{opts[:behavior].to_s.upcase if opts[:behavior]}\n #{'STRICT' if opts[:strict]}\n #{'SECURITY DEFINER' if opts[:security_definer]}\n #{\"PARALLEL #{opts[:parallel].to_s.upcase}\" if opts[:parallel]}\n #{\"COST #{opts[:cost]}\" if opts[:cost]}\n #{\"ROWS #{opts[:rows]}\" if opts[:rows]}\n #{opts[:set].map{|k,v| \" SET #{k} = #{v}\"}.join(\"\\n\") if opts[:set]}\n AS #{literal(definition.to_s)}#{\", #{literal(opts[:link_symbol].to_s)}\" if opts[:link_symbol]}\n END\n end",
"def escape_sql(args)\n return @text if args.empty?\n sql = @text.dup\n vars = args.dup\n\n replacements = 0\n mismatch = false\n\n sql.gsub!(/'[^']*'|\"[^\"]*\"|`[^`]*`|\\?/) do |x|\n next x unless x == '?'\n replacements += 1\n if vars.empty?\n mismatch = true\n else\n var = vars.shift\n connection.quote_value(var)\n end\n end\n\n if !vars.empty? || mismatch\n raise ArgumentError, \"Binding mismatch: #{args.size} for #{replacements}\"\n else\n sql\n end\n end",
"def values_sql_for_column_names_and_attributes( columns, array_of_attributes ) # :nodoc:\n values = []\n array_of_attributes.each do |arr|\n my_values = []\n arr.each_with_index do |val,j|\n my_values << quote( val, columns[j] )\n end\n values << my_values\n end \n values_arr = values.map{ |arr| '(' + arr.join( ',' ) + ')' }\n end",
"def format_args\n required_max = arity < 0 ? -arity-1 : arity\n args = 0.upto(required_max-1).map do |i| \n local_name(i)\n end.join(', ')\n \n last = local_table_size-1\n if last >= required_max\n opt_args = required_max.upto(last).map do |i| \n local_name(i)\n end.join(', ')\n args += '; ' + opt_args\n else\n args = '?'\n end\n end",
"def escaped_sql\n sql % binds.reduce({}) { |a, (col, val)|\n a[col.to_sym] = if val.is_a? Array\n val.map { |x| @conn.quote x }.join(', ')\n else\n @conn.quote val\n end\n a\n }\n end",
"def sql_array(array)\n Arel.sql(ActiveRecord::Base.sanitize_sql([\"ARRAY[?]\", array]))\n end",
"def to_sql\n @applied.reverse.map{ |e| \"#{e.type}(#{e.args.join(\", \")})\" }.join('.')\n end",
"def sanitize_sql_array(ary)\n statement, *values = ary\n if values.first.is_a?(Hash) and statement =~ /:\\w+/\n replace_named_bind_variables(statement, values.first)\n elsif statement.include?('?')\n replace_bind_variables(statement, values)\n else\n statement % values.collect { |value| quote_string(value) }\n end\n end",
"def bound_variable_arg(arg, conn)\n case arg\n when ArrayRow\n \"(#{arg.map{|v| bound_variable_array(v) if v}.join(',')})\"\n when HashRow\n arg.check_columns!\n \"(#{arg.values_at(*arg.columns).map{|v| bound_variable_array(v) if v}.join(',')})\"\n else\n super\n end\n end",
"def sql_op(*args)\n column, expr, bind = (args.size >= 3 ? args : [nil] + args)\n (num_bind, builder) = _compile_builder(expr)\n croak(\"the operator expects num_bind but got #{bind.size}\") if num_bind != bind.size\n return _sql_op(\"sql_op\", builder, column, bind)\n end",
"def sql_literal(*)\n @dataset.sql\n end",
"def to_sql(rel_exp=nil)\n rel_exp ||= @rel_exp\n sql = ''\n if rel_exp[0].is_a?(Array) then\n sql << \"(#{to_sql(rel_exp[0])})\"\n else\n sql << rel_exp[0].to_s\n end\n sql << \" #{rel_exp[2]} \"\n if rel_exp[3].is_a?(Array) then\n sql << \"(#{to_sql(rel_exp[3])})\"\n else\n sql << \"'#{rel_exp[3]}'\"\n end\n end",
"def run_sql(sql, arr = [])\n db = PG.connect(ENV['DATABASE_URL'] || {dbname: 'native_plants'})\n results = db.exec_params(sql, arr)\n db.close\n return results\nend",
"def replace_params(sql, *params)\n sql.gsub!(/\\?/) { |a| quote(params.shift) }\n end",
"def to_ast\n identifier = fn.is_a?(::Proc) ? fn : fn.name\n [identifier, args]\n end",
"def subscript_sql(s)\n \"#{s.f}[#{s.sub.join(COMMA_SEPARATOR)}]\"\n end",
"def info_sql\n \"#{INFO_SQL} AND pg_catalog.pg_get_function_arguments(p.oid) = '#{to_s}'\"\n end",
"def sql_raw(*args)\n sql, bind = parse_args(*args)\n return SQL::QueryMaker.new(nil, Proc.new { sql }, bind)\n end",
"def triggerStatements _args\n \"triggerStatements _args;\" \n end",
"def as_sql(expression, aliaz)\n \"#{expression} #{quote_identifier(aliaz)}\"\n end",
"def create_trigger_sql(table, name, function, opts=OPTS)\n events = opts[:events] ? Array(opts[:events]) : [:insert, :update, :delete]\n whence = opts[:after] ? 'AFTER' : 'BEFORE'\n if filter = opts[:when]\n raise Error, \"Trigger conditions are not supported for this database\" unless supports_trigger_conditions?\n filter = \" WHEN #{filter_expr(filter)}\"\n end\n \"CREATE #{'OR REPLACE ' if opts[:replace]}TRIGGER #{name} #{whence} #{events.map{|e| e.to_s.upcase}.join(' OR ')} ON #{quote_schema_table(table)}#{' FOR EACH ROW' if opts[:each_row]}#{filter} EXECUTE PROCEDURE #{function}(#{Array(opts[:args]).map{|a| literal(a)}.join(', ')})\"\n end",
"def complex_expression_sql(op, args)\n case op\n when *TWO_ARITY_OPERATORS\n \"(#{literal(args.at(0))} #{op} #{literal(args.at(1))})\"\n when *N_ARITY_OPERATORS\n \"(#{args.collect{|a| literal(a)}.join(\" #{op} \")})\"\n when :NOT\n \"NOT #{literal(args.at(0))}\"\n when :NOOP\n literal(args.at(0))\n else\n raise(Sequel::Error, \"invalid operator #{op}\")\n end\n end",
"def run_sql(sql, arr = []) # array is for security feature\n db = PG.connect(ENV['DATABASE_URL'] || {dbname: 'picgallary'})\n results = db.exec_params(sql, arr)\n db.close\n return results\n end",
"def odb_format_str(*args)\n \"format('#{to_s}', #{args.map{|x| quote(x)}.join(', ')})\"\n end",
"def protected_args(args)\n args.map { |a| \"'#{a}'\" }\n end",
"def to_ast\n identifier = Proc === fn ? fn : fn.name\n [identifier, args]\n end",
"def flatten_to_sql(*values)\n flatten_safely(values) do |value|\n value = yield value if block_given?\n to_arel_sql(value)\n end\n end",
"def generate_sql_query(stored_procedure_call, *attributes)\n sql_query = 'select * from '\n sql_query << stored_procedure_call << '('\n\n sql_query << attributes.map { |attribute|\n if attribute.kind_of?(Fixnum) || attribute.kind_of?(Float) || attribute.kind_of?(TrueClass) || \n attribute.kind_of?(FalseClass)\n attribute\n elsif attribute.nil?\n \"null\"\n else\n \"'\" + escape_quotes(attribute) + \"'\"\n end\n }.join(', ')\n\n sql_query << ');'\n end",
"def column_list_sql(columns)\n columns.map{|c| column_definition_sql(c)}.join(COMMA_SEPARATOR)\n end",
"def function_arguments(node, scope)\n args = node.function_arguments\n if args.respond_to?(:argument_list)\n args.argument_list.elements.flat_map do |elem|\n if elem.respond_to?(:ruby_ast)\n elem.ruby_ast(scope)\n elsif !elem.elements.nil? and !elem.elements.empty?\n elem.elements.map { |e| p e; e.expression.ruby_ast(scope) }\n else\n []\n end\n end\n elsif args.respond_to?(:attribute_list)\n members = args.attribute_list.elements.map do |elem|\n [elem.elements[1].identifier.as_symbol, elem.elements[1].expression.ruby_ast(scope)]\n end\n [Yugo::Ruby::HashLiteral.new(members, include_braces: false, symbol_keys: true)]\n else\n raise \"Don't know how to process these function arguments: #{node.inspect}\"\n end\n end",
"def quote(*args)\n arr = args.map {|x| '\"' + x + '\"'}\n return arr.join(\" \")\nend",
"def column_list_sql(columns)\n columns.map{|c| column_definition_sql(c)}.join(COMMA_SEPARATOR)\n end",
"def column_list_sql(columns)\n columns.map{|c| column_definition_sql(c)}.join(COMMA_SEPARATOR)\n end",
"def to_sql\n return nil unless filters.present? and valid?\n filters.map { |n, f| sql_for(n) }.reject(&:blank?).join(' AND ')\n end",
"def build_sql(structure)\n structure = structure.is_a?(Constructor) ? structure.structure : structure\n sql = ''\n [:with, :union, :select, :insert, :update, :delete, :set, :from,\n :join, :where, :returning, :group, :order, :limit, :offset].each do |i|\n next unless structure[i]\n sql += send(\"build_#{i}\", structure[i], structure)\n end\n sql\n end",
"def visitFunction func,args=nil\n code=Code.new\n tname=func.type.accept(self)\n fname=func.name.accept(self)\n args=func.args.collect{|arg| arg.accept(self)}\n args=args.join(\",\")\n code << \"\\n#{tname} #{fname}(#{args})\"\n code.indent=2\n code << func.body.accept(self)\n code.indent=0\n return code\n end",
"def function(name, *args)\n SQL::Function.new(name, *args)\n end",
"def parameters_and_values_sql_string\n #first get an array of equal signs\n c = Array.new(self_values.length, \"=\")\n final_array = []\n # Then zip all three arrays together\n # Ex. field_names =[p1, p2, p3] \n # c = [\"=\", \"=\", \"=\"]\n # self_values = [1, 3, \"'string'\"]\n # zip => [[[p1, \"=\"], 1], [[p2, \"=\"], 3, [[p3, \"=\"], \"'string'\"]]\n zip_array = database_field_names.zip(c).zip(quoted_string_self_values)\n zip_array.each do |row|\n # => [[\"p1 = 1\"], [\"p2 = 3\"], [\"p3 = 'string'\"]]\n final_array << row.flatten.join(\" \")\n end\n # => \"p1 = 1, p2 = 3, p3 = 'string'\"\n final_array.join(\", \")\n end",
"def to_sql(prefix = \"\")\n prefix += \"_\" unless prefix.blank?\n Array.wrap(sql).join(\" \").gsub(/__/, prefix)\n end",
"def args_to_string(array)\n\tarray.map {|x| x.class == Array && x.size > 1 ? x[0].size > 1 ? '--' + x.join(' ') : '-' + x.join(' ') : x }.join(' ')\nend",
"def arglists\n if @call_seq then\n @call_seq\n elsif @params then\n \"#{name}#{param_seq}\"\n end\n end",
"def arglists\n if @call_seq then\n @call_seq\n elsif @params then\n \"#{name}#{param_seq}\"\n end\n end",
"def format_arguments(arguments, is_const)\n return nil if arguments.empty?\n\n result = arguments.map do |name, value|\n \"#{name}: #{format_value(value, is_const)}\"\n end\n\n \"(#{result.join(', ')})\"\n end",
"def sql(method, query, *args)\n query = ActiveRecord::Base.send(:sanitize_sql_array, [query] + args)\n with_connection do |conn|\n conn.send(method, query)\n end\n end",
"def evaled_arguments\n to_eval = if arguments_or_options =~ /[^,]+=>/\n %Q~{ #{arguments_or_options} }~\n else\n %Q~[ #{arguments_or_options} ]~\n end\n precompiler.instance_eval to_eval\n end",
"def column_list_sql(generator)\n (generator.columns.map{|c| column_definition_sql(c)} + generator.constraints.map{|c| constraint_definition_sql(c)}).join(', ')\n end",
"def tuple(*args)\n \"( #{args.map { |b| if b.nil? then 'NULL' else b end }.join(', ')} )\"\nend",
"def args_to_params(sexp)\n result = []\n sexp.each do |arg|\n if arg[0] == :lasgn\n ref = variable(arg[1])\n scope.add_arg ref\n result << ref\n elsif arg[0] == :array\n result << scope.next_temp\n else\n raise \"Bad js_block_arg: #{arg[0]}\"\n end\n end\n\n result\n end",
"def to_php\n body = children? ? ('{ ' << children.map(&:to_php).join('; ') << ' }') : '{}'\n if anonymous?\n \"function(#{parameters.join(', ')}) \" << body # FIXME\n else\n \"function #{name}(#{parameters.join(', ')}) \" << body # FIXME\n end\n end",
"def generate_sql\n sql = 'guid, obj_id'\n @fields.each do |field|\n sql << ',v'\n sql << field.num.to_s\n sql << ' as '\n sql << field.name\n end\n return sql\n end",
"def create_queries\n gen_rulename\n [\"\n -- FN for sync updates \n CREATE FUNCTION fn_#{suffix}()\n RETURNS TRIGGER\n LANGUAGE plpgsql\n AS $function$\n BEGIN\n UPDATE #{dest_table}\n SET #{ cols.map{|src, dest| \"\\\"#{dest}\\\" = NEW.\\\"#{src}\\\"\" }.join(', ') }\n WHERE \\\"#{map_dest}\\\" = NEW.\\\"#{map_src}\\\";\n RETURN NULL;\n END;\n $function$;\",\n \"\n -- TR for sync updates\n CREATE TRIGGER tr_#{suffix}\n AFTER INSERT OR UPDATE ON #{src_table} \n FOR EACH ROW EXECUTE PROCEDURE fn_#{suffix}();\",\n \"\n -- FN for cleaner\n CREATE FUNCTION fn_#{suffix}_cleaner()\n RETURNS TRIGGER\n LANGUAGE plpgsql\n AS $function$\n BEGIN\n IF OLD.sid = #{sid_src} OR OLD.sid = #{sid_dest} THEN\n #{delete_queries.join(' ')}\n END IF;\n RETURN NULL;\n END;\n $function$;\",\n \"\n -- TR for cleaner\n CREATE TRIGGER tr_#{suffix}_cleaner\n AFTER DELETE ON #{surveys_table}\n FOR EACH ROW EXECUTE PROCEDURE fn_#{suffix}_cleaner();\n \"]\n end",
"def arglist (sexp, level)\n code, work = '', []\n\n until sexp.empty?\n splat = sexp.first.first == :splat\n arg = process sexp.shift, :expression\n\n if splat\n if work.empty?\n if code.empty?\n code += (arg[0] == \"[\" ? arg : \"#{arg}#{mid_to_jsid :to_a}()\")\n else\n code += \".concat(#{arg})\"\n end\n else\n join = \"[#{work.join ', '}]\"\n code += (code.empty? ? join : \".concat(#{join})\")\n code += \".concat(#{arg})\"\n end\n\n work = []\n else\n work.push arg\n end\n end\n\n unless work.empty?\n join = work.join ', '\n code += (code.empty? ? join : \".concat([#{work}])\")\n end\n\n code\n end",
"def varargs(arg1, *rest)\n \"arg1=#{arg1}. rest=#{rest.inspect}\"\nend",
"def multiple(sql, values = [])\n r = $db.exec_params(sql, values)\n return [] if r.ntuples == 0\n r.map { |row| convert_to_ruby_types(row) }\nend",
"def apply_function_concat(scope, ast)\n args = ast.children.map { |arg|\n scope, sql = apply_ast(scope, arg)\n \"(#{sql}::text)\"\n }\n\n return scope, \"(concat(#{args.join(',')}))\"\n end",
"def _construct_multiple_update_sql(table, where_update_fields, where_update_values)\n id_collector = Array.new\n update_sql = \"UPDATE #{table} SET\"\n \n (where_update_fields.length - 1).times do |field_i|\n update_sql += \", \" if field_i > 0\n update_sql += \" #{where_update_fields[field_i + 1]} = CASE #{where_update_fields[0]}\"\n \n where_update_values.each_index do |i|\n update_sql += \" WHEN \" + _construct_sql_value(where_update_values[i][0]) + \" THEN \" + _construct_sql_value(where_update_values[i][field_i + 1])\n id_collector << _construct_sql_value(where_update_values[i][0]) if field_i == 0\n end\n \n update_sql += \" END\"\n end\n \n update_sql += \" WHERE #{where_update_fields[0]} in (\" + id_collector.join(\",\") + \")\"\n \n update_sql\n end",
"def sql\n @sql ||= begin\n bind_params = []\n i = 1\n (selector_keys.length + setter_keys.length).times do\n bind_params << \"$#{i}\"\n i += 1\n end\n hstore_delete_handlers.length.times do\n bind_params << \"$#{i}::text[]\"\n i += 1\n end\n %{SELECT #{name}(#{bind_params.join(', ')})}\n end\n end",
"def sql\n @sql ||= begin\n bind_params = []\n i = 1\n (selector_keys.length + setter_keys.length).times do\n bind_params << \"$#{i}\"\n i += 1\n end\n hstore_delete_handlers.length.times do\n bind_params << \"$#{i}::text[]\"\n i += 1\n end\n %{SELECT #{name}(#{bind_params.join(', ')})}\n end\n end",
"def varargs(arg1,*rest)\n puts \"Got #{arg1} and #{rest.join(',')}\"\nend",
"def code\n @args.map(&:inspect).join(\", \")\n end",
"def columns_param(column_names)\n column_names.map! { |name| escape(name) }\n \"columns:!(#{column_names.join(',')})\"\n end",
"def format_arguments(data)\n data_array = []\n data.each_pair do |parameter, argument|\n if argument.is_a? Array\n argument.each do |single_argument|\n data_array << \"#{parameter}=#{single_argument}\"\n end\n else\n data_array << \"#{parameter}=#{argument}\"\n end\n end\n data = data_array.join(\"&\")\n return data\n end",
"def execute_prepared(sql,params_arr)\n prepared_statement = @database_handle.prepare(sql)\n params_arr.each_with_index do |param,index|\n prepared_statement.bind_param(index+1,param)\n end \n prepared_statement.execute\n end",
"def argument_list\n recursive_expression(:identifier, :comma)\n end",
"def escape_for_tsquery(array)\n keywords = array.map do |token|\n next if token.empty?\n escaped_token = token.gsub(/\\\\|'/, '\\0\\0\\0\\0')\n \"'\" + escaped_token + \"'\"\n end.compact\n query = []\n if keywords.any?\n query << \"(\" + keywords.join(\" & \") + \")\"\n end\n query\n end",
"def creating_jql_string arr, str, type\n if !arr.empty? && str.empty?\n array_to_string arr, type\n elsif !arr.empty? && !str.empty?\n \" AND #{array_to_string(arr, type)}\"\n end\n end",
"def array_to_bash array\n quoted_array = array.map {|item| \"'\" + item + \"'\"}\n \"(\" + quoted_array.join(\" \") + \")\"\n end",
"def varargs(arg1, *args)\n # \"arg1=#{arg1}, args=#{args.inspect}\"\n \"arg1 = #{arg1}, arg2 = #{args}\"\nend",
"def args_replace command, name, args, expression\n initial_offset = offset = (command =~ /\\b#{Regexp.escape name}\\(/) + name.length + 1\n bracket_count = 1\n\n # find the last bracket\n while offset < command.length\n if command[offset] == ?(\n bracket_count += 1\n elsif command[offset] == ?)\n bracket_count -= 1\n break if bracket_count == 0\n end\n\n offset += 1\n end\n\n args_expr = command[initial_offset..(offset - 1)].split(\",\").map(&:strip)\n\n # passed the wrong number of arguments to this function\n if args_expr.length != args.length\n raise Exception.new(\"Error: wrong number of arguments for call to #{name} in command '#{command}'\")\n end\n\n # do the substitutions\n command[0..(initial_offset - name.length - 2)] + \"(\" + args.zip(args_expr).inject(expression) do |result, (find, repl)|\n result.gsub(/\\b#{Regexp.escape find}\\b/, \"(\" + repl + \")\")\n end + \")\" + command[(offset + 1)..-1]\nend",
"def format_thrift_arg_strings ( args )\n params = []\n #puts args.inspect \n args.each do | arg | \n split = arg.split(' ')\n #next if i%2 > 0\n params.push( [split[1], convert_thrift_type_to_as3_type(split[0])] ) \n #puts i\n end\n #puts params.inspect\n #asdf.g\n params\nend",
"def sql(options={})\n get_location\n # TODO: validate options\n @params[:sql] = FEATURE_DEFAULTS[:sql].merge(options)\n @params[:sql][:generate] = true\n end",
"def ready_for_sql(hash)\n single_variables = []\n\n hash.each do |k, v|\n if v.is_a?(String)\n single_variables << \"#{k} = '#{v}'\"\n else\n single_variables << \"#{k} = #{v}\"\n end\n end\n return vars_to_sql = single_variables.join(\", \")\n end",
"def sql_of( expr, dataset)\n dataset.send :literal_expression_append, rv=String.new, expr\n rv\n end",
"def gen_each_arg(joinwith=\"\\n\")\n s = []\n args.each do |name|\n t = yield name, argtype[name]\n s << t unless t.nil?\n end\n return s.join(joinwith)\n end",
"def funify(array)\n\tarray << \"fun\"\n\t\nend",
"def map_to_prepared_args(bind_vars)\n prepared_args.map{|v, t| [bind_vars[v], t]}\n end",
"def filters_sql\n if filters.blank?\n ''\n else\n conditions = JSON.parse filters\n sql_array = []\n conditions.each do |condition|\n if condition['values']\n values = condition['values'].map { |x| \"'#{x}'\" }.join(',')\n sql_array << \" #{condition['name']} in (#{values})\"\n else\n sql_array << \" #{condition['name']} between #{condition['from']} and #{condition['to']}\"\n end\n end\n sql_array.join(' AND ')\n end\n end",
"def my_function(argument_a, argument_b, *args)\n puts argument_a\n puts argument_b\n p args.class.name # this is now an array\n puts args\nend",
"def sql\n @sql ||= begin\n bind_params = []\n 1.upto(selector_keys.length + setter_keys.length) { |i| bind_params << \"$#{i}\" }\n %{SELECT #{name}(#{bind_params.join(', ')})}\n end\n end",
"def update_function(name, args)\n version = args[:version]\n sql_definition = args[:sql_definition]\n revert_to_version = args[:revert_to_version]\n\n if version.nil? && sql_definition.nil?\n raise(\n ArgumentError,\n \"version or sql_definition must be specified\",\n )\n end\n\n sql_definition = sql_definition.strip_heredoc if sql_definition\n sql_definition ||= Fx::Definition.new(\n name: name,\n version: version,\n ).to_sql\n\n Fx.database.update_function(name, sql_definition)\n end",
"def query_literal(args)\n case (s = args[0])\n when LiteralString, SQL::Blob\n nil\n when String\n if args.length == 1\n LiteralString.new(s)\n else\n SQL::PlaceholderLiteralString.new(s, args[1..-1])\n end\n end\n end",
"def to_sql(options = {})\n \"#{field.to_sql(options)} = #{expression.to_sql(options)}\"\n end",
"def conditions_to_code(conditions)\n code = ''\n case conditions\n when Array\n case conditions[0]\n when String # SQL\n code += '[\"'+conditions[0].to_s+'\"'\n code += ', '+conditions[1..-1].collect{|p| sanitize_condition(p)}.join(', ') if conditions.size>1\n code += ']'\n when Symbol # Method\n code += conditions[0].to_s+'('\n code += conditions[1..-1].collect{|p| sanitize_condition(p)}.join(', ') if conditions.size>1\n code += ')'\n else\n raise ArgumentError.new(\"First element of an Array can only be String or Symbol.\")\n end\n when Hash # SQL\n code += '{'+conditions.collect{|key, value| ':'+key.to_s+'=>'+sanitize_condition(value)}.join(',')+'}'\n when Symbol # Method\n code += conditions.to_s+\"(options)\"\n when String\n code += \"(\"+conditions.gsub(/\\s*\\n\\s*/,';')+\")\"\n else\n raise ArgumentError.new(\"Unsupported type for conditions: #{conditions.inspect}\")\n end\n code\n end",
"def function(name, *args)\n SQL::Function.new(function_name(name), *args)\n end",
"def add_expressions(*expressions); end",
"def varargs(arg1, *rest)\n puts \"Got #{arg1} and #{rest.join(', ')}\"\nend",
"def prepare_arguments(expression, arguments)\n case expression\n when Array\n expression.each do |item|\n prepare_arguments(item, arguments)\n end\n when Fast::FindFromArgument\n expression.arguments = arguments\n when Fast::Find\n prepare_arguments expression.token, arguments\n end\n end",
"def to_sql\n [@sql_insert, @sql_on_conflict, @sql_returning].reject(&:nil?).join(' ')\n end",
"def lambdacall_args(sexp)\n__args_index(car(sexp)) + lambdacall_index(cadr(sexp), [])\n end",
"def args_path\n scope_definition.parameters.each_with_index.map do |parameter, i| \n if args[i].is_a? ActiveRecord::Base\n [parameter.to_s, args[i].send(args[i].class.primary_key).to_s]\n else\n [parameter.to_s, args[i].to_s] \n end\n end.flatten\n end",
"def convert_expr(expr, values = [], entry = nil)\n if expr_conjunction?(expr)\n return convert_conjunction(expr, values, entry)\n end\n\n column = qualified_db_column(expr, entry)\n op = expr.op\n arg = case expr.op\n when 'in', 'not in' then '(?)'\n when 'between', 'not between' then '? AND ?'\n else\n '?'\n end\n\n values << expr.value\n [\"#{column} #{op} #{arg}\", values]\n end",
"def build_method_call(name, *args)\n \"#{name.to_s}(#{args.join(\",\")})\"\n end",
"def to_sql\n @sql ||= case @kind\n when :target, :comp_op, :bin_bool_op, :term\n child(0).to_sql\n when :target_set\n # array of fragments, one per target\n [child(0).to_sql] + (child(1) ? child(2).to_sql : [])\n when :qual_term\n # child(2) will be an array (target_set)\n \"(\" + child(2).to_sql.collect{|sql| comparison(child(0), child(1).child(0), sql)}.join(\" OR \") + \")\"\n when :unqual_term\n \"(\" + default_quals.collect{|q| comparison(q, EQUAL_TOKEN, child(0).to_sql)}.join(\" OR \") + \")\"\n when :query\n # first form\n if child(0).is?(:lparen)\n @children.collect{|c| c.to_sql}.join\n # second form\n elsif child(1) && child(1).is?(:bin_bool_op)\n @children.collect{|c| c.to_sql}.join(\" \")\n # third form\n elsif child(1) && child(1).is?(:query)\n child(0).to_sql + \" AND \" + child(1).to_sql\n # fourth form\n else\n child(0).to_sql\n end\n end\n end",
"def process_arglist(exp)\n return '' if exp.empty?\n return process_array(exp)\n end"
] |
[
"0.68161327",
"0.6770824",
"0.6261491",
"0.6212632",
"0.6161116",
"0.59570205",
"0.58529675",
"0.58524",
"0.5774345",
"0.5739209",
"0.5678828",
"0.56407475",
"0.5639164",
"0.5581457",
"0.55652213",
"0.5481259",
"0.5450736",
"0.5443238",
"0.5410283",
"0.53626436",
"0.5294864",
"0.5287387",
"0.5280469",
"0.52702266",
"0.52323925",
"0.5223321",
"0.5213896",
"0.5210712",
"0.5207332",
"0.5197518",
"0.51966244",
"0.5191485",
"0.51889366",
"0.5177886",
"0.5168873",
"0.51619625",
"0.5126739",
"0.51234853",
"0.51234853",
"0.51048833",
"0.5100088",
"0.5088513",
"0.5070623",
"0.50685775",
"0.5056334",
"0.5045101",
"0.50306684",
"0.50306684",
"0.5019822",
"0.50152904",
"0.5012387",
"0.49943897",
"0.49859902",
"0.49820837",
"0.49784246",
"0.49745587",
"0.49644473",
"0.49524984",
"0.49450195",
"0.49329534",
"0.491689",
"0.49127913",
"0.4907767",
"0.4907767",
"0.49020278",
"0.48997706",
"0.48955983",
"0.48904932",
"0.48896134",
"0.4879168",
"0.4866543",
"0.4863673",
"0.48629478",
"0.48536986",
"0.48525065",
"0.4851187",
"0.48500463",
"0.48488536",
"0.48450565",
"0.48429838",
"0.4834429",
"0.48293114",
"0.48208657",
"0.48153946",
"0.4812168",
"0.48114705",
"0.48097542",
"0.48065332",
"0.48007503",
"0.47976923",
"0.4796907",
"0.4786657",
"0.47865483",
"0.47685868",
"0.47668403",
"0.47651502",
"0.4761563",
"0.47559935",
"0.47530055",
"0.47441998"
] |
0.7663649
|
0
|
PostgreSQL can combine multiple alter table ops into a single query.
|
def supports_combining_alter_table_ops?
true
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def apply_alter_table(name, ops)\n alter_table_sql_list(name, ops).each{|sql| execute_ddl(sql)}\n end",
"def alter_table_sql_list(table, operations)\n operations.map{|op| alter_table_sql(table, op)}\n end",
"def alter_table_sql_list(table, operations)\n operations.map{|op| alter_table_sql(table, op)}\n end",
"def alter_table_sql_list(table, operations)\n operations.map{|op| alter_table_sql(table, op)}\n end",
"def alter_table_sql(table, op)\n quoted_name = quote_identifier(op[:name]) if op[:name]\n alter_table_op = case op[:op]\n when :add_column\n \"ADD COLUMN #{column_definition_sql(op)}\"\n when :drop_column\n \"DROP COLUMN #{quoted_name}\"\n when :rename_column\n \"RENAME COLUMN #{quoted_name} TO #{quote_identifier(op[:new_name])}\"\n when :set_column_type\n \"ALTER COLUMN #{quoted_name} TYPE #{op[:type]}\"\n when :set_column_default\n \"ALTER COLUMN #{quoted_name} SET DEFAULT #{literal(op[:default])}\"\n when :set_column_null\n \"ALTER COLUMN #{quoted_name} #{op[:null] ? 'DROP' : 'SET'} NOT NULL\"\n when :add_index\n return index_definition_sql(table, op)\n when :drop_index\n return drop_index_sql(table, op)\n when :add_constraint\n \"ADD #{constraint_definition_sql(op)}\"\n when :drop_constraint\n \"DROP CONSTRAINT #{quoted_name}\"\n else\n raise Error, \"Unsupported ALTER TABLE operation\"\n end\n \"ALTER TABLE #{quote_schema_table(table)} #{alter_table_op}\"\n end",
"def supports_combining_alter_table_ops?\n false\n end",
"def alter_table(name, &block)\n g = Schema::AlterTableGenerator.new(self, &block)\n alter_table_sql_list(name, g.operations).each {|sql| execute(sql)}\n end",
"def alter_table_sql_list(table, operations, options=nil)\n\t return super(table, operations) unless Hash===options\n\t \n\t prologue = \"ALTER TABLE #{quote_schema_table(table)} \"\n\t sql = operations.map do |op|\n\t frag = alter_table_sql table, op\n\t raise ArgumentError unless frag.slice![0,prologue.length] == prologue\n\t frag\n\t end\n\t sql.push(table_options_sql(options)).join ' '\n end",
"def alter_table_sql(table, op)\n quoted_table = quote_identifier(table)\n quoted_name = quote_identifier(op[:name]) if op[:name]\n case op[:op]\n when :add_column\n \"ALTER TABLE #{quoted_table} ADD COLUMN #{column_definition_sql(op)}\"\n when :drop_column\n \"ALTER TABLE #{quoted_table} DROP COLUMN #{quoted_name}\"\n when :rename_column\n \"ALTER TABLE #{quoted_table} RENAME COLUMN #{quoted_name} TO #{quote_identifier(op[:new_name])}\"\n when :set_column_type\n \"ALTER TABLE #{quoted_table} ALTER COLUMN #{quoted_name} TYPE #{op[:type]}\"\n when :set_column_default\n \"ALTER TABLE #{quoted_table} ALTER COLUMN #{quoted_name} SET DEFAULT #{literal(op[:default])}\"\n when :add_index\n index_definition_sql(table, op)\n when :drop_index\n \"DROP INDEX #{default_index_name(table, op[:columns])}\"\n when :add_constraint\n \"ALTER TABLE #{quoted_table} ADD #{constraint_definition_sql(op)}\"\n when :drop_constraint\n \"ALTER TABLE #{quoted_table} DROP CONSTRAINT #{quoted_name}\"\n else\n raise Error, \"Unsupported ALTER TABLE operation\"\n end\n end",
"def alter_table_statement(table_name, operations)\n add_line \"alter_table #{table_name.inspect} do\"\n indent do\n operations.compact.each {|op| add_line op }\n end\n add_line \"end\"\n end",
"def alter_table_sql(table, op)\n quoted_name = quote_identifier(op[:name]) if op[:name]\n alter_table_op = case op[:op]\n when :add_column\n \"ADD COLUMN #{column_definition_sql(op)}\"\n when :drop_column\n \"DROP COLUMN #{quoted_name}\"\n when :rename_column\n \"RENAME COLUMN #{quoted_name} TO #{quote_identifier(op[:new_name])}\"\n when :set_column_type\n \"ALTER COLUMN #{quoted_name} TYPE #{type_literal(op)}\"\n when :set_column_default\n \"ALTER COLUMN #{quoted_name} SET DEFAULT #{literal(op[:default])}\"\n when :set_column_null\n \"ALTER COLUMN #{quoted_name} #{op[:null] ? 'DROP' : 'SET'} NOT NULL\"\n when :add_index\n return index_definition_sql(table, op)\n when :drop_index\n return drop_index_sql(table, op)\n when :add_constraint\n \"ADD #{constraint_definition_sql(op)}\"\n when :drop_constraint\n \"DROP CONSTRAINT #{quoted_name}\"\n else\n raise Error, \"Unsupported ALTER TABLE operation\"\n end\n \"ALTER TABLE #{quote_schema_table(table)} #{alter_table_op}\"\n end",
"def alter_table_sql(table, op)\n\t if op[:op] == :add_column\n subclause = \"ADD #{column_definition_sql(op)}\"\n else\n quoted_name = quote_identifier(op[:name]) if op[:name]\n case op[:op]\n when :set_column_type\n subclause = \"MODIFY #{quoted_name} #{type_literal(op)}\"\n when :set_column_default\n subclause = \"MODIFY #{quoted_name} DEFAULT #{literal(op[:default])}\"\n when :set_column_null\n subclause = \"MODIFY #{quoted_name} #{op[:null] ? 'NULL' : 'NOT NULL'}\"\n when :modify_constraint\n subclause = \"MODIFY #{constraint_definition_sql(op)}\"\n else\n\t return super(table, op)\n end\n end\n \"ALTER TABLE #{quote_schema_table(table)} #{subclause}\"\n\t end",
"def alter_table_sql(table, op)\n case op[:op]\n when :add_index\n index_definition_sql(table, op)\n when :drop_index\n drop_index_sql(table, op)\n else\n if sql = alter_table_op_sql(table, op)\n \"ALTER TABLE #{quote_schema_table(table)} #{sql}\"\n end\n end\n end",
"def alter_table_sql_list(table, operations)\n if supports_combining_alter_table_ops?\n grouped_ops = []\n last_combinable = false\n operations.each do |op|\n if combinable_alter_table_op?(op)\n if sql = alter_table_op_sql(table, op)\n grouped_ops << [] unless last_combinable\n grouped_ops.last << sql\n last_combinable = true\n end\n elsif sql = alter_table_sql(table, op)\n Array(sql).each{|s| grouped_ops << s}\n last_combinable = false\n end\n end\n grouped_ops.map do |gop|\n if gop.is_a?(Array)\n \"ALTER TABLE #{quote_schema_table(table)} #{gop.join(', ')}\"\n else\n gop\n end\n end\n else\n operations.map{|op| alter_table_sql(table, op)}.flatten.compact\n end\n end",
"def alter_table_sql(table, op)\n case op[:op]\n when :add_column\n \"ALTER TABLE #{quote_schema_table(table)} ADD #{column_definition_sql(op)}\"\n when :drop_column\n \"ALTER TABLE #{quote_schema_table(table)} DROP #{column_definition_sql(op)}\"\n when :rename_column\n \"ALTER TABLE #{quote_schema_table(table)} ALTER #{quote_identifier(op[:name])} TO #{quote_identifier(op[:new_name])}\"\n when :set_column_type\n \"ALTER TABLE #{quote_schema_table(table)} ALTER #{quote_identifier(op[:name])} TYPE #{type_literal(op)}\"\n else\n super(table, op)\n end\n end",
"def alter_table_sql(table, op)\n case op[:op]\n when :add_column\n \"ALTER TABLE #{quote_schema_table(table)} ADD #{column_definition_sql(op)}\"\n when :rename_column\n \"SP_RENAME #{literal(\"#{quote_schema_table(table)}.#{quote_identifier(op[:name])}\")}, #{literal(op[:new_name].to_s)}, 'COLUMN'\"\n when :set_column_type\n \"ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(op[:name])} #{type_literal(op)}\"\n when :set_column_null\n sch = schema(table).find{|k,v| k.to_s == op[:name].to_s}.last\n type = {:type=>sch[:db_type]}\n type[:size] = sch[:max_chars] if sch[:max_chars]\n \"ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(op[:name])} #{type_literal(type)} #{'NOT ' unless op[:null]}NULL\"\n when :set_column_default\n \"ALTER TABLE #{quote_schema_table(table)} ADD CONSTRAINT #{quote_identifier(\"sequel_#{table}_#{op[:name]}_def\")} DEFAULT #{literal(op[:default])} FOR #{quote_identifier(op[:name])}\"\n else\n super(table, op)\n end\n end",
"def alter_table_sql(table, op)\n case op[:op]\n when :rename_column\n unless sch = op[:schema]\n raise(Error, \"can't find existing schema entry for #{op[:name]}\") unless sch = op[:schema] || schema(table).find{|c| c.first == op[:name]}\n sch = sch.last\n end\n [\n alter_table_sql(table, :op=>:add_column, :name=>op[:new_name], :default=>sch[:ruby_default], :type=>sch[:db_type], :null=>sch[:allow_null]),\n from(table).update_sql(op[:new_name]=>op[:name]),\n alter_table_sql(table, :op=>:drop_column, :name=>op[:name])\n ]\n when :set_column_null, :set_column_default\n raise(Error, \"can't find existing schema entry for #{op[:name]}\") unless sch = op[:schema] || schema(table).find{|c| c.first == op[:name]}\n sch = sch.last\n\n sch = if op[:op] == :set_column_null\n sch.merge(:allow_null=>op[:null])\n else\n sch.merge(:ruby_default=>op[:default])\n end\n\n [\n alter_table_sql(table, :op=>:rename_column, :name=>op[:name], :new_name=>:sequel_access_backup_column, :schema=>sch),\n alter_table_sql(table, :op=>:rename_column, :new_name=>op[:name], :name=>:sequel_access_backup_column, :schema=>sch)\n ]\n else\n super\n end\n end",
"def alter_tables(current_table_names, tables)\n each_table(current_table_names, tables) do |table_name, table, last_table|\n hsh = table.dup\n hsh[:columns] = hsh[:columns].map {|c| Schema::DbColumn.build_from_hash(c) }\n operations = Schema::AlterTableOperations.\n build(@db_tables[table_name], hsh, :immutable_columns => @immutable_columns)\n unless operations.empty?\n all_operations = if @separate_alter_table_statements\n operations.map {|o| [o] }\n else\n [operations]\n end\n\n all_operations.each_with_index do |o, i|\n alter_table_statement table_name, o\n add_blank_line unless last_table && i + 1 == all_operations.size\n end\n end\n end\n end",
"def alter_table_op_sql(table, op)\n meth = \"alter_table_#{op[:op]}_sql\"\n if respond_to?(meth, true)\n # Allow calling private methods as alter table op sql methods are private\n send(meth, table, op)\n else\n raise Error, \"Unsupported ALTER TABLE operation: #{op[:op]}\"\n end\n end",
"def alter_table(name, generator=nil, options=nil, &block)\n if Hash === options\n generator ||= Schema::AlterTableGenerator.new(self, &block)\n\t\t alter_table_sql_list(name, generator.operations, options).\n\t\t flatten.each {|sql| execute_ddl(sql)}\n\t\t remove_cached_schema(name)\n\t\t nil\n else\n\t super(name, generator, &block)\n end\n end",
"def update_database\n unless @statements.empty?\n alter_table_sql = \"ALTER TABLE #{@base.quote_table_name(@table_name)}\"\n result = @base.execute(alter_table_sql + \" \" + @statements.join(', '))\n @statements.clear\n result\n end\n end",
"def alter(field)\n sql = build_sql(field)\n execute(sql)\nend",
"def add_table_actions(table)\n action_col_edit(table)\n action_col_destroy(table)\n end",
"def apply_changes\n changes.each do |method, args|\n connection.send(method, new_table, *args)\n end\n end",
"def update_audit_tables\n tables.each(&:update_audit_table)\n end",
"def change_table_with_versions(*args, &block)\n SchemaStatements.apply_versionable_option!(:change_table, self, *args, &block)\n end",
"def alter(table_name_str, wait = true, *args)\n # Table name should be a string\n raise(ArgumentError, 'Table name must be of type String') unless\n table_name_str.is_a?(String)\n\n # Table should exist\n raise(ArgumentError, \"Can't find a table: #{table_name_str}\") unless exists?(table_name_str)\n\n # There should be at least one argument\n raise(ArgumentError, 'There should be at least one argument but the table name') if args.empty?\n\n table_name = TableName.valueOf(table_name_str)\n\n # Get table descriptor\n tdb = TableDescriptorBuilder.newBuilder(@admin.getDescriptor(table_name))\n hasTableUpdate = false\n\n # Process all args\n args.each do |arg|\n # Normalize args to support column name only alter specs\n arg = { NAME => arg } if arg.is_a?(String)\n\n # Normalize args to support shortcut delete syntax\n arg = { METHOD => 'delete', NAME => arg['delete'] } if arg['delete']\n\n # There are 3 possible options.\n # 1) Column family spec. Distinguished by having a NAME and no METHOD.\n method = arg.delete(METHOD)\n if method.nil? && arg.key?(NAME)\n descriptor = cfd(arg, tdb)\n column_name = descriptor.getNameAsString\n\n # If column already exist, then try to alter it. Create otherwise.\n if tdb.build.hasColumnFamily(column_name.to_java_bytes)\n tdb.modifyColumnFamily(descriptor)\n else\n tdb.setColumnFamily(descriptor)\n end\n hasTableUpdate = true\n next\n end\n\n # 2) Method other than table_att, with some args.\n name = arg.delete(NAME)\n if !method.nil? && method != 'table_att'\n # Delete column family\n if method == 'delete'\n raise(ArgumentError, 'NAME parameter missing for delete method') unless name\n tdb.removeColumnFamily(name.to_java_bytes)\n hasTableUpdate = true\n # Unset table attributes\n elsif method == 'table_att_unset'\n raise(ArgumentError, 'NAME parameter missing for table_att_unset method') unless name\n if name.is_a?(Array)\n name.each do |key|\n if tdb.build.getValue(key).nil?\n raise ArgumentError, \"Could not find attribute: #{key}\"\n end\n tdb.removeValue(key)\n end\n else\n if tdb.build.getValue(name).nil?\n raise ArgumentError, \"Could not find attribute: #{name}\"\n end\n tdb.removeValue(name)\n end\n hasTableUpdate = true\n elsif method == 'table_remove_coprocessor'\n classname = arg.delete(CLASSNAME)\n raise(ArgumentError, 'CLASSNAME parameter missing for table_remove_coprocessor method') unless classname\n if classname.is_a?(Array)\n classname.each do |key|\n tdb.removeCoprocessor(key)\n end\n else\n tdb.removeCoprocessor(classname)\n end\n hasTableUpdate = true\n # Unset table configuration\n elsif method == 'table_conf_unset'\n raise(ArgumentError, 'NAME parameter missing for table_conf_unset method') unless name\n if name.is_a?(Array)\n name.each do |key|\n if tdb.build.getValue(key).nil?\n raise ArgumentError, \"Could not find configuration: #{key}\"\n end\n tdb.removeValue(key)\n end\n else\n if tdb.build.getValue(name).nil?\n raise ArgumentError, \"Could not find configuration: #{name}\"\n end\n tdb.removeValue(name)\n end\n hasTableUpdate = true\n # Unknown method\n else\n raise ArgumentError, \"Unknown method: #{method}\"\n end\n\n arg.each_key do |unknown_key|\n puts(format('Unknown argument ignored: %s', unknown_key))\n end\n\n next\n end\n\n # 3) Some args for the table, optionally with METHOD => table_att (deprecated)\n update_tdb_from_arg(tdb, arg)\n\n # set a coprocessor attribute\n valid_coproc_keys = []\n next unless arg.is_a?(Hash)\n arg.each do |key, value|\n k = String.new(key) # prepare to strip\n k.strip!\n\n # Uses insensitive matching so we can accept lowercase 'coprocessor' for compatibility\n next unless k =~ /#{COPROCESSOR}/i\n if value.is_a? String\n # Specifying a coprocessor by this \"spec string\" is here for backwards compatibility\n v = String.new value\n v.strip!\n cp = coprocessor_descriptor_from_spec_str v\n elsif value.is_a? Hash\n cp = coprocessor_descriptor_from_hash value\n else\n raise ArgumentError.new 'coprocessor must be provided as a String or Hash'\n end\n tdb.setCoprocessor cp\n valid_coproc_keys << key\n end\n\n valid_coproc_keys.each do |key|\n arg.delete(key)\n end\n\n hasTableUpdate = true\n\n arg.each_key do |unknown_key|\n puts(format('Unknown argument ignored: %s', unknown_key))\n end\n\n next\n end\n\n # Bulk apply all table modifications.\n if hasTableUpdate\n future = @admin.modifyTableAsync(tdb.build)\n\n if wait == true\n puts 'Updating all regions with the new schema...'\n future.get\n end\n end\n end",
"def update_audit_table\n logger.info \"Updating audit schema with modifications in table '#{name}'\"\n\n unversioned_filter = versioning_enabled ? '1 = 1' : '`_has_violation` = 0' \n\n q_rows_to_update = <<-EOS\n SELECT #{pkey_selection(audit)} \n FROM #{audit} \n WHERE `_has_delta`= 1 AND #{unversioned_filter} \n EOS\n\n db.query(q_rows_to_update).each do |row|\n watched_row = db.query(\"SELECT * FROM #{watched} WHERE #{condition_from_hashes(row)}\").to_a[0]\n db.query(\"UPDATE #{audit} SET #{assignment_from_hash(watched_row, self)} WHERE #{condition_from_hashes(row)}\")\n end\n end",
"def add_table_actions(table)\n action_col_show(table)\n action_col_edit(table)\n action_col_destroy(table)\n end",
"def post_sql_statements( table_name, options ) # :nodoc:\n post_sql_statements = []\n if options[:on_duplicate_key_update]\n post_sql_statements << sql_for_on_duplicate_key_update( table_name, options[:on_duplicate_key_update] )\n end\n\n #custom user post_sql\n post_sql_statements << options[:post_sql] if options[:post_sql]\n\n #with rollup\n post_sql_statements << rollup_sql if options[:rollup]\n\n post_sql_statements\n end",
"def combinable_alter_table_op?(op)\n (super || op[:op] == :validate_constraint) && op[:op] != :rename_column\n end",
"def update_table_sql(sql)\n sql << ' '\n source_list_append(sql, @opts[:from][0..0])\n end",
"def combinable_alter_table_op?(op)\n COMBINABLE_ALTER_TABLE_OPS.include?(op[:op])\n end",
"def post_sql_statements( table_name, options ) # :nodoc:\n post_sql_statements = []\n\n if supports_on_duplicate_key_update? && options[:on_duplicate_key_update]\n post_sql_statements << sql_for_on_duplicate_key_update( table_name, options[:on_duplicate_key_update], options[:model], options[:primary_key], options[:locking_column] )\n elsif logger && options[:on_duplicate_key_update]\n logger.warn \"Ignoring on_duplicate_key_update because it is not supported by the database.\"\n end\n\n # custom user post_sql\n post_sql_statements << options[:post_sql] if options[:post_sql]\n\n # with rollup\n post_sql_statements << rollup_sql if options[:rollup]\n\n post_sql_statements\n end",
"def chrono_alter_index(table_name, options)\n if is_chrono?(table_name) && !options[:on_current_schema]\n on_temporal_schema { yield }\n on_history_schema { yield }\n else\n yield\n end\n end",
"def functional_update_schema # abstract\n raise 'abstract'\n end",
"def apply_single\n validate_schema\n\n # Prepare some lists of columns.\n key_cols = @db1.primary_key(@table1)\n data_cols = @db1.except_primary_key(@table1)\n all_cols = @db1.column_names(@table1)\n\n # Let our public know we are beginning.\n @patch.begin_diff\n\n # Advertise column names.\n @rc_columns = DiffColumns.new\n @rc_columns.title_row = all_cols\n @rc_columns.update(0)\n cells = all_cols.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"@@\",cells)\n @patch.apply_row(rc)\n\n # If requested, we will be providing context rows around changed rows.\n # This is not a natural thing to do with SQL, so we do it only on request.\n # When requested, we need to buffer row changes.\n @pending_rcs = []\n\n # Prepare some useful SQL fragments to assemble later.\n sql_table1 = @db1.quote_table(@table1)\n sql_table2 = @db1.quote_table(@table2)\n sql_key_cols = key_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_all_cols = all_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_key_match = key_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS #{sql_table2}.#{c}\"}.join(\" AND \")\n sql_data_mismatch = data_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS NOT #{sql_table2}.#{c}\"}.join(\" OR \")\n\n # For one query we will need to interleave columns from two tables. For\n # portability we need to give these columns distinct names.\n weave = all_cols.map{|c| [[sql_table1,@db1.quote_column(c)],\n [sql_table2,@db2.quote_column(c)]]}.flatten(1)\n dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]}\"}\n sql_dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]} AS #{c[0].gsub(/[^a-zA-Z0-9]/,'_')}_#{c[1].gsub(/[^a-zA-Z0-9]/,'_')}\"}.join(\",\")\n\n # Prepare a map of primary key offsets.\n keys_in_all_cols = key_cols.each.map{|c| all_cols.index(c)}\n keys_in_dbl_cols = keys_in_all_cols.map{|x| 2*x}\n\n # Find rows in table2 that are not in table1.\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table2} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table1} WHERE #{sql_key_match})\"\n apply_inserts(sql,all_cols,keys_in_all_cols)\n\n # Find rows in table1 and table2 that differ while having the same primary\n # key.\n sql = \"SELECT #{sql_dbl_cols} FROM #{sql_table1} INNER JOIN #{sql_table2} ON #{sql_key_match} WHERE #{sql_data_mismatch}\"\n apply_updates(sql,dbl_cols,keys_in_dbl_cols)\n\n # Find rows that are in table1 but not table2\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table1} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table2} WHERE #{sql_key_match})\"\n apply_deletes(sql,all_cols,keys_in_all_cols)\n\n # If we are supposed to provide context, we need to deal with row order.\n if @patch.want_context\n sql = \"SELECT #{sql_all_cols}, 0 AS __coopy_tag__ FROM #{sql_table1} UNION SELECT #{sql_all_cols}, 1 AS __coopy_tag__ FROM #{sql_table2} ORDER BY #{sql_key_cols}, __coopy_tag__\"\n apply_with_context(sql,all_cols,keys_in_all_cols)\n end\n\n # Done!\n @patch.end_diff\n end",
"def apply_single\n validate_schema\n\n # Prepare some lists of columns.\n key_cols = @db1.primary_key(@table1)\n data_cols = @db1.except_primary_key(@table1)\n all_cols = @db1.column_names(@table1)\n\n # Let our public know we are beginning.\n @patch.begin_diff\n\n # Advertise column names.\n @rc_columns = DiffColumns.new\n @rc_columns.title_row = all_cols\n @rc_columns.update(0)\n cells = all_cols.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"@@\",cells)\n @patch.apply_row(rc)\n\n # If requested, we will be providing context rows around changed rows.\n # This is not a natural thing to do with SQL, so we do it only on request.\n # When requested, we need to buffer row changes.\n @pending_rcs = []\n\n # Prepare some useful SQL fragments to assemble later.\n sql_table1 = @db1.quote_table(@table1)\n sql_table2 = @db1.quote_table(@table2)\n sql_key_cols = key_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_all_cols = all_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_key_match = key_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS #{sql_table2}.#{c}\"}.join(\" AND \")\n sql_data_mismatch = data_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS NOT #{sql_table2}.#{c}\"}.join(\" OR \")\n\n # For one query we will need to interleave columns from two tables. For\n # portability we need to give these columns distinct names.\n weave = all_cols.map{|c| [[sql_table1,@db1.quote_column(c)],\n [sql_table2,@db2.quote_column(c)]]}.flatten(1)\n dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]}\"}\n sql_dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]} AS #{c[0].gsub(/[^a-zA-Z0-9]/,'_')}_#{c[1].gsub(/[^a-zA-Z0-9]/,'_')}\"}.join(\",\")\n\n # Prepare a map of primary key offsets.\n keys_in_all_cols = key_cols.each.map{|c| all_cols.index(c)}\n keys_in_dbl_cols = keys_in_all_cols.map{|x| 2*x}\n\n # Find rows in table2 that are not in table1.\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table2} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table1} WHERE #{sql_key_match})\"\n apply_inserts(sql,all_cols,keys_in_all_cols)\n\n # Find rows in table1 and table2 that differ while having the same primary\n # key.\n sql = \"SELECT #{sql_dbl_cols} FROM #{sql_table1} INNER JOIN #{sql_table2} ON #{sql_key_match} WHERE #{sql_data_mismatch}\"\n apply_updates(sql,dbl_cols,keys_in_dbl_cols)\n\n # Find rows that are in table1 but not table2\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table1} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table2} WHERE #{sql_key_match})\"\n apply_deletes(sql,all_cols,keys_in_all_cols)\n\n # If we are supposed to provide context, we need to deal with row order.\n if @patch.want_context\n sql = \"SELECT #{sql_all_cols}, 0 AS __coopy_tag__ FROM #{sql_table1} UNION SELECT #{sql_all_cols}, 1 AS __coopy_tag__ FROM #{sql_table2} ORDER BY #{sql_key_cols}, __coopy_tag__\"\n apply_with_context(sql,all_cols,keys_in_all_cols)\n end\n\n # Done!\n @patch.end_diff\n end",
"def apply_single\n validate_schema\n\n # Prepare some lists of columns.\n key_cols = @db1.primary_key(@table1)\n data_cols = @db1.except_primary_key(@table1)\n all_cols = @db1.column_names(@table1)\n\n # Let our public know we are beginning.\n @patch.begin_diff\n\n # Advertise column names.\n @rc_columns = DiffColumns.new\n @rc_columns.title_row = all_cols\n @rc_columns.update(0)\n cells = all_cols.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"@@\",cells)\n @patch.apply_row(rc)\n\n # If requested, we will be providing context rows around changed rows.\n # This is not a natural thing to do with SQL, so we do it only on request.\n # When requested, we need to buffer row changes.\n @pending_rcs = []\n\n # Prepare some useful SQL fragments to assemble later.\n sql_table1 = @db1.quote_table(@table1)\n sql_table2 = @db1.quote_table(@table2)\n sql_key_cols = key_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_all_cols = all_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_key_match = key_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS #{sql_table2}.#{c}\"}.join(\" AND \")\n sql_data_mismatch = data_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS NOT #{sql_table2}.#{c}\"}.join(\" OR \")\n\n # For one query we will need to interleave columns from two tables. For\n # portability we need to give these columns distinct names.\n weave = all_cols.map{|c| [[sql_table1,@db1.quote_column(c)],\n [sql_table2,@db2.quote_column(c)]]}.flatten(1)\n dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]}\"}\n sql_dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]} AS #{c[0].gsub(/[^a-zA-Z0-9]/,'_')}_#{c[1].gsub(/[^a-zA-Z0-9]/,'_')}\"}.join(\",\")\n\n # Prepare a map of primary key offsets.\n keys_in_all_cols = key_cols.each.map{|c| all_cols.index(c)}\n keys_in_dbl_cols = keys_in_all_cols.map{|x| 2*x}\n\n # Find rows in table2 that are not in table1.\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table2} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table1} WHERE #{sql_key_match})\"\n apply_inserts(sql,all_cols,keys_in_all_cols)\n\n # Find rows in table1 and table2 that differ while having the same primary\n # key.\n sql = \"SELECT #{sql_dbl_cols} FROM #{sql_table1} INNER JOIN #{sql_table2} ON #{sql_key_match} WHERE #{sql_data_mismatch}\"\n apply_updates(sql,dbl_cols,keys_in_dbl_cols)\n\n # Find rows that are in table1 but not table2\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table1} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table2} WHERE #{sql_key_match})\"\n apply_deletes(sql,all_cols,keys_in_all_cols)\n\n # If we are supposed to provide context, we need to deal with row order.\n if @patch.want_context\n sql = \"SELECT #{sql_all_cols}, 0 AS __coopy_tag__ FROM #{sql_table1} UNION SELECT #{sql_all_cols}, 1 AS __coopy_tag__ FROM #{sql_table2} ORDER BY #{sql_key_cols}, __coopy_tag__\"\n apply_with_context(sql,all_cols,keys_in_all_cols)\n end\n\n # Done!\n @patch.end_diff\n end",
"def standard_table_actions(table)\n table.edit_action_col\n table.destroy_action_col\n end",
"def modify_column_families instance_id, table_id, modifications\n execute do\n tables.modify_column_families(\n table_path(instance_id, table_id),\n modifications\n )\n end\n end",
"def undo_cleanup_concurrent_column_rename(table, old_column, new_column, type: nil, batch_column_name: :id)\n Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas.require_ddl_mode!\n\n setup_renamed_column(__callee__, table, new_column, old_column, type, batch_column_name)\n\n with_lock_retries do\n install_bidirectional_triggers(table, old_column, new_column)\n end\n end",
"def change_table(*args, &block)\n apply_translatable_option!(:change_table, block, *args) do |definition|\n super(*args, &definition)\n end\n end",
"def replace_statement(target, stage)\n <<-SQLREPLACE\n begin transaction;\n drop table #{target};\n alter table #{stage} rename to #{target};\n end transaction;\n SQLREPLACE\n end",
"def apply_alter_table_generator(name, generator)\n ops = generator.operations\n\n unless can_add_primary_key_constraint_on_nullable_columns?\n if add_pk = ops.find{|op| op[:op] == :add_constraint && op[:type] == :primary_key}\n ops = add_pk[:columns].map{|column| {:op => :set_column_null, :name => column, :null => false}} + ops\n end\n end\n\n apply_alter_table(name, ops)\n end",
"def update!(**args)\n @postgresql_tables = args[:postgresql_tables] if args.key?(:postgresql_tables)\n @schema = args[:schema] if args.key?(:schema)\n end",
"def modify_column_families instance_id, table_id, modifications\n tables.modify_column_families name: table_path(instance_id, table_id), modifications: modifications\n end",
"def generate_migration_body(tables)\n current_tables, new_tables = table_names(tables).partition do |table_name| \n @db_table_names.include?(table_name)\n end\n\n add_line \"change do\"\n create_new_tables(new_tables, tables)\n alter_tables(current_tables, tables)\n add_line \"end\"\n end",
"def execute(sql, *args, &block)\n @db.execute(rewrite_table_names(sql), *args, &block)\n end",
"def update!(changes)\n Mao.sql(with_options(:update => changes).sql) do |pg_result|\n pg_result.cmd_tuples\n end\n end",
"def build_sql(field)\n statement_sql = ''\n keys = primary_keys(field)\n puts \" Altering #{keys.length} records for: #{field.name} => #{field.output_type}\".blue\n keys.each do |primary_key|\n record_sql = \"UPDATE #{field.table} \"\n record_sql += \"SET #{field.column} = '#{out_val(field)}' \"\n record_sql += \"#{where_and(record_sql)} #{field.primary_key_col} = #{primary_key};\\n\"\n statement_sql += record_sql\n end \n statement_sql\nend",
"def update!(**args)\n @postgresql_columns = args[:postgresql_columns] if args.key?(:postgresql_columns)\n @table = args[:table] if args.key?(:table)\n end",
"def rename_table_sql(name, new_name)\n \"ALTER TABLE #{quote_schema_table(name)} RENAME TO #{quote_schema_table(new_name)}\"\n end",
"def rename_table_sql(name, new_name)\n \"ALTER TABLE #{quote_schema_table(name)} RENAME TO #{quote_schema_table(new_name)}\"\n end",
"def rebuild(table); end",
"def add_tables_to_publication(name, tables)\n typed_exec(\"ALTER PUBLICATION #{connection.quote_ident(name)} ADD TABLE #{safe_list(tables)}\")\n end",
"def rename_table_sql(name, new_name)\n \"ALTER TABLE #{quote_schema_table(name)} RENAME TO #{quote_identifier(schema_and_table(new_name).last)}\"\n end",
"def update!(**args)\n @oracle_tables = args[:oracle_tables] if args.key?(:oracle_tables)\n @schema = args[:schema] if args.key?(:schema)\n end",
"def rename_table_sql(name, new_name)\n \"ALTER TABLE #{quote_schema_table(name)} RENAME TO #{quote_schema_table(new_name)}\"\n end",
"def update!(**args)\n @existing_schema = args[:existing_schema] if args.key?(:existing_schema)\n @new_schema = args[:new_schema] if args.key?(:new_schema)\n @sampled_data_locations = args[:sampled_data_locations] if args.key?(:sampled_data_locations)\n @schema_change = args[:schema_change] if args.key?(:schema_change)\n @table = args[:table] if args.key?(:table)\n end",
"def rename_table_sql(name, new_name)\n \"ALTER TABLE #{quote_identifier(name)} RENAME TO #{quote_identifier(new_name)}\"\n end",
"def sql_update(sql)\n exec(sql)\n # return affected rows.\n end",
"def rename_table(*args)\n execute(rename_table_sql(*args))\n end",
"def update!(**args)\n @oracle_columns = args[:oracle_columns] if args.key?(:oracle_columns)\n @table = args[:table] if args.key?(:table)\n end",
"def create!\n Upsert.logger.info \"[upsert] Creating or replacing database function #{name.inspect} on table #{table_name.inspect} for selector #{selector_keys.map(&:inspect).join(', ')} and setter #{setter_keys.map(&:inspect).join(', ')}\"\n\n selector_column_definitions = column_definitions.select { |cd| selector_keys.include?(cd.name) }\n setter_column_definitions = column_definitions.select { |cd| setter_keys.include?(cd.name) }\n update_column_definitions = setter_column_definitions.select { |cd| cd.name !~ CREATED_COL_REGEX && !options[\"ignore_on_update\"].include?(cd.name) }\n\n first_try = true\n connection.execute(%{\n CREATE OR REPLACE FUNCTION #{name}(#{(selector_column_definitions.map(&:to_selector_arg) + setter_column_definitions.map(&:to_setter_arg)).join(', ')}) RETURNS VOID AS\n $$\n DECLARE\n first_try INTEGER := 1;\n BEGIN\n LOOP\n -- first try to update the key\n UPDATE #{quoted_table_name} SET #{update_column_definitions.map(&:to_setter).join(', ')}\n WHERE #{selector_column_definitions.map(&:to_selector).join(' AND ') };\n IF found THEN\n RETURN;\n END IF;\n -- not there, so try to insert the key\n -- if someone else inserts the same key concurrently,\n -- we could get a unique-key failure\n BEGIN\n INSERT INTO #{quoted_table_name}(#{setter_column_definitions.map(&:quoted_name).join(', ')}) VALUES (#{setter_column_definitions.map(&:to_setter_value).join(', ')});\n RETURN;\n EXCEPTION WHEN unique_violation THEN\n -- seamusabshere 9/20/12 only retry once\n IF (first_try = 1) THEN\n first_try := 0;\n ELSE\n RETURN;\n END IF;\n -- Do nothing, and loop to try the UPDATE again.\n END;\n END LOOP;\n END;\n $$\n LANGUAGE plpgsql;\n })\n rescue\n if first_try and $!.message =~ /tuple concurrently updated/\n first_try = false\n retry\n else\n raise $!\n end\n end",
"def copy_tables(table_names, from_db, to_db)\n return if table_names.empty?\n \n # For efficiency, turn off time consuming options.\n sql_connection.execute(\"set autocommit = 0;\")\n sql_connection.execute(\"set unique_checks = 0;\")\n sql_connection.execute(\"set foreign_key_checks = 0;\")\n\n from_escaped = sql_connection.identifier(from_db)\n to_escaped = sql_connection.identifier(to_db)\n\n table_names.each { |name| \n print \".\"\n # Think about whether we should drop/create/re-add triggers, or just truncate.\n tbl = sql_connection.identifier(name)\n\n to_create, to_autoincr = show_create_table_without_increment(to_db, name)\n from_create, from_autoincr = show_create_table_without_increment(from_db, name)\n\n if to_create.nil?\n # table doesn't exist, create it.\n op = :create\n sql_connection.execute(\"CREATE TABLE IF NOT EXISTS #{to_escaped}.#{tbl} LIKE #{from_escaped}.#{tbl}\")\n elsif from_create == to_create\n # table is the same, truncate it.\n op = :truncate\n sql_connection.execute(\"TRUNCATE TABLE #{to_escaped}.#{tbl}\")\n else\n # table is different, drop and create.\n op = :drop_and_create\n sql_connection.execute(\"DROP TABLE #{to_escaped}.#{tbl}\")\n sql_connection.execute(\"CREATE TABLE IF NOT EXISTS #{to_escaped}.#{tbl} LIKE #{from_escaped}.#{tbl}\")\n end\n\n if block_given?\n yield name, op\n end\n\n sql_connection.execute(\"INSERT INTO #{to_escaped}.#{tbl} SELECT * FROM #{from_escaped}.#{tbl}\")\n #\n # if from_create == to_create and from_autoincr != to_autoincr\n # puts \"Warning: set auto_increment not implemented yet.\"\n # For many purposes it won't matter because TRUNCATE TABLE\n # will reset auto_increment (see docs for TRUNCATE TABLE).\n # If it does matter then either implement this or\n # provide an option to drop the table.\n # end\n\n }\n\n sql_connection.execute(\"COMMIT;\")\n sql_connection.execute(\"set foreign_key_checks = 1;\")\n sql_connection.execute(\"set unique_checks = 1;\")\n sql_connection.execute(\"set autocommit = 1;\")\n end",
"def merge_or_replace(mode:)\n target = replication.target_table\n stage = temporary_table_name\n sql = send(\"#{mode}_statement\", target, stage)\n ::RailsRedshiftReplicator.connection.exec sql\n end",
"def update_sql\n update_manager = Arel::UpdateManager.new(ActiveRecord::Base)\n update_manager.table(arel_table)\n update_manager.where(arel_table[:id].eq(id))\n update_manager.set([[arel_table[:retoure_reason], retoure_reason]])\n update_manager.to_sql\n end",
"def create!\n Upsert.logger.info \"[upsert] Creating or replacing database function #{name.inspect} on table #{buffer.parent.table_name.inspect} for selector #{selector.map(&:inspect).join(', ')} and columns #{columns.map(&:inspect).join(', ')}\"\n column_definitions = get_column_definitions\n connection.execute <<-EOS\nCREATE OR REPLACE FUNCTION #{name}(#{column_definitions.map { |c| \"#{c.quoted_input_name} #{c.sql_type} DEFAULT #{c.default || 'NULL'}\" }.join(',') }) RETURNS VOID AS\n$$\nBEGIN\n LOOP\n -- first try to update the key\n UPDATE #{quoted_table_name} SET #{column_definitions.map { |c| \"#{c.quoted_name} = #{c.quoted_input_name}\" }.join(',')}\n WHERE #{selector.map { |k| \"#{connection.quote_ident(k)} = #{connection.quote_ident([k,'input'].join('_'))}\" }.join(' AND ') };\n IF found THEN\n RETURN;\n END IF;\n -- not there, so try to insert the key\n -- if someone else inserts the same key concurrently,\n -- we could get a unique-key failure\n BEGIN\n INSERT INTO #{quoted_table_name}(#{column_definitions.map { |c| c.quoted_name }.join(',')}) VALUES (#{column_definitions.map { |c| c.quoted_input_name }.join(',')});\n RETURN;\n EXCEPTION WHEN unique_violation THEN\n -- Do nothing, and loop to try the UPDATE again.\n END;\n END LOOP;\nEND;\n$$\nLANGUAGE plpgsql;\nEOS\n end",
"def create!\n Upsert.logger.info \"[upsert] Creating or replacing database function #{name.inspect} on table #{table_name.inspect} for selector #{selector_keys.map(&:inspect).join(', ')} and setter #{setter_keys.map(&:inspect).join(', ')}\"\n first_try = true\n connection.execute(%{\n CREATE OR REPLACE FUNCTION #{name}(#{(selector_column_definitions.map(&:to_selector_arg) + setter_column_definitions.map(&:to_setter_arg) + hstore_delete_handlers.map(&:to_arg)).join(', ')}) RETURNS VOID AS\n $$\n DECLARE\n first_try INTEGER := 1;\n BEGIN\n LOOP\n -- first try to update the key\n UPDATE #{quoted_table_name} SET #{update_column_definitions.map(&:to_setter).join(', ')}\n WHERE #{selector_column_definitions.map(&:to_selector).join(' AND ') };\n IF found THEN\n #{hstore_delete_handlers.map(&:to_pgsql).join(' ')}\n RETURN;\n END IF;\n -- not there, so try to insert the key\n -- if someone else inserts the same key concurrently,\n -- we could get a unique-key failure\n BEGIN\n INSERT INTO #{quoted_table_name}(#{setter_column_definitions.map(&:quoted_name).join(', ')}) VALUES (#{setter_column_definitions.map(&:to_setter_value).join(', ')});\n #{hstore_delete_handlers.map(&:to_pgsql).join(' ')}\n RETURN;\n EXCEPTION WHEN unique_violation THEN\n -- seamusabshere 9/20/12 only retry once\n IF (first_try = 1) THEN\n first_try := 0;\n ELSE\n RETURN;\n END IF;\n -- Do nothing, and loop to try the UPDATE again.\n END;\n END LOOP;\n END;\n $$\n LANGUAGE plpgsql;\n })\n rescue\n if first_try and $!.message =~ /tuple concurrently updated/\n first_try = false\n retry\n else\n raise $!\n end\n end",
"def set_publication_tables(name, tables)\n typed_exec(\"ALTER PUBLICATION #{connection.quote_ident(name)} SET TABLE #{safe_list(tables)}\")\n end",
"def update_schema\n create_table unless table_exists?\n (schema_columns - column_names).each { |column| ActiveRecord::Migration.add_column(table_name, column, :string) }\n (column_names - protected_columns - schema_columns).each { |column| ActiveRecord::Migration.remove_column(table_name, column) }\n end",
"def to_updates(set_columns, args={})\n args[:pk] ||= [0, 'id']\n args[:table] ||= Pathname.new(@filename).basename.to_s.downcase.gsub(/\\W/, '_')\n args[:before] ||= @@defaults[:before]\n args[:after] ||= @@defaults[:after]\n args[:values_glue] ||= \", \"\n args[:row_format] ||= lambda do |values|\n r = []\n set_columns.each_with_index { |set_column, i| r << \"#{set_column} = #{values[i]}\" if set_column }\n \"update #{args[:table]} set #{r.join(', ')} where #{args[:pk][1]} = #{values[args[:pk][0]]}\"\n end\n args[:row_glue] ||= \";\\n\"\n to_any args\n end",
"def add_additions(statements)\n statements.each do |statement|\n add_addition(statement)\n end\n end",
"def migration_update_table\n added, removed, changed, prev_fields = field_changes\n\n if table_comments\n new_table_comment = table_comment_changes\n new_fields_comments = fields_comments_changes\n end\n\n unless added.present? || removed.present? || changed.present? ||\n new_table_comment || new_fields_comments.present? || table_name_changed || no_master_association_changed\n return\n end\n\n new_fields_comments ||= {}\n\n <<~ARCONTENT\n self.no_master_association = #{!!no_master_association}\n #{table_name_changed ? \" self.prev_table_name = '#{prev_table_name}'\" : ''}\n #{table_name_changed ? ' update_table_name' : ''}\n #{table_name_changed ? '' : \" self.prev_fields = %i[#{prev_fields.join(' ')}]\"}\n \\# added: #{added}\n \\# removed: #{removed}\n \\# changed type: #{changed}\n #{new_table_comment ? \" \\# new table comment: #{new_table_comment.gsub(\"\\n\", '\\n')}\" : ''}\n #{new_fields_comments.present? ? \" \\# new fields comments: #{new_fields_comments.keys}\" : ''}\n update_fields\n ARCONTENT\n end",
"def flag_row_deltas\n tables.each(&:flag_row_deltas)\n end",
"def rename_column_concurrently(table, old_column, new_column, type: nil, batch_column_name: :id)\n Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas.require_ddl_mode!\n\n setup_renamed_column(__callee__, table, old_column, new_column, type, batch_column_name)\n\n with_lock_retries do\n install_bidirectional_triggers(table, old_column, new_column)\n end\n end",
"def execute_batch(sql, *args, &block)\n @db.execute_batch(rewrite_table_names(sql), *args, &block)\n end",
"def sql_for_on_duplicate_key_update( table_name, *args ) # :nodoc:\n arg, model, primary_key, locking_column = args\n arg = { columns: arg } if arg.is_a?( Array ) || arg.is_a?( String )\n return unless arg.is_a?( Hash )\n\n sql = ' ON CONFLICT '.dup\n conflict_target = sql_for_conflict_target( arg )\n\n columns = arg.fetch( :columns, [] )\n condition = arg[:condition]\n if columns.respond_to?( :empty? ) && columns.empty?\n return sql << \"#{conflict_target}DO NOTHING\"\n end\n\n conflict_target ||= sql_for_default_conflict_target( primary_key )\n unless conflict_target\n raise ArgumentError, 'Expected :conflict_target to be specified'\n end\n\n sql << \"#{conflict_target}DO UPDATE SET \"\n case columns\n when Array\n sql << sql_for_on_duplicate_key_update_as_array( table_name, model, locking_column, columns )\n when Hash\n sql << sql_for_on_duplicate_key_update_as_hash( table_name, model, locking_column, columns )\n when String\n sql << columns\n else\n raise ArgumentError, 'Expected :columns to be an Array or Hash'\n end\n\n sql << \" WHERE #{condition}\" if condition.present?\n\n sql\n end",
"def set_operation(other, oper = :+, distinct: true, add_boundaries: true, inherit_boundaries: false)\n unless columns.size == other.columns.size\n msg = \"can't apply set ops to tables with a different number of columns\"\n raise UserError, msg\n end\n unless columns.map(&:type) == other.columns.map(&:type)\n msg = \"can't apply a set ops to tables with different column types.\"\n raise UserError, msg\n end\n other_rows = other.rows.map { |r| r.replace_keys(headers) }\n result = empty_dup\n new_rows = rows.send(oper, other_rows)\n new_rows.each_with_index do |row, k|\n result << row\n result.mark_boundary if k == size - 1 && add_boundaries\n end\n if inherit_boundaries\n result.explicit_boundaries = boundaries\n result.append_boundaries(other.boundaries, shift: size)\n end\n result.normalize_boundaries\n distinct ? result.distinct : result\n end",
"def sql_for_on_duplicate_key_update( table_name, *args ) # :nodoc:\n arg, model, primary_key, locking_column = args\n arg = { columns: arg } if arg.is_a?( Array ) || arg.is_a?( String )\n return unless arg.is_a?( Hash )\n\n sql = ' ON CONFLICT '.dup\n conflict_target = sql_for_conflict_target( arg )\n\n columns = arg.fetch( :columns, [] )\n condition = arg[:condition]\n if columns.respond_to?( :empty? ) && columns.empty?\n return sql << \"#{conflict_target}DO NOTHING\"\n end\n\n conflict_target ||= sql_for_default_conflict_target( table_name, primary_key )\n unless conflict_target\n raise ArgumentError, 'Expected :conflict_target or :constraint_name to be specified'\n end\n\n sql << \"#{conflict_target}DO UPDATE SET \"\n case columns\n when Array\n sql << sql_for_on_duplicate_key_update_as_array( table_name, model, locking_column, columns )\n when Hash\n sql << sql_for_on_duplicate_key_update_as_hash( table_name, model, locking_column, columns )\n when String\n sql << columns\n else\n raise ArgumentError, 'Expected :columns to be an Array or Hash'\n end\n\n sql << \" WHERE #{condition}\" if condition.present?\n\n sql\n end",
"def alter_table(name, *)\n super\n remove_cached_schema(name)\n nil\n end",
"def collins_set_being_altered!(database, table, alter, skip_rename)\n self.collins_osc_state = {\n 'running' => true,\n 'started' => Time.now.to_i,\n 'database' => database,\n 'table' => table,\n 'alter' => alter,\n 'current_state' => \"being_altered\",\n 'next_state' => skip_rename ? \"needs_rename\" : \"can_be_altered\"\n }\n end",
"def update(table,cond,op)\n connection.update(path(table),cond,op)\n end",
"def create_tables!\n migrate(:up)\n end",
"def update_sql(sql, name = nil)\n super\n end",
"def update(sql, name = nil) end",
"def chrono_rename_temporal_indexes(name, new_name)\n on_temporal_schema do\n temporal_indexes = indexes(new_name)\n temporal_indexes.map(&:name).each do |old_idx_name|\n if old_idx_name =~ /^index_#{name}_on_(?<columns>.+)/\n new_idx_name = \"index_#{new_name}_on_#{$~['columns']}\"\n execute \"ALTER INDEX #{old_idx_name} RENAME TO #{new_idx_name}\"\n end\n end\n end\n end",
"def compare_columns(table)\n t1_cols = query(@db1, \"DESCRIBE #{table}\")\n t2_cols = query(@db2, \"DESCRIBE #{table}\")\n \n # reject cols that already exist in the table\n add_to_t2 = t1_cols.reject {|t| t2_cols.include?(t) }\n add_to_t1 = t2_cols.reject {|t| t1_cols.include?(t) }\n \n changes = false\n add_to_t1.each do |c|\n # find the previous column in the other table so it stays in order\n previous_col = t2_cols.at(t2_cols.index(c) - 1)\n @db1_output << \"ALTER TABLE #{table} ADD COLUMN #{c[0]} #{c[1]}\" \n @db1_output << \" AFTER #{previous_col[0]};\" if previous_col[0]\n @db1_output << \" FIRST\" if ! previous_col[0]\n @db1_output << \"\\n\"\n changes = true\n end\n @db1_output << \"\\n\" if changes\n \n chnages = false\n add_to_t2.each do |c|\n # find the previous column in the other table so it stays in order\n previous_col = t1_cols.at(t1_cols.index(c) - 1)\n @db2_output << \"ALTER TABLE #{table} ADD COLUMN #{c[0]} #{c[1]}\" \n @db2_output << \" AFTER #{previous_col[0]};\" if previous_col[0]\n @db2_output << \" FIRST\" if ! previous_col[0]\n @db2_output << \"\\n\"\n changes = true\n end\n @db2_output << \"\\n\" if changes\n \n # TODO - stw - Compare column types and modify if necessary\n \n # store away new column layout for data diff\n @columns[table] = t1_cols | t2_cols\n end",
"def add_select_into_table(new_table_name, sql_query)\n \"CREATE TABLE #{new_table_name} \" + sql_query\n end",
"def extended_table(extra = nil, **opt)\n sql = sql_extended_table(extra, **opt)\n ActiveRecord::Base.connection.exec_query(sql)\n end",
"def exec_update(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def update!(**args)\n @base_table = args[:base_table] if args.key?(:base_table)\n @sql_query = args[:sql_query] if args.key?(:sql_query)\n @view_type = args[:view_type] if args.key?(:view_type)\n end",
"def update_all(updates, conditions = nil)\n sql = \"UPDATE #{table_name} SET #{updates} \"\n add_conditions!(sql, conditions)\n connection.update(sql, \"#{name} Update\")\n end",
"def update_db\n ## Sequel\n drop_table\n create_table\n db.transaction do\n table.delete_sql\n install_manual\n install_updates\n end\n ## AR\n # self.transaction do\n # self.delete_all\n # self.install_manual\n # self.install_updates\n # end\n end",
"def splice_table(*tables)\n table = tables.shift\n tables.each do |new_table|\n table.each_with_index do |row, i|\n table[i] = row + new_table[i]\n end\n end\n table\nend",
"def update_where(table, what, where, *bindvars)\n sql = \"update #{table.name}\\nset #{what} where #{where}\"\n#$stderr.puts sql\n db.do(sql, *bindvars)\n end",
"def drop!(db, colls = nil)\n db.in_transaction do |conn|\n schema_tables(conn).each do |table|\n conn.exec \"DROP TABLE IF EXISTS #{table}\"\n end\n end\n end",
"def apply_with_context(sql,all_cols,keys_in_all_cols)\n hits = {}\n @pending_rcs.each do |rc|\n hits[rc.key] = rc\n end \n hist = []\n n = 2\n pending = 0\n skipped = false\n noted = false\n last_row = nil\n @db1.fetch(sql,all_cols + [\"__coopy_tag__\"]) do |row|\n tag = row.pop.to_i\n k = keyify(row.values_at(*keys_in_all_cols))\n if hits[k]\n emit_skip(row) if skipped\n hist.each do |row0|\n cells = row0.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n end\n hist.clear\n pending = n\n @patch.apply_row(hits[k])\n hits.delete(k)\n skipped = false\n noted = true\n elsif tag == 1\n # ignore redundant row\n elsif pending>0\n emit_skip(row) if skipped\n cells = row.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n pending = pending-1\n skipped = false\n else\n hist << row\n if hist.length>n\n skipped = true\n last_row = row\n hist.shift\n end\n end\n end\n emit_skip(last_row) if skipped and noted\n end",
"def apply_with_context(sql,all_cols,keys_in_all_cols)\n hits = {}\n @pending_rcs.each do |rc|\n hits[rc.key] = rc\n end \n hist = []\n n = 2\n pending = 0\n skipped = false\n noted = false\n last_row = nil\n @db1.fetch(sql,all_cols + [\"__coopy_tag__\"]) do |row|\n tag = row.pop.to_i\n k = keyify(row.values_at(*keys_in_all_cols))\n if hits[k]\n emit_skip(row) if skipped\n hist.each do |row0|\n cells = row0.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n end\n hist.clear\n pending = n\n @patch.apply_row(hits[k])\n hits.delete(k)\n skipped = false\n noted = true\n elsif tag == 1\n # ignore redundant row\n elsif pending>0\n emit_skip(row) if skipped\n cells = row.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n pending = pending-1\n skipped = false\n else\n hist << row\n if hist.length>n\n skipped = true\n last_row = row\n hist.shift\n end\n end\n end\n emit_skip(last_row) if skipped and noted\n end"
] |
[
"0.7256814",
"0.68517065",
"0.68125564",
"0.68125564",
"0.67696595",
"0.67665815",
"0.6752053",
"0.67360526",
"0.67310065",
"0.6724511",
"0.669938",
"0.66552204",
"0.6649486",
"0.6588922",
"0.6582951",
"0.6451879",
"0.64394355",
"0.62767357",
"0.60828197",
"0.5993038",
"0.5988151",
"0.59689957",
"0.5934984",
"0.5854244",
"0.5743144",
"0.5698778",
"0.5605594",
"0.55666006",
"0.55191225",
"0.54564524",
"0.54558873",
"0.54266155",
"0.5414035",
"0.5387888",
"0.53497744",
"0.53315395",
"0.53258884",
"0.53258884",
"0.53258884",
"0.52261955",
"0.52255005",
"0.51937884",
"0.517355",
"0.5171295",
"0.51631933",
"0.51464367",
"0.5129549",
"0.5098824",
"0.50773644",
"0.5070826",
"0.50665766",
"0.50612694",
"0.5041756",
"0.5041756",
"0.5041563",
"0.5033719",
"0.5009877",
"0.5008098",
"0.49961638",
"0.4971663",
"0.49638954",
"0.49595934",
"0.4946994",
"0.49419272",
"0.49395335",
"0.4939436",
"0.49364728",
"0.4934757",
"0.49332595",
"0.49130732",
"0.48977607",
"0.48767734",
"0.4870745",
"0.48611903",
"0.48578718",
"0.48513794",
"0.48506063",
"0.4842367",
"0.48307386",
"0.4829695",
"0.48228773",
"0.48120824",
"0.48067552",
"0.48009208",
"0.47928917",
"0.47891328",
"0.47776902",
"0.47713792",
"0.47658655",
"0.4762172",
"0.47551417",
"0.4731038",
"0.4730623",
"0.47278103",
"0.47245952",
"0.46781266",
"0.46776372",
"0.4664941",
"0.46581396",
"0.46581396"
] |
0.69321823
|
1
|
PostgreSQL supports CREATE OR REPLACE VIEW.
|
def supports_create_or_replace_view?
true
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def create_or_replace_view(name, source)\n source = source.sql if source.is_a?(Dataset)\n execute(\"CREATE OR REPLACE VIEW #{name} AS #{source}\")\n end",
"def create_or_replace_view(name, source, options = OPTS)\n if supports_create_or_replace_view?\n options = options.merge(:replace=>true)\n else\n swallow_database_error{drop_view(name)}\n end\n\n create_view(name, source, options)\n nil\n end",
"def supports_create_or_replace_view?\n false\n end",
"def recreate_view name\n view_structure = ActiveRecord::Base.connection.select_value(\"select definition from pg_views where viewname='#{name}'\")\n if view_structure\n execute \"DROP VIEW IF EXISTS #{name}\"\n execute \"CREATE VIEW #{name} AS #{view_structure};\"\n end\n end",
"def create_view(view_name, definition, options={})\n SchemaMonkey::Middleware::Migration::CreateView.start(connection: self, view_name: view_name, definition: definition, options: options) do |env|\n definition = env.definition\n view_name = env.view_name\n options = env.options\n definition = definition.to_sql if definition.respond_to? :to_sql\n\n if options[:materialized] && options[:allow_replace]\n raise ArgumentError, 'allow_replace is not supported for materialized views'\n end\n\n if options[:force]\n drop_view(view_name, {if_exists: true}.merge(options.slice(:materialized)))\n end\n\n command = if options[:materialized]\n \"CREATE MATERIALIZED\"\n elsif options[:allow_replace]\n \"CREATE OR REPLACE\"\n else\n \"CREATE\"\n end\n\n execute \"#{command} VIEW #{quote_table_name(view_name)} AS #{definition}\"\n end\n end",
"def create_view_prefix_sql(name, options)\n create_view_sql_append_columns(\"CREATE #{'OR REPLACE 'if options[:replace]}VIEW #{quote_schema_table(name)}\", options[:columns])\n end",
"def update_view name, type, columns, options={}\n view_structure = ActiveRecord::Base.connection.select_value(\"select definition from pg_views where viewname='#{name}'\")\n raise ViewNotExistException(\"View #{name} does not exist in current db\") unless view_structure\n \n columns_str = columns.is_a?(Array) ? columns.join(',') : columns\n \n select_pattern = /select (.*) from/i\n select_str = view_structure[select_pattern,1]\n\n case type\n when :add\n view_structure.gsub!(select_pattern, \"SELECT #{select_str}, #{columns_str} FROM\")\n when :remove\n select_str.gsub!(\", #{columns_str}\", '')\n view_structure.gsub!(select_pattern, \"SELECT #{select_str} FROM\")\n when :replace\n view_structure.gsub!(select_pattern, \"SELECT #{columns_str} FROM\")\n end\n\n drop_views name, options[:dependent_views] \n execute \"CREATE VIEW #{name} AS #{view_structure};\"\n end",
"def create_view(name, body = nil, force: false, **kwargs, &block)\n kwargs[:sqlite3] = !!(adapter_name =~ /sqlite/i)\n\n drop_view(name) if force && table_exists?(name)\n\n execute build_create_view_query(name, body, **kwargs, &block)\n end",
"def create_view(name, source)\n source = source.sql if source.is_a?(Dataset)\n execute(\"CREATE VIEW #{name} AS #{source}\")\n end",
"def create_view(name, source, options = OPTS)\n execute_ddl(create_view_sql(name, source, options))\n remove_cached_schema(name)\n nil\n end",
"def create_materialized_view(name, body = nil, force: false, **kwargs, &block)\n supports_materialized_view!\n\n drop_materialized_view(name) if force && table_exists?(name)\n\n execute build_create_materialized_view_query(name, body, **kwargs, &block)\n end",
"def create_view_prefix_sql(name, options)\n sql = create_view_sql_append_columns(\"CREATE #{'OR REPLACE 'if options[:replace]}#{'TEMPORARY 'if options[:temp]}#{'RECURSIVE ' if options[:recursive]}#{'MATERIALIZED ' if options[:materialized]}VIEW #{quote_schema_table(name)}\", options[:columns] || options[:recursive])\n\n if options[:security_invoker]\n sql += \" WITH (security_invoker)\"\n end\n\n if tablespace = options[:tablespace]\n sql += \" TABLESPACE #{quote_identifier(tablespace)}\"\n end\n\n sql\n end",
"def create_view table_id, query, name: nil, description: nil\n options = { query: query, name: name, description: description }\n insert_table table_id, options\n end",
"def drop_view(name)\n execute(\"DROP VIEW #{name}\")\n end",
"def write_view_definition(stream, table_schema, table_name, view_definition)\n stream << \" create_view \\\"#{table_schema}.#{table_name}\\\", <<-SQL\\n\" \\\n \" #{view_definition}\\n\" \\\n \" SQL\\n\"\n end",
"def write_foods_view()\n puts <<SQL\nDROP VIEW IF EXISTS foods;\n\nCREATE VIEW foods AS\nSELECT food_description.id, food_description.description,\n nutrient_data.nutrient_value AS kcal,\n food_group.description AS food_group,\n food_description.refuse_percentage, food_description.refuse_description\n FROM food_description, nutrient_definition, nutrient_data, food_group\n WHERE food_description.id = nutrient_data.food_id\n AND food_group.id = food_description.food_group_id\n AND nutrient_definition.id = nutrient_data.nutrient_id\n AND nutrient_definition.id = '208'\n AND food_group.id NOT IN ('0300', '2100', '2200', '3600');\nSQL\nend",
"def view(name)\n new_view = view_old(name)\n new_view.table_name = name\n new_view\n end",
"def tableView(aView, validateDrop:info, proposedRow:row, proposedDropOperation:op)\n NSDragOperationEvery\n end",
"def drop_view_sql(name, options)\n \"DROP VIEW#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def drop_view_sql(name, opts=OPTS)\n \"DROP #{'MATERIALIZED ' if opts[:materialized]}VIEW#{' IF EXISTS' if opts[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def view_select_statement(view, name=nil)\n row = execute(\"SELECT VIEW_DEFINITION FROM SYSIBM.VIEWS WHERE TABLE_NAME = '#{view}'\", name).each do |row|\n return row[0]\n end\n raise \"No view called #{view} found\"\n end",
"def alter_materialized_view_owner(name, role, options = {})\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :owner_to => role\n }, options).to_sql\n end",
"def merge_or_replace(mode:)\n target = replication.target_table\n stage = temporary_table_name\n sql = send(\"#{mode}_statement\", target, stage)\n ::RailsRedshiftReplicator.connection.exec sql\n end",
"def drop_view(name, **kwargs)\n kwargs[:sqlite3] = !!(adapter_name =~ /sqlite/i)\n\n execute build_drop_view_query(name, **kwargs)\n end",
"def rename_materialized_view(name, new_name, options = {})\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :rename_to => new_name\n }, options).to_sql\n end",
"def create_view(id,doc)\n resp = get_design_doc(id)\n ddoc = set_views(resp,doc)\n\n create_design_doc(id,ddoc)\n end",
"def create_view_sql(name, source, options)\n source = source.sql if source.is_a?(Dataset)\n sql = String.new\n sql << \"#{create_view_prefix_sql(name, options)} AS #{source}\"\n if check = options[:check]\n sql << \" WITH#{' LOCAL' if check == :local} CHECK OPTION\"\n end\n sql\n end",
"def view_select_statement(view, name=nil)\n raise NotImplementedError, \"view_select_statement is an abstract method\"\n end",
"def new_or_edit\nend",
"def supports_materialized_views?\n false\n end",
"def create\n @postgresql_view_person = PostgresqlViewPerson.new(postgresql_view_person_params)\n\n respond_to do |format|\n if @postgresql_view_person.save\n format.html { redirect_to @postgresql_view_person, notice: 'Postgresql view person was successfully created.' }\n format.json { render :show, status: :created, location: @postgresql_view_person }\n else\n format.html { render :new }\n format.json { render json: @postgresql_view_person.errors, status: :unprocessable_entity }\n end\n end\n end",
"def alter_materialized_view_schema(name, schema, options = {})\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :set_schema => schema\n }, options).to_sql\n end",
"def materialize_view(name, sql)\n unless @enduser\n raise Empire::MissingEnduserError.new\n end\n path = \"view/#{name}\"\n data = {'query' => sql}\n request path, :put, {}, data\n end",
"def replaces_views?\n return false\n end",
"def set_postgresql_view_person\n @postgresql_view_person = PostgresqlViewPerson.find(params[:id])\n end",
"def create_view()\n raise RuntimeError.new(\"Class #{self.class.name} must implement a ::create_view() method.\")\n end",
"def conflicting_or_created_record\n conflict || create\n end",
"def create_view(view_name, map_function, reduce_function = nil)\n design_doc = database.get \"_design/#{self.name.underscore}\" rescue nil\n if design_doc\n design_doc[\"views\"][view_name] = {:map => map_function, :reduce => reduce_function}\n else\n design_doc = {\n \"_id\" => \"_design/#{self.name.underscore}\",\n :views => {\n view_name => {\n :map => map_function,\n :reduce => reduce_function\n }\n }\n }\n end\n database.save(design_doc)\n end",
"def refresh_view(view_name, options = {})\n SchemaMonkey::Middleware::Migration::RefreshView.start(connection: self, view_name: view_name, options: options) do |env|\n view_name = env.view_name\n sql = \"REFRESH MATERIALIZED VIEW #{quote_table_name(view_name)}\"\n execute sql\n end\n end",
"def create_view(db)\n begin\n db.get('_design/todos')\n rescue RestClient::ResourceNotFound => nfe\n db.save_doc({\n \"_id\" => \"_design/todos\",\n :views => {\n :allTodos => {\n :reduce => \"_count\",\n :map => \"function(doc){if(doc.name != null){emit(doc.order,{name: doc.name})}}\"\n }\n }\n })\n end\n end",
"def view_select_statement(view, name=nil)\n q =<<-ENDSQL\n SELECT\n SM.definition\n FROM\n sys.objects O\n JOIN\n sys.sql_modules SM ON o.object_id = SM.object_id\n WHERE\n o.type = 'V' AND o.name = '#{view}'\n ENDSQL\n \n view_def = select_value(q, name)\n \n if view_def\n return convert_statement(view_def)\n else\n raise \"No view called #{view} found\"\n end\n end",
"def table_or_view\n return unless Admin::MigrationGenerator.table_or_view_exists? table_name\n\n return :table if Admin::MigrationGenerator.table_exists? table_name\n\n :view\n end",
"def alter_materialized_view_set_options(name, set_options, options = {})\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :set_options => set_options\n }, options).to_sql\n end",
"def create_view(view, cspec)\n\tKernel.system(\"perl #{$scripts}/ct-mkview.pl -raw -name #{view} -dynamic -spec #{cspec}\")\n\treturn $? == 0\nend",
"def create_extension_view_and_class\n self.const_get(\"Extended#{to_s}\")\n rescue\n clause = view_builder\n #this needs to be moved into the specific db adapter files\n connection.execute %{\n create or replace algorithm = merge SQL SECURITY DEFINER view #{extended_table_name} as select #{clause[:view_select]} from #{table_name} #{clause[:view_joins]}#{clause[:view_conditions]}\n }\n class_eval %{\n class Extended#{to_s} < #{to_s}\n set_table_name \"#{extended_table_name}\"\n def self.descends_from_active_record?\n true\n end\n end\n }\n true\n end",
"def refresh_materialized_view(name, options = {})\n options = {\n :with_data => true\n }.merge(options)\n\n sql = \"REFRESH MATERIALIZED VIEW #{quote_view_name(name)}\"\n sql << \" WITH NO DATA\" unless options[:with_data]\n\n execute \"#{sql};\"\n end",
"def create_or_replace_replication_trigger_function(params)\n execute(<<-end_sql)\n DROP PROCEDURE IF EXISTS `#{params[:trigger_name]}`;\n end_sql\n \n activity_check = \"\"\n if params[:exclude_rr_activity] then\n activity_check = <<-end_sql\n DECLARE active INT;\n SELECT count(*) INTO active FROM #{params[:activity_table]};\n IF active <> 0 THEN\n LEAVE p;\n END IF;\n end_sql\n end\n\n execute(<<-end_sql)\n CREATE PROCEDURE `#{params[:trigger_name]}`(change_key varchar(2000), change_new_key varchar(2000), change_type varchar(1))\n p: BEGIN\n #{activity_check}\n INSERT INTO #{params[:log_table]}(change_table, change_key, change_new_key, change_type, change_time)\n VALUES('#{params[:table]}', change_key, change_new_key, change_type, now());\n END;\n end_sql\n \n end",
"def drop_materialized_view(*args)\n options = args.extract_options!\n args.flatten!\n\n sql = 'DROP MATERIALIZED VIEW '\n sql << 'IF EXISTS ' if options[:if_exists]\n sql << Array.wrap(args).collect { |v| quote_view_name(v) }.join(', ')\n sql << ' CASCADE' if options[:cascade]\n execute(\"#{sql};\")\n end",
"def materialized_view_definition(matview_name, name = nil) raise \"Internal Error: Connection adapter didn't override abstract function\"; end",
"def create\n create_or_update\n end",
"def create\n create_or_update\n end",
"def table_modifier_in_create(o)\n \" TEMPORARY\" if o.temporary\n end",
"def test_materialized_view_metadata_drop\n skip(\"Materialized views were introduced in Cassandra 3.0.0\") if CCM.cassandra_version < '3.0.0'\n\n @session.execute(\"CREATE TABLE simplex.test (pk int PRIMARY KEY, c int)\")\n @session.execute(\"CREATE MATERIALIZED VIEW simplex.mv1 AS SELECT c,pk FROM simplex.test WHERE c IS NOT NULL AND pk IS NOT NULL PRIMARY KEY (pk, c)\")\n\n @listener.wait_for_materialized_view('simplex', 'mv1')\n assert @cluster.keyspace('simplex').has_materialized_view?('mv1')\n\n @session.execute(\"DROP MATERIALIZED VIEW simplex.mv1\")\n @cluster.refresh_schema\n refute @cluster.keyspace('simplex').has_materialized_view?('mv1')\n end",
"def refresh_view(name, opts=OPTS)\n run \"REFRESH MATERIALIZED VIEW#{' CONCURRENTLY' if opts[:concurrently]} #{quote_schema_table(name)}\"\n end",
"def recreate\n drop if exists?\n create\n end",
"def create_view(db) \n begin\n db.get('_design/todos')\n rescue RestClient::ResourceNotFound => nfe\n db.save_doc({\n \"_id\" => \"_design/todos\",\n :views => {\n :allTodos => {\n :reduce => \"_count\",\n :map => \"function(doc){if(doc.title && doc.completed != null){emit(doc.order,{title: doc.title,completed: doc.completed})}}\"\n }\n }\n })\n end\nend",
"def migration_update_view\n _added, _removed, _changed, prev_fields = field_changes\n\n if table_comments\n new_table_comment = table_comment_changes\n new_fields_comments = fields_comments_changes\n end\n\n new_fields_comments ||= {}\n\n <<~ARCONTENT\n #{table_name_changed ? \" self.prev_table_name = '#{prev_table_name}'\" : ''}\n #{table_name_changed ? ' update_table_name' : ''}\n self.prev_fields = %i[#{prev_fields.join(' ')}]\n #{new_table_comment ? \" \\# new table comment: #{new_table_comment.gsub(\"\\n\", '\\n')}\" : ''}\n #{new_fields_comments.present? ? \" \\# new fields comments: #{new_fields_comments.keys}\" : ''}\n create_or_update_dynamic_model_view\n ARCONTENT\n end",
"def create_or_update\n raise ReadOnlyRecord if readonly?\n result = new_record? ? create_record : update_record\n result != false\n end",
"def copy_vassal_view(view)\n # Copy this view to a new one. Both views needs to point to the same\n # buffer.\n new_view = CSTE::View.new(\n view.buffer, view.init_col, view.init_line, view.cols, view.lines)\n\n # This container is the lord of those views.\n view.lord = self\n new_view.lord = self\n\n return new_view\n end",
"def create!\n Upsert.logger.info \"[upsert] Creating or replacing database function #{name.inspect} on table #{buffer.parent.table_name.inspect} for selector #{selector.map(&:inspect).join(', ')} and columns #{columns.map(&:inspect).join(', ')}\"\n column_definitions = get_column_definitions\n connection.execute <<-EOS\nCREATE OR REPLACE FUNCTION #{name}(#{column_definitions.map { |c| \"#{c.quoted_input_name} #{c.sql_type} DEFAULT #{c.default || 'NULL'}\" }.join(',') }) RETURNS VOID AS\n$$\nBEGIN\n LOOP\n -- first try to update the key\n UPDATE #{quoted_table_name} SET #{column_definitions.map { |c| \"#{c.quoted_name} = #{c.quoted_input_name}\" }.join(',')}\n WHERE #{selector.map { |k| \"#{connection.quote_ident(k)} = #{connection.quote_ident([k,'input'].join('_'))}\" }.join(' AND ') };\n IF found THEN\n RETURN;\n END IF;\n -- not there, so try to insert the key\n -- if someone else inserts the same key concurrently,\n -- we could get a unique-key failure\n BEGIN\n INSERT INTO #{quoted_table_name}(#{column_definitions.map { |c| c.quoted_name }.join(',')}) VALUES (#{column_definitions.map { |c| c.quoted_input_name }.join(',')});\n RETURN;\n EXCEPTION WHEN unique_violation THEN\n -- Do nothing, and loop to try the UPDATE again.\n END;\n END LOOP;\nEND;\n$$\nLANGUAGE plpgsql;\nEOS\n end",
"def drop_view(view_name, options = {})\n SchemaMonkey::Middleware::Migration::DropView.start(connection: self, view_name: view_name, options: options) do |env|\n view_name = env.view_name\n options = env.options\n materialized = options[:materialized] ? 'MATERIALIZED' : ''\n sql = \"DROP #{materialized} VIEW\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{quote_table_name(view_name)}\"\n execute sql\n end\n end",
"def save_or_create(db, doc_id)\n begin\n puts \"Trying to create a new testing design document with a view by_automation...\".cyan\n db.save_doc({\"_id\" => doc_id, :views => { :by_automation => { :map => \"function(doc) { if (doc.content.testing == 'SELCUKE') { emit(doc.content.general.oi, doc._id); } }\" } } })\n rescue CouchRest::Conflict => nfe\n puts \"Design document already exists, updating existing design document with view...\".yellow\n doc = db.get('_design/testing')\n rev = db.get(doc['_id'])['_rev']\n db.save_doc({\"_id\" => doc_id, \"_rev\" => rev, :views => { :by_automation => { :map => \"function(doc) { if (doc.content.testing == 'SELCUKE') { emit(doc.content.general.oi, doc._id); } }\" } } })\n puts \"Updated testing design document with rev# \\\"#{rev}\\\"\".cyan\n end\n end",
"def create_views(overrides={})\n views = new_views(overrides)\n views.site.save\n Views.find(views.id)\n end",
"def create(view_or_constant, style = nil, opts = {})\n # TODO, refactor so that add_subview uses create, not backwards like it is now\n opts[:do_not_add] = true\n opts[:style] = style\n add_subview view_or_constant, opts\n end",
"def replace_statement(target, stage)\n <<-SQLREPLACE\n begin transaction;\n drop table #{target};\n alter table #{stage} rename to #{target};\n end transaction;\n SQLREPLACE\n end",
"def views(stream)\n # Don't create \"system\" views.\n view_names = PgSaurus::Tools.views\n view_names.each do |options|\n write_view_definition(stream,\n options[\"table_schema\"],\n options[\"table_name\"],\n options[\"view_definition\"])\n end\n stream << \"\\n\"\n end",
"def target_postgresql_version=(_arg0); end",
"def paths_to_view view\n replaceable_paths_to :views, \"#{view}.*\"\n end",
"def create_full_rst_tbl(preserve_null_pk = true)\n unless defined? @full_rst_tbl\n self.all_cols_select\n self.pk_full_list\n if preserve_null_pk\n renamed_pk_col = @pk_full_list.map { |pk| \"#{pk['col']} as #{pk['alias']}_pk\" }.join(', ')\n else\n renamed_pk_col = @pk_full_list.map do |pk|\n pkcol = @all_cols.find{|col| col.colname == pk['colname'] and col.relname==pk['relname']}\n \"COALESCE(#{pk['col']},#{pkcol.null_replacement}) as #{pk['alias']}_pk\"\n end.join(',')\n end\n targetListReplacement = \"#{renamed_pk_col},#{@all_cols_select}\"\n query = ReverseParseTree.reverseAndreplace(@parseTree, targetListReplacement, '')\n @full_rst_tbl = \"#{@table}_full_rst\"\n pk = @pk_full_list.map { |pk| \"#{pk['alias']}_pk\" }.join(', ')\n # binding.pry\n DBConn.tblCreation(@full_rst_tbl, pk, query)\n\n # unless preserve_null_pk\n # DBConn.update_null_columns(@full_rst_tbl,pk)\n # end\n # if is_plain_query()\n # query = QueryBuilder.create_tbl(@full_rst_tbl, pk, query)\n # DBConn.exec(query)\n # else\n # query = QueryBuilder.create_tbl(@full_rst_tbl, '', query)\n # DBConn.exec(query)\n\n # # not_null_query = pk_list.flat.map{|pk| \"#{pk} is not null\"}.join(' AND ')\n # # add index on not null columns\n # pk_not_null = @pk_full_list.map { |pk| \"#{pk['alias']}_pk is not null\"}.join(' OR ')\n # create_indx = \"CREATE UNIQUE INDEX idx_#{@full_rst_tbl} on #{@full_rst_tbl} (#{pk}) where #{pk_not_null}\"\n # pp create_indx\n # DBConn.exec(create_indx)\n\n # end\n end\n return @full_rst_tbl\n end",
"def materialized_views(name = nil) raise \"Internal Error: Connection adapter didn't override abstract function\"; [] end",
"def views(opts=OPTS, &block)\n tables_or_views('VIEW', opts, &block)\n end",
"def drop_materialized_view(name, **kwargs)\n supports_materialized_view!\n\n execute build_drop_materialized_view_query(name, **kwargs)\n end",
"def test_materialized_view_metadata_updates\n skip(\"Materialized views were introduced in Cassandra 3.0.0\") if CCM.cassandra_version < '3.0.0'\n\n @session.execute(\"CREATE TABLE simplex.test (pk int PRIMARY KEY, c int)\")\n @session.execute(\"CREATE MATERIALIZED VIEW simplex.mv1 AS SELECT c,pk FROM simplex.test WHERE c IS NOT NULL AND pk IS NOT NULL PRIMARY KEY (pk, c)\")\n\n @listener.wait_for_materialized_view('simplex', 'mv1')\n\n assert @cluster.keyspace('simplex').has_materialized_view?('mv1')\n mv_meta = @cluster.keyspace('simplex').materialized_view('mv1')\n assert_equal 'SizeTieredCompactionStrategy', mv_meta.options.compaction_strategy.class_name\n\n @session.execute(\"ALTER MATERIALIZED VIEW simplex.mv1 WITH compaction = { 'class' : 'LeveledCompactionStrategy' }\")\n @cluster.refresh_schema\n mv_meta = @cluster.keyspace('simplex').materialized_view('mv1')\n assert_equal 'LeveledCompactionStrategy', mv_meta.options.compaction_strategy.class_name\n end",
"def create_or_update\n\n return if @deleted == true\n\n cf = connect\n\n if @record_id.nil?\n rec = cf.rec_new('ec.vg','A',name, content, self.ttl)\n logger.debug rec\n self.record_id = rec[\"response\"][\"rec\"][\"obj\"][\"rec_id\"].to_i\n else\n rec = cf.rec_edit('ec.vg', 'A', record_id, name, content, self.ttl, false)\n logger.debug rec\n end\n\n end",
"def view(name, opts = {})\n design_doc.create_view(name, opts)\n end",
"def drop_views name, defs=nil\n defs = defs.delete(:dependent_views) if defs.is_a?(Hash)\n defs.each do |dependent_view|\n execute \"DROP VIEW IF EXISTS #{dependent_view}\"\n end if defs\n \n execute \"DROP VIEW IF EXISTS #{name}\"\n\n end",
"def update\n respond_to do |format|\n if @postgresql_view_person.update(postgresql_view_person_params)\n format.html { redirect_to @postgresql_view_person, notice: 'Postgresql view person was successfully updated.' }\n format.json { render :show, status: :ok, location: @postgresql_view_person }\n else\n format.html { render :edit }\n format.json { render json: @postgresql_view_person.errors, status: :unprocessable_entity }\n end\n end\n end",
"def views(opts=OPTS)\n relkind = opts[:materialized] ? 'm' : 'v'\n pg_class_relname(relkind, opts)\n end",
"def create_movies_table\n c = PGconn.new(:host => \"localhost\", :dbname => \"test\")\n c.exec %q{\n CREATE TABLE movies (\n id PRIMARY KEY,\n title TEXT,\n description TEXT,\n rating INTEGER\n );\n }\n c.close\nend",
"def update\n create_or_update\n end",
"def add_to_graph\n connection.execute <<-EOS\n REPLACE INTO #{oqgraph_table_name} (origid, destid, weight) \n VALUES (#{self.send(self.class.from_key)}, #{self.send(self.class.to_key)}, #{self.send(self.class.weight_column) || 1.0})\n EOS\n end",
"def begin_view_new(object_locale_key)\n begin_view(t(:Create),\" \",t(object_locale_key))\n end",
"def do_query_view(view_name, view_options)\n database.view \"#{self.name.underscore}/#{view_name}\", view_options\n end",
"def quote_table_or_view(name, options)\n schema = options[:schema]\n if schema\n \"\\\"#{schema}\\\".\\\"#{name}\\\"\"\n else\n \"\\\"#{name}\\\"\"\n end\n end",
"def anonymize_work_view_event( view_event )\n\n # find an anomomyzed version\n event = find_existing_view_event( view_event.date, view_event.work_id, nil )\n if event.nil? == false\n event.work_views += view_event.work_views\n else\n event = create_new_view_event( view_event.work_id, nil )\n event.date = view_event.date\n end\n save_safely( event )\n\n view_event.destroy\n end",
"def drop_view(*names)\n options = names.last.is_a?(Hash) ? names.pop : OPTS\n names.each do |n|\n execute_ddl(drop_view_sql(n, options))\n remove_cached_schema(n)\n end\n nil\n end",
"def _view; end",
"def edit\n raise NotImplementedError\n end",
"def store_preferred_view\n return if %w[hierarchy online_contents].include?(params[:view])\n super\n end",
"def create_edit(commit, key, value)\n ActiveRecord::Base.transaction do\n DatasetEdit.create!(\n commit_id: commit.id,\n key: key,\n value: value\n )\n end\n end",
"def create_edit(commit, key, value)\n ActiveRecord::Base.transaction do\n DatasetEdit.create!(\n commit_id: commit.id,\n key: key,\n value: value\n )\n end\n end",
"def set_new_record(record)\n replace(record)\n end",
"def view(ddoc,view,*opts)\n q = \"#{database}/_design/#{ddoc}/_view/#{view}\"\n q << build_query_string(opts.first,\"view\") if opts && opts.any? && opts.first.is_a?(Hash)\n\n @conn.query({url_path: q, method: :get})\n end",
"def reload! view: nil\n view ||= :SCHEMA_VIEW\n @grpc = service.get_table instance_id, name, view: view\n @loaded_views = Set[view]\n self\n end",
"def alter_materialized_view_reset_options(name, *args)\n options = args.extract_options!\n\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :reset_options => args\n }, options).to_sql\n end",
"def create_view(name, content, engine=:erb)\n FileUtils.mkdir_p(default_views_path)\n file = File.join(default_views_path, name.to_s + \".#{engine}\")\n File.open(file, 'w') { |io| io.write content }\n end",
"def cd_id_table\n view_name = cd_id_table_tn\n if @cd_id_table_tn_exists.nil?\n @cd_id_table_tn_exists=true\n if !$db.table_exists?(view_name)\n $db.run(\"CREATE VIEW #{view_name} AS SELECT DISTINCT(r.canonical_document_id) FROM records r INNER JOIN records_searches br ON r.id=br.record_id INNER JOIN searches b ON br.search_id=b.id WHERE b.systematic_review_id=#{self[:id]} AND b.valid=1\n\n UNION\n\n SELECT DISTINCT r.canonical_document_id FROM searches b INNER JOIN records_searches br ON b.id=br.search_id INNER JOIN records_references rr ON br.record_id=rr.record_id INNER JOIN bib_references r ON rr.reference_id=r.id WHERE b.systematic_review_id=#{self[:id]} and r.canonical_document_id IS NOT NULL and b.valid=1 GROUP BY r.canonical_document_id\")\n end\n end\n $db[view_name.to_sym]\n end",
"def create_tbilisi_precincts(month)\n vl_name = \"#{@year} #{month} voters list\"\n view_name = \"#{vl_name} - #{@shapes[:tbilisi_precinct]}\"\n @client.query(\"drop view if exists `#{view_name}`\")\n sql = \"create view `#{view_name}` as\n select `region` AS `region`,\n `district_id` AS `district_id`,\n `district_name` AS `district_name`,\n `precinct_id` AS `precinct_id`,\n concat(cast(`district_id` as char charset utf8),\n '.',\n cast(`precinct_id` as char charset utf8)) AS `precinct_name`,\n `prec_id_from_data` AS `prec_id_from_data`,\n `avg_age` AS `avg_age`,\n `greater_99` AS `greater_99`,\n `85_99` AS `85_99`,\n `less_than_85` AS `less_than_85`,\n `no_birthdate` AS `no_birthdate`,\n `total_voters` AS `total_voters`,\n `duplicates` AS `duplicates`\n from `#{vl_name} - raw`\n where (`district_id` between 1 and 22)\n order by `district_id`\"\n\n results = @client.query(sql)\nend",
"def test_f11_Delete_missing_view\n W('f11a');\n\n Metakit::Storage.open(\"f11a\", 1) {|s1|\n v1 = s1.get_as(\"a\");\n v1.set_size(10);\n\n s1.commit();\n }\n # D(f11a);\n R('f11a');\n end",
"def alter_materialized_view_set_column_default(name, column, default, options = {})\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :column => column,\n :set_default => default\n }, options).to_sql\n end"
] |
[
"0.78645104",
"0.74009097",
"0.7034088",
"0.6799619",
"0.65479636",
"0.650351",
"0.642942",
"0.6428404",
"0.6259404",
"0.6029196",
"0.59640574",
"0.57344437",
"0.5711872",
"0.56566465",
"0.5653659",
"0.56536067",
"0.55815",
"0.54361945",
"0.5410468",
"0.5246351",
"0.52235186",
"0.5175634",
"0.5162842",
"0.5156768",
"0.5144696",
"0.5143121",
"0.5142825",
"0.51395434",
"0.5102223",
"0.50994265",
"0.5098409",
"0.5096665",
"0.50957483",
"0.50917876",
"0.5076416",
"0.5058139",
"0.5034481",
"0.50221884",
"0.5007544",
"0.50024676",
"0.49946094",
"0.49805108",
"0.49746466",
"0.49695614",
"0.49615252",
"0.49506065",
"0.49407566",
"0.49350813",
"0.4930819",
"0.49228993",
"0.49228993",
"0.49134657",
"0.49103954",
"0.48906982",
"0.48651117",
"0.485964",
"0.48553237",
"0.4843347",
"0.48355317",
"0.48330572",
"0.482217",
"0.48013303",
"0.4773333",
"0.47613776",
"0.47540605",
"0.47494978",
"0.4748068",
"0.47477457",
"0.47265756",
"0.4725938",
"0.47210664",
"0.47195134",
"0.47126487",
"0.47080272",
"0.469689",
"0.4679347",
"0.46392557",
"0.46125898",
"0.45885292",
"0.4582078",
"0.45714",
"0.45713505",
"0.45677665",
"0.45666304",
"0.4565459",
"0.4519369",
"0.4510968",
"0.44987088",
"0.44862592",
"0.44841707",
"0.44841707",
"0.44839615",
"0.44823563",
"0.44802198",
"0.44791636",
"0.44769937",
"0.44732356",
"0.44699815",
"0.4453874",
"0.44363287"
] |
0.71346927
|
2
|
Handle bigserial type if :serial option is present
|
def type_literal_generic_bignum_symbol(column)
column[:serial] ? :bigserial : super
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def supports_serial?\n true\n end",
"def type_literal_generic_bignum(column)\n column[:serial] ? :bigserial : super\n end",
"def type_literal_generic_integer(column)\n column[:serial] ? :serial : super\n end",
"def type_literal_generic_integer(column)\n column[:serial] ? :serial : super\n end",
"def is_serial?\n @job.metadata_hash['type_of_record'] == \"as\"\n end",
"def serial?\n @serial\n end",
"def serial?\n @serial\n end",
"def serial; end",
"def serial; end",
"def serial=(_arg0); end",
"def serial\n fetch('device.serial')\n end",
"def serial_value(serial)\n serial.is_a?(SerialNumber) ? serial.value : serial\n end",
"def serial_primary_key_options\n {:primary_key => true, :serial => true, :type=>Integer}\n end",
"def set_serial\n @serial = Serial.find(params[:id])\n end",
"def set_serial\n @serial = Serial.find(params[:id])\n end",
"def lot_code_from_serial\n if lot_code.blank? && serial.present?\n self[:lot_code] = serial\n end\n end",
"def set_serial\n @serial = Serial.find(params[:id])\n end",
"def serial_primary_key_options\n # :nocov:\n auto_increment_key = server_version >= 100002 ? :identity : :serial\n # :nocov:\n {:primary_key => true, auto_increment_key => true, :type=>Integer}\n end",
"def serial_types\n types = []\n types << 'SX' if %w[b s].include?(record.leader[7])\n types.concat journal_types\n types.concat newspaper_types\n types.uniq!\n return types\n end",
"def new_serial\n @soa[:serial] = self.class.next_serial(@soa[:serial])\n end",
"def serial_number; Common.serial_number(@handle); end",
"def lot_code_from_serial\n if lot_code.blank? && serial.present?\n self[:lot_code] = serial\n end\n self\n end",
"def serial_number=(value)\n @serial_number = value\n end",
"def serial_params\n params.require(:serial).permit(:name, :date, :genre, :description)\n end",
"def serial_params\n params.require(:serial).permit(:name, :date, :genre, :description)\n end",
"def convert_to_native_type(data_type, s)\r\n return kb_nil if s == KB_NIL\r\n\r\n # I added this line to keep KBTable#import_csv working after I made\r\n # the kb_nil changes.\r\n return nil if s.nil?\r\n\r\n case data_type\r\n when :String\r\n if s =~ UNENCODE_RE\r\n return s.gsub('&linefeed;', \"\\n\").gsub('&carriage_return;',\r\n \"\\r\").gsub('&substitute;', \"\\032\").gsub('&pipe;', \"|\"\r\n ).gsub('&', \"&\")\r\n else\r\n return s\r\n end\r\n when :Integer\r\n return s.to_i\r\n when :Float\r\n return s.to_f\r\n when :Boolean\r\n if ['false', 'False', nil, false].include?(s)\r\n return false\r\n else\r\n return true\r\n end\r\n when :Time\r\n return Time.parse(s) \r\n when :Date\r\n return Date.parse(s)\r\n when :DateTime\r\n return DateTime.parse(s)\r\n when :YAML\r\n # This code is here in case the YAML field is the last\r\n # field in the record. Because YAML normally defines a\r\n # nil value as \"--- \", but KirbyBase strips trailing\r\n # spaces off the end of the record, so if this is the\r\n # last field in the record, KirbyBase will strip the\r\n # trailing space off and make it \"---\". When KirbyBase\r\n # attempts to convert this value back using to_yaml,\r\n # you get an exception.\r\n if s == \"---\"\r\n return nil\r\n elsif s =~ UNENCODE_RE\r\n y = s.gsub('&linefeed;', \"\\n\").gsub('&carriage_return;',\r\n \"\\r\").gsub('&substitute;', \"\\032\").gsub('&pipe;', \"|\"\r\n ).gsub('&', \"&\")\r\n return YAML.load(y)\r\n else\r\n return YAML.load(s)\r\n end\r\n when :Memo\r\n memo = KBMemo.new(@tbl.db, s)\r\n memo.read_from_file\r\n return memo\r\n when :Blob\r\n blob = KBBlob.new(@tbl.db, s)\r\n blob.read_from_file\r\n return blob\r\n else\r\n raise \"Invalid field type: %s\" % data_type\r\n end\r\n end",
"def get_serial\n return serial if serial.present?\n\n serial = \\\n if platform =~ /^jingdong/\n \"#{Date.today.to_s.gsub('-','')}_001\"\n else\n \"#{Date.today.to_s.gsub('-','')}\"\n end\n\n self.update_attribute(:serial, serial)\n serial\n end",
"def check_serial\n <<-CODE\n t1 = stack_pop();\n next_literal;\n next_int;\n stack_push(cpu_check_serial(state, c, t1, _lit, _int));\n CODE\n end",
"def convert_type(v)\n case v\n when Java::NetSourceforgeJtdsJdbc::ClobImpl\n convert_type(v.getSubString(1, v.length))\n else\n super\n end\n end",
"def test_serial_port\n master = SerialPort.new \"/home/ralph/dev/vmodem0\", 38400\n\n master.write \"AT\\r\\n\"\n\n master.write \"AT\\r\\n\"\n master.write \"AT\\r\\n\"\n #slave = SerialPort.new \"/dev/pts/10\", 38400\n\n assert_equal \"AT\", master.read\n end",
"def serial_params\n params.require(:serial).permit(:name,\n :publisher,\n :place_published,\n :primary_language_id,\n :first_year_of_issue,\n :last_year_of_issue,\n :alternate_values_attributes => [:id,\n :value,\n :type,\n :language_id,\n :alternate_value_object_type,\n :alternate_value_object_id,\n :alternate_value_object_attribute,\n :_destroy])\n end",
"def require_serial_number?\n !!(self.registered_download.require_serial_number?)\n end",
"def create_serial_ilv(s_mode = @s_mode, pkt_type = @pkt_type, ter_id = @ter_id, pkt_first = true, pkt_last = true)\n\n $test_logger.log(\"Create Serial ILV Packet Format..\")\n $test_logger.log(\"Serial Comm Mode :- #{s_mode}, Packet type:- #{pkt_type}, Terminal Identifier:- #{ter_id}\")\n raise \"Specify at least one serial protocol RS422/RS485\" if s_mode == nil\n raise \"Specify at least one Packet type\" if pkt_type == nil\n raise \"Specify Terminal Identifier\" if ter_id == nil\n\n #set flag to true for serial comunication\n @chk_serial_comm = true\n\n # create Packet Identifier\n pkt_id = create_packet_idenfier(pkt_first, pkt_last, pkt_type)\n\n # store all ILV element and hex string of ILV command\n ilv_elem = @xml_doc.elements[REQ_TAG].to_a #Here using xml_doc so there could be issue with null character in future\n hex_str = @ilv_hex_str\n\n #create Data node assign whole ILV Command into Data Tag element\n add_tag(REQ_TAG, DATA_TAG, \"//#{ID_TAG}\")\n ilv_elem.each do |elem|\n @xml_ilv_node.root.elements[\"//#{DATA_TAG}\"].add(elem)\n end\n\n #delete ILV command from Request Tag element\n @xml_ilv_node.root.elements[REQ_TAG].delete_element \"/Request/Identifier\" if ID_TAG\n @xml_ilv_node.root.elements[REQ_TAG].delete_element \"/Request/Length\" if LEN_TAG\n @xml_ilv_node.root.elements[REQ_TAG].delete_element \"/Request/Values\" if VALUE_TAG\n\n #Add Start text node with value and attributes\n add_tag(REQ_TAG, STX_TAG, \"//#{DATA_TAG}\")\n set_tag_value(\"//#{STX_TAG}\", \"0x02\")\n set_tag_attr(\"//#{STX_TAG}\", SIZE_ATTR, \"1\")\n\n #Add Packet Identifier Node with value and attributes\n add_tag(REQ_TAG, PKTID_TAG, \"//#{DATA_TAG}\")\n set_tag_value(\"//#{PKTID_TAG}\", pkt_id)\n set_tag_attr(\"//#{PKTID_TAG}\", SIZE_ATTR, \"1\")\n\n #Add Terminal Identifier Node or Request counter Node based on serial communication RS485/RS422\n ter_id = ter_id.to_s(16)\n if(s_mode == \"RS485\")\n add_tag(REQ_TAG, TID_TAG, \"//#{DATA_TAG}\")\n set_tag_value(\"//#{TID_TAG}\", \"0x\" + ter_id)\n set_tag_attr(\"//#{TID_TAG}\", SIZE_ATTR, \"1\")\n elsif (s_mode == \"RS422\")\n add_tag(REQ_TAG, RC_TAG, \"//#{DATA_TAG}\")\n set_tag_value(\"//#{RC_TAG}\", \"0x\" + ter_id)\n set_tag_attr(\"//#{RC_TAG}\", SIZE_ATTR, \"1\")\n else\n $test_logger.log(\"Please provide Serial Protocol mode RS485/RS422\")\n raise \"Specify at least one Serial Protocol mode RS485/RS422\"\n end\n\n #Add End Text Node with value and attributes\n add_tag(REQ_TAG, ETX_TAG, \"//#{DATA_TAG}\", false)\n set_tag_value(\"//#{ETX_TAG}\", \"0x02\")\n set_tag_attr(\"//#{ETX_TAG}\", SIZE_ATTR, \"1\")\n\n #Add Data Link Escape Node with value and attributes\n add_tag(REQ_TAG, DLE_TAG, \"//#{DATA_TAG}\", false)\n set_tag_value(\"//#{DLE_TAG}\", \"0x1b\")\n set_tag_attr(\"//#{DLE_TAG}\", SIZE_ATTR, \"1\")\n\n #Calculate checksum and Add CRC Node with value and attributes\n crc = calc_checksum(hex_str)\n add_tag(REQ_TAG, CRC_TAG, \"//#{DATA_TAG}\", false)\n set_tag_value(\"//#{CRC_TAG}\", \"0x\" + crc)\n set_tag_attr(\"//#{CRC_TAG}\", SIZE_ATTR, \"2\")\n #puts to_s\n #Notify changes to load other formats like RAW and HEX\n notify_change(InputChannel::XML)\n #puts \"hex:-#{@ilv_hex_str}\"\n @ilv_hex_str\n end",
"def protocol_type\n self[:p_type].to_endian(:big)\n end",
"def serial_chronology_params\n params.require(:serial_chronology).permit(:preceding_serial_id, :succeeding_serial_id)\n end",
"def serial_primary_key_options\n {:primary_key => true, :type => :integer, :auto_increment => true}\n end",
"def serial_primary_key_options\n {:primary_key => true, :type => :integer, :auto_increment => true}\n end",
"def serial_primary_key_options\n {:primary_key => true, :type => Integer, :auto_increment => true}\n end",
"def serial_primary_key_options\n {:primary_key => true, :type => Integer, :auto_increment => true}\n end",
"def phone_microdata_for(type)\n type == 'fax' ? 'faxNumber' : 'telephone'\n end",
"def credit_card_type; end",
"def update_serial!\n update_serial\n save\n end",
"def schema_column_type(db_type)\n case db_type\n when 'json'\n :json\n when 'jsonb'\n :jsonb\n else\n super\n end\n end",
"def serial?\n !@enumcron.empty?\n end",
"def needs_encoding?\n ![ :binary, :date, :datetime, :boolean, :float, :integer ].include?(type)\n end",
"def initialize(serial, secret)\n raise Bnet::BadInputError.new(\"bad serial #{serial}\") unless Util.is_valid_serial?(serial)\n raise Bnet::BadInputError.new(\"bad secret #{secret}\") unless Util.is_valid_secret?(secret)\n\n @normalized_serial = Util.normalize_serial(serial)\n @secret = secret\n end",
"def needs_encoding?\n ![ :binary, :date, :datetime, :boolean, :float, :integer ].include?(type)\n end",
"def serial_port\n\t\t\treturn @sp if port_initialized?\n\t\tend",
"def serial_number\n return @serial_number\n end",
"def schema_data_type\n case type\n when \"N\", \"F\"\n decimal > 0 ? \":float\" : \":integer\"\n when \"I\"\n \":integer\"\n when \"D\"\n \":date\"\n when \"T\"\n \":datetime\"\n when \"L\"\n \":boolean\"\n when \"M\"\n \":text\"\n else\n \":string, :limit => #{length}\"\n end\n end",
"def set_card_type\n self.cc_type ||= CardDetector.brand?(number)\n end",
"def disableSerialization \n \"disableSerialization\" \n end",
"def baud_rate=(baud_rate)\n validator = EnumAttributeValidator.new('String', [\"platform-default\", \"9600\", \"19200\", \"38400\", \"57600\", \"115200\"])\n unless validator.valid?(baud_rate)\n fail ArgumentError, \"invalid value for \\\"baud_rate\\\", must be one of #{validator.allowable_values}.\"\n end\n @baud_rate = baud_rate\n end",
"def part_type=(value)\n\n end",
"def bin_mode=(bool)\n @telnet_options[:bin_mode] = bool\n end",
"def initialize(port, options = {})\n baudrate = options[:baudrate] || 57600\n @serial_port = port.is_a?(String) ? SerialPort.new(port, baudrate, 8, 1, SerialPort::NONE) : port\n @serial_port.read_timeout = 2\n @major_version = 0\n @minor_version = 0\n @pins = []\n @analog_pins = []\n @connected = false\n end",
"def spec_type(desc, *additional); end",
"def interactivekit_params\n params.require(:interactivekit).permit(:serial)\n end",
"def marshal_load(serialised); end",
"def serial_read_int()\n return serial_read_line().hex\n end",
"def standard_port?; end",
"def optional_port; end",
"def update_serial\n return if self.type != 'SOA'\n\n a = self.content.split(' ')\n\n # Last 4 digits of serial (serial update counter)\n i = a[2][6..-1].to_i\n\n # 201801 -> 201821\n t = Time.now.strftime('%Y%m').to_i + 20\n\n # Reset serial update counter if serial is on \"old\" format (e.g.\n # 2018010199)\n i = 0 if a[2][4..5].to_i <= 12\n\n # Increment serial update counter\n i += 1\n\n a[2] = t.to_s + \"%04d\" % i\n self.content = a.join(' ')\n end",
"def serial_begin(opts={})\n rate = opts[:rate] ? opts[:rate] : 9600\n @other_setup << \"Serial.begin(#{rate});\"\n end",
"def identifier\n # TODO: Don't parse ser_type every time\n r_class = Thales::Datamodel::CLASS_FOR[record.ser_type]\n data = r_class.new.deserialize(record.ser_data)\n\n data.identifier\n end",
"def cmd_type(param)\n send_unauthorised and return unless logged_in?\n send_param_required and return if param.nil?\n if param.upcase.eql?(\"A\")\n send_response \"200 Type set to ASCII\"\n elsif param.upcase.eql?(\"I\")\n send_response \"200 Type set to binary\"\n else\n send_response \"500 Invalid type\"\n end\n end",
"def bin_mode?\n @telnet_options[:bin_mode]\n end",
"def set_serial_chronology\n @serial_chronology = SerialChronology.find(params[:id])\n end",
"def read\n @serial_port.read()\n end",
"def arduino_params\n params.require(:arduino).permit(:model, :serial)\n end",
"def type_literal_generic_file(column)\n :bytea\n end",
"def serial_console\n return nil if serial_baud_rate.nil?\n [serial_baud_rate, serial_dce_dte, serial_flow_control]\n end",
"def schema_column_type(db_type)\n Sequel::Mysql2.convert_tinyint_to_bool && db_type == 'tinyint(1)' ? :boolean : super\n end",
"def type(msg) # :yields: type\n if (msg == 'A')\n thread[:mode] = :ascii\n \"200 Type set to ASCII\"\n elsif (msg == 'I')\n thread[:mode] = :binary\n \"200 Type set to binary\"\n end\n end",
"def short_binary_type; end",
"def create\n @serial = Serial.new(serial_params)\n\n respond_to do |format|\n if @serial.save\n format.html { redirect_to @serial, notice: \"Serial '#{@serial.name}' was successfully created.\" }\n format.json { render action: 'show', status: :created, location: @serial }\n else\n format.html { render action: 'new' }\n format.json { render json: @serial.errors, status: :unprocessable_entity }\n end\n end\n end",
"def can_be?(btype)\n backend_types.keys.include?(btype)\n end",
"def initialize(port=:auto, verbosity=:warn, baud=9600, cmd_delay=0.1)\n\t\t\n\t\t# if no port was specified, we'll attempt to iterate\n\t\t# all of the serial ports that i've ever seen gsm\n\t\t# modems mounted on. this is kind of shaky, and\n\t\t# only works well with a single modem. for now,\n\t\t# we'll try: ttyS0, ttyUSB0, ttyACM0, ttyS1...\n\t\tif port == :auto\n\t\t\t@device, @port = catch(:found) do\n\t\t\t\t0.upto(8) do |n|\n\t\t\t\t\t[\"ttyS\", \"ttyUSB\", \"ttyACM\"].each do |prefix|\n\t\t\t\t\t\ttry_port = \"/dev/#{prefix}#{n}\"\n\t\t\t\n\t\t\t\t\t\tbegin\n\t\t\t\t\t\t\t# serialport args: port, baud, data bits, stop bits, parity\n\t\t\t\t\t\t\tdevice = SerialPort.new(try_port, baud, 8, 1, SerialPort::NONE)\n\t\t\t\t\t\t\tthrow :found, [device, try_port]\n\t\t\t\t\t\t\n\t\t\t\t\t\trescue ArgumentError, Errno::ENOENT\n\t\t\t\t\t\t\t# do nothing, just continue to\n\t\t\t\t\t\t\t# try the next port in order\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\tend\n\t\n\t\t\t\t# tried all ports, nothing worked\n\t\t\t\traise AutoDetectError\n\t\t\tend\n\t\t\n\t\t# if the port was a port number or file\n\t\t# name, initialize a serialport object\n\t\telsif port.is_a?(String) or port.is_a?(Fixnum)\n\t\t\t@device = SerialPort.new(port, baud, 8, 1, SerialPort::NONE)\n\t\t\t@port = port\n\t\t\t\n\t\t# otherwise, we'll assume that the object passed\n\t\t# was an object ready to quack like a serial modem\n\t\telse\n\t\t\t@device = port\n\t\t\t@port = nil\n\t\tend\n\t\t\n\t\t@cmd_delay = cmd_delay\n\t\t@verbosity = verbosity\n\t\t@retry_commands = 6\n\t\t@read_timeout = 10\n\t\t@locked_to = false\n\t\t\n\t\t# keep track of the depth which each\n\t\t# thread is indented in the log\n\t\t@log_indents = {}\n\t\t@log_indents.default = 0\n\t\t\n\t\t# to keep multi-part messages until\n\t\t# the last part is delivered\n\t\t@multipart = {}\n\t\t\n\t\t# start logging to file\n\t\tlog_init\n\t\t\n\t\t# to store incoming messages\n\t\t# until they're dealt with by\n\t\t# someone else, like a commander\n\t\t@incoming = []\n\t\t\n\t\t# initialize the modem; rubygsm is (supposed to be) robust enough to function\n\t\t# without these working (hence the \"try_\"), but they make different modems more\n\t\t# consistant, and the logs a bit more sane.\n\t\ttry_command \"ATE0\" # echo off\n\t\ttry_command \"AT+CMEE=1\" # useful errors\n\t\ttry_command \"AT+WIND=0\" # no notifications\n\t\t\n\t\t# PDU mode isn't supported right now (although\n\t\t# it should be, because it's quite simple), so\n\t\t# switching to text mode (mode 1) is MANDATORY\n\t\tcommand \"AT+CMGF=1\"\n\tend",
"def data_type\n\tend",
"def set_card_type\n self.cc_type ||= Spree::Creditcard::CardDetector.type?(self.number.to_s.gsub(/\\s/,''))\n end",
"def prepare_column_options(column)\n super.tap do |spec|\n spec[:encoding] = \"'#{column.sql_type_metadata.encoding}'\" if column.sql_type_metadata.encoding.present?\n end\n end",
"def get_serial_number(data)\n data['product']['serial_number'] || 'n/a'\n end",
"def process_data(type, data)\n case type\n when :boolean\n MuseekBindings::BinUtils.pack_boolean(data)\n when :uint32\n MuseekBindings::BinUtils.pack_uint32(data)\n when :string\n MuseekBindings::BinUtils.pack_string(data)\n end\n end",
"def read\n serial_port.read_nonblock(4096)\n rescue EOFError\n end",
"def software_serial(rx, tx, opts={})\n raise ArgumentError, \"can only define rx from Fixnum, got #{rx.class}\" unless rx.is_a?(Fixnum)\n raise ArgumentError, \"can only define tx from Fixnum, got #{tx.class}\" unless tx.is_a?(Fixnum)\n \n output_pin(tx)\n \n rate = opts[:rate] ? opts[:rate] : 9600\n \t\tif opts[:as]\n \t\t\t@declarations << \"SoftwareSerial _#{opts[ :as ]} = SoftwareSerial(#{rx}, #{tx});\"\n \t\t\taccessor = []\n \t\t\taccessor << \"SoftwareSerial& #{opts[ :as ]}() {\"\n \t\t\taccessor << \"\\treturn _#{opts[ :as ]};\"\n \t\t\taccessor << \"}\"\n \t\t\t@@swser_inc ||= FALSE\n \t\t\tif (@@swser_inc == FALSE) # on second instance this stuff can't be repeated\n \t\t\t\t@@swser_inc = TRUE\n\t \t\t\taccessor << \"int read(SoftwareSerial& s) {\"\n \t\t\t\taccessor << \"\\treturn s.read();\"\n \t\t\t\taccessor << \"}\"\n \t\t\t\taccessor << \"void println( SoftwareSerial& s, char* str ) {\"\n \t\t\t\taccessor << \"\\treturn s.println( str );\"\n \t\t\t\taccessor << \"}\"\n \t\t\t\taccessor << \"void print( SoftwareSerial& s, char* str ) {\"\n \t\t\t\taccessor << \"\\treturn s.print( str );\"\n \t\t\t\taccessor << \"}\"\n \t\t\t\taccessor << \"void println( SoftwareSerial& s, int i ) {\"\n \t\t\t\taccessor << \"\\treturn s.println( i );\"\n \t\t\t\taccessor << \"}\"\n \t\t\t\taccessor << \"void print( SoftwareSerial& s, int i ) {\"\n \t\t\t\taccessor << \"\\treturn s.print( i );\"\n \t\t\t\taccessor << \"}\"\n \t\t\tend\n \t\t\t@accessors << accessor.join( \"\\n\" )\n \t\t\t\n \t\t\t@signatures << \"SoftwareSerial& #{opts[ :as ]}();\"\n \n \t\t\t@other_setup << \"_#{opts[ :as ]}.begin(#{rate});\"\n \t\tend\n \tend",
"def test_find_id_by_serial_num\n d = Dog.new({\"name\"=>\"Phoebe\", \"breed\"=>\"Heeler x\", \"age\"=>3, \"serial_num\"=>3, \n \"colour\"=>\"Black tri\", \"description\"=>\"Fearful of people.\", \"temperament_id\"=>13, \"owner_id\"=>1})\n x = Dog.find_id_by_serial_num({\"serial_num\"=>3})\n assert_kind_of(Integer, x)\n end",
"def device_type=(s)\n self[:type] = s\n end",
"def find_type_wellsfargo\n batch_id = csv[0][(config_hash['BANK_OF_AMERICA']['PAYMENT']['BATCH']['batchid'])].to_i\n (900..999).include?(batch_id) ? 'CORRESP' : 'PAYMENT'\n end",
"def value_type\n 'BINARY'\n end",
"def deserialize(arg)\n raise NotImplementedError\n end",
"def nonregular_type; end",
"def find_type\n if @job_condition\n @type = (row[88].chr == 'C') ? 'PAYMENT' : 'CORRESP'\n end\n end",
"def schema_column_type(db_type)\n if convert_smallint_to_bool && db_type =~ /smallint/i \n :boolean\n else\n super\n end\n end",
"def type_supported?(type)\n Sequel.synchronize{return @supported_types[type] if @supported_types.has_key?(type)}\n supported = from(:pg_type).where(:typtype=>'b', :typname=>type.to_s).count > 0\n Sequel.synchronize{return @supported_types[type] = supported}\n end",
"def read_binary\n raise NotImplementedError\n end",
"def create\n @serial = Serial.new(serial_params)\n\n respond_to do |format|\n if @serial.save\n format.html { redirect_to @serial, notice: 'Serial was successfully created.' }\n format.json { render action: 'show', status: :created, location: @serial }\n else\n format.html { render action: 'new' }\n format.json { render json: @serial.errors, status: :unprocessable_entity }\n end\n end\n end",
"def convert_serial_to_identity(table, opts=OPTS)\n raise Error, \"convert_serial_to_identity is only supported on PostgreSQL 10.2+\" unless server_version >= 100002\n\n server = opts[:server]\n server_hash = server ? {:server=>server} : OPTS\n ds = dataset\n ds = ds.server(server) if server\n\n raise Error, \"convert_serial_to_identity requires superuser permissions\" unless ds.get{current_setting('is_superuser')} == 'on'\n\n table_oid = regclass_oid(table)\n im = input_identifier_meth\n unless column = (opts[:column] || ((sch = schema(table).find{|_, sc| sc[:primary_key] && sc[:auto_increment]}) && sch[0]))\n raise Error, \"could not determine column to convert from serial to identity automatically\"\n end\n column = im.call(column)\n\n column_num = ds.from(:pg_attribute).\n where(:attrelid=>table_oid, :attname=>column).\n get(:attnum)\n\n pg_class = Sequel.cast('pg_class', :regclass)\n res = ds.from(:pg_depend).\n where(:refclassid=>pg_class, :refobjid=>table_oid, :refobjsubid=>column_num, :classid=>pg_class, :objsubid=>0, :deptype=>%w'a i').\n select_map([:objid, Sequel.as({:deptype=>'i'}, :v)])\n\n case res.length\n when 0\n raise Error, \"unable to find related sequence when converting serial to identity\"\n when 1\n seq_oid, already_identity = res.first\n else\n raise Error, \"more than one linked sequence found when converting serial to identity\"\n end\n\n return if already_identity\n\n transaction(server_hash) do\n run(\"ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(column)} DROP DEFAULT\", server_hash)\n\n ds.from(:pg_depend).\n where(:classid=>pg_class, :objid=>seq_oid, :objsubid=>0, :deptype=>'a').\n update(:deptype=>'i')\n\n ds.from(:pg_attribute).\n where(:attrelid=>table_oid, :attname=>column).\n update(:attidentity=>'d')\n end\n\n remove_cached_schema(table)\n nil\n end",
"def bs_type_params\n params.require(:bs_type).permit(:bs_db_id, :name, :short_name, :is_precision, :is_scale, :delflag)\n end",
"def buffer?\n defined?(buffer_part_type)\n end",
"def data_type\n\t\tend"
] |
[
"0.68647045",
"0.6242564",
"0.5943363",
"0.5943363",
"0.57919806",
"0.5643944",
"0.5643944",
"0.5393611",
"0.5393611",
"0.5379735",
"0.5183374",
"0.5166281",
"0.512758",
"0.506078",
"0.506078",
"0.50425655",
"0.5034772",
"0.49538374",
"0.49437237",
"0.4926396",
"0.4922839",
"0.49088174",
"0.4899686",
"0.4883607",
"0.4866105",
"0.48602453",
"0.480907",
"0.47812817",
"0.47772908",
"0.47734487",
"0.47607154",
"0.47503895",
"0.46669713",
"0.4662439",
"0.46333826",
"0.4614709",
"0.4614709",
"0.46018288",
"0.46018288",
"0.45932007",
"0.45897254",
"0.45850712",
"0.45680386",
"0.45666435",
"0.4557004",
"0.45449287",
"0.45323822",
"0.452114",
"0.45031145",
"0.45021984",
"0.44911304",
"0.44885498",
"0.4485963",
"0.44798672",
"0.44789478",
"0.44671923",
"0.44634792",
"0.44627067",
"0.44591984",
"0.44521555",
"0.44518092",
"0.44398713",
"0.4437275",
"0.44277096",
"0.4426774",
"0.442304",
"0.44194505",
"0.44155547",
"0.44099805",
"0.44002372",
"0.43937042",
"0.43914005",
"0.43821442",
"0.43794003",
"0.43687233",
"0.4354148",
"0.43527928",
"0.4342081",
"0.43393865",
"0.43372595",
"0.43364224",
"0.4336277",
"0.43356368",
"0.43233135",
"0.43142602",
"0.43116784",
"0.4308348",
"0.43018472",
"0.42962882",
"0.4290009",
"0.42857742",
"0.42826465",
"0.42791334",
"0.42788476",
"0.42775914",
"0.42715457",
"0.42710024",
"0.42653686",
"0.4260789",
"0.4260735"
] |
0.63120544
|
1
|
PostgreSQL uses the bytea data type for blobs
|
def type_literal_generic_file(column)
:bytea
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def typecast_value_blob(value)\n value.is_a?(Sequel::SQL::Blob) ? value : Sequel::SQL::Blob.new(value)\n end",
"def type_literal_generic_file(column)\n :blob\n end",
"def blob(s)\n if s.is_a?(SQL::Blob)\n s\n else\n SQL::Blob.new(s)\n end\n end",
"def blob(s)\n SQL::Blob.new(s)\n end",
"def blob; end",
"def blob; end",
"def to_blob; end",
"def to_blob; end",
"def to_blob; end",
"def to_blob; end",
"def to_blob; end",
"def to_blob; end",
"def postgres_base64_data(data)\n [data].pack(\"m*\").gsub(/\\r?\\n/,\"\")\n end",
"def insert_bytea(column, value = :no_value)\n @columns << column\n @values << Translate.escape_bytea(value, @connection.pgconn) unless value == :no_value\n end",
"def value_type\n 'BINARY'\n end",
"def string_to_binary(value)\n # Escape data prior to insert into a bytea column\n if value\n res = ''\n value.each_byte { |b| res << sprintf('\\\\\\\\%03o', b) }\n res\n end\n end",
"def literal_blob_append(sql, v)\n sql << \"'\" << v.gsub(/[\\000-\\037\\047\\134\\177-\\377]/n){|b| \"\\\\#{(\"%o\" % b[0..1].unpack(\"C\")[0]).rjust(3, '0')}\"} << \"'\"\n end",
"def literal_blob_append(sql, v)\n sql << \"'\" << db.synchronize(@opts[:server]){|c| c.escape_bytea(v)} << \"'\"\n end",
"def literal_blob_append(sql, v)\n if v.empty?\n sql << \"''\"\n else\n sql << \"x'#{v.unpack('H*').first}'\"\n end\n end",
"def blob\n dbf.blob\n end",
"def to_sequel_blob\n SQL::Blob.new(self)\n end",
"def blob(val)\n val.gsub(/[\\000-\\037\\047\\134\\177-\\377]/) do |b|\n \"\\\\#{ b[0].to_s(8).rjust(3, '0') }\"\n end\n end",
"def visit_blob(binding_type)\n #TODO: consider moving the conversion right in here.\n self.result = binding_type.to_ruby(input)\n end",
"def binary_to_string(value)\n # TODO: Figure out what Cassandra's blobs look like\n value\n end",
"def convert_type(v)\n case v\n when Java::NetSourceforgeJtdsJdbc::ClobImpl\n convert_type(v.getSubString(1, v.length))\n else\n super\n end\n end",
"def to_sequel_blob\n self\n end",
"def byte cols\n decode_values :byte, cols\n end",
"def data= blob\n $postgres.exec_prepared('wsfile_update', [self.id, {value: blob, format: 1}])\n end",
"def convert_binary\n end",
"def binary_string; end",
"def to_blob(constraints = {})\n to_datastream(constraints).to_blob\n end",
"def to_blob(constraints = {})\n to_datastream(constraints).to_blob\n end",
"def pg_jsonb(v)\n case v\n when Postgres::JSONBObject\n v\n when Array\n Postgres::JSONBArray.new(v)\n when Hash\n Postgres::JSONBHash.new(v)\n when Postgres::JSONObject\n v = v.__getobj__\n Postgres::JSONDatabaseMethods.jsonb_primitive_wrapper(v).new(v)\n else\n Sequel.pg_jsonb_op(v)\n end\n end",
"def to_blob\n #@blob ||= Net::SSH::Buffer.from(:string, ssh_type,\n # :bignum, p, :bignum, q, :bignum, g, :bignum, pub_key).to_s\n SSHUtils.convert_string(ssh_type)<<\n SSHUtils.convert_bignum(p)<<\n SSHUtils.convert_bignum(q)<<\n SSHUtils.convert_bignum(g)<<\n SSHUtils.convert_bignum(pub_key)\n end",
"def type_literal_generic_string(column)\n if column[:text]\n uses_clob_for_text? ? :clob : :text\n elsif column[:fixed]\n \"char(#{column[:size]||default_string_column_size})\"\n else\n \"varchar(#{column[:size]||default_string_column_size})\"\n end\n end",
"def revert_postgres_type( type )\n case type\n when /\\Acharacter varying/\n return :String, :default_size => 255\n when /\\Acharacter/\n return :String, :fixed => true, :default_size => 255\n when /\\Atext\\z/\n return :String, :text => true\n when /\\Abytea\\z/\n return :blob\n when /\\Atimestamp/\n return :timestamp\n end\n end",
"def type_literal_generic_bignum(column)\n column[:serial] ? :bigserial : super\n end",
"def postgres_upload_binary_data(data, remote_fname=nil)\n remote_fname ||= Rex::Text::rand_text_alpha(8) + \".dll\"\n\n # From the Postgres documentation:\n # SELECT lo_creat(-1); -- returns OID of new, empty large object\n # Doing it this way instead of calling lo_create with a random number\n # ensures that we don't accidentally hit the id of a real object.\n resp = postgres_query \"select lo_creat(-1)\"\n unless resp and resp[:complete] and resp[:complete].rows[0]\n print_error \"Failed to get a new loid\"\n return\n end\n\n oid = resp[:complete].rows[0][0].to_i\n\n queries = [ \"delete from pg_largeobject where loid=#{oid}\" ]\n\n # Break the data into smaller chunks that can fit in the size allowed in\n # the pg_largeobject data column.\n # From the postgres documentation:\n # \"The amount of data per page is defined to be LOBLKSIZE (which is\n # currently BLCKSZ/4, or typically 2 kB).\"\n # Empirically, it seems that 8kB is fine on 9.x, but we play it safe and\n # stick to 2kB.\n chunks = []\n while ((c = data.slice!(0..2047)) && c.length > 0)\n chunks.push c\n end\n\n chunks.each_with_index do |chunk, pageno|\n b64_data = postgres_base64_data(chunk)\n insert = \"insert into pg_largeobject (loid,pageno,data) values(%d, %d, decode('%s', 'base64'))\"\n queries.push( \"#{insert}\"%[oid, pageno, b64_data] )\n end\n queries.push \"select lo_export(#{oid}, '#{remote_fname}')\"\n\n # Now run each of the queries we just built\n queries.each do |q|\n resp = postgres_query(q)\n if resp && resp[:sql_error]\n print_error \"Could not write the library to disk.\"\n print_error resp[:sql_error]\n # Can't really recover from this, bail\n return nil\n end\n end\n return remote_fname\n end",
"def data_as_bytes\n raise \"subclass responsibility\"\n end",
"def to_blob\n #@blob ||= Net::SSH::Buffer.from(:string, ssh_type, :bignum, e, :bignum, n).to_s\n SSHUtils.convert_string(ssh_type)<<\n SSHUtils.convert_bignum(e)<<\n SSHUtils.convert_bignum(n)\n end",
"def type_to_sql_with_notext(*args)\n type = type_to_sql_without_notext(*args)\n if type =~ /(text|blob)/\n 'varchar(2500)' # If this is bigger than about 21000 it always fails, and sometimes hits a row limit anyway if too large\n else\n type\n end\n end",
"def field_to_binary(field_type, field_value)\n case field_type\n when \"boolean\"\n if field_value == \"true\"\n \"\\x01\".b\n elsif field_value == \"false\"\n \"\\x00\".b\n else\n raise \"Expected true/false, got #{field_value}\"\n end\n when \"float\"\n # Works with any of: \"123\", \"123.4\", \"123,4\"\n [field_value.gsub('\"', '').sub(\",\", \".\").to_f].pack(\"f\")\n when \"int\"\n [field_value.gsub('\"', '').to_i].pack(\"l\")\n when \"short\"\n [field_value.gsub('\"', '').to_i].pack(\"v\")\n when \"string\"\n if field_value =~ /\\A\".*\"\\z/\n str = eval(field_value).unpack(\"U*\")\n else\n str = field_value.unpack(\"U*\")\n end\n [str.size].pack(\"v\") + str.pack(\"v*\")\n when \"optstring\"\n # this is really stupid\n if field_value == \"nil\"\n \"\\x00\"\n else\n if field_value =~ /\\A\".*\"\\z/\n str = eval(field_value).unpack(\"U*\")\n else\n str = field_value.unpack(\"U*\")\n end\n \"\\x01\" + [str.size].pack(\"v\") + str.pack(\"v*\")\n end\n when /\\Ablob:(\\d+)/\n bloblen = $1.to_i\n if field_value =~ /\\A\".*\"\\z/\n str = eval(field_value)\n else\n str = eval('\"' + field_value + '\"')\n end\n raise \"Blob expected to have #{bloblen} bytes, had #{str.size}\" unless str.size == bloblen\n str\n else\n warn \"Unknown ft/fv #{field_type} #{field_value}\"\n \"x\"\n end\n end",
"def bytes; end",
"def literal_blob(v)\n blob = '0x'\n v.each_byte{|x| blob << sprintf('%02x', x)}\n blob\n end",
"def to_blob\n @blob\n end",
"def bytes(*args); data(1, *args); end",
"def bytes cols\n decode_values :byte, cols, true\n end",
"def bytea(value)\n \"'\\\\x#{value.unpack1('H*')}'::bytea\"\n end",
"def as_binary\n raise Error::UnsupportedOperation, \"Method Geometry#as_binary not defined.\"\n end",
"def prepare_column_options(column)\n super.tap do |spec|\n spec[:encoding] = \"'#{column.sql_type_metadata.encoding}'\" if column.sql_type_metadata.encoding.present?\n end\n end",
"def type_literal_generic_string(column)\n if column[:text]\n :text\n elsif column[:fixed]\n \"char(#{column[:size]||default_string_column_size})\"\n elsif column[:text] == false || column[:size]\n \"varchar(#{column[:size]||default_string_column_size})\"\n else\n :text\n end\n end",
"def to_bytes; end",
"def to_bytes; end",
"def database_column_type\n :string\n end",
"def short_binary_type; end",
"def read_keyblob(type); end",
"def type_literal_generic_bignum_symbol(column)\n column[:serial] ? :bigserial : super\n end",
"def pg_jsonb\n Sequel::Postgres::JSONBArray.new(self)\n end",
"def bytes name, description: nil, mode: :nullable, policy_tags: nil, max_length: nil\n record_check!\n\n add_field name,\n :bytes,\n description: description,\n mode: mode,\n policy_tags: policy_tags,\n max_length: max_length\n end",
"def result_value_of(declared_type, value)\n if value.is_a?(::Amalgalite::Blob)\n SQL::Blob.new(value.to_s)\n elsif value.is_a?(String) && declared_type\n (meth = self.class.sql_to_method(declared_type.downcase)) ? send(meth, value) : value\n else\n super\n end\n end",
"def typecast_value_jsonb(value)\n case value\n when JSONBObject\n value\n when String\n if typecast_json_strings\n JSONBString.new(value)\n else\n _wrap_jsonb(_parse_json(value))\n end\n when *JSON_WRAP_CLASSES\n JSONDatabaseMethods.jsonb_primitive_wrapper(value).new(value)\n when JSONObject\n value = value.__getobj__\n JSONDatabaseMethods.jsonb_primitive_wrapper(value).new(value)\n else\n raise Sequel::InvalidValue, \"invalid value for jsonb: #{value.inspect}\"\n end\n end",
"def bind_string(stmt, index, value)\n case value.encoding\n when Encoding.utf_8, Encoding.us_ascii\n API.sqlite3_bind_text(stmt, index, value, value.bytesize, TRANSIENT)\n when Encoding.utf_16le, Encoding.utf_16be\n value = add_byte_order_mask(value)\n API.sqlite3_bind_text16(stmt, index, value, value.bytesize, TRANSIENT)\n else\n API.sqlite3_bind_blob(stmt, index, value, value.bytesize, TRANSIENT)\n end\n end",
"def blob\n nil\n end",
"def quote(value, column = nil)\n case value\n when String, ActiveSupport::Multibyte::Chars\n value_S = value.to_s\n if column && column.type == :binary && column.class.respond_to?(:string_to_binary)\n \"'#{column.class.string_to_binary(value_S)}'\"\n else\n super(value, column)\n end\n else\n super(value, column)\n end\n end",
"def __rubyrel_from_physical_value(physical_value)\n return physical_value if Sequel::Schema::Generator::GENERIC_TYPES.include?(self)\n Marshal.load(Base64.decode64(physical_value))\n end",
"def to_string_or_binary(value)\n encoding = value.encoding\n if encoding == Encoding::ASCII_8BIT\n Puppet::Pops::Types::PBinaryType::Binary.from_binary_string(value).to_s\n else\n # Transform to UTF-8 (do not assume UTF-8 is correct) with source invalid byte\n # sequences and UTF-8 undefined characters replaced by the default unicode uFFFD character\n # (black diamond with question mark).\n value.encode(Encoding::UTF_8, encoding, :invalid => :replace, :undef => :replace)\n end\n end",
"def to_bytes v\n HBase.import_java_classes!\n\n case v\n when Array\n v.to_java(Java::byte)\n when String, ByteArray\n v.to_java_bytes\n when Fixnum\n Bytes.java_send :toBytes, [Java::long], v\n when Symbol\n v.to_s.to_java_bytes\n when Float\n Bytes.java_send :toBytes, [Java::double], v\n when true, false, ByteBuffer\n Bytes.to_bytes v\n when nil\n ''.to_java_bytes\n when Bignum\n raise ArgumentError, \"Integer too large. Consider storing it as a BigDecimal.\"\n when BigDecimal\n Bytes.java_send :toBytes, [java.math.BigDecimal], v.to_java\n when java.math.BigDecimal\n Bytes.java_send :toBytes, [java.math.BigDecimal], v\n when Hash\n len = v.length\n raise ArgumentError, \"Unknown value format\" unless len == 1\n\n val = v.values.first\n raise ArgumentError, \"Unknown value format\" unless val.is_a?(Numeric)\n\n case v.keys.first\n when :byte\n [val].to_java(Java::byte)\n when :int\n Bytes.java_send :toBytes, [Java::int], val\n when :short\n Bytes.java_send :toBytes, [Java::short], val\n when :long, :fixnum\n Bytes.java_send :toBytes, [Java::long], val\n when :float\n Bytes.java_send :toBytes, [Java::float], val\n when :double\n Bytes.java_send :toBytes, [Java::double], val\n else\n raise ArgumentError, \"Invalid value format\"\n end\n else\n if java_bytes?(v)\n v\n else\n raise ArgumentError.new(\"Don't know how to convert #{v.class} into Java bytes\")\n end\n end\n end",
"def as_bytes\n raise NotImplementedError\n end",
"def type_literal_generic_bigdecimal(column)\n type_literal_generic_numeric(column)\n end",
"def initialize(*)\n super.force_encoding(Encoding::BINARY)\n end",
"def encoding\n select_value(\n \"SELECT pg_encoding_to_char(pg_database.encoding)\" <<\n \" FROM pg_database\" <<\n \" WHERE pg_database.datname LIKE '#{current_database}'\",\n 'SCHEMA')\n end",
"def encoding\n select_value(\"SELECT pg_encoding_to_char(encoding) FROM pg_database WHERE datname LIKE '#{current_database}'\", 'SCHEMA')\n end",
"def type_literal_generic_file(column)\n :image\n end",
"def type_literal(column)\n column[:size] ||= 255 if column[:type] == :varchar\n elements = column[:size] || column[:elements]\n \"#{type_literal_base(column)}#{literal(Array(elements)) if elements}#{UNSIGNED if column[:unsigned]}\"\n end",
"def binary?\n t = @type.downcase\n !!((t =~ /binary/) || (t =~ /blob/))\n end",
"def blob?\n @blob\n end",
"def type_literal(column)\n type = type_literal_base(column)\n column[:size] ||= 255 if type.to_s == 'varchar'\n elements = column[:size] || column[:elements]\n \"#{type}#{literal(Array(elements)) if elements}#{UNSIGNED if column[:unsigned]}\"\n end",
"def as_hex_wkb\r\n as_hex_ewkb(false,false,false)\r\n end",
"def encode\n type_byte + encode_data\n end",
"def schema_column_type(db_type)\n case db_type\n when 'json'\n :json\n when 'jsonb'\n :jsonb\n else\n super\n end\n end",
"def convert_data(v, opts = {})\n v.is_a?(BSON::Binary) ? v : BSON::Binary.new(to_hex(v['$hex'], opts), :generic)\n end",
"def type_literal_generic_bignum_symbol(column)\n :bigint\n end",
"def documento_params\n params.permit(:nome, :blob)\n end",
"def bytes\n self\n end",
"def blob_format=(value)\n @children['blob-format'][:value] = value\n end",
"def aggregate_db_storage_type; end",
"def var_bytes(val)\n val = val.to_s\n\n raise ArgumentError, \"var_bytes() cannot encode data longer \" +\n \"than 2**32-1 bytes\" \\\n unless val.length <= 2**32-1\n\n # While strings are still byte sequences, this is the same as a\n # string\n self.string(val)\n end",
"def cast(value)\n if value.is_a?(MySQLBinUUID::Type::Data)\n # It could be a Data object, in which case we should add dashes to the\n # string value from there.\n add_dashes(value.to_s)\n elsif value.is_a?(String) && value.encoding == Encoding::ASCII_8BIT && strip_dashes(value).length != 32\n # We cannot unpack something that looks like a UUID, with or without\n # dashes. Not entirely sure why ActiveRecord does a weird combination of\n # cast and serialize before anything needs to be saved..\n undashed_uuid = value.unpack1('H*')\n add_dashes(undashed_uuid.to_s)\n else\n super\n end\n end",
"def postgres_upload_binary_file(fname, remote_fname=nil)\n data = File.read(fname)\n postgres_upload_binary_data(data, remote_fname)\n end",
"def quote(value, column = nil) #:nodoc:\n if value.kind_of?(String) && column && column.sql_type =~ /^xml$/\n \"xml '#{quote_string(value)}'\"\n elsif value.kind_of?(Numeric) && column && column.sql_type =~ /^money$/\n # Not truly string input, so doesn't require (or allow) escape string syntax.\n \"'#{value.to_s}'\"\n elsif value.kind_of?(String) && column && column.sql_type =~ /^bit/\n case value\n when /^[01]*$/\n \"B'#{value}'\" # Bit-string notation\n when /^[0-9A-F]*$/i\n \"X'#{value}'\" # Hexadecimal notation\n end\n elsif column && column.sql_type =~ /^datetime$/\n if (not value.nil?) && (value.acts_like?(:date) || value.acts_like?(:time))\n \"CONVERT(datetime,'#{quoted_date(value)}',120)\"\n else\n # Fixtures#insert_fixtures sets fields like +updated_now+ to a String instance (\"Time.now.to_s(:db)\")\n super\n end\n elsif column && column.sql_type =~ /^boolean$/\n \"'#{value ? 1 : 0}'\"\n elsif value.class.to_s == 'System::Byte[]' && column && column.sql_type =~ /^binary$/\n \"CONVERT(varbinary(max),'0x#{bytes_to_string(value)}',1)\" \n else\n super\n end\n end",
"def pg_jsonb\n Sequel::Postgres::JSONBHash.new(self)\n end",
"def to_blob\n File.binread(path)\n end",
"def type_literal_generic_object(column)\n type_literal_generic_string(column)\n end",
"def blob_params\n params.require(:blob).permit(:name, :length, :last_modified)\n end",
"def encode(object)\n ''.force_encoding(Encoding::BINARY).tap do |bytes|\n newest_version = schema_versions.keys.max\n schema = schema_versions[newest_version]\n bytes << newest_version.chr if has_version_tag\n write(object, bytes, schema)\n end\n end",
"def to_binary; ''; end",
"def type_literal_specific(column)\n type = column[:type]\n type = \"double precision\" if type.to_s == 'double'\n column[:size] ||= default_string_column_size if type.to_s == 'varchar'\n elements = column[:size] || column[:elements]\n \"#{type}#{literal(Array(elements)) if elements}#{' UNSIGNED' if column[:unsigned]}\"\n end",
"def dbms_type_cast(columns, rows)\n # Cast the values to the correct type\n columns.each_with_index do |column, col_index|\n #puts \" #{column.name} type #{column.type} length #{column.length} nullable #{column.nullable} scale #{column.scale} precision #{column.precision} searchable #{column.searchable} unsigned #{column.unsigned}\"\n rows.each do |row|\n value = row[col_index]\n\n new_value = case\n when value.nil?\n nil\n when [ODBC::SQL_CHAR, ODBC::SQL_VARCHAR, ODBC::SQL_LONGVARCHAR].include?(column.type)\n # Do nothing, because the data defaults to strings\n # This also covers null values, as they are VARCHARs of length 0\n value.is_a?(String) ? value.force_encoding(\"UTF-8\") : value\n when [ODBC::SQL_DECIMAL, ODBC::SQL_NUMERIC].include?(column.type)\n column.scale == 0 ? value.to_i : value.to_f\n when [ODBC::SQL_REAL, ODBC::SQL_FLOAT, ODBC::SQL_DOUBLE].include?(column.type)\n value.to_f\n when [ODBC::SQL_INTEGER, ODBC::SQL_SMALLINT, ODBC::SQL_TINYINT, ODBC::SQL_BIGINT].include?(column.type)\n value.to_i\n when [ODBC::SQL_BIT].include?(column.type)\n value == 1\n when [ODBC::SQL_DATE, ODBC::SQL_TYPE_DATE].include?(column.type)\n value.to_date\n when [ODBC::SQL_TIME, ODBC::SQL_TYPE_TIME].include?(column.type)\n value.to_time\n when [ODBC::SQL_DATETIME, ODBC::SQL_TIMESTAMP, ODBC::SQL_TYPE_TIMESTAMP].include?(column.type)\n value.to_datetime\n # when [\"ARRAY\"?, \"OBJECT\"?, \"VARIANT\"?].include?(column.type)\n # TODO: \"ARRAY\", \"OBJECT\", \"VARIANT\" all return as VARCHAR\n # so we'd need to parse them to make them the correct type\n\n # As of now, we are just going to return the value as a string\n # and let the consumer handle it. In the future, we could handle\n # if here, but there's not a good way to tell what the type is\n # without trying to parse the value as JSON as see if it works\n # JSON.parse(value)\n when [ODBC::SQL_BINARY].include?(column.type)\n # These don't actually ever seem to return, even though they are\n # defined in the ODBC driver, but I left them in here just in case\n # so that future us can see what they should be\n value\n else\n # the use of @connection.types() results in a \"was not dropped before garbage collection\" warning.\n raise \"Unknown column type: #{column.type} #{@connection.types(column.type).first[0]}\"\n end\n\n row[col_index] = new_value\n end\n end\n rows\n end",
"def column_schema_to_ruby_type(schema)\n case t = schema[:db_type].downcase\n when /\\A(?:medium|small)?int(?:eger)?(?:\\((?:\\d+)\\))?(?: unsigned)?\\z/o\n {:type=>Integer}\n when /\\Atinyint(?:\\((\\d+)\\))?\\z/o\n {:type =>schema[:type] == :boolean ? TrueClass : Integer}\n when /\\Abigint(?:\\((?:\\d+)\\))?(?: unsigned)?\\z/o\n {:type=>Bignum}\n when /\\A(?:real|float|double(?: precision)?)\\z/o\n {:type=>Float}\n when 'boolean'\n {:type=>TrueClass}\n when /\\A(?:(?:tiny|medium|long|n)?text|clob)\\z/o\n {:type=>String, :text=>true}\n when 'date'\n {:type=>Date}\n when /\\A(?:small)?datetime\\z/o\n {:type=>DateTime}\n when /\\Atimestamp(?:\\((\\d+)\\))?(?: with(?:out)? time zone)?\\z/o\n {:type=>DateTime, :size=>($1.to_i if $1)}\n when /\\Atime(?: with(?:out)? time zone)?\\z/o\n {:type=>Time, :only_time=>true}\n when /\\An?char(?:acter)?(?:\\((\\d+)\\))?\\z/o\n {:type=>String, :size=>($1.to_i if $1), :fixed=>true}\n when /\\A(?:n?varchar|character varying|bpchar|string)(?:\\((\\d+)\\))?\\z/o\n {:type=>String, :size=>($1.to_i if $1)}\n when /\\A(?:small)?money\\z/o\n {:type=>BigDecimal, :size=>[19,2]}\n when /\\A(?:decimal|numeric|number)(?:\\((\\d+)(?:,\\s*(\\d+))?\\))?\\z/o\n s = [($1.to_i if $1), ($2.to_i if $2)].compact\n {:type=>BigDecimal, :size=>(s.empty? ? nil : s)}\n when /\\A(?:bytea|(?:tiny|medium|long)?blob|(?:var)?binary)(?:\\((\\d+)\\))?\\z/o\n {:type=>File, :size=>($1.to_i if $1)}\n when 'year'\n {:type=>Integer}\n else\n {:type=>String}\n end\n end",
"def store_blob(object,field_name,blob)\n super #=> returns blob[:tempfile]\n end"
] |
[
"0.748311",
"0.7277911",
"0.6994736",
"0.68574053",
"0.6844959",
"0.6844959",
"0.6809552",
"0.6809552",
"0.6809552",
"0.6809552",
"0.6809552",
"0.6809552",
"0.674962",
"0.66967905",
"0.66562515",
"0.64659435",
"0.6404109",
"0.62737864",
"0.6272902",
"0.6261824",
"0.621018",
"0.61972237",
"0.6172352",
"0.6146043",
"0.60632646",
"0.6060197",
"0.6055074",
"0.60020983",
"0.59994346",
"0.5999419",
"0.59552974",
"0.59552974",
"0.59412557",
"0.5921416",
"0.5912886",
"0.5892885",
"0.5881554",
"0.5860017",
"0.5859986",
"0.58587724",
"0.5845743",
"0.5841459",
"0.5793208",
"0.57876885",
"0.57534415",
"0.5702279",
"0.5699257",
"0.5669614",
"0.56695515",
"0.5658049",
"0.56151795",
"0.5610816",
"0.5610816",
"0.5609948",
"0.55829054",
"0.5580993",
"0.55772626",
"0.5577106",
"0.55693746",
"0.55678564",
"0.55664533",
"0.55618674",
"0.5538112",
"0.55376494",
"0.551392",
"0.5491716",
"0.5482944",
"0.5463835",
"0.54524285",
"0.54496104",
"0.5440734",
"0.54317135",
"0.5420889",
"0.54071826",
"0.5387063",
"0.53826964",
"0.5377513",
"0.5350436",
"0.5347841",
"0.5336378",
"0.5332463",
"0.53280133",
"0.53273594",
"0.53270715",
"0.5311075",
"0.5299184",
"0.5294939",
"0.5294387",
"0.5286564",
"0.52718604",
"0.5262815",
"0.5250874",
"0.5249661",
"0.52402943",
"0.52287674",
"0.52273583",
"0.52128726",
"0.5209264",
"0.52031195",
"0.5202966"
] |
0.734235
|
1
|
Handle serial type if :serial option is present
|
def type_literal_generic_integer(column)
column[:serial] ? :serial : super
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def supports_serial?\n true\n end",
"def serial?\n @serial\n end",
"def serial?\n @serial\n end",
"def serial\n fetch('device.serial')\n end",
"def set_serial\n @serial = Serial.find(params[:id])\n end",
"def set_serial\n @serial = Serial.find(params[:id])\n end",
"def set_serial\n @serial = Serial.find(params[:id])\n end",
"def is_serial?\n @job.metadata_hash['type_of_record'] == \"as\"\n end",
"def serial; end",
"def serial; end",
"def serial=(_arg0); end",
"def serial_params\n params.require(:serial).permit(:name, :date, :genre, :description)\n end",
"def serial_params\n params.require(:serial).permit(:name, :date, :genre, :description)\n end",
"def serial_number=(value)\n @serial_number = value\n end",
"def serial_value(serial)\n serial.is_a?(SerialNumber) ? serial.value : serial\n end",
"def serial_number; Common.serial_number(@handle); end",
"def serial_begin(opts={})\n rate = opts[:rate] ? opts[:rate] : 9600\n @other_setup << \"Serial.begin(#{rate});\"\n end",
"def serial_port\n\t\t\treturn @sp if port_initialized?\n\t\tend",
"def get_serial\n return serial if serial.present?\n\n serial = \\\n if platform =~ /^jingdong/\n \"#{Date.today.to_s.gsub('-','')}_001\"\n else\n \"#{Date.today.to_s.gsub('-','')}\"\n end\n\n self.update_attribute(:serial, serial)\n serial\n end",
"def new_serial\n @soa[:serial] = self.class.next_serial(@soa[:serial])\n end",
"def test_serial_port\n master = SerialPort.new \"/home/ralph/dev/vmodem0\", 38400\n\n master.write \"AT\\r\\n\"\n\n master.write \"AT\\r\\n\"\n master.write \"AT\\r\\n\"\n #slave = SerialPort.new \"/dev/pts/10\", 38400\n\n assert_equal \"AT\", master.read\n end",
"def serial_chronology_params\n params.require(:serial_chronology).permit(:preceding_serial_id, :succeeding_serial_id)\n end",
"def create\n @serial = Serial.new(serial_params)\n\n respond_to do |format|\n if @serial.save\n format.html { redirect_to @serial, notice: \"Serial '#{@serial.name}' was successfully created.\" }\n format.json { render action: 'show', status: :created, location: @serial }\n else\n format.html { render action: 'new' }\n format.json { render json: @serial.errors, status: :unprocessable_entity }\n end\n end\n end",
"def require_serial_number?\n !!(self.registered_download.require_serial_number?)\n end",
"def serial_console\n return nil if serial_baud_rate.nil?\n [serial_baud_rate, serial_dce_dte, serial_flow_control]\n end",
"def initialize(port=:auto, verbosity=:warn, baud=9600, cmd_delay=0.1)\n\t\t\n\t\t# if no port was specified, we'll attempt to iterate\n\t\t# all of the serial ports that i've ever seen gsm\n\t\t# modems mounted on. this is kind of shaky, and\n\t\t# only works well with a single modem. for now,\n\t\t# we'll try: ttyS0, ttyUSB0, ttyACM0, ttyS1...\n\t\tif port == :auto\n\t\t\t@device, @port = catch(:found) do\n\t\t\t\t0.upto(8) do |n|\n\t\t\t\t\t[\"ttyS\", \"ttyUSB\", \"ttyACM\"].each do |prefix|\n\t\t\t\t\t\ttry_port = \"/dev/#{prefix}#{n}\"\n\t\t\t\n\t\t\t\t\t\tbegin\n\t\t\t\t\t\t\t# serialport args: port, baud, data bits, stop bits, parity\n\t\t\t\t\t\t\tdevice = SerialPort.new(try_port, baud, 8, 1, SerialPort::NONE)\n\t\t\t\t\t\t\tthrow :found, [device, try_port]\n\t\t\t\t\t\t\n\t\t\t\t\t\trescue ArgumentError, Errno::ENOENT\n\t\t\t\t\t\t\t# do nothing, just continue to\n\t\t\t\t\t\t\t# try the next port in order\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\tend\n\t\n\t\t\t\t# tried all ports, nothing worked\n\t\t\t\traise AutoDetectError\n\t\t\tend\n\t\t\n\t\t# if the port was a port number or file\n\t\t# name, initialize a serialport object\n\t\telsif port.is_a?(String) or port.is_a?(Fixnum)\n\t\t\t@device = SerialPort.new(port, baud, 8, 1, SerialPort::NONE)\n\t\t\t@port = port\n\t\t\t\n\t\t# otherwise, we'll assume that the object passed\n\t\t# was an object ready to quack like a serial modem\n\t\telse\n\t\t\t@device = port\n\t\t\t@port = nil\n\t\tend\n\t\t\n\t\t@cmd_delay = cmd_delay\n\t\t@verbosity = verbosity\n\t\t@retry_commands = 6\n\t\t@read_timeout = 10\n\t\t@locked_to = false\n\t\t\n\t\t# keep track of the depth which each\n\t\t# thread is indented in the log\n\t\t@log_indents = {}\n\t\t@log_indents.default = 0\n\t\t\n\t\t# to keep multi-part messages until\n\t\t# the last part is delivered\n\t\t@multipart = {}\n\t\t\n\t\t# start logging to file\n\t\tlog_init\n\t\t\n\t\t# to store incoming messages\n\t\t# until they're dealt with by\n\t\t# someone else, like a commander\n\t\t@incoming = []\n\t\t\n\t\t# initialize the modem; rubygsm is (supposed to be) robust enough to function\n\t\t# without these working (hence the \"try_\"), but they make different modems more\n\t\t# consistant, and the logs a bit more sane.\n\t\ttry_command \"ATE0\" # echo off\n\t\ttry_command \"AT+CMEE=1\" # useful errors\n\t\ttry_command \"AT+WIND=0\" # no notifications\n\t\t\n\t\t# PDU mode isn't supported right now (although\n\t\t# it should be, because it's quite simple), so\n\t\t# switching to text mode (mode 1) is MANDATORY\n\t\tcommand \"AT+CMGF=1\"\n\tend",
"def interactivekit_params\n params.require(:interactivekit).permit(:serial)\n end",
"def software_serial(rx, tx, opts={})\n raise ArgumentError, \"can only define rx from Fixnum, got #{rx.class}\" unless rx.is_a?(Fixnum)\n raise ArgumentError, \"can only define tx from Fixnum, got #{tx.class}\" unless tx.is_a?(Fixnum)\n \n output_pin(tx)\n \n rate = opts[:rate] ? opts[:rate] : 9600\n \t\tif opts[:as]\n \t\t\t@declarations << \"SoftwareSerial _#{opts[ :as ]} = SoftwareSerial(#{rx}, #{tx});\"\n \t\t\taccessor = []\n \t\t\taccessor << \"SoftwareSerial& #{opts[ :as ]}() {\"\n \t\t\taccessor << \"\\treturn _#{opts[ :as ]};\"\n \t\t\taccessor << \"}\"\n \t\t\t@@swser_inc ||= FALSE\n \t\t\tif (@@swser_inc == FALSE) # on second instance this stuff can't be repeated\n \t\t\t\t@@swser_inc = TRUE\n\t \t\t\taccessor << \"int read(SoftwareSerial& s) {\"\n \t\t\t\taccessor << \"\\treturn s.read();\"\n \t\t\t\taccessor << \"}\"\n \t\t\t\taccessor << \"void println( SoftwareSerial& s, char* str ) {\"\n \t\t\t\taccessor << \"\\treturn s.println( str );\"\n \t\t\t\taccessor << \"}\"\n \t\t\t\taccessor << \"void print( SoftwareSerial& s, char* str ) {\"\n \t\t\t\taccessor << \"\\treturn s.print( str );\"\n \t\t\t\taccessor << \"}\"\n \t\t\t\taccessor << \"void println( SoftwareSerial& s, int i ) {\"\n \t\t\t\taccessor << \"\\treturn s.println( i );\"\n \t\t\t\taccessor << \"}\"\n \t\t\t\taccessor << \"void print( SoftwareSerial& s, int i ) {\"\n \t\t\t\taccessor << \"\\treturn s.print( i );\"\n \t\t\t\taccessor << \"}\"\n \t\t\tend\n \t\t\t@accessors << accessor.join( \"\\n\" )\n \t\t\t\n \t\t\t@signatures << \"SoftwareSerial& #{opts[ :as ]}();\"\n \n \t\t\t@other_setup << \"_#{opts[ :as ]}.begin(#{rate});\"\n \t\tend\n \tend",
"def set_serial_chronology\n @serial_chronology = SerialChronology.find(params[:id])\n end",
"def serial_number\n return @serial_number\n end",
"def serial?\n !@enumcron.empty?\n end",
"def check_serial\n <<-CODE\n t1 = stack_pop();\n next_literal;\n next_int;\n stack_push(cpu_check_serial(state, c, t1, _lit, _int));\n CODE\n end",
"def arduino_params\n params.require(:arduino).permit(:model, :serial)\n end",
"def lot_code_from_serial\n if lot_code.blank? && serial.present?\n self[:lot_code] = serial\n end\n end",
"def serial_primary_key_options\n {:primary_key => true, :serial => true, :type=>Integer}\n end",
"def serial_params\n params.require(:serial).permit(:name,\n :publisher,\n :place_published,\n :primary_language_id,\n :first_year_of_issue,\n :last_year_of_issue,\n :alternate_values_attributes => [:id,\n :value,\n :type,\n :language_id,\n :alternate_value_object_type,\n :alternate_value_object_id,\n :alternate_value_object_attribute,\n :_destroy])\n end",
"def create\n @serial = Serial.new(serial_params)\n\n respond_to do |format|\n if @serial.save\n format.html { redirect_to @serial, notice: 'Serial was successfully created.' }\n format.json { render action: 'show', status: :created, location: @serial }\n else\n format.html { render action: 'new' }\n format.json { render json: @serial.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update_serial!\n update_serial\n save\n end",
"def update_serial\n return if self.type != 'SOA'\n\n a = self.content.split(' ')\n\n # Last 4 digits of serial (serial update counter)\n i = a[2][6..-1].to_i\n\n # 201801 -> 201821\n t = Time.now.strftime('%Y%m').to_i + 20\n\n # Reset serial update counter if serial is on \"old\" format (e.g.\n # 2018010199)\n i = 0 if a[2][4..5].to_i <= 12\n\n # Increment serial update counter\n i += 1\n\n a[2] = t.to_s + \"%04d\" % i\n self.content = a.join(' ')\n end",
"def add_serial_to_parallels_vm(options)\n message = \"Information:\\tAdding Serial Port to \"+options['name']\n command = \"prlctl set \\\"#{options['name']}\\\" --add-device serial --ouput /tmp/#{options['name']}\"\n execute_command(options,message,command)\n return\nend",
"def get_serial_number(data)\n data['product']['serial_number'] || 'n/a'\n end",
"def read\n @serial_port.read()\n end",
"def add_device(serial)\n res = @client.post(\"#{path}/devices\", nil, { serial: serial }, \"Content-Type\" => \"application/json\")\n Device.new(@client, res.json) if res.success?\n end",
"def create_serial_ilv(s_mode = @s_mode, pkt_type = @pkt_type, ter_id = @ter_id, pkt_first = true, pkt_last = true)\n\n $test_logger.log(\"Create Serial ILV Packet Format..\")\n $test_logger.log(\"Serial Comm Mode :- #{s_mode}, Packet type:- #{pkt_type}, Terminal Identifier:- #{ter_id}\")\n raise \"Specify at least one serial protocol RS422/RS485\" if s_mode == nil\n raise \"Specify at least one Packet type\" if pkt_type == nil\n raise \"Specify Terminal Identifier\" if ter_id == nil\n\n #set flag to true for serial comunication\n @chk_serial_comm = true\n\n # create Packet Identifier\n pkt_id = create_packet_idenfier(pkt_first, pkt_last, pkt_type)\n\n # store all ILV element and hex string of ILV command\n ilv_elem = @xml_doc.elements[REQ_TAG].to_a #Here using xml_doc so there could be issue with null character in future\n hex_str = @ilv_hex_str\n\n #create Data node assign whole ILV Command into Data Tag element\n add_tag(REQ_TAG, DATA_TAG, \"//#{ID_TAG}\")\n ilv_elem.each do |elem|\n @xml_ilv_node.root.elements[\"//#{DATA_TAG}\"].add(elem)\n end\n\n #delete ILV command from Request Tag element\n @xml_ilv_node.root.elements[REQ_TAG].delete_element \"/Request/Identifier\" if ID_TAG\n @xml_ilv_node.root.elements[REQ_TAG].delete_element \"/Request/Length\" if LEN_TAG\n @xml_ilv_node.root.elements[REQ_TAG].delete_element \"/Request/Values\" if VALUE_TAG\n\n #Add Start text node with value and attributes\n add_tag(REQ_TAG, STX_TAG, \"//#{DATA_TAG}\")\n set_tag_value(\"//#{STX_TAG}\", \"0x02\")\n set_tag_attr(\"//#{STX_TAG}\", SIZE_ATTR, \"1\")\n\n #Add Packet Identifier Node with value and attributes\n add_tag(REQ_TAG, PKTID_TAG, \"//#{DATA_TAG}\")\n set_tag_value(\"//#{PKTID_TAG}\", pkt_id)\n set_tag_attr(\"//#{PKTID_TAG}\", SIZE_ATTR, \"1\")\n\n #Add Terminal Identifier Node or Request counter Node based on serial communication RS485/RS422\n ter_id = ter_id.to_s(16)\n if(s_mode == \"RS485\")\n add_tag(REQ_TAG, TID_TAG, \"//#{DATA_TAG}\")\n set_tag_value(\"//#{TID_TAG}\", \"0x\" + ter_id)\n set_tag_attr(\"//#{TID_TAG}\", SIZE_ATTR, \"1\")\n elsif (s_mode == \"RS422\")\n add_tag(REQ_TAG, RC_TAG, \"//#{DATA_TAG}\")\n set_tag_value(\"//#{RC_TAG}\", \"0x\" + ter_id)\n set_tag_attr(\"//#{RC_TAG}\", SIZE_ATTR, \"1\")\n else\n $test_logger.log(\"Please provide Serial Protocol mode RS485/RS422\")\n raise \"Specify at least one Serial Protocol mode RS485/RS422\"\n end\n\n #Add End Text Node with value and attributes\n add_tag(REQ_TAG, ETX_TAG, \"//#{DATA_TAG}\", false)\n set_tag_value(\"//#{ETX_TAG}\", \"0x02\")\n set_tag_attr(\"//#{ETX_TAG}\", SIZE_ATTR, \"1\")\n\n #Add Data Link Escape Node with value and attributes\n add_tag(REQ_TAG, DLE_TAG, \"//#{DATA_TAG}\", false)\n set_tag_value(\"//#{DLE_TAG}\", \"0x1b\")\n set_tag_attr(\"//#{DLE_TAG}\", SIZE_ATTR, \"1\")\n\n #Calculate checksum and Add CRC Node with value and attributes\n crc = calc_checksum(hex_str)\n add_tag(REQ_TAG, CRC_TAG, \"//#{DATA_TAG}\", false)\n set_tag_value(\"//#{CRC_TAG}\", \"0x\" + crc)\n set_tag_attr(\"//#{CRC_TAG}\", SIZE_ATTR, \"2\")\n #puts to_s\n #Notify changes to load other formats like RAW and HEX\n notify_change(InputChannel::XML)\n #puts \"hex:-#{@ilv_hex_str}\"\n @ilv_hex_str\n end",
"def determine_name\n name = nil\n\n case @type\n when :computer\n name = `sudo dmidecode -s system-serial-number`.chomp\n when :hard_drive\n `sudo smartctl -i #{@options['device']}`.each_line do |line|\n line =~ /^Serial\\sNumber:\\s+([A-Za-z0-9_-]+)$/\n name = $1\n end\n end\n\n # Check if the id is valid (all word characters plus dash)\n if ( name =~ /^[A-Za-z0-9_-]+$/ )\n name\n else\n nil\n end\n end",
"def serial_types\n types = []\n types << 'SX' if %w[b s].include?(record.leader[7])\n types.concat journal_types\n types.concat newspaper_types\n types.uniq!\n return types\n end",
"def open_serial_port(port = PORT, speed = BAUD, bits = BITS, stopbits = STOPBITS, parity = PARITY)\n begin\n @sp = SerialPort.new(port, speed, bits, stopbits, parity)\n @sp.flow_control = SerialPort::NONE\n @sp.binmode\n @sp.read_timeout = 30000\n #puts \"DTR #{@sp.dtr} DSR #{@sp.dsr} RTS #{@sp.rts} CTS #{@sp.cts} DCD #{@sp.dcd} RI #{@sp.ri}\"\n rescue => error\n STDERR.puts Weather_exception, \"open_serial_port: \" + error.to_s\n @sp = nil\n end\n end",
"def lot_code_from_serial\n if lot_code.blank? && serial.present?\n self[:lot_code] = serial\n end\n self\n end",
"def admin_device_params\n params.require(:admin_device).permit(:serial)\n end",
"def add_device(serial)\n res = @client.post(\"#{path}/devices\", nil, { serial: serial }, \"Content-Type\" => \"application/json\")\n\n ::M2X::Client::Device.new(@client, res.json) if res.success?\n end",
"def serial_primary_key_options\n # :nocov:\n auto_increment_key = server_version >= 100002 ? :identity : :serial\n # :nocov:\n {:primary_key => true, auto_increment_key => true, :type=>Integer}\n end",
"def serial_port_aenable=(serial_port_aenable)\n validator = EnumAttributeValidator.new('String', [\"platform-default\", \"enabled\", \"disabled\"])\n unless validator.valid?(serial_port_aenable)\n fail ArgumentError, \"invalid value for \\\"serial_port_aenable\\\", must be one of #{validator.allowable_values}.\"\n end\n @serial_port_aenable = serial_port_aenable\n end",
"def initialize(port, options = {})\n baudrate = options[:baudrate] || 57600\n @serial_port = port.is_a?(String) ? SerialPort.new(port, baudrate, 8, 1, SerialPort::NONE) : port\n @serial_port.read_timeout = 2\n @major_version = 0\n @minor_version = 0\n @pins = []\n @analog_pins = []\n @connected = false\n end",
"def next_serial\n serial = nil\n\n # This is slightly odd. If the file doesn't exist, our readwritelock creates\n # it, but with a mode we can't actually read in some cases. So, use\n # a default before the lock.\n unless FileTest.exist?(Puppet[:serial])\n serial = 0x1\n end\n\n Puppet.settings.readwritelock(:serial) { |f|\n if FileTest.exist?(Puppet[:serial])\n serial ||= File.read(Puppet.settings[:serial]).chomp.hex\n end\n\n # We store the next valid serial, not the one we just used.\n f << \"%04X\" % (serial + 1)\n }\n\n return serial\n end",
"def update\n respond_to do |format|\n if @serial.update(serial_params)\n format.html { redirect_to @serial, notice: 'Serial was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @serial.errors, status: :unprocessable_entity }\n end\n end\n end",
"def update\n respond_to do |format|\n if @serial.update(serial_params)\n format.html { redirect_to @serial, notice: 'Serial was successfully updated.' }\n format.json { head :no_content }\n else\n format.html { render action: 'edit' }\n format.json { render json: @serial.errors, status: :unprocessable_entity }\n end\n end\n end",
"def _set_serial_number number\n appname = @appname\n pattern = Regexp.new \"^#{appname}:.*$\"\n filename = @app_serial_path || \"serial_numbers\"\n # during testing redo this file does not exist, so i get errors\n if !File.exists? filename\n _get_serial_number\n end\n _backup filename\n change_row filename, pattern, \"#{appname}:#{number}\"\n end",
"def serial?\n @items.any? { |item| item.serial? }\n end",
"def serial_read_int()\n return serial_read_line().hex\n end",
"def set_card_type\n self.cc_type ||= CardDetector.brand?(number)\n end",
"def initialize(serial, secret)\n raise Bnet::BadInputError.new(\"bad serial #{serial}\") unless Util.is_valid_serial?(serial)\n raise Bnet::BadInputError.new(\"bad secret #{secret}\") unless Util.is_valid_secret?(secret)\n\n @normalized_serial = Util.normalize_serial(serial)\n @secret = secret\n end",
"def device_type=(s)\n self[:type] = s\n end",
"def set_serial_number number\n appname = @appname\n pattern = Regexp.new \"^#{appname}:.*$\"\n filename = @app_serial_path || \"serial_numbers\"\n # during testing redo this file does not exist, so i get errors\n if !File.exists? filename\n get_serial_number\n end\n backup filename\n # from Sed\n change_row filename, pattern, \"#{appname}:#{number}\"\n end",
"def tyre_params\n params.require(:tyre).permit(:serial, :brand, :truck_id, :position, :status)\n end",
"def uuid\n return serial_no unless (serial_no.nil? or serial_no == '')\n raise Exception.new('no uuid for you')\n end",
"def initialize(options = {})\n @port = SerialPort.new(options[:port], options[:speed])\n cmd(\"AT\")\n # Set to text mode\n cmd(\"AT+CMGF=1\")\n # Set SMSC number\n cmd(\"AT+CSCA=\\\"#{SMSC}\\\"\") \n end",
"def serial_revoked?(crl, serial)\n raise TypeError, \"crl must be a Ruby OpenSSL::X509::CRL object\" unless crl.is_a?(::OpenSSL::X509::CRL)\n raise TypeError, \"serial must be a Ruby String or Integer object\" unless serial.is_a?(String) || serial.is_a?(Integer)\n\n serial_to_verify = if serial.is_a?(String)\n serial.to_i(16)\n else\n serial\n end\n status = false\n crl.revoked.each do |revoked|\n status = true if revoked.serial == serial_to_verify\n end\n status\n end",
"def close\n if !@serial_port.nil? then\n deafen\n @serial_port.close\n @serial_port = nil\n end\n end",
"def parsed_serial_no\n (no_extension_or_in_end? ? data_file_name.gsub(ext, '').split('.')[1] : data_file_name.split(ext)[1]&.gsub('.', ''))&.to_i\n end",
"def show\n respond_with @serial\n end",
"def device_type=(s)\n self[:type] = s\n end",
"def set_card_type\n @card_type_name = (params[:type] || default_type)\n @full_type_name = @card_type_name\n @card_type = @full_type_name.classify.constantize\n raise(ArgumentError, \"No valid card type\") unless(@card_type <= BaseCard)\n @card_type\n end",
"def set_card_type\n self.cc_type ||= Spree::Creditcard::CardDetector.type?(self.number.to_s.gsub(/\\s/,''))\n end",
"def read\n serial_port.read_nonblock(4096)\n rescue EOFError\n end",
"def serial_primary_key_options\n {:primary_key => true, :type => Integer, :auto_increment => true}\n end",
"def serial_primary_key_options\n {:primary_key => true, :type => Integer, :auto_increment => true}\n end",
"def input_port?\n @type == :input\n end",
"def validate_devices\n [gateway_serial, transmitter_serial].each do |_serial|\n errors.add_to_base(\"Device #{_serial} is not available. Please verify the serial number.\") unless (_serial.blank? || Device.available?( _serial, senior))\n end\n end",
"def device_type=(s)\n self[:type] = s\n end",
"def serial_primary_key_options\n {:primary_key => true, :type => :integer, :auto_increment => true}\n end",
"def serial_primary_key_options\n {:primary_key => true, :type => :integer, :auto_increment => true}\n end",
"def generate_serial_number\n self.serial_number = SecureRandom.uuid \n generate_serial_number if Product.find_by(serial_number: serial_number)\n\tend",
"def perform_with_tingyun\n self._ty_serial = true\n perform_without_tingyun\n end",
"def run\n\t\t\t#is the serial port setup?\n\t\t\tputs \"Listening on serial port #{@portname}\"\n\t\t\tif port_initialized?\n\t\t\t\t@sp.flush_input\n \t\t\tbegin \n \t\t\t\twhile data = @sp.readline\n \t\t\t\t\tparse(data)\n \t\t\t\tend\n \t \t\t\trescue Interrupt\n \t\t\t\t\tputs \"Exiting\"\t\n \t\t\t\t\t@sp.close\n \t\t\t\tend\n \t\t\tend\n\t\tend",
"def scaf_serial_columns\n scaf_columns.select { |c| c.primary }\n end",
"def initialize(log_id:, serial:, seqno:)\n super(log_id: log_id, serial: serial, seqno: seqno, type: 'SYSTEM')\n\n self\n end",
"def register_device(device_serial)\n register_form.device_serial_txt.set device_serial\n page.find('#btnSubmitForm', wait: 3).click\n has_act_code_txt?(wait: 10)\n end",
"def update_serial\n unless Record.batch_soa_updates.nil? \n if Record.batch_soa_updates.include?( self.id )\n return\n end\n \n Record.batch_soa_updates << self.id\n end\n \n @serial_updated = true\n\n date_segment = Time.now.strftime( \"%Y%m%d\" )\n\n # Same day change?\n increment = if date_segment == self.serial.to_s[0,8]\n increment = self.serial.to_s[8,2].succ\n else\n \"01\"\n end\n\n self.serial = ( date_segment + increment[-2,2] ).to_i\n \n end",
"def add_serial_number\n serial_number = serial_number_stub\n # Converting to Base 36 can be useful when you want to generate random combinations of letters and numbers, since it counts using every number from 0 to 9 and then every letter from a to z. Read more about base 36 here: https://en.wikipedia.org/wiki/Senary#Base_36_as_senary_compression\n alphanumerics = (0...36).map{ |i| i.to_s 36 }\n 13.times{|t| serial_number << alphanumerics.sample}\n self.update(serial_number: serial_number)\n end",
"def add_serial_number\n serial_number = serial_number_stub\n # Converting to Base 36 can be useful when you want to generate random combinations of letters and numbers, since it counts using every number from 0 to 9 and then every letter from a to z. Read more about base 36 here: https://en.wikipedia.org/wiki/Senary#Base_36_as_senary_compression\n alphanumerics = (0...36).map{ |i| i.to_s 36 }\n 13.times{|t| serial_number << alphanumerics.sample}\n self.update(serial_number: serial_number)\n end",
"def identifier\n # TODO: Don't parse ser_type every time\n r_class = Thales::Datamodel::CLASS_FOR[record.ser_type]\n data = r_class.new.deserialize(record.ser_data)\n\n data.identifier\n end",
"def credit_card_type\n params['CardType']\n end",
"def before_run_command\n return unless @local_serial\n # for `cu` command\n @reader.expect(/^Connected\\./, 1) do\n write_and_logging 'Send enter to connect serial', \"\\r\\n\", true\n end\n end",
"def standard_port?; end",
"def tank_params\n params.require(:tank).permit(:serial_number)\n end",
"def baud_rate=(baud_rate)\n validator = EnumAttributeValidator.new('String', [\"platform-default\", \"9600\", \"19200\", \"38400\", \"57600\", \"115200\"])\n unless validator.valid?(baud_rate)\n fail ArgumentError, \"invalid value for \\\"baud_rate\\\", must be one of #{validator.allowable_values}.\"\n end\n @baud_rate = baud_rate\n end",
"def initialize(system, config)\n super\n\n @input_pins = (0..53).map do |i|\n pin = Pin.new(self)\n pin.config['name'] = DEFAULT_INPUTS[i] if DEFAULT_INPUTS[i]\n pin\n end\n @output_pins = []\n output_pins << OutputPin.new(self)\n\n if config['host']\n @telnet = TCPSocket.new(config['host'], 23)\n @wait_io = @telnet\n elsif config['serial_port']\n @telnet = Serial.new(config['serial_port'])\n # yes, this is terrible, but I want to `select` on this, so need\n # a true FD. I could use File.open, but then I would need to\n # tcsetattr the serial port settings, which would take work that\n # the RubySerial library does\n @wait_io = IO.new(@telnet.instance_variable_get(:@fd))\n end\n\n # I _could_ detect these with the speaker system configuration,\n # but that doesn't work if the receiver is off. so put it in the\n # config\n if config['speaker_b']\n speaker_selector = SpeakerSelector.new\n output_pins.first.connect(speaker_selector.input_pins.first)\n end\n if config['zones']\n if config['zones'].include?(2)\n output_pins << OutputPin.new(self, 2)\n end\n if config['zones'].include?(3)\n output_pins << OutputPin.new(self, 3)\n end\n if config['zones'].include?('hdzone')\n output_pins << OutputPin.new(self, :hdzone)\n end\n end\n\n if @telnet\n # only ask for the status if IP; serial would wake the\n # receiver up if it's not on\n unless config['serial_port']\n # interrogate the input names\n input_pins.length.times do |i|\n @telnet.write(\"?RGB%02d\\r\" % i)\n end\n # populate the initial status\n output_pins.each do |pin|\n pin.send(:poll_status)\n end\n end\n # read all pending messages\n while poll_status(timeout: 0.0); end\n Thread.new do\n loop do\n poll_status\n end\n end\n end\n end",
"def tty\n unless (@connection.nil?) then return @connection end\n \n @serial ||= self.device['org.bluez.Serial']\n devpattern = \"spp\" # TODO: figure out how to get a device string\n @connection = @serial.Connect(devpattern)[0]\n end",
"def device_type\n self[:type]\n end"
] |
[
"0.73630124",
"0.66959614",
"0.66959614",
"0.6484796",
"0.61889386",
"0.6171069",
"0.6171069",
"0.5970394",
"0.59641844",
"0.59641844",
"0.5879537",
"0.58395797",
"0.58285207",
"0.57895595",
"0.5720503",
"0.54991263",
"0.5484193",
"0.54618275",
"0.54382974",
"0.5431286",
"0.5414517",
"0.5404689",
"0.5363121",
"0.5350517",
"0.5349966",
"0.53387934",
"0.53057843",
"0.52875596",
"0.52615774",
"0.5261492",
"0.5255951",
"0.5239292",
"0.52304804",
"0.52281535",
"0.5227259",
"0.52228737",
"0.52115613",
"0.5166959",
"0.51644313",
"0.51569366",
"0.51123196",
"0.5074368",
"0.5044987",
"0.50325674",
"0.5031772",
"0.50285035",
"0.50199217",
"0.500964",
"0.49737516",
"0.49641007",
"0.48845914",
"0.4877245",
"0.48750055",
"0.48699176",
"0.48423988",
"0.48423988",
"0.48240954",
"0.4806259",
"0.4786301",
"0.47820064",
"0.47765857",
"0.47383207",
"0.47199225",
"0.47160894",
"0.47108486",
"0.47095335",
"0.46974534",
"0.4684311",
"0.4683562",
"0.46830386",
"0.4634025",
"0.46286464",
"0.46236897",
"0.46236017",
"0.46123374",
"0.46123374",
"0.46064323",
"0.45964545",
"0.45880666",
"0.4583284",
"0.4583284",
"0.45713875",
"0.45527518",
"0.4551508",
"0.4545462",
"0.45423365",
"0.45301107",
"0.4527166",
"0.45165125",
"0.45165125",
"0.45118704",
"0.45033646",
"0.4502079",
"0.45009926",
"0.44968393",
"0.44939584",
"0.44877642",
"0.44757822",
"0.44725132"
] |
0.5012591
|
48
|
PostgreSQL prefers the text datatype. If a fixed size is requested, the char type is used. If the text type is specifically disallowed or there is a size specified, use the varchar type. Otherwise use the text type.
|
def type_literal_generic_string(column)
if column[:text]
:text
elsif column[:fixed]
"char(#{column[:size]||default_string_column_size})"
elsif column[:text] == false || column[:size]
"varchar(#{column[:size]||default_string_column_size})"
else
:text
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def type_literal_generic_string(column)\n if column[:text]\n uses_clob_for_text? ? :clob : :text\n elsif column[:fixed]\n \"char(#{column[:size]||default_string_column_size})\"\n else\n \"varchar(#{column[:size]||default_string_column_size})\"\n end\n end",
"def revert_postgres_type( type )\n case type\n when /\\Acharacter varying/\n return :String, :default_size => 255\n when /\\Acharacter/\n return :String, :fixed => true, :default_size => 255\n when /\\Atext\\z/\n return :String, :text => true\n when /\\Abytea\\z/\n return :blob\n when /\\Atimestamp/\n return :timestamp\n end\n end",
"def type_to_sql_with_notext(*args)\n type = type_to_sql_without_notext(*args)\n if type =~ /(text|blob)/\n 'varchar(2500)' # If this is bigger than about 21000 it always fails, and sometimes hits a row limit anyway if too large\n else\n type\n end\n end",
"def text_type\n return @text_type\n end",
"def cast_string(sql_type = nil)\n cast(sql_type || :text).sql_string\n end",
"def database_column_type\n :string\n end",
"def text_type\n @@text_type_ord_to_text_type[self.text_type_ord]\n end",
"def is_texttype?(); @type == GRT_TEXTTYPE; end",
"def schema_data_type\n case type\n when \"N\", \"F\"\n decimal > 0 ? \":float\" : \":integer\"\n when \"I\"\n \":integer\"\n when \"D\"\n \":date\"\n when \"T\"\n \":datetime\"\n when \"L\"\n \":boolean\"\n when \"M\"\n \":text\"\n else\n \":string, :limit => #{length}\"\n end\n end",
"def text_type=(value)\n @text_type = value\n end",
"def text_type=(value)\n self.text_type_ord = @@text_type_to_text_type_ord[value]\n end",
"def type *val\n return @chars_allowed if val.empty?\n\n dtype = val[0]\n #return self if @chars_allowed # disallow changing\n if dtype.is_a? Regexp \n @chars_allowed = dtype\n return self\n end\n dtype = dtype.to_s.downcase.to_sym if dtype.is_a? String\n case dtype # missing to_sym would have always failed due to to_s 2011-09-30 1.3.1\n when :integer, Integer\n @chars_allowed = /\\d/\n when :numeric, :float, Numeric, Float\n @chars_allowed = /[\\d\\.]/ \n when :alpha\n @chars_allowed = /[a-zA-Z]/ \n when :alnum\n @chars_allowed = /[a-zA-Z0-9]/ \n else\n raise ArgumentError, \"Field type: invalid datatype specified. Use :integer, :numeric, :float, :alpha, :alnum \"\n end\n self\n end",
"def revert_generic_type( type )\n case type\n when /\\Avarchar/\n return :String, :default_size => 255\n when /\\Achar/\n return :String, :fixed => true, :default_size => 255\n when /\\Atext\\z/\n return :String, :text => true\n when /\\A(\\w+)\\([\\s\\d,]+\\)\\z/\n return $1.to_sym\n when /\\A\\w+\\z/\n return type.to_sym\n end\n end",
"def type=(val)\n\n dtype = val\n #return self if @chars_allowed # disallow changing\n # send in a regexp, we just save it.\n if dtype.is_a? Regexp \n @chars_allowed = dtype\n return self\n end\n dtype = dtype.to_s.downcase.to_sym if dtype.is_a? String\n case dtype # missing to_sym would have always failed due to to_s 2011-09-30 1.3.1\n when :integer, Integer\n @chars_allowed = /\\d/\n when :numeric, :float, Numeric, Float\n @chars_allowed = /[\\d\\.]/ \n when :alpha\n @chars_allowed = /[a-zA-Z]/ \n when :alnum\n @chars_allowed = /[a-zA-Z0-9]/ \n else\n raise ArgumentError, \"Field type: invalid datatype specified. Use :integer, :numeric, :float, :alpha, :alnum \"\n end\n self\n end",
"def schema_column_type(db_type)\n case db_type\n when /\\Ainterval\\z/io\n :interval\n when /\\Acitext\\z/io\n :string\n else\n super\n end\n end",
"def type_literal_specific(column)\n type = column[:type]\n type = \"double precision\" if type.to_s == 'double'\n column[:size] ||= default_string_column_size if type.to_s == 'varchar'\n elements = column[:size] || column[:elements]\n \"#{type}#{literal(Array(elements)) if elements}#{' UNSIGNED' if column[:unsigned]}\"\n end",
"def datatype\n @options[:datatype] || (@column ? @column.type : nil)\n end",
"def is_text?(); @type == GRT_TEXT; end",
"def text?\n type == :text\n end",
"def data_type\n if @data_type\n @data_type\n else\n (@is_string ? String : nil)\n end\n end",
"def type_to_sql(type, limit = nil, precision = nil, scale = nil)\n case type.to_s\n when 'binary'\n # PostgreSQL doesn't support limits on binary (bytea) columns.\n # The hard limit is 1Gb, because of a 32-bit size field, and TOAST.\n case limit\n when nil, 0..0x3fffffff; super(type)\n else raise(ActiveRecordError, \"No binary type has byte size #{limit}.\")\n end\n when 'text'\n # PostgreSQL doesn't support limits on text columns.\n # The hard limit is 1Gb, according to section 8.3 in the manual.\n case limit\n when nil, 0..0x3fffffff; super(type)\n else raise(ActiveRecordError, \"The limit on text can be at most 1GB - 1byte.\")\n end\n when 'integer'\n return 'integer' unless limit\n\n case limit\n when 1, 2; 'smallint'\n when 3, 4; 'integer'\n when 5..8; 'bigint'\n else raise(ActiveRecordError, \"No integer type has byte size #{limit}. Use a numeric with precision 0 instead.\")\n end\n when 'datetime'\n return super unless precision\n\n case precision\n when 0..6; \"timestamp(#{precision})\"\n else raise(ActiveRecordError, \"No timestamp type has precision of #{precision}. The allowed range of precision is from 0 to 6\")\n end\n else\n super\n end\n end",
"def simplified_type(field_type)\n case field_type\n # Numeric and monetary types\n when /^(?:real|double precision)$/ then :float\n # Monetary types\n when 'money' then :decimal\n # Character types\n when /^(?:character varying|bpchar)(?:\\(\\d+\\))?$/ then :string\n # Binary data types\n when 'bytea' then :binary\n # Date/time types\n when /^timestamp with(?:out)? time zone$/ then :datetime\n when 'interval' then :string\n # Geometric types\n when /^(?:point|line|lseg|box|\"?path\"?|polygon|circle)$/ then :string\n # Network address types\n when /^(?:cidr|inet|macaddr)$/ then :string\n # Bit strings\n when /^bit(?: varying)?(?:\\(\\d+\\))?$/ then :string\n # XML type\n when 'xml' then :xml\n # tsvector type\n when 'tsvector' then :tsvector\n # Arrays\n when /^\\D+\\[\\]$/ then :string\n # Object identifier types\n when 'oid' then :integer\n # UUID type\n when 'uuid' then :string\n # Small and big integer types\n when /^(?:small|big)int$/ then :integer\n # Pass through all types that are not specific to PostgreSQL.\n else\n super\n end\n end",
"def elasticsearch_datatype\n case self\n when TextArea,TextField\n 'string'\n when IntegerField\n 'integer'\n when NumberField\n 'float'\n when BooleanField\n 'boolean'\n when DateField\n 'date'\n when AttachmentField\n 'attachment'\n when ArrayField\n 'string'\n else\n 'string'\n end\n end",
"def assert_type(type, length, precision, scale)\n if type == 'longvarchar'\n return \"character varying(#{length})\"\n end\n\n if type == 'time'\n return \"time with time zone\"\n end\n\n if type == 'tinyint' || type == 'integer'\n return \"integer\"\n end\n\n if type == 'char'\n return \"\\\"char\\\"\"\n end\n\n if type == 'decimal'\n return \"numeric(#{precision},#{scale})\"\n end\n\n if type == 'date'\n return \"date\"\n end\n\nend",
"def choose_sql_type(type_name, value_constraint, component, options)\n case MM::DataType.intrinsic_type(type_name)\n when MM::DataType::TYPE_Boolean\n data_type_context.boolean_type\n\n when MM::DataType::TYPE_Integer\n # The :auto_assign key is set for auto-assigned types, but with a nil value in foreign keys\n length = options[:length]\n if options.has_key?(:auto_assign)\n options[:default] ||= ' GENERATED ALWAYS AS IDENTITY' if options[:auto_assign]\n length = data_type_context.default_autoincrement_length\n type_name = 'int'\n end\n if chosen = MM::DataType.choose_integer(type_name, length, value_constraint, data_type_context)\n options.delete(:length)\n chosen\n else # No available integer seems to suit. Use the defined type and length\n type_name\n end\n\n when MM::DataType::TYPE_Real\n 'Double'\n\n when MM::DataType::TYPE_Decimal\n 'Decimal'\n\n when MM::DataType::TYPE_Money\n 'Currency'\n\n when MM::DataType::TYPE_Char\n data_type_context.default_char_type\n\n when MM::DataType::TYPE_String\n data_type_context.default_varchar_type\n\n when MM::DataType::TYPE_Text\n options[:length] ||= 'MAX'\n data_type_context.default_text_type\n\n when MM::DataType::TYPE_Date\n 'Date'\n\n when MM::DataType::TYPE_Time\n 'Time'\n\n when MM::DataType::TYPE_DateTime\n 'DateTime'\n\n when MM::DataType::TYPE_Timestamp\n 'Binary'\n\n when MM::DataType::TYPE_Binary\n # If it's a surrogate, that might change the length we use\n binary_surrogate(type_name, value_constraint, options)\n if options[:length]\n 'Binary' # Fixed length\n else\n 'Binary'\n end\n else\n type_name\n end\n end",
"def type_literal(column)\n type = type_literal_base(column)\n column[:size] ||= 255 if type.to_s == 'varchar'\n elements = column[:size] || column[:elements]\n \"#{type}#{literal(Array(elements)) if elements}#{UNSIGNED if column[:unsigned]}\"\n end",
"def try(text)\n type.coerce(text) if match?(text)\n end",
"def string?\n data_type == String\n end",
"def type_for_attribute(_attribute)\n ActiveModel::Type.lookup(:string, limit: 200)\n end",
"def schema_column_type(db_type)\n case db_type\n when 'json'\n :json\n when 'jsonb'\n :jsonb\n else\n super\n end\n end",
"def type_to_sql(type, limit: nil, precision: nil, scale: nil, **) # :nodoc:\n # MSSQL's NVARCHAR(n | max) column supports either a number between 1 and\n # 4000, or the word \"MAX\", which corresponds to 2**30-1 UCS-2 characters.\n #\n # It does not accept NVARCHAR(1073741823) here, so we have to change it\n # to NVARCHAR(MAX), even though they are logically equivalent.\n #\n # See: http://msdn.microsoft.com/en-us/library/ms186939.aspx\n #\n type = type.to_sym if type\n native = native_database_types[type]\n\n if type == :string && limit == 1_073_741_823\n 'nvarchar(max)'\n elsif NO_LIMIT_TYPES.include?(type)\n super(type)\n elsif %i[int integer].include?(type)\n if limit.nil? || limit == 4\n 'int'\n elsif limit == 2\n 'smallint'\n elsif limit == 1\n 'tinyint'\n else\n 'bigint'\n end\n elsif type == :uniqueidentifier\n 'uniqueidentifier'\n elsif %i[datetime time].include?(type)\n precision ||= 7\n column_type_sql = (native.is_a?(Hash) ? native[:name] : native).dup\n if (0..7).include?(precision)\n column_type_sql << \"(#{precision})\"\n else\n raise(\n ArgumentError,\n \"No #{native[:name]} type has precision of #{precision}. The \" \\\n 'allowed range of precision is from 0 to 7, even though the ' \\\n 'sql type precision is 7 this adapter will persist up to 6 ' \\\n 'precision only.'\n )\n end\n else\n super\n end\n end",
"def simplified_type(field_type)\n case field_type\n # Numeric and monetary types\n when /^(?:real|double precision)$/\n :float\n # Monetary types\n when 'money'\n :decimal\n when 'hstore'\n :hstore\n when 'ltree'\n :ltree\n # Network address types\n when 'inet'\n :inet\n when 'cidr'\n :cidr\n when 'macaddr'\n :macaddr\n # Character types\n when /^(?:character varying|bpchar)(?:\\(\\d+\\))?$/\n :string\n when /^citext(?:\\(\\d+\\))?$/\n :citext\n # Binary data types\n when 'bytea'\n :binary\n # Date/time types\n when /^timestamp with(?:out)? time zone$/\n :datetime\n when /^interval(?:|\\(\\d+\\))$/\n :string\n # Geometric types\n when /^(?:point|line|lseg|box|\"?path\"?|polygon|circle)$/\n :string\n # Bit strings\n when /^bit(?: varying)?(?:\\(\\d+\\))?$/\n :string\n # XML type\n when 'xml'\n :xml\n # tsvector type\n when 'tsvector'\n :tsvector\n # Arrays\n when /^\\D+\\[\\]$/\n :string\n # Object identifier types\n when 'oid'\n :integer\n # UUID type\n when 'uuid'\n :uuid\n # JSON type\n when 'json'\n :json\n # Small and big integer types\n when /^(?:small|big)int$/\n :integer\n when /(num|date|tstz|ts|int4|int8)range$/\n field_type.to_sym\n # Pass through all types that are not specific to PostgreSQL.\n else\n super\n end\n end",
"def type_to_sql(type, limit = nil, precision = nil, scale = nil, unsigned = false)\n case type.to_s\n when 'binary'\n case limit\n when 0..0xfff; \"varbinary(#{limit})\"\n when nil; \"blob\"\n when 0x1000..0xffffffff; \"blob(#{limit})\"\n else raise(ActiveRecordError, \"No binary type has character length #{limit}\")\n end\n when 'integer'\n case limit\n when 1\n 'tinyint' + (unsigned ? ' unsigned' : '')\n when 2\n 'smallint' + (unsigned ? ' unsigned' : '')\n when 3\n 'mediumint' + (unsigned ? ' unsigned' : '')\n when nil, 4, 11 # compatibility with MySQL default\n if unsigned\n 'int(10) unsigned'\n else\n 'int(10)'\n end\n when 5..8\n 'bigint' + (unsigned ? ' unsigned' : '')\n else raise(ActiveRecordError, \"No integer type has byte size #{limit}\")\n end\n when 'text'\n case limit\n when 0..0xff; 'tinytext'\n when nil, 0x100..0xffff; 'text'\n when 0x10000..0xffffff; 'mediumtext'\n when 0x1000000..0xffffffff; 'longtext'\n else raise(ActiveRecordError, \"No text type has character length #{limit}\")\n end\n else\n super\n end\n end",
"def bson_type\n String::BSON_TYPE\n end",
"def type_to_sql(type, limit = nil, precision = nil, scale = nil)\n case type.to_s\n when 'integer'\n return 'integer' unless limit\n\n case limit\n when 1, 2; 'smallint'\n when nil, 3, 4; 'integer'\n when 5..8; 'bigint'\n else raise(ActiveRecordError, \"No integer type has byte size #{limit}. Use a numeric with precision 0 instead.\")\n end\n else\n super\n end\n end",
"def simplified_type(field_type)\r\n return :boolean if field_type =~ /logical/i\r\n return :string if field_type =~ /varchar/i\r\n return :binary if field_type =~ /long binary/i\r\n return :datetime if field_type =~ /timestamp/i\r\n return :integer if field_type =~ /short|integer/i\r\n return :integer if field_type =~ /autoinc/i\r\n super\r\n end",
"def type_literal(column)\n column[:size] ||= 255 if column[:type] == :varchar\n elements = column[:size] || column[:elements]\n \"#{type_literal_base(column)}#{literal(Array(elements)) if elements}#{UNSIGNED if column[:unsigned]}\"\n end",
"def text?\n [:ascii, :text].include?(type)\n end",
"def convert_to_data_type_from_string(type, value)\n case type\n when \"boolean\"\n # Since we've determined this is a boolean data type, we can assume that:\n # If the value as an int is 1, return true\n # If the value as an int is 0 (not 1), return false\n value.to_i == 1\n when \"integer\"\n value.to_i\n when \"float\"\n value.to_f\n when \"string\", \"dictionary\"\n value\n when nil\n \"\"\n else\n raise \"Unknown or unsupported data type: #{type.class}\"\n end\n end",
"def schema_column_type(db_type)\n case db_type\n when /\\Atinyint/\n Sequel.convert_tinyint_to_bool ? :boolean : :integer\n when /\\A(int(eger)?|bigint|smallint)/\n :integer\n when /\\A(character( varying)?|varchar|text)/\n :string\n when /\\Adate\\z/\n :date\n when /\\A(datetime|timestamp( with(out)? time zone)?)\\z/\n :datetime\n when /\\Atime( with(out)? time zone)?\\z/\n :time\n when \"boolean\"\n :boolean\n when /\\A(real|float|double( precision)?)\\z/\n :float\n when /\\A(numeric(\\(\\d+,\\d+\\))?|decimal|money)\\z/\n :decimal\n when \"bytea\"\n :blob\n end\n end",
"def content_type\n @record.send( :\"#{@column}_content_type\" ) rescue ''\n end",
"def convert_to_native_type(data_type, s)\r\n return kb_nil if s == KB_NIL\r\n\r\n # I added this line to keep KBTable#import_csv working after I made\r\n # the kb_nil changes.\r\n return nil if s.nil?\r\n\r\n case data_type\r\n when :String\r\n if s =~ UNENCODE_RE\r\n return s.gsub('&linefeed;', \"\\n\").gsub('&carriage_return;',\r\n \"\\r\").gsub('&substitute;', \"\\032\").gsub('&pipe;', \"|\"\r\n ).gsub('&', \"&\")\r\n else\r\n return s\r\n end\r\n when :Integer\r\n return s.to_i\r\n when :Float\r\n return s.to_f\r\n when :Boolean\r\n if ['false', 'False', nil, false].include?(s)\r\n return false\r\n else\r\n return true\r\n end\r\n when :Time\r\n return Time.parse(s) \r\n when :Date\r\n return Date.parse(s)\r\n when :DateTime\r\n return DateTime.parse(s)\r\n when :YAML\r\n # This code is here in case the YAML field is the last\r\n # field in the record. Because YAML normally defines a\r\n # nil value as \"--- \", but KirbyBase strips trailing\r\n # spaces off the end of the record, so if this is the\r\n # last field in the record, KirbyBase will strip the\r\n # trailing space off and make it \"---\". When KirbyBase\r\n # attempts to convert this value back using to_yaml,\r\n # you get an exception.\r\n if s == \"---\"\r\n return nil\r\n elsif s =~ UNENCODE_RE\r\n y = s.gsub('&linefeed;', \"\\n\").gsub('&carriage_return;',\r\n \"\\r\").gsub('&substitute;', \"\\032\").gsub('&pipe;', \"|\"\r\n ).gsub('&', \"&\")\r\n return YAML.load(y)\r\n else\r\n return YAML.load(s)\r\n end\r\n when :Memo\r\n memo = KBMemo.new(@tbl.db, s)\r\n memo.read_from_file\r\n return memo\r\n when :Blob\r\n blob = KBBlob.new(@tbl.db, s)\r\n blob.read_from_file\r\n return blob\r\n else\r\n raise \"Invalid field type: %s\" % data_type\r\n end\r\n end",
"def cast_string(sql_type = nil)\n Cast.new(self, sql_type || String).sql_string\n end",
"def simplified_type(field_type)\n case field_type\n # Numeric and monetary types\n when /^(?:real|double precision)$/\n :float\n # Monetary types\n when 'money'\n :decimal\n # Character types\n when /^(?:character varchar|varying|bpchar)(?:\\(\\d+\\))?$/\n :string\n # Binary data types\n when 'bytea'\n :binary\n when 'binary'\n :binary\n # Date/time types\n when /^timestamp with(?:out)? time zone$/\n :datetime\n when 'interval'\n :string\n # Geometric types\n when /^(?:point|line|lseg|box|\"?path\"?|polygon|circle)$/\n :string\n # Network address types\n when /^(?:cidr|inet|macaddr)$/\n :string\n # Bit strings\n when /^bit(?: varying)?(?:\\(\\d+\\))?$/\n :string\n # XML type\n when 'xml'\n :xml\n # Arrays\n when /^\\D+\\[\\]$/\n :string\n # Object identifier types\n when 'oid'\n :integer\n # UUID type\n when 'uuid'\n :string\n # Small and big integer types\n when /^(?:small|big)int$/\n :integer\n # Pass through all types that are not specific to Vertica.\n else\n super\n end\n end",
"def type_to_sql(type, limit = nil, precision = nil, scale = nil)\n return super unless type.to_s == 'integer'\n return 'integer' unless limit\n\n case limit\n when 1..8; 'integer'\n else raise(ActiveRecordError, \"No integer type has byte size #{limit}. Use a numeric with precision 0 instead.\")\n end\n end",
"def mime_type_text\n mt = MIME::Types[content_type]&.first if content_type.present?\n\n mt.present? && (mt.friendly || mt.sub_type || mt.media_type) || UnknownMimeTypeText\n end",
"def schema_column_type(db_type)\n case db_type\n when /\\A(int(eger)?|bigint|smallint)\\z/\n :integer\n when /\\A(character( varying)?|varchar|text)\\z/\n :string\n when /\\Adate\\z/\n :date\n when /\\A(datetime|timestamp( with(out)? time zone)?)\\z/\n :datetime\n when /\\Atime( with(out)? time zone)?\\z/\n :time\n when /\\A(boolean|tinyint)\\z/\n :boolean\n when /\\A(real|float|double( precision)?)\\z/\n :float\n when /\\A(numeric|decimal|money)\\z/\n :decimal\n end\n end",
"def expected_type\n 'string'\n end",
"def type_to_sql(type, limit = nil, precision = nil, scale = nil) #:nodoc:\r\n if native = native_database_types[type]\r\n if type == :integer\r\n column_type_sql = 'integer'\r\n elsif type == :string and !limit.nil?\r\n \"varchar (#{limit})\"\r\n else\r\n super(type, limit, precision, scale)\r\n end\r\n else\r\n super(type, limit, precision, scale)\r\n end\r\n end",
"def type_is_text\n self.stimulus_type=\"text\"\n end",
"def is_strtype?(); @type == GRT_STRTYPE; end",
"def type_to_sql(type, limit = nil, precision = nil, scale = nil)\n return super unless type.to_s == 'integer'\n return 'integer' unless limit\n\n case limit\n when 1, 2; 'smallint'\n when 3, 4; 'integer'\n when 5..8; 'bigint'\n else raise(ActiveRecordError, \"No integer type has byte size #{limit}. Use a numeric with precision 0 instead.\")\n end\n end",
"def is_string?(); @type == GRT_STRING; end",
"def type_literal_generic_object(column)\n type_literal_generic_string(column)\n end",
"def simplified_type(field_type)\n case field_type\n when /tsvector/i\n :text\n when /regprocedure/i\n :string\n else\n simplified_type_base(field_type)\n end\n end",
"def column_schema_to_ruby_type(schema)\n case t = schema[:db_type].downcase\n when /\\A(?:medium|small)?int(?:eger)?(?:\\((?:\\d+)\\))?(?: unsigned)?\\z/o\n {:type=>Integer}\n when /\\Atinyint(?:\\((\\d+)\\))?\\z/o\n {:type =>schema[:type] == :boolean ? TrueClass : Integer}\n when /\\Abigint(?:\\((?:\\d+)\\))?(?: unsigned)?\\z/o\n {:type=>Bignum}\n when /\\A(?:real|float|double(?: precision)?)\\z/o\n {:type=>Float}\n when 'boolean'\n {:type=>TrueClass}\n when /\\A(?:(?:tiny|medium|long|n)?text|clob)\\z/o\n {:type=>String, :text=>true}\n when 'date'\n {:type=>Date}\n when /\\A(?:small)?datetime\\z/o\n {:type=>DateTime}\n when /\\Atimestamp(?:\\((\\d+)\\))?(?: with(?:out)? time zone)?\\z/o\n {:type=>DateTime, :size=>($1.to_i if $1)}\n when /\\Atime(?: with(?:out)? time zone)?\\z/o\n {:type=>Time, :only_time=>true}\n when /\\An?char(?:acter)?(?:\\((\\d+)\\))?\\z/o\n {:type=>String, :size=>($1.to_i if $1), :fixed=>true}\n when /\\A(?:n?varchar|character varying|bpchar|string)(?:\\((\\d+)\\))?\\z/o\n {:type=>String, :size=>($1.to_i if $1)}\n when /\\A(?:small)?money\\z/o\n {:type=>BigDecimal, :size=>[19,2]}\n when /\\A(?:decimal|numeric|number)(?:\\((\\d+)(?:,\\s*(\\d+))?\\))?\\z/o\n s = [($1.to_i if $1), ($2.to_i if $2)].compact\n {:type=>BigDecimal, :size=>(s.empty? ? nil : s)}\n when /\\A(?:bytea|(?:tiny|medium|long)?blob|(?:var)?binary)(?:\\((\\d+)\\))?\\z/o\n {:type=>File, :size=>($1.to_i if $1)}\n when 'year'\n {:type=>Integer}\n else\n {:type=>String}\n end\n end",
"def column_type(type)\n case type\n when :integer then Integer\n when :float, :decimal then Float\n when :string, :text, :uuid then String\n when :datetime, :timestamp, :time then DateTime\n when :date then Date\n when :boolean then Virtus::Attribute::Boolean # Boolean is not a standard Ruby class\n else\n raise \"Could not match column type '#{type}' for #{model_name}\"\n end\n end",
"def type_literal_generic_numeric(column)\n column[:size] ? \"numeric(#{Array(column[:size]).join(', ')})\" : :numeric\n end",
"def default_string_options(method, type) #:nodoc:\n column = self.column_for(method)\n\n if type == :text\n { :cols => @@default_text_field_size, :rows => @@default_text_area_height }\n elsif type == :numeric || column.nil? || column.limit.nil?\n { :size => @@default_text_field_size }\n else\n { :maxlength => column.limit, :size => [column.limit, @@default_text_field_size].min }\n end\n end",
"def cast_string(arg, sql_type = nil)\n cast(arg, sql_type || String).sql_string\n end",
"def type_to_sql(type, limit = nil, precision = nil, scale = nil)\n return super unless type.to_s == 'integer'\n \n if limit.nil? || limit == 4\n 'integer'\n elsif limit < 4\n 'smallint'\n else\n 'bigint'\n end\n end",
"def allows_text?\n case kind\n when TEXT, IMAGE1, AUDIO, VIDEO1 then true\n else false\n end\n end",
"def datatype(value)\n _datatype(value) or fail ArgumentError, \"Unknown value for datatype: #{value}\"\n end",
"def attribute_type\n (float? || integer? || text? || boolean? ? field_type : 'string').to_sym\n end",
"def column_type_of(attribute)\n column = @ar_class.columns_hash[attribute.to_s]\n return :string unless column\n return :numeric if column.number? or [:datetime,:time,:date,:timestamp].include?(column.type)\n return :string\n end",
"def schema_column_type(db_type)\n case db_type\n when 'inet', 'cidr'\n :ipaddr\n else\n super\n end\n end",
"def schema_column_type(db_type)\n case db_type\n when 'inet', 'cidr'\n :ipaddr\n else\n super\n end\n end",
"def has_a_minimum_one_string_type?\n self.index_table_fields.map(&:field_name).each do |field_name|\n self.table_name.constantize.columns.each {|elt| return true if (elt.name == field_name && (elt.type == :string || elt.type == :text))} \n end\n return false\n end",
"def stradivari_type(column_name)\n raise NotImplementedError\n end",
"def nonregular_type; end",
"def detect_content_type(text)\n #; [!onjro] returns 'text/html; charset=utf-8' when text starts with '<'.\n #; [!qiugc] returns 'application/json' when text starts with '{'.\n #; [!zamnv] returns nil when text starts with neight '<' nor '{'.\n case text\n when /\\A\\s*</ ; return \"text/html; charset=utf-8\" # probably HTML\n when /\\A\\s*\\{/; return \"application/json\" # probably JSON\n else ; return nil\n end\n end",
"def type\n @node[\"type\"] || \"text\"\n end",
"def scaffold_table_column_type(c)\n column = self.properties[c]\n if column then\n if column.type == DataMapper::Property::Text\n :text\n else\n column.class.to_s.split(\"::\").last.downcase.intern\n end\n else\n nil\n end\n end",
"def schema_column_type(db_type)\n if convert_smallint_to_bool && db_type =~ /smallint/i \n :boolean\n else\n super\n end\n end",
"def plain_string?\n (type == :string || type == :text) && # String typed\n !value.nil? &&\n !value.empty? &&\n !is_text_run? && # No inline styles\n !is_formula? &&\n !is_array_formula?\n end",
"def convert_type(v)\n case v\n when Java::NetSourceforgeJtdsJdbc::ClobImpl\n convert_type(v.getSubString(1, v.length))\n else\n super\n end\n end",
"def cast_with_string(object, type)\n object.is_a?(::String) ? :string : cast_without_string(object, type)\n end",
"def convert_text_column_storage_type(table_name, column_name, to_type, temp_column_name)\n quoted_table_name = quote_table_name(table_name)\n orig_col = quote_column_name(column_name)\n temp_col = quote_column_name(temp_column_name || 'oee_temp_col')\n cmds = <<-SQL\n alter table #{quoted_table_name} add #{temp_col} #{to_type}\n update #{quoted_table_name} set #{temp_col} = #{orig_col}\n alter table #{quoted_table_name} drop column #{orig_col}\n alter table #{quoted_table_name} rename column #{temp_col} to #{orig_col}\n SQL\n cmds.split(\"\\n\").map(&:strip).each{|cmd| execute cmd}\n end",
"def type text, options={}\n packet = 0.chr * 8\n packet[0] = 4.chr\n text.split(//).each do |char|\n packet[7] = char[0]\n packet[1] = 1.chr\n socket.write packet\n packet[1] = 0.chr\n socket.write packet\n end\n wait options\n end",
"def type\n if new_record? || image?\n 'image'\n elsif video?\n 'video'\n else\n 'text'\n end\n end",
"def typecast_value_string(value)\n case value\n when Hash, Array\n raise Sequel::InvalidValue, \"invalid value for String: #{value.inspect}\"\n else\n value.to_s\n end\n end",
"def type_literal(column)\n case column[:type]\n when Class\n type_literal_generic(column)\n when :Bignum\n type_literal_generic_bignum_symbol(column)\n else\n type_literal_specific(column)\n end\n end",
"def typecast_value(column, value)\n return value unless typecast_on_assignment && db_schema && (col_schema = db_schema[column])\n value = nil if '' == value and typecast_empty_string_to_nil and col_schema[:type] and ![:string, :blob].include?(col_schema[:type])\n raise(InvalidValue, \"nil/NULL is not allowed for the #{column} column\") if raise_on_typecast_failure && value.nil? && (col_schema[:allow_null] == false)\n begin\n model.db.typecast_value(col_schema[:type], value)\n rescue InvalidValue\n raise_on_typecast_failure ? raise : value\n end\n end",
"def typecast_value(column, value)\n return value unless typecast_on_assignment && db_schema && (col_schema = db_schema[column])\n value = nil if '' == value and typecast_empty_string_to_nil and col_schema[:type] and ![:string, :blob].include?(col_schema[:type])\n raise(InvalidValue, \"nil/NULL is not allowed for the #{column} column\") if raise_on_typecast_failure && value.nil? && (col_schema[:allow_null] == false)\n begin\n model.db.typecast_value(col_schema[:type], value)\n rescue InvalidValue\n raise_on_typecast_failure ? raise : value\n end\n end",
"def convert_to_type(val)\n case type\n when 'NilClass'\n if val != false && val.blank?\n # Leave the type of the Column open. Unfortunately, false counts as\n # blank and we don't want it to. It should be classified as a boolean.\n new_val = nil\n else\n # Only non-blank values are allowed to set the type of the Column\n bool_val = convert_to_boolean(val)\n new_val =\n if bool_val.nil?\n convert_to_date_time(val) ||\n convert_to_numeric(val) ||\n convert_to_string(val)\n else\n bool_val\n end\n @type =\n if [true, false].include?(new_val)\n 'Boolean'\n elsif new_val.is_a?(Date) || new_val.is_a?(DateTime)\n 'DateTime'\n elsif new_val.is_a?(Numeric)\n 'Numeric'\n elsif new_val.is_a?(String)\n 'String'\n else\n msg = \"can't add #{val} of type #{new_val.class.name} to a column\"\n raise UserError, msg\n end\n end\n new_val\n when 'Boolean'\n if val.is_a?(String) && val.blank? || val.nil?\n nil\n else\n new_val = convert_to_boolean(val)\n if new_val.nil?\n msg = \"attempt to add '#{val}' to a column already typed as #{type}\"\n raise UserError, msg\n end\n new_val\n end\n when 'DateTime'\n if val.blank?\n nil\n else\n new_val = convert_to_date_time(val)\n if new_val.nil?\n msg = \"attempt to add '#{val}' to a column already typed as #{type}\"\n raise UserError, msg\n end\n new_val\n end\n when 'Numeric'\n if val.blank?\n nil\n else\n new_val = convert_to_numeric(val)\n if new_val.nil?\n msg = \"attempt to add '#{val}' to a column already typed as #{type}\"\n raise UserError, msg\n end\n new_val\n end\n when 'String'\n if val.nil?\n nil\n else\n new_val = convert_to_string(val)\n if new_val.nil?\n msg = \"attempt to add '#{val}' to a column already typed as #{type}\"\n raise UserError, msg\n end\n new_val\n end\n else\n raise UserError, \"Mysteriously, column has unknown type '#{type}'\"\n end\n end",
"def type_str(c)\n ret = c.type.to_s\n ret << \", primary\" if c.primary\n ret << \", default=#{truncate_default(c.default)}\" if c.default\n ret << \", not null\" unless c.null\n ret << \", limit=#{c.limit}\" if c.limit && (c.limit != 255 && c.type != :string)\n ret\n end",
"def string(value)\n message = \"Invalid typeface: must be a string\"\n raise ArgumentError, message unless value.is_a?(String)\n end",
"def value_field\n \"text\"\n end",
"def normalise_type(type, length)\n sql_type = case type\n when /^Auto ?Counter$/\n 'int'\n\n when /^Unsigned ?Integer$/,\n /^Signed ?Integer$/,\n /^Unsigned ?Small ?Integer$/,\n /^Signed ?Small ?Integer$/,\n /^Unsigned ?Tiny ?Integer$/\n s = case\n when length == nil\n 'int'\n when length <= 8\n 'tinyint'\n when length <= 16\n 'smallint'\n when length <= 32\n 'int'\n else\n 'bigint'\n end\n length = nil\n s\n\n when /^Decimal$/\n 'decimal'\n\n when /^Fixed ?Length ?Text$/, /^Char$/\n 'char'\n when /^Variable ?Length ?Text$/, /^String$/\n 'varchar'\n when /^Large ?Length ?Text$/, /^Text$/\n 'text'\n\n when /^Date ?And ?Time$/, /^Date ?Time$/\n 'datetime'\n when /^Date$/\n 'datetime' # SQLSVR 2K5: 'date'\n when /^Time$/\n 'datetime' # SQLSVR 2K5: 'time'\n when /^Auto ?Time ?Stamp$/\n 'timestamp'\n\n when /^Guid$/\n 'uniqueidentifier'\n when /^Money$/\n 'decimal'\n when /^Picture ?Raw ?Data$/, /^Image$/\n 'image'\n when /^Variable ?Length ?Raw ?Data$/, /^Blob$/\n 'varbinary'\n when /^BIT$/\n 'bit'\n when /^BOOLEAN$/\n 'boolean'\n else type # raise \"SQL type unknown for standard type #{type}\"\n end\n [sql_type, length]\n end",
"def should_be_string(data)\n if data.class != String\n raise \"Data needs to be a string; you passed in a #{data.class}: #{data}\"\n end\n end",
"def oracle_data_type_conversion(in_var, data_type, data_scale)\n \n case\n when data_type == \"VARCHAR2\"\n if in_var.nil? or in_var.empty?\n in_var = \"\"\n end\n this_in_var = in_var.to_s\n this_data_type = String\n \n when data_type == \"CHAR\"\n if in_var.nil? or in_var.empty?\n in_var = \"\"\n end\n this_in_var = in_var.to_s\n this_data_type = String\n \n when data_type == \"NUMBER\"\n if !data_scale.nil? and data_scale > 0\n \n this_in_var = in_var.to_f\n this_data_type = Float\n else\n this_in_var = in_var.to_i\n this_data_type = Fixnum\n end\n \n when data_type == \"TIMESTAMP\"\n this_in_var = in_var\n this_data_type = DateTime\n \n when data_type == \"DATE\"\n this_in_var = in_var\n this_data_type = DateTime\n \n else nil\n end \n \n return this_in_var, this_data_type \n end",
"def column_type(type_indicator)\n case type_indicator\n when :eval; :text\n when :text; :text\n when :string; :string\n when :sec; :double\n when :msec; :double\n when :duration; :double\n when :float; :double\n when :double; :double\n when :integer; :integer\n when :int; :int\n when :timestamp; :datetime\n when :datetime; :datetime\n when :date; :date\n else :string\n end\n end",
"def normalise_type(type, length)\n sql_type = case type\n when /^Auto ?Counter$/\n 'int'\n\n when /^Unsigned ?Integer$/,\n /^Signed ?Integer$/,\n /^Unsigned ?Small ?Integer$/,\n /^Signed ?Small ?Integer$/,\n /^Unsigned ?Tiny ?Integer$/\n s = case\n when length == nil\n 'int'\n when length <= 8\n 'tinyint'\n when length <= 16\n 'smallint'\n when length <= 32\n 'int'\n else\n 'bigint'\n end\n length = nil\n s\n\n when /^Decimal$/\n 'decimal'\n\n when /^Fixed ?Length ?Text$/, /^Char$/\n 'char'\n when /^Variable ?Length ?Text$/, /^String$/\n 'varchar'\n when /^Large ?Length ?Text$/, /^Text$/\n 'text'\n\n when /^Date ?And ?Time$/, /^Date ?Time$/\n 'datetime'\n when /^Date$/\n 'datetime' # SQLSVR 2K5: 'date'\n when /^Time$/\n 'datetime' # SQLSVR 2K5: 'time'\n when /^Auto ?Time ?Stamp$/\n 'timestamp'\n\n when /^Guid$/\n 'uniqueidentifier'\n when /^Money$/\n 'decimal'\n when /^Picture ?Raw ?Data$/, /^Image$/\n 'image'\n when /^Variable ?Length ?Raw ?Data$/, /^Blob$/\n 'varbinary'\n when /^BIT$/\n 'bit'\n else type # raise \"SQL type unknown for standard type #{type}\"\n end\n [sql_type, length]\n end",
"def text?; mediatype == 'text' || child_of?('text/plain'); end",
"def type_from_database(attribute)\n col = @object.column_for_attribute(attribute)\n\n unless col.nil?\n col.type.to_s.downcase\n else\n nil\n end\n end",
"def sniff_content_type str\n if (str.nil? or\n (not str.respond_to? :encoding ) or\n (str.encoding.to_s == \"ASCII-8BIT\"))\n \"application/octet-stream\"\n else\n \"text/plain; charset=#{str.encoding}\"\n end\n end",
"def string?\n type == \"STRING\"\n end",
"def value_field\n \"string\"\n end",
"def test_mysql_text_not_null_defaults_non_strict\n using_strict(false) do\n with_text_blob_not_null_table do |klass|\n assert_equal '', klass.columns_hash['non_null_blob'].default\n assert_equal '', klass.columns_hash['non_null_text'].default\n\n assert_nil klass.columns_hash['null_blob'].default\n assert_nil klass.columns_hash['null_text'].default\n\n instance = klass.create!\n\n assert_equal '', instance.non_null_text\n assert_equal '', instance.non_null_blob\n\n assert_nil instance.null_text\n assert_nil instance.null_blob\n end\n end\n end",
"def replace_type_string(mapping, field)\n if (mapping.has_key?(\"analyzer\")) # analyzer -> text\n mapping[\"type\"] = \"text\"\n else # no analyzer\n if (mapping.has_key?(\"index\")) # index\n case (mapping[\"index\"])\n when \"analyzed\" # index.analyzed -> text\n mapping[\"type\"] = \"text\"\n when \"not_analyzed\" # index.not_analyzed -> keyword \n mapping[\"type\"] = \"keyword\"\n when \"no\" # index.no -> text\n mapping[\"type\"] = \"text\"\n end\n else\t\t\t\t# no alalezer and no index -> in field keyword, otherwise text\n if field\n mapping[\"type\"] = \"keyword\"\n else\n mapping[\"type\"] = \"text\"\n end\n end\n end\n end"
] |
[
"0.7334014",
"0.70625246",
"0.69362247",
"0.6559399",
"0.65411496",
"0.65359974",
"0.6518442",
"0.6473457",
"0.64247924",
"0.637079",
"0.63639545",
"0.6286142",
"0.62203",
"0.6183551",
"0.6152284",
"0.5986913",
"0.59777796",
"0.5955377",
"0.58920866",
"0.58748233",
"0.5829819",
"0.5822843",
"0.57882273",
"0.57831943",
"0.57612014",
"0.57574344",
"0.5731628",
"0.5711726",
"0.568399",
"0.5663169",
"0.5640366",
"0.56358427",
"0.5626653",
"0.56066096",
"0.560138",
"0.5557098",
"0.5554278",
"0.5549526",
"0.5547751",
"0.5537447",
"0.5526396",
"0.5512987",
"0.55059516",
"0.5503484",
"0.55001247",
"0.5497624",
"0.5496259",
"0.5489309",
"0.5473206",
"0.54639554",
"0.5463857",
"0.54621553",
"0.54604626",
"0.5459317",
"0.54529035",
"0.5451463",
"0.5409939",
"0.54081064",
"0.5407634",
"0.5405017",
"0.53856885",
"0.5385237",
"0.5385174",
"0.5373624",
"0.53683233",
"0.53679675",
"0.53679675",
"0.5361749",
"0.53591436",
"0.53473276",
"0.534457",
"0.5338276",
"0.5330689",
"0.5323781",
"0.53217953",
"0.5320336",
"0.5317574",
"0.53070384",
"0.530069",
"0.529534",
"0.52910715",
"0.52841073",
"0.5283203",
"0.5283203",
"0.5277473",
"0.5273983",
"0.52685195",
"0.52626866",
"0.5260913",
"0.52594966",
"0.5259361",
"0.5253363",
"0.52497536",
"0.52383184",
"0.523798",
"0.5226987",
"0.5215829",
"0.5210051",
"0.52070636",
"0.5200299"
] |
0.76549566
|
0
|
PostgreSQL 9.4+ supports views with check option.
|
def view_with_check_option_support
# :nocov:
:local if server_version >= 90400
# :nocov:
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def supports_views_with_check_option?\n !!view_with_check_option_support\n end",
"def get_views\n connect_db.fetch(\"SELECT RDB$RELATION_NAME, RDB$VIEW_SOURCE FROM RDB$RELATIONS WHERE RDB$VIEW_BLR IS NOT NULL AND (RDB$SYSTEM_FLAG IS NULL OR RDB$SYSTEM_FLAG = 0)\")\n end",
"def supports_materialized_views?\n false\n end",
"def views(name = nil)\n select_values(\"SELECT table_name FROM information_schema.views\", name)\n end",
"def create_view_sql(name, source, options)\n source = source.sql if source.is_a?(Dataset)\n sql = String.new\n sql << \"#{create_view_prefix_sql(name, options)} AS #{source}\"\n if check = options[:check]\n sql << \" WITH#{' LOCAL' if check == :local} CHECK OPTION\"\n end\n sql\n end",
"def views(opts=OPTS, &block)\n tables_or_views('VIEW', opts, &block)\n end",
"def view_with_check_option_support\n true\n end",
"def view_select_statement(view, name=nil)\n row = execute(\"SELECT VIEW_DEFINITION FROM SYSIBM.VIEWS WHERE TABLE_NAME = '#{view}'\", name).each do |row|\n return row[0]\n end\n raise \"No view called #{view} found\"\n end",
"def view_with_check_option_support\n nil\n end",
"def supports_views?\n true\n end",
"def materialized_views(name = nil) raise \"Internal Error: Connection adapter didn't override abstract function\"; [] end",
"def views(opts=OPTS)\n relkind = opts[:materialized] ? 'm' : 'v'\n pg_class_relname(relkind, opts)\n end",
"def supports_views_with_local_check_option?\n view_with_check_option_support == :local\n end",
"def create_view_prefix_sql(name, options)\n create_view_sql_append_columns(\"CREATE #{'OR REPLACE 'if options[:replace]}VIEW #{quote_schema_table(name)}\", options[:columns])\n end",
"def view_select_statement(view, name=nil)\n q =<<-ENDSQL\n SELECT\n SM.definition\n FROM\n sys.objects O\n JOIN\n sys.sql_modules SM ON o.object_id = SM.object_id\n WHERE\n o.type = 'V' AND o.name = '#{view}'\n ENDSQL\n \n view_def = select_value(q, name)\n \n if view_def\n return convert_statement(view_def)\n else\n raise \"No view called #{view} found\"\n end\n end",
"def inline_view?(options, scope)\n options[:use_sql_view] &&\n options[:extra_cols] &&\n (scope.limit_value || scope.where_values_hash.present?) &&\n !scope.table_name&.include?(\".\") &&\n scope.respond_to?(:includes_values)\n end",
"def recreate_view name\n view_structure = ActiveRecord::Base.connection.select_value(\"select definition from pg_views where viewname='#{name}'\")\n if view_structure\n execute \"DROP VIEW IF EXISTS #{name}\"\n execute \"CREATE VIEW #{name} AS #{view_structure};\"\n end\n end",
"def create_view_prefix_sql(name, options)\n sql = create_view_sql_append_columns(\"CREATE #{'OR REPLACE 'if options[:replace]}#{'TEMPORARY 'if options[:temp]}#{'RECURSIVE ' if options[:recursive]}#{'MATERIALIZED ' if options[:materialized]}VIEW #{quote_schema_table(name)}\", options[:columns] || options[:recursive])\n\n if options[:security_invoker]\n sql += \" WITH (security_invoker)\"\n end\n\n if tablespace = options[:tablespace]\n sql += \" TABLESPACE #{quote_identifier(tablespace)}\"\n end\n\n sql\n end",
"def can_view?(object)\n false\n end",
"def supports_views?\n return false\n end",
"def drop_view_sql(name, options)\n \"DROP VIEW#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def supports_views?\n false\n end",
"def supports_views?\n false\n end",
"def supports_view_listing?\n respond_to?(:views)\n end",
"def tableView(aView, validateDrop:info, proposedRow:row, proposedDropOperation:op)\n NSDragOperationEvery\n end",
"def table_or_view\n return unless Admin::MigrationGenerator.table_or_view_exists? table_name\n\n return :table if Admin::MigrationGenerator.table_exists? table_name\n\n :view\n end",
"def user_can_view?(_check_user)\n true # everyone can view these\n end",
"def drop_view_sql(name, opts=OPTS)\n \"DROP #{'MATERIALIZED ' if opts[:materialized]}VIEW#{' IF EXISTS' if opts[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if opts[:cascade]}\"\n end",
"def has_view?(view)\n view = view.to_s\n if generated_design_doc['views'][view]\n generated_design_doc['views'][view][\"couchrest-defaults\"]\n end\n end",
"def exists?\n begin\n res=@db.fetch %Q/ SELECT tablename FROM pg_tables WHERE tablename='%s' AND schemaname='%s'/ % [table_name,schema_name]\n if res.count == 0\n res=@db.fetch %Q/ SELECT viewname FROM pg_views WHERE viewname='%s'AND schemaname='%s'/ % [table_name,schema_name]\n end\n rescue PG::Error => err\n return false\n end\n\n # also say false if the table is empty\n if res.count > 0\n return ds.select(1).limit(1).count > 0\n else\n return true\n end\n return false\n end",
"def view_select_statement(view, name=nil)\n raise NotImplementedError, \"view_select_statement is an abstract method\"\n end",
"def create_view(view_name, definition, options={})\n SchemaMonkey::Middleware::Migration::CreateView.start(connection: self, view_name: view_name, definition: definition, options: options) do |env|\n definition = env.definition\n view_name = env.view_name\n options = env.options\n definition = definition.to_sql if definition.respond_to? :to_sql\n\n if options[:materialized] && options[:allow_replace]\n raise ArgumentError, 'allow_replace is not supported for materialized views'\n end\n\n if options[:force]\n drop_view(view_name, {if_exists: true}.merge(options.slice(:materialized)))\n end\n\n command = if options[:materialized]\n \"CREATE MATERIALIZED\"\n elsif options[:allow_replace]\n \"CREATE OR REPLACE\"\n else\n \"CREATE\"\n end\n\n execute \"#{command} VIEW #{quote_table_name(view_name)} AS #{definition}\"\n end\n end",
"def supports_virtual_columns?\n false\n end",
"def supports_virtual_columns?\n false\n end",
"def do_query_view(view_name, view_options)\n database.view \"#{self.name.underscore}/#{view_name}\", view_options\n end",
"def create_view(name, source)\n source = source.sql if source.is_a?(Dataset)\n execute(\"CREATE VIEW #{name} AS #{source}\")\n end",
"def view(name)\n new_view = view_old(name)\n new_view.table_name = name\n new_view\n end",
"def materialized_view_definition(matview_name, name = nil) raise \"Internal Error: Connection adapter didn't override abstract function\"; end",
"def table_or_view_ready?\n Admin::MigrationGenerator.table_or_view_exists?(table_name)\n rescue StandardError => e\n @extra_error = e\n false\n end",
"def update_view name, type, columns, options={}\n view_structure = ActiveRecord::Base.connection.select_value(\"select definition from pg_views where viewname='#{name}'\")\n raise ViewNotExistException(\"View #{name} does not exist in current db\") unless view_structure\n \n columns_str = columns.is_a?(Array) ? columns.join(',') : columns\n \n select_pattern = /select (.*) from/i\n select_str = view_structure[select_pattern,1]\n\n case type\n when :add\n view_structure.gsub!(select_pattern, \"SELECT #{select_str}, #{columns_str} FROM\")\n when :remove\n select_str.gsub!(\", #{columns_str}\", '')\n view_structure.gsub!(select_pattern, \"SELECT #{select_str} FROM\")\n when :replace\n view_structure.gsub!(select_pattern, \"SELECT #{columns_str} FROM\")\n end\n\n drop_views name, options[:dependent_views] \n execute \"CREATE VIEW #{name} AS #{view_structure};\"\n end",
"def view(ddoc,view,*opts)\n q = \"#{database}/_design/#{ddoc}/_view/#{view}\"\n q << build_query_string(opts.first,\"view\") if opts && opts.any? && opts.first.is_a?(Hash)\n\n @conn.query({url_path: q, method: :get})\n end",
"def views(stream)\n # Don't create \"system\" views.\n view_names = PgSaurus::Tools.views\n view_names.each do |options|\n write_view_definition(stream,\n options[\"table_schema\"],\n options[\"table_name\"],\n options[\"view_definition\"])\n end\n stream << \"\\n\"\n end",
"def create_or_replace_view(name, source)\n source = source.sql if source.is_a?(Dataset)\n execute(\"CREATE OR REPLACE VIEW #{name} AS #{source}\")\n end",
"def supports_create_or_replace_view?\n false\n end",
"def create_extension_view_and_class\n self.const_get(\"Extended#{to_s}\")\n rescue\n clause = view_builder\n #this needs to be moved into the specific db adapter files\n connection.execute %{\n create or replace algorithm = merge SQL SECURITY DEFINER view #{extended_table_name} as select #{clause[:view_select]} from #{table_name} #{clause[:view_joins]}#{clause[:view_conditions]}\n }\n class_eval %{\n class Extended#{to_s} < #{to_s}\n set_table_name \"#{extended_table_name}\"\n def self.descends_from_active_record?\n true\n end\n end\n }\n true\n end",
"def supports_create_or_replace_view?\n true\n end",
"def alter_materialized_view_set_options(name, set_options, options = {})\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :set_options => set_options\n }, options).to_sql\n end",
"def viewable?\n published? || created? || editing?\n end",
"def create_view(name, source, options = OPTS)\n execute_ddl(create_view_sql(name, source, options))\n remove_cached_schema(name)\n nil\n end",
"def can_view?(u)\n\t\tu and (u.admin? or u.staff? or (u == self.user))\n\t\t# TODO: support users having explicit permission to view a document\n\t\t# TODO: support users belonging to group that has permission to view a document\n\tend",
"def has_views?\n !@views.empty?\n end",
"def has_views?\n !@views.empty?\n end",
"def can_view?(person, object, field=nil)\n if field\n field = field.to_sym if field.is_a? String\n return false if object.is_a?(ActiveRecord::Base) and object.class.never_show?(field)\n else\n # Special support for classes (can view instances?)\n if object.is_a?(Class) and object < ActiveRecord::Base\n object = object.new\n elsif object.is_a?(Array)\n if object.respond_to?(:new_without_appending)\n object = object.new_without_appending\n elsif object.respond_to?(:member_class)\n object = object.member_class.new\n end \n end\n end\n viewable = check_permission(:view, person, object, field)\n if viewable and field and\n ( (field_val = get_field(object, field)).is_a?(ActiveRecord::Base) or field_val.is_a?(Array) )\n # also ask the current value if it is viewable\n can_view?(person, field_val)\n else\n viewable\n end\n end",
"def test_materialized_view_metadata_drop\n skip(\"Materialized views were introduced in Cassandra 3.0.0\") if CCM.cassandra_version < '3.0.0'\n\n @session.execute(\"CREATE TABLE simplex.test (pk int PRIMARY KEY, c int)\")\n @session.execute(\"CREATE MATERIALIZED VIEW simplex.mv1 AS SELECT c,pk FROM simplex.test WHERE c IS NOT NULL AND pk IS NOT NULL PRIMARY KEY (pk, c)\")\n\n @listener.wait_for_materialized_view('simplex', 'mv1')\n assert @cluster.keyspace('simplex').has_materialized_view?('mv1')\n\n @session.execute(\"DROP MATERIALIZED VIEW simplex.mv1\")\n @cluster.refresh_schema\n refute @cluster.keyspace('simplex').has_materialized_view?('mv1')\n end",
"def write_foods_view()\n puts <<SQL\nDROP VIEW IF EXISTS foods;\n\nCREATE VIEW foods AS\nSELECT food_description.id, food_description.description,\n nutrient_data.nutrient_value AS kcal,\n food_group.description AS food_group,\n food_description.refuse_percentage, food_description.refuse_description\n FROM food_description, nutrient_definition, nutrient_data, food_group\n WHERE food_description.id = nutrient_data.food_id\n AND food_group.id = food_description.food_group_id\n AND nutrient_definition.id = nutrient_data.nutrient_id\n AND nutrient_definition.id = '208'\n AND food_group.id NOT IN ('0300', '2100', '2200', '3600');\nSQL\nend",
"def create_or_replace_view(name, source, options = OPTS)\n if supports_create_or_replace_view?\n options = options.merge(:replace=>true)\n else\n swallow_database_error{drop_view(name)}\n end\n\n create_view(name, source, options)\n nil\n end",
"def can_view?(timetable)\n can_see = timetable.public? #Yes, if it's public\n can_see ||= others_timetables_that_can_view.include?(timetable) #Yes, if the user have explicit permission to do so\n end",
"def inherit_views?\n !!read_inheritable_attribute('inherit_views')\n end",
"def is_view_permitted?(user_or_project = nil)\n return false\n end",
"def replaces_views?\n return false\n end",
"def column_virtual? c\n raise NotImplementedError\n end",
"def create_view(name, body = nil, force: false, **kwargs, &block)\n kwargs[:sqlite3] = !!(adapter_name =~ /sqlite/i)\n\n drop_view(name) if force && table_exists?(name)\n\n execute build_create_view_query(name, body, **kwargs, &block)\n end",
"def drop_views name, defs=nil\n defs = defs.delete(:dependent_views) if defs.is_a?(Hash)\n defs.each do |dependent_view|\n execute \"DROP VIEW IF EXISTS #{dependent_view}\"\n end if defs\n \n execute \"DROP VIEW IF EXISTS #{name}\"\n\n end",
"def views(name = nil)\n raise NotImplementedError, \"views is an abstract method\"\n end",
"def view(check = nil, &block)\n raise ArgumentError, \"block required\" unless block_given?\n @views.insert(0, ViewHandler.new(check, block))\n end",
"def collection_is_viewable(collection, user, project=nil)\n if collection == nil\n return false\n end\n\n if collection.user == user\n return true\n end\n \n #collection_has_editor(collection) #makes collection viewable to editor in data view\n \n #If collection is part of a project\n if project != nil && collection.projects.include?(project)\n return true\n end\n \n return false\n end",
"def quote_table_or_view(name, options)\n schema = options[:schema]\n if schema\n \"\\\"#{schema}\\\".\\\"#{name}\\\"\"\n else\n \"\\\"#{name}\\\"\"\n end\n end",
"def refresh_view(name, opts=OPTS)\n run \"REFRESH MATERIALIZED VIEW#{' CONCURRENTLY' if opts[:concurrently]} #{quote_schema_table(name)}\"\n end",
"def index_view\n @options[:view] == :index\n end",
"def drop_view(name)\n execute(\"DROP VIEW #{name}\")\n end",
"def test_materialized_view_metadata_updates\n skip(\"Materialized views were introduced in Cassandra 3.0.0\") if CCM.cassandra_version < '3.0.0'\n\n @session.execute(\"CREATE TABLE simplex.test (pk int PRIMARY KEY, c int)\")\n @session.execute(\"CREATE MATERIALIZED VIEW simplex.mv1 AS SELECT c,pk FROM simplex.test WHERE c IS NOT NULL AND pk IS NOT NULL PRIMARY KEY (pk, c)\")\n\n @listener.wait_for_materialized_view('simplex', 'mv1')\n\n assert @cluster.keyspace('simplex').has_materialized_view?('mv1')\n mv_meta = @cluster.keyspace('simplex').materialized_view('mv1')\n assert_equal 'SizeTieredCompactionStrategy', mv_meta.options.compaction_strategy.class_name\n\n @session.execute(\"ALTER MATERIALIZED VIEW simplex.mv1 WITH compaction = { 'class' : 'LeveledCompactionStrategy' }\")\n @cluster.refresh_schema\n mv_meta = @cluster.keyspace('simplex').materialized_view('mv1')\n assert_equal 'LeveledCompactionStrategy', mv_meta.options.compaction_strategy.class_name\n end",
"def view_only!\n @view_only = true\n end",
"def can_view_employee(employee)\n\t\t\t return (self.can_view_dept? && self.is_same_department(employee) && self.can_view_all?\n\t\t\t end",
"def drop_view(view_name, options = {})\n SchemaMonkey::Middleware::Migration::DropView.start(connection: self, view_name: view_name, options: options) do |env|\n view_name = env.view_name\n options = env.options\n materialized = options[:materialized] ? 'MATERIALIZED' : ''\n sql = \"DROP #{materialized} VIEW\"\n sql += \" IF EXISTS\" if options[:if_exists]\n sql += \" #{quote_table_name(view_name)}\"\n execute sql\n end\n end",
"def inherit_views?\n read_inheritable_attribute('inherit_views') ? true : false\n end",
"def can_view?(movie)\n end",
"def create_materialized_view(name, body = nil, force: false, **kwargs, &block)\n supports_materialized_view!\n\n drop_materialized_view(name) if force && table_exists?(name)\n\n execute build_create_materialized_view_query(name, body, **kwargs, &block)\n end",
"def view name, query={}, &block\n unless design_doc_fresh\n refresh_design_doc\n end\n query[:raw] = true if query[:reduce] \n raw = query.delete(:raw)\n view_name = \"#{design_doc_slug}/#{name}\"\n fetch_view_with_docs(view_name, query, raw, &block)\n end",
"def write_view_definition(stream, table_schema, table_name, view_definition)\n stream << \" create_view \\\"#{table_schema}.#{table_name}\\\", <<-SQL\\n\" \\\n \" #{view_definition}\\n\" \\\n \" SQL\\n\"\n end",
"def add_view_option(opts)\n opts = check_params(opts,[:view_infos])\n super(opts)\n end",
"def on_view(definition, &block)\n on(definition, true, &block)\n end",
"def xview(index = None)\n if None == index\n execute(:xview)\n else\n execute_only(:xview, index)\n end\n end",
"def view(options = {})\n\t \tget_records('-view', {}, options)\n\t end",
"def start_view(view)\n\treturn true if File.directory?(view_dir(view))\n\tKernel.system(\"cleartool startview #{view} > nul 2>&1\")\n\treturn $? == 0\nend",
"def refresh_view(view_name, options = {})\n SchemaMonkey::Middleware::Migration::RefreshView.start(connection: self, view_name: view_name, options: options) do |env|\n view_name = env.view_name\n sql = \"REFRESH MATERIALIZED VIEW #{quote_table_name(view_name)}\"\n execute sql\n end\n end",
"def is_viewable?(param_tmp_token:)\n ((!tmp_token.nil? && param_tmp_token == tmp_token) || (publication.is_published? && deleted_at.nil? && !accepted.nil? && (visible_after.nil? || visible_after < Date.today)))\n end",
"def path_to_views\n @path_to_views ||= \"db/views\"\n end",
"def view_ready?(name)\n status = view_status(name)\n\n case status['viewStatus']\n when 'ready' then true\n when 'pending' then false\n else\n raise APIError.new(\"Unknown view status: #{response['viewStatus']}\")\n end\n end",
"def view_name=(view_name)\n Domgen.error(\"sql.view_name= invoked on entity #{entity.qualified_name} with no subtypes\") if entity.direct_subtypes.size == 0\n @view_name = view_name\n end",
"def view_query(design_document_name, view_name, options = Options::View::DEFAULT)\n resp = @backend.document_view(@name, design_document_name, view_name, options.namespace, options.to_backend)\n ViewResult.new do |res|\n res.meta_data = ViewMetaData.new do |meta|\n meta.total_rows = resp[:meta][:total_rows]\n meta.debug_info = resp[:meta][:debug_info]\n end\n res.rows = resp[:rows].map do |entry|\n ViewRow.new do |row|\n row.id = entry[:id] if entry.key?(:id)\n row.key = JSON.parse(entry[:key])\n row.value = JSON.parse(entry[:value])\n end\n end\n end\n end",
"def create_view(db) \n begin\n db.get('_design/todos')\n rescue RestClient::ResourceNotFound => nfe\n db.save_doc({\n \"_id\" => \"_design/todos\",\n :views => {\n :allTodos => {\n :reduce => \"_count\",\n :map => \"function(doc){if(doc.title && doc.completed != null){emit(doc.order,{title: doc.title,completed: doc.completed})}}\"\n }\n }\n })\n end\nend",
"def create_view(view, cspec)\n\tKernel.system(\"perl #{$scripts}/ct-mkview.pl -raw -name #{view} -dynamic -spec #{cspec}\")\n\treturn $? == 0\nend",
"def supervisor_check?(user_id)\n db_params = {\n host: ENV['host'],\n port: ENV['port'],\n dbname: ENV['dbname'],\n user: ENV['user'],\n password: ENV['password']\n }\n db = PG::Connection.new(db_params)\n supervisors = db.exec(\"SELECT supervisor FROM supervisor\").values\n alt_supervisors = db.exec(\"SELECT alt_supervisor FROM supervisor\").values\n supervisor_list = supervisors.flatten\n alt_supervisor_list = alt_supervisors.flatten\n db.close\n if supervisor_list.include?(user_id) || alt_supervisor_list.include?(user_id)\n true\n else\n false\n end\nend",
"def alter_materialized_view_schema(name, schema, options = {})\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :set_schema => schema\n }, options).to_sql\n end",
"def view_table(db, table)\n db.execute(\"SELECT * FROM #{table}\")\nend",
"def view(type)\n views.each { |v| return v if v.type.id =~ /^#{Regexp.escape(type)}$/}\n nil\n end",
"def view_allowed?(item)\n @view_allowed.to_a.map(&:to_sym).include?(item.to_sym)\n end",
"def set_view_option(opts)\n opts = check_params(opts,[:view_infos])\n super(opts)\n end",
"def populated?\n query = <<-SQL\n SELECT ispopulated \n FROM pg_matviews \n WHERE matviewname = '#{table_name}' \n AND schemaname = '#{schema_name.chomp('.')}';\n SQL\n result = connection.execute query\n ActiveRecord::Type::Boolean.new.cast(result.values[0][0])\n end",
"def alter_materialized_view_owner(name, role, options = {})\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :owner_to => role\n }, options).to_sql\n end"
] |
[
"0.67245424",
"0.6375552",
"0.63630784",
"0.6352705",
"0.6345518",
"0.6326178",
"0.6318707",
"0.6247733",
"0.6198591",
"0.610404",
"0.60873914",
"0.60536164",
"0.6031846",
"0.6029833",
"0.597804",
"0.5947009",
"0.5936955",
"0.5927375",
"0.58991086",
"0.58800215",
"0.5850445",
"0.5832947",
"0.5832947",
"0.58203024",
"0.58175397",
"0.5810632",
"0.5794738",
"0.579389",
"0.5785171",
"0.5742428",
"0.57342744",
"0.5662041",
"0.5632221",
"0.5632221",
"0.56275415",
"0.56210375",
"0.5609638",
"0.5607716",
"0.55932236",
"0.5591938",
"0.5574748",
"0.5447751",
"0.5440618",
"0.54347634",
"0.54343694",
"0.5420898",
"0.541465",
"0.53917634",
"0.5358928",
"0.5352627",
"0.5345219",
"0.5345219",
"0.5324179",
"0.5314637",
"0.52741134",
"0.52738416",
"0.52334756",
"0.523258",
"0.5229748",
"0.52146655",
"0.51909924",
"0.51896673",
"0.5173766",
"0.5169731",
"0.51660234",
"0.5159901",
"0.51510215",
"0.515084",
"0.51487505",
"0.5148742",
"0.5136121",
"0.51258624",
"0.51231617",
"0.5106393",
"0.5088425",
"0.50688213",
"0.5061464",
"0.504326",
"0.50144434",
"0.5011033",
"0.5007353",
"0.50022817",
"0.5000484",
"0.49962744",
"0.49907306",
"0.4986789",
"0.4983768",
"0.49827483",
"0.4980411",
"0.49782097",
"0.49715728",
"0.4969059",
"0.49638408",
"0.4959",
"0.49526137",
"0.4950019",
"0.4944256",
"0.49342304",
"0.4928615",
"0.4924248"
] |
0.49060613
|
100
|
Return the results of an EXPLAIN ANALYZE query as a string
|
def analyze
explain(:analyze=>true)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def explain(opts=OPTS)\n with_sql((opts[:analyze] ? 'EXPLAIN ANALYZE ' : 'EXPLAIN ') + select_sql).map(:'QUERY PLAN').join(\"\\r\\n\")\n end",
"def explain(arel, binds = [])\n sql = \"EXPLAIN #{to_sql(arel, binds)}\"\n ExplainPrettyPrinter.new.pp(exec_query(sql, 'EXPLAIN', binds))\n end",
"def string_explain_plan_results(results)\n [nil, [results]]\n end",
"def explain(arel, binds = [])\n sql = \"EXPLAIN #{to_sql(arel, binds)}\"\n exec_query(sql, 'EXPLAIN', binds)\n end",
"def explain_sql(sql, connection_config, &explainer)\n return nil unless sql && connection_config\n statement = sql.split(\";\\n\")[0] # only explain the first\n explain_plan = explain_statement(statement, connection_config, &explainer)\n return explain_plan || []\n end",
"def analyse_explain(result:, identifier:, query:)\n _id, _select_type, _table, _partitions, type, possible_keys, key, _key_len,\n _ref, _rows, _filtered, extra = result\n\n return if type == \"ref\"\n return if ALLOWED_EXTRA_VALUES.any? { |value| extra&.include?(value) }\n\n if possible_keys.nil?\n @aggregator.add_critical(identifier: identifier, query: query)\n return\n end\n\n if possible_keys == \"PRIMARY\" && key.nil? && type == \"ALL\"\n @aggregator.add_warning(identifier: identifier, query: query)\n end\n end",
"def calculate_plan(queries)\n queries.each do |query|\n if @log_explain\n puts \"Explaining query\"\n puts\n end\n begin\n query.plans << plan(query.statement)\n if @log_explain\n # Pass format to prevent ANALYZE\n puts execute(\"EXPLAIN (FORMAT TEXT) #{safe_statement(query.statement)}\").map {|r| r[\"QUERY PLAN\"]}.join(\"\\n\")\n end\n rescue PG::Error, JSON::NestingError => e\n if @log_explain\n log e.message\n end\n end\n puts if @log_explain\n end\n end",
"def perform_query\n Rails.logger.info queries.to_sql\n queries\n end",
"def analyze\n format_results\n end",
"def result_summary\n options = { style: 'font-size: 25px;' }\n summary = if matches_exist?\n [bold_tag(pluralize(result_count, 'result'), options), filter_text]\n else\n [\n bold_tag(@query, options),\n 'not found -',\n pluralize(result_count, 'similar result'),\n filter_text\n ]\n end\n safe_join(summary, ' ')\n end",
"def exec_query(query, conn = ActiveRecord::Base.connection)\n res = conn.exec_query(query)\n puts res.rows.map { |r| r.map(&:inspect).join(\",\") }.join('\\n')\n res.to_a\nend",
"def explain\n \n end",
"def to_s\n query.to_s\n end",
"def explain(command)\n @connection.call('GRAPH.EXPLAIN', @graphname, command)\n rescue Redis::CommandError => e\n raise ExplainError, e\n end",
"def supports_explain?\n false\n end",
"def supports_explain?\n false\n end",
"def inspect\n \"#{first_seen} #{last_seen} #{query} #{answer} #{rr} #{maptype}\"\n end",
"def test_explain_sql_select_with_mysql_explain_result\n config = {:adapter => 'mysql'}\n sql = 'SELECT foo'\n\n plan = {\n 'select_type' => 'SIMPLE', 'key_len' => nil, 'table' => 'blogs', 'id' => '1',\n 'possible_keys' => nil, 'type' => 'ALL', 'Extra' => '', 'rows' => '2',\n 'ref' => nil, 'key' => nil\n }\n explainer_result = mock('explain plan')\n explainer_result.expects(:each_hash).yields(plan)\n explainer = lambda { |statement| explainer_result }\n\n statement = NewRelic::Agent::Database::Statement.new(sql, config, explainer)\n result = NewRelic::Agent::Database.explain_sql(statement)\n expected_result = [%w[select_type key_len table id possible_keys type\n Extra rows ref key],\n [['SIMPLE', nil, 'blogs', '1', nil, 'ALL', '', '2', nil, nil]]]\n\n assert_equal(expected_result[0].sort, result[0].sort, \"Headers don't match\")\n assert_equal(expected_result[1][0].compact.sort, result[1][0].compact.sort, \"Values don't match\")\n end",
"def display_analyze_output(json, options={})\n return unless json['tokens']\n\n output = [] << ''\n\n max_length = json['tokens'].map { |d| d['token'].to_s.size }.max\n\n output << Helpers.table(json['tokens'].map do |t|\n [\n t['position'],\n t['token'].ljust(max_length+5).ansi(:bold),\n \"#{t['start_offset']}–#{t['end_offset']}\",\n t['type']\n ]\n end).to_s\n output.join(\"\\n\")\n end",
"def aql\n aql = Visitor.run(relation).aql\n adapter.logger.debug { \"AQL: #{aql}\" }\n aql\n end",
"def to_s\n self.query.to_s\n end",
"def test_explain_sql_with_mysql2_activerecord_result\n return unless defined?(::ActiveRecord::Result)\n\n config = {:adapter => 'mysql2'}\n sql = 'SELECT * FROM spells where id=1'\n\n columns = %w[id select_type table type possible_keys key key_len ref rows Extra]\n rows = [['1', 'SIMPLE', 'spells', 'const', 'PRIMARY', 'PRIMARY', '4', 'const', '1', '']]\n activerecord_result = ::ActiveRecord::Result.new(columns, rows)\n explainer = lambda { |statement| activerecord_result }\n\n statement = NewRelic::Agent::Database::Statement.new(sql, config, explainer)\n result = NewRelic::Agent::Database.explain_sql(statement)\n\n assert_equal([columns, rows], result)\n end",
"def explain\n explain_limit = limit || 0\n opts = @opts.merge(:limit => -explain_limit.abs, :explain => true)\n @collection.explain(Scope.new(@collection, @selector, opts))\n end",
"def result\n ActiveRecord::Base.connection.select_all(sql).entries\n end",
"def analyzed_best_bet_query(query)\n analyzed_query = @client.indices.analyze(\n index: @index_name,\n body: {\n text: query,\n analyzer: \"best_bet_stemmed_match\",\n },\n )\n\n analyzed_query.fetch(\"tokens\", []).map { |token_info|\n token_info[\"token\"]\n }.join(\" \")\n rescue Elasticsearch::Transport::Transport::Errors::BadRequest\n \"\"\n end",
"def get_query_results(query)\n hits = execute_query(query).hpath('hits.hits').first\n return [] unless hits && !hits.empty?\n hits.map do |hit|\n hit['_source'].expand.merge(hit.only('_id', '_type', '_index')).kmap do |key|\n key.to_s.gsub('@', '_').to_sym\n end\n end\n end",
"def set_explain\n @explain = Explain.find(params[:id])\n end",
"def info_sql\n INFO_SQL\n end",
"def query sql\n result = db[sql].all\n return result\n end",
"def query_string\n ast.to_query_string\n end",
"def oracle_sql_results(sql, conn = VACOLS::Case.connection)\n result = conn.execute(sql)\n output = []\n while r = result.fetch_hash\n output << r\n end\n output\nend",
"def extract_sql_queries\n sql_queries = Lograge::Sql.store[:lograge_sql_queries]\n return {} unless sql_queries\n\n Lograge::Sql.store[:lograge_sql_queries] = nil\n {\n sql_queries: Lograge::Sql.formatter.call(sql_queries),\n sql_queries_count: sql_queries.length\n }\n end",
"def stats(query = nil, target = nil)\n raw \"STATS #{query} #{target}\".strip << \"\\r\\n\"\n end",
"def generate_text_sql\n ThinkingSphinx.context.indexed_models.each do |model|\n model = model.constantize\n model.define_indexes\n model.sphinx_indexes.each do |idx|\n idx.sources.each do |src|\n puts \"#{model.to_s} SQL => \"\n puts src.to_sql\n end\n end\n end\nend",
"def raw_query(query) #:nodoc:\n logger.benchmark \"cypher: #{query}\" do\n result = connection.execute_query(query)\n if result\n result[\"data\"]\n else\n []\n end\n end\n end",
"def analyzed_best_bet_query(query)\n analyzed_query = JSON.parse(@client.get_with_payload(\n \"_analyze?analyzer=best_bet_stemmed_match\", query))\n\n analyzed_query[\"tokens\"].map { |token_info|\n token_info[\"token\"]\n }.join(\" \")\n end",
"def to_s\n PgQuery::Deparse.from ast\n end",
"def explain(arel, binds = [])\n end",
"def to_s\n return 'No Results' if page_count.all? { |_key, count| count.empty? }\n\n output = []\n page_count.each_pair do |type, counts|\n output << type\n sorted(counts).each do |path, count|\n output << \"#{path} #{count} visit#{'s' if count.zero? || count > 1}\"\n end\n end\n output.join(\"\\n\")\n end",
"def print_results(db, athlete_name)\n ath_reps = db.execute 'SELECT athletes.name, repetitions.distance, repetitions.effort, athlete_repetitions.elapsed_time, repetition_sets.date FROM athletes JOIN athlete_repetitions ON athletes.id = athlete_repetitions.athlete_id JOIN repetition_sets ON athlete_repetitions.repetition_set_id = repetition_sets.id JOIN repetitions ON repetition_sets.repetition_id = repetitions.id WHERE athletes.name = (?)', [athlete_name]\n ath_reps.each { |row| p \"#{row[0]} ran #{row[1]}m at #{row[2]}% effort in #{row[3]}s on #{row[4]}\" }\n end",
"def to_s\n analyses = beam + completed\n analyses = analyses.sort\n \"Beam Search: #{length + @completed.length} beams, \" +\n \"#{length} active\\n\" + analyses.join(\"\\n\\n\")\n end",
"def get_query_result_docs(query_obj)\n response = @query_server.query(query_obj)\n response.getResults\n end",
"def queries_summary(project_id)\n get \"projects/#{project_id}/queries/summary\"\n end",
"def to_stdout\n\t\t\tresult_string = String.new\n\t\t\thashes = Array.new\n\n\t\t\t@results.sort_by {|k| k[:scanner] }.each do |result|\n\t\t\t\tunless hashes.include? result[:hash].downcase\n\t\t\t\t\tresult_string << \"#{result[:hash]}:\\n\"\n\t\t\t\t\thashes << result[:hash].downcase\n\t\t\t\tend\n\t\t\t\tresult_string << \"#{result[:scanner]}: \".rjust(25) + \"#{result[:result]}\\n\"\n\t\t\tend if @results != nil\n\n\t\t\tresult_string\n\t\tend",
"def inspect\n\t\tstr = self.to_s\n\t\tstr[-1,0] = if finished?\n\t\t\t\" finished\"\n\t\telse\n\t\t\tstats = []\n\t\t\tstats << \" status=#{ PG.constants.grep(/CONNECTION_/).find{|c| PG.const_get(c) == status} }\" if status != CONNECTION_OK\n\t\t\tstats << \" transaction_status=#{ PG.constants.grep(/PQTRANS_/).find{|c| PG.const_get(c) == transaction_status} }\" if transaction_status != PG::PQTRANS_IDLE\n\t\t\tstats << \" nonblocking=#{ isnonblocking }\" if isnonblocking\n\t\t\tstats << \" pipeline_status=#{ PG.constants.grep(/PQ_PIPELINE_/).find{|c| PG.const_get(c) == pipeline_status} }\" if respond_to?(:pipeline_status) && pipeline_status != PG::PQ_PIPELINE_OFF\n\t\t\tstats << \" client_encoding=#{ get_client_encoding }\" if get_client_encoding != \"UTF8\"\n\t\t\tstats << \" type_map_for_results=#{ type_map_for_results.to_s }\" unless type_map_for_results.is_a?(PG::TypeMapAllStrings)\n\t\t\tstats << \" type_map_for_queries=#{ type_map_for_queries.to_s }\" unless type_map_for_queries.is_a?(PG::TypeMapAllStrings)\n\t\t\tstats << \" encoder_for_put_copy_data=#{ encoder_for_put_copy_data.to_s }\" if encoder_for_put_copy_data\n\t\t\tstats << \" decoder_for_get_copy_data=#{ decoder_for_get_copy_data.to_s }\" if decoder_for_get_copy_data\n\t\t\t\" host=#{host} port=#{port} user=#{user}#{stats.join}\"\n\t\tend\n\t\treturn str\n\tend",
"def display_query_sql(users)\n tag.p('SQL:') + tag.code(users.to_sql)\n end",
"def explain\n \"#{ enumerable_type.safe_name }<#{ entry_type.explain }>\"\n end",
"def a2s_info\n encode_message('T', \"Source Engine Query\\x00\")\n end",
"def execute\n # build the query string\n # run the query\n # return the results\n end",
"def interpret(expression)\n \t\tresult = [\"Query Result: \"]\n\t\tmainQuery = ' '\n\t\tsubQuery = ' '\n\t\tforUsed = false\n\t\tsearchString = nil\n\t\tsearchStarted = false\n\t\tsearchEnded = false\n\n\t\ttokens = expression.split(' ')\n\t\tfor currentToken in tokens\n \t\t\tif currentToken == \"show\"\n \t\t\t\tnext\n\t\t\tend\n\n\t\t\t#//show in all queries, not really used\n\t\t\tif currentToken == \"title\"\n \t\t\t\tif mainQuery == ' '\n \t\t\t\t\tmainQuery = 'T'\n\t\t\t\telsif forUsed and subQuery == ' '\n \t\t\t\t\t\tsubQuery = 'T'\n\t\t\t\tend\n\t\t\telsif currentToken == \"actor\"\n \t\t\t\tif mainQuery == ' '\n \t\t\t\t\tmainQuery = 'A'\n\t\t\t\telsif forUsed and subQuery == ' '\n \t\t\t\t\t\tsubQuery = 'A'\n\t\t\t\tend\n\n\t\t\telsif currentToken == 'for'\n \t\t\t\tforUsed = true\n\n\t\t\telsif searchString == nil and subQuery != ' ' and currentToken.start_with?(\"<\")\n \t\t\t\tsearchString = currentToken\n\t\t\t\tsearchStarted = true\n\t\t\t\tif currentToken.end_with?(\">\")\n \t\t\t\t\tsearchEnded = true \n\t\t\t\tend\n\n\t\t\telsif searchStarted and not searchEnded\n \t\t\t\tsearchString += \" \" + currentToken\n\t\t\t\tif currentToken.end_with?(\">\")\n\t\t\t\t\tsearchEnded = true \n\t\t\t\tend\n\t\t\tend\n\t\tend\n\n\n\t\t#//remove <>\n\t\tif searchString != nil\n\t\t\tsearchString = searchString.slice(1..-2)\n\t\t\t#searchString = searchString.substring(1,(searchString.length() - 1)) \n\t\tend\n\n\t\texpr = nil\n\t\tif mainQuery == 'A'\n \t\t\tif subQuery == 'T'\n \t\t\t\texpr = DvdActorTitleExpression.new(searchString) \n\t\t\telse\n \t\t\t\texpr = DvdActorExpression.new()\n\t\t\tend\n\t\telsif mainQuery == 'T'\n \t\t\tif subQuery == 'A'\n \t\t\t\texpr = DvdTitleActorExpression.new(searchString) \n\t\t\telse\n \t\t\t\texpr = DvdTitleExpression.new()\n\t\t\tend\n\t\telse\n \t\t\treturn str(result)\n\t\tend\n\n\t\tresult.push(expr.interpret(@ctx))\n\t\tresult.join('')\n\tend",
"def to_s\n regex = REPLACEMENT\n\n if Gitlab::Database.mysql?\n regex = Regexp.union(regex, MYSQL_REPLACEMENTS)\n end\n\n sql = @sql.gsub(regex, '?').gsub(CONSECUTIVE) do |match|\n \"#{match.count(',') + 1} values\"\n end\n\n # InfluxDB escapes double quotes upon output, so lets get rid of them\n # whenever we can.\n if Gitlab::Database.postgresql?\n sql = sql.delete('\"')\n end\n\n sql.tr(\"\\n\", ' ')\n end",
"def query_hits(options = {})\n options = { sort: false, transpose: false, subject_regex: nil }.merge(options)\n \n # Get the hits\n hits_hash = {}\n each_query(sort: options[:sort], subject_regex: options[:subject_regex]) do |query, hits| \n # hits.map!(&:transpose!) if options[:transpose]\n hits_hash[query] = hits\n end\n\n # TODO: Transpose. This needs to actually interchange subject and query on the hash level\n \n hits_hash\n end",
"def summary\n \"Results: #{results[:critical].size} critical, \" \\\n \"#{results[:warning].size} warning, \" \\\n \"#{results[:unknown].size} unknown, #{results[:ok].size} ok\"\n end",
"def exercise1\n @content = ActiveRecord::Base.connection.execute(\"\n SELECT\n u.name as user_name,\n COUNT(gr.name) as groups_count,\n CONCAT('[', COALESCE(STRING_AGG(gr.name, ', ' ), ''),']') as groups\n FROM ((users as u\n LEFT JOIN groups_users as gu ON u.id=gu.user_id)\n LEFT JOIN groups as gr ON gr.id = gu.group_id)\n GROUP BY user_name\n ORDER BY groups_count;\");\n\n @results1 = []\n\n index = 0\n @content.each do |r|\n @results1[index] = Result1.new r\n index = index + 1;\n end\n\n return @results1\n end",
"def blogs_weblog_explain; \"List your weblogs.\"; end",
"def to_s\n \"#{id}:#{query}\"\n end",
"def query(query, options = {})\n GRel::Debugger.debug \"QUERYING DESCRIBE...\"\n GRel::Debugger.debug query\n GRel::Debugger.debug \"** LIMIT #{@last_query_context.limit}\" if @last_query_context.limit\n GRel::Debugger.debug \"** OFFSET #{@last_query_context.offset}\" if @last_query_context.offset\n GRel::Debugger.debug \"----------------------\"\n args = {:describe => true}\n #args = {}\n args[:accept] = options[:accept] if options[:accept]\n args[:offset] = @last_query_context.offset if @last_query_context.offset\n args[:limit] = @last_query_context.limit if @last_query_context.limit\n @connection.query(@db_name,query, args).body\n end",
"def analyze\n analyze_text\n @analyzed = true\n nil\n end",
"def estimate_cost(sql, *args)\n explanation = ask \"EXPLAIN (FORMAT JSON) #{sql}\", *args\n explanation.first.dig \"Plan\", \"Total Cost\"\n end",
"def recalculate_usage\n # For some reason, ANALYZE TABLE doesn't update statistics in Travis' environment\n ActiveRecord::Base.connection.execute(\"OPTIMIZE TABLE #{binding.database}.stuff\")\n end",
"def get_sql_shorttext_by_sql_id(sql_id)\n # Connect zur DB nachhollen wenn noch auf NullAdapter steht, da Zugriff auf gecachte Werte ohne DB-Connect möglich ist\n open_oracle_connection if ActiveRecord::Base.connection.class != ActiveRecord::ConnectionAdapters::OracleEnhancedAdapter\n\n # erster Versuch direkt aus SGA zu lesen\n sqls = sql_select_all [\"\\\n SELECT /*+ Panorama-Tool Ramm */ SUBSTR(SQL_FullText, 1, 150) SQL_Text\n FROM v$SQLArea\n WHERE SQL_ID = ?\",\n sql_id]\n\n if sqls.size == 0 # Wenn nicht gefunden, dann in AWR-History suchen\n sqls = sql_select_all [\"\\\n SELECT /*+ Panorama-Tool Ramm */ SUBSTR(SQL_Text, 1, 150) SQL_Text\n FROM DBA_Hist_SQLText\n WHERE DBID = ?\n AND SQL_ID = ?\",\n session[:database][:dbid], sql_id]\n end\n\n if sqls.size == 0\n \"< Kein SQL-Text zu ermitteln füer SQL-ID='#{sql_id}' >\"\n else\n sqls[0].sql_text\n end\n end",
"def index\n if (params[:redirect] == \"sql\")\n sql = <<-SQL\n WITH src AS (SELECT id, cow_no, date, state, owner_id FROM ai_logs\n WHERE owner_id = #{params[:search_owner]})\n SELECT json_agg(src) FROM src;\n SQL\n render json: ActiveRecord::Base.connection.select_value(sql)\n else\n @ai_logs = AiLog.all\n end\n end",
"def sql\n @parser.sql\n end",
"def sql\n @parser.sql\n end",
"def indexusage\n sql = %q(SELECT\n relname,\n 100 * idx_scan / (seq_scan + idx_scan) percent_of_times_index_used,\n n_live_tup rows_in_table\n FROM\n pg_stat_user_tables\n ORDER BY\n n_live_tup DESC;)\n exec_sql(sql, find_uri)\n end",
"def log_query(sql)\n pad = ' '\n puts Paint[pad + sql, :cyan, :bold]\n # @loggers[0]&.debug(' ' + sql)\n end",
"def sql\n @stmt_api.sql\n end",
"def query(soql)\n response = api_get 'query', :q => soql\n mashify? ? response.body : response.body['records']\n end",
"def to_s\n \"#<ResultSet::#{@collection} :total_results => #{@results.size}>\"\n end",
"def to_sql_query_info(offset)\n \"SELECT * FROM #{@model.quoted_table_name} WHERE \" +\n \" #{quote_column(@model.primary_key)} = (($id - #{offset}) / #{ThinkingSphinx.indexed_models.size})\"\n end",
"def to_s\n flags = [ ]\n flags << 'qr' if (response?)\n flags << 'aa' if (authorative?)\n flags << 'tc' if (truncated?)\n flags << 'rd' if (recursion_desired?)\n flags << 'ra' if (recursion_available?)\n \n \";; HEADER:\\n;; opcode: #{opcode.to_s.upcase} status: #{response_code.to_s.upcase} id: #{id} \\n\" +\n \";; flags: #{flags.join(' ')}; QUERY: #{questions.length}, ANSWER: #{answers.length}, AUTHORITY: #{nameservers.length}, ADDITIONAL: #{additional_records.length}\" +\n \"\\n\" +\n \";; QUESTION SECTION:\\n\" +\n questions.collect(&:to_s).join(\"\\n\") + \"\\n\" +\n \";; ANSWER SECTION:\\n\" +\n answers.collect(&:to_s).join(\"\\n\") + \"\\n\" +\n \";; NAMESERVER SECTION:\\n\" +\n nameservers.collect(&:to_s).join(\"\\n\") + \"\\n\" +\n \";; ADDITIONAL SECTION:\\n\" +\n additional_records.collect(&:to_s).join(\"\\n\") + \"\\n\"\n end",
"def get_analyzer_results(analyzer, the_samples)\n output = Array.new\n analyzer_base = analyzer.sub('.py', '')\n the_samples.each do |sample|\n output << \"results/#{$jobid}/#{analyzer_base}/#{sample}.root\"\n end\n return output\nend",
"def get_analyzer_results(analyzer, the_samples)\n output = Array.new\n analyzer_base = analyzer.sub('.py', '')\n the_samples.each do |sample|\n output << \"results/#{$jobid}/#{analyzer_base}/#{sample}.root\"\n end\n return output\nend",
"def get_analyzer_results(analyzer, the_samples)\n output = Array.new\n analyzer_base = analyzer.sub('.py', '')\n the_samples.each do |sample|\n output << \"results/#{$jobid}/#{analyzer_base}/#{sample}.root\"\n end\n return output\nend",
"def get_analyzer_results(analyzer, the_samples)\n output = Array.new\n analyzer_base = analyzer.sub('.py', '')\n the_samples.each do |sample|\n output << \"results/#{$jobid}/#{analyzer_base}/#{sample}.root\"\n end\n return output\nend",
"def get_analyzer_results(analyzer, the_samples)\n output = Array.new\n analyzer_base = analyzer.sub('.py', '')\n the_samples.each do |sample|\n output << \"results/#{$jobid}/#{analyzer_base}/#{sample}.root\"\n end\n return output\nend",
"def get_analyzer_results(analyzer, the_samples)\n output = Array.new\n analyzer_base = analyzer.sub('.py', '')\n the_samples.each do |sample|\n output << \"results/#{$jobid}/#{analyzer_base}/#{sample}.root\"\n end\n return output\nend",
"def sql! sql=nil\n require 'niceql'\n puts Niceql::Prettifier.prettify_sql sql || $last_sql_command\n end",
"def run_query(query_file , exp , o = {})\n index_path = o[:index_path] || @index_path\n cmd = fwrite('cmd_galago_run_query.log' , \"#{$galago_path}/bin/galago batch-search --index=#{index_path} #{o[:param_query]} \\\n #{to_path(query_file)} |grep -e ^[0-9] > #{to_path(exp+'.res')}\" , :mode=>'a')\n `#{cmd}`\n end",
"def stats\n native.command('dbstats' => 1).documents.first\n end",
"def stats\n native.command('dbstats' => 1).documents.first\n end",
"def query_string\n _f = @params.fields.include?(:full_text) ? [:full_text] : fields\n # byebug\n a = query.gsub('/', '').scan( /\"[^\"]+\"|[^ ]+/ ).map do |word|\n if word[0] === '\"'\n m = word.match( /^\"(.*)\"$/ );\n word = m ? m[1] : word;\n end\n Unicode.downcase(word.gsub('\"', ''))\n end\n _q = '(' + a.join('* AND ') + '*)'\n # _q = '/(?=.*?'+a.join( ')(?=.*?' )+').*/';\n #byebug\n index.filter{ ~q(query_string: {fields: _f, query: \"#{_q}\", default_operator: 'or'}) } if _q.present? && _f.present?\n\n #index.query(multi_match: {query: \"#{_q}*\", fields: _f}) if _q.present? && _f.present\n end",
"def explain(**opts)\n self.class.new(collection, selector, options.merge(explain_options(**opts))).first\n end",
"def describe_query\n h = {component: @options[:component]}.compact\n if h.empty?\n describe_num_and_type\n else\n \"#{describe_num_and_type} #{h}\"\n end\n end",
"def index\n @analyzers = Analyzer.all\n end",
"def exec_query(sql, name = 'SQL', binds = [])\n log(sql, name, binds) do\n result = without_prepared_statement?(binds) ? exec_no_cache(sql) :\n exec_cache(sql, binds)\n result_array = result_as_array(result)\n if ActiveRecord::VERSION::MAJOR >= 4\n column_types = compute_field_types(result)\n ret = ActiveRecord::Result.new(result.fields, result_array, column_types)\n else\n ret = ActiveRecord::Result.new(result.fields, result_array)\n end\n result.clear\n ret\n end\n end",
"def log_indexes(indexes)\n if indexes.any?\n indexes.map {|i| \"#{i[:table]} (#{i[:columns].join(\", \")})\"}.join(\", \")\n else\n \"None\"\n end\n end",
"def results_summary_sentence\n summaries =\n ocurring_result_types.map do |result_type|\n summary_msg = result_summary_msg(result_type)\n block_given? ? yield(summary_msg, result_type) : summary_msg\n end\n to_sentence(summaries)\n end",
"def snapshots_redact_sql_queries; end",
"def auto_analyze; end",
"def get_analyzer_results(analyzer, the_samples)\n output = Array.new\n analyzer_base = analyzer.sub('.py', '')\n puts the_samples\n puts analyzer\n the_samples.each do |sample|\n output << \"results/#{$jobid}/#{$selection}#{$jetcorrection}/iso#{$isolation}/#{analyzer_base}/#{sample}.root\"\n #output << \"results/#{$jobid}/#{analyzer_base}/#{sample}.root\"\n end\n return output\nend",
"def db_query_transform__count query\n tmp_table = \"resultset_table\"\n make_tmp_table = db_query_transform__subquery query, tmp_table\n \"SELECT COUNT(*) FROM #{make_tmp_table}\"\n end",
"def to_s\n statements.collect { |s| s.to_s }.join(\";\\n\")\n end",
"def condensed_to_s \n concat = \"\"\n tmp = \"\"\n count = 0\n \n @verticies.keys.each do |v| \n if v.edges.length > 0 # only display connected portions of graph\n \n v.edges.each do |edge|\n if edge.passable\n tmp += \" #{edge.vertex.to_s}, w=#{edge.weight}\" \n count+=1\n end\n end\n if count > 0\n concat += \"v: \" + v.to_s + \" edges: \"\n concat += tmp + \"\\n\"\n count = 0\n end\n tmp = \"\"\n end\n end\n concat\n end",
"def sql\n <<-SQL\n -- Search learning paths\n SELECT DISTINCT\n c.id,\n c.name,\n c.course_code,\n c.settings,\n cc.content,\n 'learning_path' AS content_type,\n c.id AS learning_path_id,\n 0 AS learning_objective_id\n FROM courses c\n LEFT OUTER JOIN fearless_taggings ts\n ON ts.taggable_id = c.id AND ts.taggable_type = 'LearningPath'\n LEFT OUTER JOIN fearless_tags t\n ON t.id = ts.tag_id\n LEFT OUTER JOIN fearless_custom_contents cc\n ON cc.contentable_id = c.id AND cc.contentable_type = 'LearningPath'\n WHERE 0=0\n #{construct_account_clause}\n #{construct_course_worklow_clause}\n #{construct_name_sql}\n #{construct_all_tags_search('t', 'name')}\n UNION ALL\n -- Search learning objectives\n SELECT DISTINCT\n cm.id,\n cm.name,\n c.course_code,\n c.settings,\n cc.content,\n 'learning_objective' AS content_type,\n cm.context_id::bigint AS learning_path_id,\n cm.id::bigint AS learning_objective_id\n FROM context_modules cm\n INNER JOIN courses c\n ON c.id = cm.context_id\n AND cm.context_type = 'Course'\n LEFT OUTER JOIN fearless_taggings ts\n ON ts.taggable_id = cm.id AND ts.taggable_type = 'LearningObjective'\n LEFT OUTER JOIN fearless_tags t\n ON t.id = ts.tag_id\n LEFT OUTER JOIN fearless_custom_contents cc\n ON cc.contentable_id = cm.id AND cc.contentable_type = 'LearningObjective'\n WHERE 0=0\n #{construct_account_clause}\n #{construct_generic_workflow_clause('cm')}\n #{construct_name_sql('cm')}\n #{construct_all_tags_search('t', 'name')}\n UNION ALL\n -- Search learning learning_event\n SELECT DISTINCT\n ct.id,\n ct.title AS name,\n c.course_code,\n c.settings,\n cc.content,\n 'learning_event' AS content_type,\n ct.context_id::bigint AS learning_path_id,\n ct.context_module_id::bigint AS learning_objective_id\n FROM content_tags ct\n INNER JOIN courses c\n ON c.id = ct.context_id\n AND ct.context_type = 'Course'\n LEFT OUTER JOIN fearless_taggings ts\n ON ts.taggable_id = ct.id AND ts.taggable_type = 'LearningEvent'\n LEFT OUTER JOIN fearless_tags t\n ON t.id = ts.tag_id\n LEFT OUTER JOIN fearless_custom_contents cc\n ON cc.contentable_id = ct.id AND cc.contentable_type = 'LearningEvent'\n WHERE 0=0\n #{construct_account_clause}\n #{construct_generic_workflow_clause('ct')}\n #{construct_name_sql('ct', 'title')}\n #{construct_all_tags_search('t', 'name')}\n SQL\n end",
"def documentation_search_summary(result)\n t('documentation.helpers.documentation_search_summary.text', :total_results => result.total_results, :start_result => result.start_result_number, :end_result => result.end_result_number, :query => result.query)\n end",
"def generate_array_query(accessions: [])\n \"SELECT DISTINCT id, name FROM(SELECT id_col AS id, name_col as name FROM #{CellMetadatum::BIGQUERY_TABLE}, \" \\\n \"UNNEST(#{big_query_id_column}) AS id_col WITH OFFSET id_pos, UNNEST(#{big_query_name_column}) AS name_col \" \\\n \"WITH OFFSET name_pos WHERE id_pos = name_pos #{accessions.any? ? \"AND #{format_accession_list(accessions)}\" : nil}) \" \\\n 'WHERE id IS NOT NULL ORDER BY LOWER(name)'\n end",
"def tsvector_construction_string\n self.class.fulltext_search_plan.collect do |weight, methods|\n if methods.to_a.any?\n keywords = methods.collect { |method| self.send(method) }.join(\" \")\n self.class.send(:sanitize_sql_array, [\n %{setweight(to_tsvector('%s'), '%s')},\n keywords, weight\n ])\n end\n end.compact.join(\" || \").squish\n end",
"def analytics_top_queries(engine_id, options={})\n get(\"engines/#{engine_id}/analytics/top_queries.json\", options)\n end",
"def format_result(obj)\n formatted_str = \"\\n\\nSearch Results: \\n\\n\"\n obj.each do |key, value|\n unless key == \"_id\"\n\t key_str = key.capitalize\n\t if value.kind_of?(Array)\n\t\t \tformatted_str << key_str << \": \"\n\t\t \tvalue.each do |var|\n\t\t \t\tformatted_str << var.to_s << ((var == value.last) ? \"\" : \",\")\n\t\t \tend\n\t\t \tformatted_str << \"\\n\"\n\t elsif value.is_a?(Hash)\n\t\t formatted_str << key_str << \": \" << \"\\n\"\n\t\t value.each do |var_key, var_value|\n\t\t formatted_str << \"\\t\" << var_key << \":\" << var_value << \"\\n\"\n\t\t end\n\t else\n\t\t formatted_str << key_str << \": \" << value.to_s << \"\\n\"\n\t end\n\t end\n end\n formatted_str\n end"
] |
[
"0.6823953",
"0.6274661",
"0.6253358",
"0.610667",
"0.59947056",
"0.5803804",
"0.5746523",
"0.57279754",
"0.5630922",
"0.54320306",
"0.54110736",
"0.53897107",
"0.5338646",
"0.53190017",
"0.5312021",
"0.5312021",
"0.52763075",
"0.5218137",
"0.5196163",
"0.5192691",
"0.5188185",
"0.5179327",
"0.5163724",
"0.5163495",
"0.5126807",
"0.51044625",
"0.5093439",
"0.5072663",
"0.50382775",
"0.5020023",
"0.5004944",
"0.49992377",
"0.4990708",
"0.4949799",
"0.49410743",
"0.49323806",
"0.491289",
"0.48856112",
"0.48811287",
"0.48554784",
"0.48498935",
"0.48340437",
"0.48216474",
"0.48202103",
"0.48179063",
"0.48159125",
"0.48150623",
"0.47781456",
"0.47734046",
"0.47434068",
"0.4731438",
"0.47120917",
"0.47060096",
"0.4704394",
"0.46989673",
"0.46970007",
"0.46847045",
"0.4673975",
"0.46687236",
"0.46683478",
"0.4663329",
"0.46516186",
"0.46449375",
"0.46449375",
"0.46402326",
"0.46329787",
"0.4625837",
"0.46236983",
"0.46196228",
"0.4612739",
"0.46121714",
"0.46105623",
"0.46105623",
"0.46105623",
"0.46105623",
"0.46105623",
"0.46105623",
"0.4610466",
"0.46103358",
"0.45956942",
"0.45956942",
"0.45892155",
"0.4584255",
"0.45811766",
"0.45807385",
"0.45765963",
"0.45760605",
"0.45708063",
"0.45703167",
"0.4569916",
"0.45657513",
"0.45625502",
"0.4559371",
"0.45562947",
"0.45451975",
"0.45436722",
"0.45216775",
"0.4518395",
"0.45045418",
"0.450364"
] |
0.65946364
|
1
|
Handle converting the ruby xor operator (^) into the PostgreSQL xor operator (), and use the ILIKE and NOT ILIKE operators.
|
def complex_expression_sql_append(sql, op, args)
case op
when :^
j = ' # '
c = false
args.each do |a|
sql << j if c
literal_append(sql, a)
c ||= true
end
when :ILIKE, :'NOT ILIKE'
sql << '('
literal_append(sql, args[0])
sql << ' ' << op.to_s << ' '
literal_append(sql, args[1])
sql << ')'
else
super
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def ^(p0) end",
"def ^(p0) end",
"def ^(p0) end",
"def ^(p0) end",
"def ^(p0) end",
"def xor(*args)\n args.inject(self, :^)\n end",
"def xor_c\n end",
"def bit_xor\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 52 )\n return_value = BitXorReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n char_literal238 = nil\n bit_and237 = nil\n bit_and239 = nil\n\n tree_for_char_literal238 = nil\n\n begin\n root_0 = @adaptor.create_flat_list\n\n\n # at line 593:5: bit_and ( '^' bit_and )*\n @state.following.push( TOKENS_FOLLOWING_bit_and_IN_bit_xor_3918 )\n bit_and237 = bit_and\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, bit_and237.tree )\n end\n # at line 593:13: ( '^' bit_and )*\n while true # decision 55\n alt_55 = 2\n look_55_0 = @input.peek( 1 )\n\n if ( look_55_0 == HAT )\n alt_55 = 1\n\n end\n case alt_55\n when 1\n # at line 593:16: '^' bit_and\n char_literal238 = match( HAT, TOKENS_FOLLOWING_HAT_IN_bit_xor_3923 )\n if @state.backtracking == 0\n\n tree_for_char_literal238 = @adaptor.create_with_payload( char_literal238 )\n root_0 = @adaptor.become_root( tree_for_char_literal238, root_0 )\n\n end\n @state.following.push( TOKENS_FOLLOWING_bit_and_IN_bit_xor_3927 )\n bit_and239 = bit_and\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, bit_and239.tree )\n end\n\n else\n break # out of loop for decision 55\n end\n end # loop for decision 55\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n end\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 52 )\n\n end\n \n return return_value\n end",
"def xor_l\n end",
"def xor_cypher(str)\n # try rewrite using Array#pack 'H*' or String#unpack\n str_byte_arr = str.scan(/../).map { |h| h.to_i(16) }\n processed_buffers = []\n \n #instead of taking two arrays, take in 1\n string_xor_ascii(processed_buffers, str_byte_arr)\n find_plaintext(processed_buffers)\n end",
"def xor_e\n end",
"def xordecrypt\n return match(@payload,@prefix,@keys,@keywords)\n end",
"def xor_b\n end",
"def op_xor(t_sym, f_sym, num)\n chars = num.split('')\n num_of_trues = 0\n chars.each do |c|\n if c == \"1\"\n num_of_trues += 1\n end\n end\n\n if num_of_trues.to_i.even?\n return f_sym\n else\n return t_sym\n end\nend",
"def xor(x, y)\n\nend",
"def bit_or\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 51 )\n return_value = BitOrReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n char_literal235 = nil\n bit_xor234 = nil\n bit_xor236 = nil\n\n tree_for_char_literal235 = nil\n\n begin\n root_0 = @adaptor.create_flat_list\n\n\n # at line 589:5: bit_xor ( '|' bit_xor )*\n @state.following.push( TOKENS_FOLLOWING_bit_xor_IN_bit_or_3894 )\n bit_xor234 = bit_xor\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, bit_xor234.tree )\n end\n # at line 589:13: ( '|' bit_xor )*\n while true # decision 54\n alt_54 = 2\n alt_54 = @dfa54.predict( @input )\n case alt_54\n when 1\n # at line 589:15: '|' bit_xor\n char_literal235 = match( PIPE, TOKENS_FOLLOWING_PIPE_IN_bit_or_3898 )\n if @state.backtracking == 0\n\n tree_for_char_literal235 = @adaptor.create_with_payload( char_literal235 )\n root_0 = @adaptor.become_root( tree_for_char_literal235, root_0 )\n\n end\n @state.following.push( TOKENS_FOLLOWING_bit_xor_IN_bit_or_3902 )\n bit_xor236 = bit_xor\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, bit_xor236.tree )\n end\n\n else\n break # out of loop for decision 54\n end\n end # loop for decision 54\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n end\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 51 )\n\n end\n \n return return_value\n end",
"def xor(a,b)\n (a | b) - (a & b)\n end",
"def xor?(arg1, arg2)\r\n\r\nend",
"def XOR(str1,str2)\n ret =\"\"\n str1.split(//).each_with_index do |c, i|\n ret[i] = (str1[i].ord ^ str2[i].ord).chr\n end\n return ret\nend",
"def xor(codes, string)\n Array.new([codes.size, string.size].min) { |i| codes[i] ^ string[i].ord }.map(&:chr).join\nend",
"def xor_a\n end",
"def not_regexp(left, right)\n # could be DRYer, but this is more readable than: \"NOT #{self.=~(left,right)}\"\n raise if right.is_a? Regexp\n \"NOT #{left}#{quotify right}\"\n end",
"def xor(e1, e2)\n eval_ex(e1) ^ eval_ex(e2)\n end",
"def ^ other\n (self.to_s ^ other).to_sym\n end",
"def xor_d\n end",
"def exclusive_or (p,q)\n p ^ q\nend",
"def xor(*others)\n self.class.xor(self, *others)\n end",
"def xor?(op1, op2)\n (op1 || op2) && !(op1 && op2)\nend",
"def xor(argument1, argument2)\n argument1 && !argument2 || !argument1 && argument2\n end",
"def inv_xor(str1, str2)\n #result = ''\n #for i in 0..(str1.size - 1) do\n # result += mod_sub(str1[i].chr, str2[i].chr)\n #end\n #result\n\n # Probably slower than above:\n str1.split(//).zip(str2.split(//)).inject('') do |acc, ch| \n acc += mod_sub(ch.first, ch.last) \n end\n end",
"def ^(arg0)\n end",
"def ^(arg0)\n end",
"def ^(arg0)\n end",
"def xor?(one, two)\n if one && two\n false\n elsif one || two\n true\n else\n false\n end\nend",
"def p1(x, y, z)\n # x or y\n xor(x, y)\nend",
"def xor?(a,b)\n if a && !b\n true\n elsif !a && b\n true\n else\n false\n end\nend",
"def xor?(op_1, op_2)\n (op_1 == true && op_2 == false) || (op_1 == false && op_2 == true) \nend",
"def sql_operator(operator, field)\n raise ScopedSearch::QueryNotSupported, \"the operator '#{operator}' is not supported for field type '#{field.type}'\" if !field.virtual? and [:like, :unlike].include?(operator) and !field.textual?\n return '@@' if [:like, :unlike].include?(operator) && field.full_text_search\n case operator\n when :like then 'ILIKE'\n when :unlike then 'NOT ILIKE'\n else super(operator, field)\n end\n end",
"def xor?(input1, input2)\n if input1 && input2\n false\n elsif \n input1 || input2\n true\n else \n false\n end \nend",
"def ^(o)\n\t\tcase o\n\t\twhen Integer\n\t\t\tBuffer.__assert_range(o)\n\t\t\treturn __xor_byte(o)\n\t\twhen Buffer\n\t\t\treturn __xor_buffer(o)\n\t\telse\n\t\t\traise TypeError.new\n\t\tend\n\tend",
"def test_xor\n assert_false F ^ F, 'F ^ F'\n assert_maybe F ^ M, 'F ^ M'\n assert_true F ^ T, 'F ^ T'\n\n assert_maybe M ^ F, 'M ^ F'\n assert_maybe M ^ M, 'M ^ M'\n assert_maybe M ^ T, 'M ^ T'\n\n assert_true T ^ F, 'T ^ F'\n assert_maybe T ^ M, 'T ^ M'\n assert_false T ^ T, 'T ^ T'\n end",
"def xor?(x, y)\n (x || !y) || (y && !x)\nend",
"def =~(p0) end",
"def =~(p0) end",
"def =~(p0) end",
"def ^(other)\n `return other.$r ? Qtrue : Qfalse;`\n end",
"def xor?(arg1, arg2)\n if arg1 && !arg2\n true\n elsif !arg1 && arg2\n true\n else \n false\n end\nend",
"def xor_hl\n end",
"def match_operator(p, value)\n if p[0] == '!'\n @@operator_map[p[0]].call(_match(p[1], value))\n elsif p[0] == '^'\n @@operator_map[p[0]].call(_match(p[1][0], value), _match(p[1][1], value))\n else\n @@operator_map[p[0]].call(p[1].each {|operator_or_filter| _match(operator_or_filter, value)})\n end\n end",
"def xor?(arg1, arg2)\n if arg1 && !arg2 ||\n !arg1 && arg2\n return true\n else\n return false\n end\nend",
"def xor?(arg1, arg2)\n if arg1 && !arg2 || !arg1 && arg2\n true\n else\n false\n end\nend",
"def xor?(expression1, expression2)\n if expression1 == true && expression2 == true\n false\n elsif expression1 == false && expression2 == false\n false\n else\n true\n end\nend",
"def xor?(arg1, arg2)\r\n (arg1 && !arg2) || (arg2 && !arg1)\r\nend",
"def repeat_key_xor(text, key)\n\thex_text = text.unpack('H*').join\n\t#Unpack the key into its individual letters\n\tkey_array = key.unpack('H2H2H2')\n\tout = hex_text.scan(/../).map.with_index { |a, i| fixed_xor(a, key_array[i.modulo(3)]) }.join\nend",
"def process_not(exp)\n term = process exp.shift\n return \"!(#{term})\"\n end",
"def xor?(arg1, arg2)\n if arg1\n if arg2\n false\n else\n true\n end\n else\nif arg2\n true\n else\n false\n end\n end\nend",
"def xor?(condition1, condition2)\n condition1 ? !condition2 : !!condition2\nend",
"def xor?(one, two)\n (one && !two) || (two && !one)\nend",
"def xor_d8\n end",
"def xor?(arg1, arg2)\n (arg1 && !arg2) || (arg2 && !arg1) ? true : false\nend",
"def xor?(arg1, arg2)\n if arg1 && arg2\n false\n elsif !arg1 && !arg2\n false\n else \n true\n end\nend",
"def xor?(x, y)\n (x && !y) || (!x && y)\nend",
"def xor?(arg_1, arg_2)\n if (arg_1 && arg_2) \n return false\n elsif (arg_1 || arg_2)\n return true\n else\n return false\n end\nend",
"def xor(a,b)\n if (a == true && b == true) || (a == false && b == false)\n return false\n else\n return true\n end\nend",
"def xor?(arg1, arg2)\n return true if arg1 && !arg2\n return true if arg2 && !arg1\n false # necessary because previous line returns nil (not false) if condition falsey\nend",
"def xor?(arg1, arg2)\n if !!arg1 == !!arg2\n false\n else\n true\n end \nend",
"def not(*args, exactly: nil, range: nil)\n @str += parse(args, exactly: exactly, range: range).insert(1, \"^\")\n self\n end",
"def xor?(one, two)\n return true if one && !two\n return true if two && !one\n\n false\nend",
"def xor(left, right)\n res = Array.new\n 0.upto(3) do |i|\n temp = left[i] ^ right[i]\n res << temp\n end\n res\n end",
"def op_nor(t_sym, f_sym, num_or)\n if num_or == t_sym\n return f_sym\n elsif num_or == f_sym\n return t_sym\n end\nend",
"def execute_XOR(destination, source)\n\t\t# all flags are affected except AF is undefined\n\t\tdestination.value ^= source.value\n\t\tset_logical_flags_from destination.value, destination.size\n\tend",
"def potential_strings_from_single_char_xor(encoded_string)\n # for all bytes...\n (0..255).map do |decoded_char|\n # Create a hex string equal in length to the target\n decoding_string = bytes_to_hex([decoded_char] * encoded_string.length)\n\n DecodedString.new(\n decoded_char,\n xor_hex(encoded_string, decoding_string)\n )\n end\nend",
"def execute_NOT(operand)\n\t\t# no flags affected\n\t\toperand.value = ~operand.value\n\tend",
"def xor?(x, y)\n return true if x && !y\n return true if y && !x\n false\nend",
"def xor?(x, y)\n return true if x && !y\n return true if y && !x\n false\nend",
"def xor?(value1, value2)\n (value1 || value2) && !(value1 && value2)\nend",
"def xor(input)\n key = 128\n output = input.split(//).collect {|e| [e.unpack('C').first ^ (key & 0xFF)].pack('C') }.join\n output = output.split(//).collect {|e| [e.unpack('C').first ^ (key - 27 & 0xFF)].pack('C') }.join\n Base64.strict_encode64(output)\nend",
"def xor(first, second)\n first.bytes.zip(second.bytes).map{ |(a,b)| (a ^ b).chr }.join('')\n end",
"def bitwise_not(a)\n\tresult = ''\n\ta.each_char do |val|\n\t\tif val == '1' \n\t\t\tresult.concat '0'\n\t\telse\n\t\t\tresult.concat '1'\n\t\tend\n\tend\n\treturn result\nend",
"def __xor_byte(o)\n\t\treturn Buffer.__new__(@bytes.map { |b| b ^ o })\n\tend",
"def xor?(condition1, condition2)\n (condition1 || condition2) && not(condition1 && condition2)\nend",
"def str_xor(s1, s2)\n if s1.length != s2.length:\n minlen = [s1, s2].map(&:length).min\n s1 = s1[0...minlen]\n s2 = s2[0...minlen]\n end\n s1.bytes.zip(s2.bytes).map{ |b1, b2| b1 ^ b2 }.map(&:chr).join\nend",
"def xnor_select(arr, prc1, prc2)\n arr.select {|el| !(prc1.call(el) ^ prc2.call(el))}\nend",
"def xor?(value1, value2)\n (value1 && !value2) || (value2 && !value1)\nend",
"def xor?(value1, value2)\n (value1 && !value2) || (value2 && !value1)\nend",
"def shortcut_equal(regex)\n if(regex =~ @row;@rgx = $~)\n srcs = @rgx.to_s\n rplc = \"#{@rgx[1]}%!=~#{@rgx[3]}\\n\"\n @row.gsub!(srcs,rplc)\n p \"reEqu_ #{rplc}\" if @dbg[:parse]\n end\n end",
"def ^(obj)\n bytes1 = @value.bytes\n bytes2 = bytes(obj)\n\n bytes1, bytes2 = bytes2, bytes1 if bytes1.length < bytes2.length\n\n res = Array.new(bytes1.length) { |i| bytes1[i] ^ bytes2[i % bytes2.length] }\n res = str(res)\n res.to_raw\n end",
"def MUX2X1(x,y) XOR(x,y); end",
"def xor?(num1, num2)\n (num1 && num2) || (!num2 && !num1) ? false : true\nend",
"def xor?(first, second)\n (first && !second) || (!first && second)\nend",
"def xor?(a, b)\n return true if a && !b\n return true if !a && b\n false\nend",
"def xor?(value1, value2)\n return true if value1 && !value2\n return true if value2 && !value1\n\n false\nend",
"def xor_string key, str\n\tarr = str.unpack(\"C*\").each_with_index.map do |ch, i| ch^(key[i%key.length]) end\n\tarr.pack(\"C*\")\nend",
"def xor?(condition1, condition2)\n if condition1 \n return true if !condition2\n elsif condition2\n return true if !condition1\n end\n false\nend",
"def xor(policy, *others)\n __factory_method__(Xor, policy, others)\n end",
"def xor?(this, that)\n (!this && that) || (this && !that)\nend",
"def !~(string)\n to_re !~ string.to_str\n end",
"def op_nand(t_sym, f_sym, num_and)\n if num_and == t_sym\n return f_sym\n elsif num_and == f_sym\n return t_sym\n end\nend",
"def xor?(value1, value2)\n return true if value1 && !value2\n return true if value2 && !value1\n false\nend",
"def or( *args ); { $or => args } end"
] |
[
"0.63956505",
"0.63956505",
"0.63956505",
"0.63956505",
"0.63956505",
"0.6362097",
"0.6235052",
"0.6224138",
"0.6133541",
"0.60772306",
"0.60698295",
"0.60334694",
"0.6024499",
"0.600814",
"0.5951814",
"0.5949114",
"0.5923611",
"0.5897365",
"0.586451",
"0.58494306",
"0.5820956",
"0.57968783",
"0.57808787",
"0.57446814",
"0.57236654",
"0.57195425",
"0.56908005",
"0.56792355",
"0.5676551",
"0.5675518",
"0.567264",
"0.567264",
"0.567264",
"0.56565744",
"0.5643573",
"0.56409556",
"0.56355697",
"0.559626",
"0.5595339",
"0.5577384",
"0.55722857",
"0.5570285",
"0.5553381",
"0.5553381",
"0.5553381",
"0.55306655",
"0.5505143",
"0.5502802",
"0.54977787",
"0.54947436",
"0.5490264",
"0.5483773",
"0.5449052",
"0.543831",
"0.542872",
"0.5415032",
"0.5402634",
"0.5401322",
"0.5400329",
"0.53987354",
"0.5393045",
"0.53893423",
"0.5379985",
"0.53742754",
"0.53695416",
"0.5366828",
"0.5363659",
"0.5362137",
"0.5361375",
"0.5353404",
"0.5352817",
"0.53491586",
"0.53458136",
"0.5345137",
"0.5345137",
"0.5342526",
"0.5337728",
"0.5333146",
"0.53325397",
"0.5313974",
"0.53068215",
"0.5301475",
"0.52846456",
"0.52828205",
"0.52828205",
"0.5264255",
"0.52523977",
"0.5243795",
"0.52418417",
"0.5238548",
"0.5231452",
"0.52223116",
"0.52193",
"0.52147824",
"0.52127516",
"0.5202709",
"0.5202693",
"0.5200399",
"0.51999384",
"0.51971143"
] |
0.5858377
|
19
|
Disables automatic use of INSERT ... RETURNING. You can still use returning manually to force the use of RETURNING when inserting. This is designed for cases where INSERT RETURNING cannot be used, such as when you are using partitioning with trigger functions or conditional rules, or when you are using a PostgreSQL version less than 8.2, or a PostgreSQL derivative that does not support returning. Note that when this method is used, insert will not return the primary key of the inserted row, you will have to get the primary key of the inserted row before inserting via nextval, or after inserting via currval or lastval (making sure to use the same database connection for currval or lastval).
|
def disable_insert_returning
clone(:disable_insert_returning=>true)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(*insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n return super if id_value\n pk = pk_from_insert_sql(sql) unless pk\n select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n return super if id_value\n pk = pk_from_insert_sql(sql) unless pk\n select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n end",
"def insert(*values)\n if @opts[:sql] || @opts[:returning]\n super\n else\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def sql_for_insert(sql, pk, id_value, sequence_name, binds)\n unless pk\n table_ref = extract_table_ref_from_insert_sql(sql)\n pk = primary_key(table_ref) if table_ref\n end\n\n sql = \"#{sql} RETURNING #{quote_column_name(pk)}\" if pk\n\n [sql, binds]\n end",
"def returning_clause(serial)\n \" RETURNING #{quote_name(serial.field)} INTO :insert_id\"\n end",
"def insert_sql(sql, name = nil, pri_key = nil, id_value = nil, sequence_name = nil)\n unless pri_key\n table_ref = extract_table_ref_from_insert_sql(sql)\n pri_key = primary_key(table_ref) if table_ref\n end\n\n if pri_key\n select_value(\"#{sql} RETURNING #{quote_column_name(pri_key)}\")\n else\n super\n end\n end",
"def _insert\n ds = _insert_dataset\n if _use_insert_select?(ds) && (h = _insert_select_raw(ds))\n _save_set_values(h)\n nil\n else\n iid = _insert_raw(ds)\n # if we have a regular primary key and it's not set in @values,\n # we assume it's the last inserted id\n if (pk = autoincrementing_primary_key) && pk.is_a?(Symbol) && !(vals = @values)[pk]\n vals[pk] = iid\n end\n pk\n end\n end",
"def _insert\n ds = _insert_dataset\n if _use_insert_select?(ds) && (h = _insert_select_raw(ds))\n _save_set_values(h)\n nil\n else\n iid = _insert_raw(ds)\n # if we have a regular primary key and it's not set in @values,\n # we assume it's the last inserted id\n if (pk = autoincrementing_primary_key) && pk.is_a?(Symbol) && !(vals = @values)[pk]\n vals[pk] = iid\n end\n pk\n end\n end",
"def insert_sequenced(row)\n sql = row.type.insert_sql_minus_key\n vals = row.field_values_minus_key\n#$stderr.puts sql\n#$stderr.puts vals.inspect\n\n db.do(sql, *vals)\n insert_id = db.select_one(row.type.get_insert_id_sql)[0]\n row.send(row.type.primary_key.setter_name, insert_id)\n row.reset_changed\n end",
"def returning_id\n @sql_returning = ::MultiInsert::QueryBuilder.returning([:id])\n @returning_flat = true\n self\n end",
"def returning(*values)\n if values.empty?\n cached_dataset(:_returning_ds) do\n raise Error, \"RETURNING is not supported on #{db.database_type}\" unless supports_returning?(:insert)\n clone(:returning=>EMPTY_ARRAY)\n end\n else\n raise Error, \"RETURNING is not supported on #{db.database_type}\" unless supports_returning?(:insert)\n clone(:returning=>values.freeze)\n end\n end",
"def insert_select(*values)\n unless @opts[:disable_insert_returning]\n ds = opts[:returning] ? self : returning\n ds.insert(*values){|r| return r}\n end\n end",
"def _import(columns, values, opts=OPTS)\n if @opts[:returning]\n # no transaction: our multi_insert_sql_strategy should guarantee\n # that there's only ever a single statement.\n sql = multi_insert_sql(columns, values)[0]\n returning_fetch_rows(sql).map{|v| v.length == 1 ? v.values.first : v}\n elsif opts[:return] == :primary_key\n returning(insert_pk)._import(columns, values, opts)\n else\n super\n end\n end",
"def post_insert(sql, name, pk, id_value, sequence_name)\n if @iiEnabled\n begin\n @connection.do(enable_identity_insert(@iiTable, false))\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned off\"\n end\n end\n end",
"def post_insert(sql, name, pk, id_value, sequence_name)\n if @iiEnabled\n begin\n @connection.do(enable_identity_insert(@iiTable, false))\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned off\"\n end\n end\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil, binds = [])\n # Extract the table from the insert sql. Yuck.\n table = sql.split(\" \", 4)[2].gsub('\"', '')\n\n # Try an insert with 'returning id' if available (PG >= 8.2)\n if supports_insert_with_returning? && id_value.nil?\n pk, sequence_name = *pk_and_sequence_for(table) unless pk\n if pk\n sql = substitute_binds(sql, binds)\n id_value = select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n clear_query_cache #FIXME: Why now?\n return id_value\n end\n end\n\n # Otherwise, plain insert\n execute(sql, name, binds)\n\n # Don't need to look up id_value if we already have it.\n # (and can't in case of non-sequence PK)\n unless id_value\n # If neither pk nor sequence name is given, look them up.\n unless pk || sequence_name\n pk, sequence_name = *pk_and_sequence_for(table)\n end\n\n # If a pk is given, fallback to default sequence name.\n # Don't fetch last insert id for a table without a pk.\n if pk && sequence_name ||= default_sequence_name(table, pk)\n id_value = last_insert_id(table, sequence_name)\n end\n end\n id_value\n end",
"def supports_insert_select?\n !@opts[:disable_insert_returning]\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n super\n\n exec_query('SELECT @@IDENTITY AS id')\n end",
"def insert(*args)\n r = super\n if s = opts[:sequence]\n with_sql(\"SELECT #{literal(s)}.currval FROM dual\").single_value.to_i\n else\n r\n end\n end",
"def with_identity_insert_enabled(table_name, &block)\n set_identity_insert(table_name, true)\n yield\n ensure\n set_identity_insert(table_name, false)\n end",
"def with_identity_insert_enabled(table_name)\n set_identity_insert(table_name, true)\n yield\n ensure\n set_identity_insert(table_name, false)\n end",
"def with_identity_insert_enabled(table_name)\n set_identity_insert(table_name, true)\n yield\n ensure\n set_identity_insert(table_name, false)\n end",
"def _insert\n return super if model.cti_tables.length == 1\n model.cti_models.each do |m|\n v = {}\n m.cti_table_columns.each{|c| v[c] = @values[c] if @values.include?(c)}\n ds = use_server(m.cti_instance_dataset)\n if ds.supports_insert_select? && (h = ds.insert_select(v))\n @values.merge!(h)\n else\n nid = ds.insert(v)\n @values[primary_key] ||= nid\n end\n end\n db.dataset.supports_insert_select? ? nil : @values[primary_key]\n end",
"def with_identity_insert_enabled(table_name)\n set_identity_insert(table_name, true)\n yield\n ensure\n set_identity_insert(table_name, false)\n end",
"def insert(arel, name = nil, pk = nil, id_value = nil, sequence_name = nil, binds = [])\n exec_insert(to_sql(arel), name, binds)\n retval = last_inserted_id(nil)\n retval = id_value if retval == 0\n return retval\n end",
"def exec_insert(sql, name = nil, binds = [], pk = nil, sequence_name = nil)\n exec_query(sql, name, binds)\n end",
"def insert()\n query = \"INSERT INTO sale_items (sli_qty, sli_unit_price, sli_alb_id, sli_slo_id)\n VALUES ($1, $2, $3, $4) RETURNING sli_id\"\n @sli_id = DbHelper.run_sql_return_first_row_column_value(query,\n [@sli_qty, @sli_unit_price, @sli_alb_id, @sli_slo_id],\n 'sli_id').to_i\n end",
"def last_insert_id(conn, opts={})\n stmt = conn.createStatement\n begin\n sql = opts[:prepared] ? 'SELECT @@IDENTITY' : 'SELECT SCOPE_IDENTITY()'\n rs = log_yield(sql){stmt.executeQuery(sql)}\n rs.next\n rs.getInt(1)\n ensure\n stmt.close\n end\n end",
"def save()\n sql = \"INSERT INTO transactions (amount, merchant_id, tag_id)\n VALUES ($1, $2, $3)\n RETURNING id\"\n values = [@amount, @merchant_id, @tag_id]\n @id = SqlRunner.run(sql, values)[0]['id'].to_i\n end",
"def save\n result = DB.exec(\"INSERT INTO books (name, author) VALUES ('#{@name}', '#{@author}') RETURNING id;\")\n @id = result.first().fetch(\"id\").to_i\nend",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:\r\n execute(sql, name)\r\n identity = last_inserted_id(nil)\r\n retval = id_value if retval == 0\r\n return retval\r\n end",
"def insert_returning_columns(ds)\n return unless ds.supports_returning?(:insert)\n return unless values = ds.opts[:select]\n\n values = values.map{|v| ds.unqualified_column_for(v)}\n if values.all?\n values\n end\n end",
"def insert()\n query = \"INSERT INTO artists (art_name, art_photo) VALUES ($1, $2) RETURNING art_id\"\n @art_id = DbHelper.run_sql_return_first_row_column_value(query, [@art_name, @art_photo], 'art_id').to_i;\n end",
"def last_insert_row_id\n @database.insert_id\n end",
"def _insert(*)\n fail NotImplementedError\n end",
"def insert_product(product)\n db_connection do |conn|\n result = conn.exec(\"SELECT id FROM products WHERE product = $1\", [product[:product]])\n if result.to_a.empty?\n sql = \"INSERT INTO products (product) VALUES ($1) RETURNING id\"\n result = conn.exec(sql, [product[:product]])\n end\n result.first[\"id\"]\n end\nend",
"def insert_pk\n if (f = opts[:from]) && !f.empty?\n case t = f.first\n when Symbol, String, SQL::Identifier, SQL::QualifiedIdentifier\n if pk = db.primary_key(t)\n pk\n end\n end\n end\n end",
"def insert_pk\n (f = opts[:from]) && !f.empty? && (t = f.first)\n case t\n when Symbol, String, SQL::Identifier, SQL::QualifiedIdentifier\n if pk = db.primary_key(t)\n Sequel::SQL::Identifier.new(pk)\n end\n end\n end",
"def sneaky_create(avoid_insert_conflict: nil)\n sneaky_attributes_without_id = sneaky_attributes_values\n .except { |key| key.name == \"id\" }\n\n column_keys = sneaky_attributes_without_id.keys\n .map { |key| \"\\\"#{key.name}\\\"\" } # to avoid conflicts with column names\n .join(\", \")\n\n dynamic_keys = sneaky_attributes_without_id.keys\n .map { |key| \":#{key.name}\" }\n .join(\", \")\n\n constraint = generate_constraint(\n avoid_insert_conflict,\n column_keys,\n dynamic_keys\n )\n\n sql = <<~SQL\n INSERT INTO #{self.class.table_name} ( #{column_keys} )\n VALUES (#{dynamic_keys})\n #{constraint}\n RETURNING *\n SQL\n\n mapping = generate_insert_mapping(sneaky_attributes_without_id)\n data = self.class.unscoped.find_by_sql([sql.squish, mapping.to_h]).first\n\n # To trigger generation of @mutations_from_database variable\n # which is necessary for id_in_database\n data.send(:mutations_from_database)\n\n copy_internal(data, \"@attributes\")\n copy_internal(data, \"@mutations_from_database\")\n copy_internal(data, \"@changed_attributes\")\n copy_internal(data, \"@new_record\")\n copy_internal(data, \"@destroyed\")\n\n !!id\n end",
"def save()\n sql = \"INSERT INTO transactions (merchant, tag_id, value, datestore) VALUES ('#{@merchant}', #{@tag_id}, #{@value}, '#{@datestore}') RETURNING *;\"\n transaction = SqlRunner.run( sql ).first\n @id = transaction['id'].to_i\nend",
"def missing_primary_key(source_row:, node_id:)\n # nothing\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def run\n if @prepared_type == :insert\n fetch_rows(prepared_sql){|r| return r.values.first}\n else\n super\n end\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n table = sql.split(\" \", 4)[2]\n super || last_insert_id(table, sequence_name || default_sequence_name(table, pk))\n end",
"def _insert_raw(ds)\n if use_prepared_statements_for?(:insert)\n _set_prepared_statement_server(model.send(:prepared_insert, @values.keys)).call(@values)\n else\n super\n end\n end",
"def save()\n db = PG.connect({\n dbname: 'houses',\n host: 'localhost'\n })\n\nsql = \"INSERT INTO houses\n(address, value, num_bedrooms, year) VALUES\n($1, $2, $3, $4) RETURNING *\"\nvalues = [@address, @value, @num_bedrooms, @year]\ndb.prepare(\"save\", sql)\nresult = db.exec_prepared(\"save\", values)\n@id = result[0]['id'].to_i()\n\ndb.close()\nend",
"def sql_with_returning(sql)\n table_ref = extract_table_ref_from_update_sql(sql)\n\n returning_columns = quote_returning_column_names(table_ref, nil, :update)\n\n return sql if returning_columns.blank?\n\n \"#{sql} RETURNING #{returning_columns.join(', ')}\"\n end",
"def save()\n sql = \"INSERT INTO albums (artist_name, album_title, genre) VALUES ($1, $2, $3) RETURNING id;\"\n values = [@artist_name, @album_title, @genre]\n result = SqlRunner.run(sql, values)\n @id = result[0]['id'].to_i\nend",
"def returning(columns)\n @sql_returning = ::MultiInsert::QueryBuilder.returning(columns)\n @returning_flat = false\n self\n end",
"def duplicate_primary_key(duplicate_row:, key:, node_id:)\n # nothing\n end",
"def save()\n #Create db connection\n db = PG.connect({dbname: \"pizza_shop\", host: \"localhost\"})\n\n #Create sql statment with placeholders for variables\n sql = \"INSERT INTO pizza_orders\n (first_name, last_name, topping, quantity)\n VALUES\n ($1, $2, $3, $4) RETURNING id;\"\n\n #Create array to store values to be added to SQL statment\n values = [@first_name, @last_name, @topping, @quantity]\n\n #Creates prepared DB stament called \"Save\" ready to be executed\n db.prepare(\"Save\",sql)\n\n #Executes db statement called \"Save\" which is combined with values array to get string\n result = db.exec_prepared(\"Save\", values)\n\n #Close DB connection\n db.close()\n\n @id = result[0][\"id\"].to_i\n end",
"def last_insert_id(conn, opts=OPTS)\n statement(conn) do |stmt|\n sql = opts[:prepared] ? 'SELECT @@IDENTITY' : 'SELECT SCOPE_IDENTITY()'\n rs = log_connection_yield(sql, conn){stmt.executeQuery(sql)}\n rs.next\n rs.getLong(1)\n end\n end",
"def last_insert_id(conn, opts=OPTS)\n statement(conn) do |stmt|\n sql = opts[:prepared] ? 'SELECT @@IDENTITY' : 'SELECT SCOPE_IDENTITY()'\n rs = log_connection_yield(sql, conn){stmt.executeQuery(sql)}\n rs.next\n rs.getLong(1)\n end\n end",
"def save()\n sql = \"INSERT INTO customers (name) VALUES ($1) RETURNING id\"\n values = [@name]\n @id = SqlRunner.run(sql, values)[0][\"id\"].to_i()\n end",
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @iiTable = get_table_name(sql)\n @iiCol = get_autounique_column(@iiTable)\n @iiEnabled = false\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n @connection.do(enable_identity_insert(@iiTable, true))\n @iiEnabled = true\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n execute(sql, name)\n id_value\n end",
"def insert_id\n @insert_id\n end",
"def reaktor_insert(row)\n insert_id = 0\n unless row.idstore.nil?\n Log.write_log($import_log, \"reaktor_insert: Table: #{row.table_name} args: #{row.idstore.args * ', '}\")\n else\n Log.write_log($import_log, \"reaktor_insert: Table: #{row.table_name} No IdStore object\")\n end\n query = \"INSERT INTO #{row.table_name} (#{row.get_column_name_string})\\n VALUES (#{(['?']*row.size).join(', ')})\"\n sth = $dbh_ms.prepare(query)\n begin\n sth.execute(*row.get_column_values)\n rescue\n $stderr.puts \"### Error in #{__FILE__} on line #{__LINE__}. See errorlog\"\n Log.write_log('error', \"Could not insert data. Message: #{$!}. query: \\\"#{get_query_string(sth)}\\\"\")\n raise\n exit\n end\n begin\n insert_id = $dbh_ms.func(:insert_id) unless row.idstore.nil?\n rescue\n $stderr.puts \"### Error in #{__FILE__} on line #{__LINE__}. See errorlog\"\n Log.write_log('error', \"Could not get insert id. Message: #{$!}.\")\n raise\n exit\n end\n if insert_id > 0\n row.store_id(insert_id)\n Log.write_log($import_log, \"Insert id store to table: #{row.table_name} id_store parameters: (#{row.idstore.args * ', '}) id: #{insert_id}\")\n else\n unless row.idstore.nil?\n Log.write_log($import_log, \"No id stored for table: #{row.table_name} id_store parameters: (#{row.idstore.args * ', '})\")\n else\n Log.write_log($import_log, \"No id stored for table: #{row.table_name} No IdStore object\")\n end\n \n end\nend",
"def merge_insert(*values, &block)\n h = {:type=>:insert, :values=>values}\n if override = @opts[:override]\n h[:override] = insert_override_sql(String.new)\n end\n _merge_when(h, &block)\n end",
"def save()\n sql = \"INSERT INTO customers\n (name, funds) VALUES ($1, $2) RETURNING id\"\n values = [@name,@funds]\n customer = SqlRunner.run(sql,values).first\n @id = customer['id'].to_i\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n # Extract the table from the insert sql. Yuck.\n table = sql.split(\" \", 4)[2].gsub('\"', '')\n\n # Otherwise, insert then grab last_insert_id.\n if insert_id = super\n insert_id\n else\n # If neither pk nor sequence name is given, look them up.\n unless pk || sequence_name\n pk, sequence_name = *pk_and_sequence_for(table)\n end\n\n # If a pk is given, fallback to default sequence name.\n # Don't fetch last insert id for a table without a pk.\n if pk && sequence_name ||= default_sequence_name(table, pk)\n last_insert_id(table, sequence_name)\n end\n end\n end",
"def execute_insert(sql, opts=OPTS)\n _execute(sql, opts){|conn| log_connection_yield(sql, conn){conn.execute_batch(sql)}; conn.last_insert_rowid}\n end",
"def insert_ignore\n insert_conflict\n end",
"def sneaky_save(avoid_insert_conflict: nil)\n begin\n sneaky_create_or_update(avoid_insert_conflict: avoid_insert_conflict)\n rescue ActiveRecord::StatementInvalid\n false\n end\n end",
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @logger.unknown(\"ODBCAdapter#pre_insert>\") if @trace\n @logger.unknown(\"args=[#{sql}|#{name}|#{pk}|#{id_value}|#{sequence_name}]\") if @trace\n @iiTable = get_table_name(sql)\n @logger.unknown(\"@iiTable=#{@iiTable}\") if @trace\n @iiCol = get_autounique_column(@iiTable)\n @logger.unknown(\"@iiCol=#{@iiCol}\") if @trace\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n remove_null_sequence_value_from_sql(sql, @iiCol)\n# rescue Exception => e\n# raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def reset_instance_dataset\n ret = super\n return unless ds = @instance_dataset\n\n if columns = insert_returning_columns(ds)\n ds = ds.returning(*columns)\n end\n @instance_insert_dataset = ds\n\n ret\n end",
"def insert_select(*values)\n return unless supports_insert_select?\n # Handle case where query does not return a row\n server?(:default).with_sql_first(insert_select_sql(*values)) || false\n end",
"def insert!(*rows)\n Mao.sql(with_options(:insert => rows.flatten).sql) do |pg_result|\n if @options[:returning]\n pg_result.map {|result| Mao.normalize_result(result, @col_types)}\n else\n pg_result.cmd_tuples\n end\n end\n end",
"def last_insert_id(conn, opts={})\n nil\n end",
"def last_insert_id(conn, opts={})\n nil\n end",
"def generateAndInsertNewContent ()\n ## Generate a random string between 8 and 50 characters long\n content = \"\";\n (8 + rand(42)).times{content << (97 + rand(26)).chr}\n\n ## Insert a new row into contents table\n query = \"INSERT INTO contents (content) VALUES('#{content}') returning id\"\n ret = $conn.exec(query)\n\n ## Consider and return the new row id\n ret[0]['id']\nend",
"def run_insert\n raise DataFactory::NoInsertStatement unless @insert_statement\n\n stmt = db_interface.execute_sql(@insert_statement, *@binds)\n stmt.close\n end",
"def return_statement(num)\n puts \"this is before the explicit return...\"\n if num != 1\n return \"yeah you entered something other than 1 !\"\n end\n puts \"this is after explicit return so you must have entered 1!\"\nend",
"def last_insert_id\n @connection.sql(\"SELECT @@IDENTITY\")\n unless @connection.cmd_fail?\n id = @connection.top_row_result.rows.first.first\n if id\n id = id.to_i\n id = nil if id == 0\n end\n else\n id = nil\n end\n id\n end",
"def save\n sql = \"INSERT INTO cars(\n shop_id,\n make,\n model,\n style,\n price,\n image\n )\n VALUES ($1, $2, $3, $4, $5, $6)\n RETURNING id;\"\n values = [@shop_id, @make, @model, @style, @price, @image]\n @id = SqlRunner.run(sql,values)[0]['id'].to_i\n end",
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @logger.unknown(\"ODBCAdapter#pre_insert>\") if @trace\n @logger.unknown(\"args=[#{sql}|#{name}|#{pk}|#{id_value}|#{sequence_name}]\") if @trace\n @iiTable = get_table_name(sql)\n @iiCol = get_autounique_column(@iiTable)\n @logger.unknown(\"@iiCol=#{@iiCol}>\") if @trace\n @iiEnabled = false\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n remove_null_sequence_value_from_sql(sql, @iiCol)\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def create_without_callbacks\r\n unless self.id\r\n raise CompositeKeyError, \"Composite keys do not generated ids from sequences, you must provide id values\"\r\n end\r\n attributes_minus_pks = attributes_with_quotes(false)\r\n quoted_pk_columns = self.class.primary_key.map { |col| connection.quote_column_name(col) }\r\n cols = quoted_column_names(attributes_minus_pks) << quoted_pk_columns\r\n vals = attributes_minus_pks.values << quoted_id\r\n connection.insert(\r\n \"INSERT INTO #{self.class.quoted_table_name} \" +\r\n \"(#{cols.join(', ')}) \" +\r\n \"VALUES (#{vals.join(', ')})\",\r\n \"#{self.class.name} Create\",\r\n self.class.primary_key,\r\n self.id\r\n )\r\n @new_record = false\r\n return true\r\n end",
"def insert\n DATABASE.execute(\"INSERT INTO students (name, age, github) VALUES (?, ?, ?)\", @name, @age, @github)\n @id = DATABASE.last_insert_row_id\n end",
"def last_insert_id(table, sequence_name) #:nodoc:\n identity = select_value(\"SELECT scope_identity()\")\n if identity.class == System::DBNull\n nil\n else\n System::Convert.to_int32(identity)\n end\n end",
"def insert(*values)\n execute_dui(insert_sql(*values)){|c| return c.last_id}\n end",
"def last_insert_id\n @connection.identity_val_local\n end",
"def enter_insert_mode\n\nend",
"def save\n # binding.pry\n sql = \"INSERT INTO #{table_name_for_insert} (#{col_names_for_insert}) VALUES (#{values_for_insert})\"\n # binding.pry\n DB[:conn].execute(sql)\n # binding.pry\n @id = DB[:conn].execute(\"SELECT last_insert_rowid() FROM #{table_name_for_insert}\")[0][0]\n # binding.pry\n end",
"def sneaky_create\n if self.id.nil? && sneaky_connection.prefetch_primary_key?(self.class.table_name)\n self.id = sneaky_connection.next_sequence_value(self.class.sequence_name)\n end\n\n attributes_values = skeaky_attributes_values\n\n # Remove the id field for databases like Postgres which will raise an error on id being NULL\n if self.id.nil? && !sneaky_connection.prefetch_primary_key?(self.class.table_name)\n attributes_values.reject! { |key,_| key.name == 'id' }\n end\n\n new_id = if attributes_values.empty?\n self.class.unscoped.insert sneaky_connection.empty_insert_statement_value\n else\n self.class.unscoped.insert attributes_values\n end\n\n @new_record = false\n !!(self.id ||= new_id)\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil) end",
"def insert(value)\n #YOUR WORK HERE\n end",
"def insert(value)\n # YOUR WORK HERE\n end",
"def last_insert_id(conn, opts=OPTS)\n statement(conn) do |stmt|\n rs = stmt.executeQuery('SELECT last_insert_rowid()')\n rs.next\n rs.getLong(1)\n end\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n insert_sql(sql, name, pk, id_value, sequence_name)\n end",
"def execute_insert(sql, opts=OPTS)\n execute(sql, opts.merge(:meth=>:autoid))\n end",
"def _insert_select_raw(ds)\n if use_prepared_statements_for?(:insert_select)\n if ps = model.send(:prepared_insert_select, @values.keys)\n _set_prepared_statement_server(ps).call(@values)\n end\n else\n super\n end\n end",
"def last_insert_id(table, sequence_name) #:nodoc:\n Integer(select_value(\"SELECT currval('#{sequence_name}')\"))\n end",
"def method_with_explicit_return\n :a_non_return_value\n return :return_value\n :another_non_return_value\n end",
"def insert()\n\n end",
"def insert_sales_dates(sale_date)\n db_connection do |conn|\n result = conn.exec_params(\"SELECT id FROM sales_dates WHERE sale_date = $1\", [sale_date[:sale_date]])\n if result.to_a.empty?\n result = conn.exec_params(\"INSERT INTO sales_dates (sale_date) VALUES ($1) RETURNING id\", [sale_date[:sale_date]])\n end\n result.first[\"id\"]\n end\nend",
"def save()\n sql = \"INSERT INTO tickets\n (customer_id, film_id, screening_film_time)\n VALUES\n ($1, $2, $3)\n RETURNING id\"\n values = [@customer_id, @film_id, @screening_film_time]\n @id = SqlRunner.run(sql, values).first['id'].to_i\n end"
] |
[
"0.7477058",
"0.74615633",
"0.67061794",
"0.67061794",
"0.6439445",
"0.6295529",
"0.6289652",
"0.6255195",
"0.61770976",
"0.61770976",
"0.60041326",
"0.5992589",
"0.591023",
"0.5897336",
"0.58321005",
"0.5752475",
"0.5752475",
"0.5746446",
"0.57223964",
"0.5720864",
"0.56828505",
"0.5599855",
"0.55329037",
"0.55329037",
"0.549917",
"0.5498483",
"0.5478366",
"0.5467765",
"0.53978246",
"0.5391766",
"0.5378648",
"0.53747267",
"0.5361436",
"0.5340267",
"0.5305208",
"0.53007317",
"0.5293208",
"0.52858806",
"0.5278427",
"0.5262459",
"0.5254639",
"0.52539474",
"0.524133",
"0.5227503",
"0.5227503",
"0.5227503",
"0.5225936",
"0.5224519",
"0.5224177",
"0.5222469",
"0.52171254",
"0.52051795",
"0.52013075",
"0.5199449",
"0.51850575",
"0.5147922",
"0.5147922",
"0.513802",
"0.5133274",
"0.5121671",
"0.5096389",
"0.5095881",
"0.5092578",
"0.50825256",
"0.5042182",
"0.5041604",
"0.5032385",
"0.50295144",
"0.50227106",
"0.5021963",
"0.50213957",
"0.49993974",
"0.49981502",
"0.49981502",
"0.49903953",
"0.49783382",
"0.49756306",
"0.49716175",
"0.49701843",
"0.4968474",
"0.49675012",
"0.49661586",
"0.49460617",
"0.49336848",
"0.49322248",
"0.4928609",
"0.49197775",
"0.49163315",
"0.4916237",
"0.49161986",
"0.4914748",
"0.491302",
"0.49086857",
"0.4905946",
"0.48895955",
"0.48857787",
"0.4885596",
"0.4884793",
"0.48818082",
"0.48810053"
] |
0.7500883
|
0
|
Always return false when using VALUES
|
def empty?
return false if @opts[:values]
super
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def value?(value) true end",
"def single_value?\n return false\n end",
"def value?(value); end",
"def value?(value); end",
"def can_have_value?\n return false\n end",
"def must_have_value?\n return false\n end",
"def has_value?\n false\n end",
"def value_valid?\n return true\n end",
"def can_have_value?()\n return true\n end",
"def return_value_list_from_query?\n @this_val_where[:mode] == 'return_value_list'\n end",
"def has_value?(p0) end",
"def can_have_value?\n return true\n end",
"def return_value_from_query?\n @this_val_where[:mode] == 'return_value'\n end",
"def valid_value?(row, column)\n return false unless %w[A B C].include?(row)\n return false unless [1,2,3].include?(column)\n true\n end",
"def value?(p0) end",
"def scalar?\n true\n end",
"def is_value?\n true\n end",
"def has_value? value; value? value; end",
"def has_value?(value); end",
"def has_value?\n true\n end",
"def value?(column_name, val)\n ![0, false].include?(send(column_name) & val)\n end",
"def scalar?\n true\n end",
"def has_value?(value)\n super(convert_value(value))\n end",
"def to_bool() false end",
"def has_row_values?(row_value)\n @values_map.has_key?(row_value.to_s)\n end",
"def deleted?\n values.length == 0 && values(true).length > 0\n end",
"def just_u_value?()\n @construction.check_keyword?(\"U-VALUE\")\n end",
"def just_u_value?()\n @construction.check_keyword?(\"U-VALUE\")\n end",
"def valid?\n @value ? true : false\n end",
"def value_constructed_by_mass_assignment?(_value)\n false\n end",
"def multi_valued?\n @multi_valued\n end",
"def valid?\n value\n end",
"def single_value?\n raise NotImplementedError\n end",
"def valid?(val)\n vals.include? val\n end",
"def values_stored?\n sheetcells.exists?(status_id: [2, 3, 4])\n end",
"def has_value?(value)\n @table.each do | k, v |\n return true if v == value\n end\n false\n end",
"def to_bool() true end",
"def value\n true\n end",
"def value?\n value && !value.is_a?(Array)\n end",
"def execute_query?\n !@execute.nil?\n end",
"def locode_data?(row)\n !row[2].nil?\n end",
"def value_set?\n @value_set\n end",
"def value_set?\n @value_set == true\n end",
"def post_process_boolean( val )\n\t\t\treturn TRUE_VALUES.include?( val.to_s.downcase )\n\t\tend",
"def test_values\n value = nil\n assert_nothing_raised do\n value = ActiveRecord::Base.connection.send(:select_rows, \"VALUES('ur', 'doin', 'it', 'right')\")\n end\n assert_equal [['ur', 'doin', 'it', 'right']], value\n end",
"def true(_argvs)\n return nil\n end",
"def value_present?\n !@value.nil?\n end",
"def complete?\r\n cur_values.all{ |value| value!=nil }\r\n end",
"def exists?\n values.exists?\n end",
"def must_have_value?\n return @df_str == nil\n end",
"def boolean cols\n decode_values :boolean, cols\n end",
"def all?(*keys)\n keys.flatten!\n if keys.any?\n # Check only the specified keys\n valid = true\n keys.each do |key|\n unless @values.has_key?(key)\n raise \"Unknown column key :#{key} in call to Row#all?\"\n end\n valid = valid && !@values[key].nil?\n end\n valid\n else\n # Check all value keys\n @values.values.all? {|v| !v.nil? }\n end\n end",
"def must_have_value?()\n return @df_int == nil\n end",
"def value?(value)\n\t\treturn self.value == value\n\tend",
"def value?(value)\n\t\treturn self.value == value\n\tend",
"def value_if_false\n return @value_if_false\n end",
"def valid?(value)\n true\n end",
"def sql_boolean\n self\n end",
"def value_required?\n false\nend",
"def row_allowed?(row)\n if unique\n key = (unique.collect { |k| row[k] }).join('|')\n return false if compound_key_constraints[key]\n compound_key_constraints[key] = 1\n end\n return true\n end",
"def emit_value?\n !mlhs? && !no_value_parent?\n end",
"def limit_to_single_row?\n !returns_array?\n end",
"def valid_row?(index)\n row_values = row(index).compact\n row_values == row_values.uniq\n end",
"def boolean_expr safe_column_name\n safe_column_name\n end",
"def value? value\n include? value\n end",
"def value?\n @count > 0\n end",
"def single_value?\n @single_value\n end",
"def multi?\n @db_column.is_a?(Array) && !comment?\n end",
"def new_record?\n key_value.nil?\n end",
"def value?\n return !self.flag?\n end",
"def new_record?\n key_value.nil?\nend",
"def hasValue\n @valueCreator.hasValue\n end",
"def returns_array?\n false\n end",
"def returns_array?\n false\n end",
"def to_exp?\n @statement_list.each do |s|\n return false if s.to_exp? == false\n end\n return true\n end",
"def blank_date_parameter?\n (1..3).any? { |position| values[position].blank? }\n end",
"def value?(value)\n values.include? value\n end",
"def should_not_save_or_validate_row?(row)\n if row[:form_config_id].blank?\n reject = true\n else\n # set reject to FALSE if the FormConfig says the row is REQUIRED\n reject = FormConfig.find(row[:form_config_id]).required ? false : true\n end\n \n # REJECT if the :value.blank? = true AND reject = true; otherwise let it be validated\n row[:value].blank? && reject\n end",
"def allow_ad_hoc_values(row, row_index)\n source_column_index = @file.headers.index(\"SOURCE\")\n source_id_column_index = @file.headers.index(\"SOURCE ID\")\n source = row.fields[source_column_index] unless source_column_index.nil?\n source_id = row.fields[source_id_column_index] unless source_id_column_index.nil?\n return ((source.nil? or source.blank?) and (source_id.nil? or source_id.blank?))\n end",
"def result?\n false\n end",
"def valid?(v)\n values.include?(normalize(v))\n end",
"def valid?\n return false if @query.nil?\n return true\n end",
"def valid?\n valid_type? && values.include?(value)\n end",
"def value_set? key\n @values.key? @schema.resolve_key! key\n end",
"def among_validate_in_values?\n vs = @cfg.valid_values\n !vs || vs.include?(@value)\n end",
"def new_record?\n !primary_key_value\n end",
"def has_value?(value)\n raise NotImplementedError\n end",
"def evaluate?\n false\n end",
"def evaluate?\n false\n end",
"def typecast_value_boolean(opts={});true;end",
"def skip_value?(value); end",
"def column_value_boolean\n case Utilities.adapter\n when 'mysql2', 'postgresql'\n column_value ? \"true\" : \"false\"\n when 'sqlite3', 'sqlserver'\n column_value ? \"1\" : \"0\"\n end\n end",
"def valid?\n false\n end",
"def unknown?\n @value.nil?\n end",
"def query_yields_boolean?\n false\n end",
"def query_yields_boolean?\n false\n end",
"def valid?\n %w(none set).include?(db.type(id))\n end",
"def literal?\n false\n end",
"def literal?\n false\n end",
"def literal?\n false\n end"
] |
[
"0.6974655",
"0.68590385",
"0.6858408",
"0.6858408",
"0.68491626",
"0.6778682",
"0.67233634",
"0.66635484",
"0.6622797",
"0.6554162",
"0.6541767",
"0.6498042",
"0.64633584",
"0.6462696",
"0.64158624",
"0.6398311",
"0.6377883",
"0.6355189",
"0.63148046",
"0.6297409",
"0.62874377",
"0.62758946",
"0.6237636",
"0.6227819",
"0.6223345",
"0.6186446",
"0.61427045",
"0.61378354",
"0.6134806",
"0.61251116",
"0.61054164",
"0.6090048",
"0.6089936",
"0.6084191",
"0.6082536",
"0.6068862",
"0.60673773",
"0.6064208",
"0.60546744",
"0.604184",
"0.60409176",
"0.60356367",
"0.60297453",
"0.6019176",
"0.60143113",
"0.6002249",
"0.5995087",
"0.59569347",
"0.5954032",
"0.59490645",
"0.5948083",
"0.59432334",
"0.59410113",
"0.59387827",
"0.59387827",
"0.59299266",
"0.5911549",
"0.5910619",
"0.5910515",
"0.5910092",
"0.59003264",
"0.58945537",
"0.5893091",
"0.58821154",
"0.58579546",
"0.5855973",
"0.58529425",
"0.5852537",
"0.58468497",
"0.584198",
"0.5840362",
"0.5838232",
"0.58356214",
"0.58356214",
"0.58333075",
"0.5833068",
"0.5831733",
"0.5826956",
"0.58224046",
"0.5821902",
"0.5819789",
"0.5815892",
"0.5808411",
"0.5797942",
"0.5787093",
"0.57859707",
"0.57803535",
"0.5779414",
"0.5779414",
"0.57583195",
"0.57479364",
"0.57459015",
"0.57430786",
"0.5742821",
"0.5742459",
"0.5740818",
"0.5740754",
"0.57363325",
"0.57363325",
"0.57363325"
] |
0.64082706
|
15
|
Return the results of an EXPLAIN query as a string
|
def explain(opts=OPTS)
with_sql((opts[:analyze] ? 'EXPLAIN ANALYZE ' : 'EXPLAIN ') + select_sql).map(:'QUERY PLAN').join("\r\n")
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def explain(arel, binds = [])\n sql = \"EXPLAIN #{to_sql(arel, binds)}\"\n ExplainPrettyPrinter.new.pp(exec_query(sql, 'EXPLAIN', binds))\n end",
"def explain(arel, binds = [])\n sql = \"EXPLAIN #{to_sql(arel, binds)}\"\n exec_query(sql, 'EXPLAIN', binds)\n end",
"def explain_sql(sql, connection_config, &explainer)\n return nil unless sql && connection_config\n statement = sql.split(\";\\n\")[0] # only explain the first\n explain_plan = explain_statement(statement, connection_config, &explainer)\n return explain_plan || []\n end",
"def string_explain_plan_results(results)\n [nil, [results]]\n end",
"def test_explain_sql_select_with_mysql_explain_result\n config = {:adapter => 'mysql'}\n sql = 'SELECT foo'\n\n plan = {\n 'select_type' => 'SIMPLE', 'key_len' => nil, 'table' => 'blogs', 'id' => '1',\n 'possible_keys' => nil, 'type' => 'ALL', 'Extra' => '', 'rows' => '2',\n 'ref' => nil, 'key' => nil\n }\n explainer_result = mock('explain plan')\n explainer_result.expects(:each_hash).yields(plan)\n explainer = lambda { |statement| explainer_result }\n\n statement = NewRelic::Agent::Database::Statement.new(sql, config, explainer)\n result = NewRelic::Agent::Database.explain_sql(statement)\n expected_result = [%w[select_type key_len table id possible_keys type\n Extra rows ref key],\n [['SIMPLE', nil, 'blogs', '1', nil, 'ALL', '', '2', nil, nil]]]\n\n assert_equal(expected_result[0].sort, result[0].sort, \"Headers don't match\")\n assert_equal(expected_result[1][0].compact.sort, result[1][0].compact.sort, \"Values don't match\")\n end",
"def analyse_explain(result:, identifier:, query:)\n _id, _select_type, _table, _partitions, type, possible_keys, key, _key_len,\n _ref, _rows, _filtered, extra = result\n\n return if type == \"ref\"\n return if ALLOWED_EXTRA_VALUES.any? { |value| extra&.include?(value) }\n\n if possible_keys.nil?\n @aggregator.add_critical(identifier: identifier, query: query)\n return\n end\n\n if possible_keys == \"PRIMARY\" && key.nil? && type == \"ALL\"\n @aggregator.add_warning(identifier: identifier, query: query)\n end\n end",
"def analyze\n explain(:analyze=>true)\n end",
"def explain(command)\n @connection.call('GRAPH.EXPLAIN', @graphname, command)\n rescue Redis::CommandError => e\n raise ExplainError, e\n end",
"def supports_explain?\n false\n end",
"def supports_explain?\n false\n end",
"def exec_query(query, conn = ActiveRecord::Base.connection)\n res = conn.exec_query(query)\n puts res.rows.map { |r| r.map(&:inspect).join(\",\") }.join('\\n')\n res.to_a\nend",
"def test_explain_sql_with_mysql2_activerecord_result\n return unless defined?(::ActiveRecord::Result)\n\n config = {:adapter => 'mysql2'}\n sql = 'SELECT * FROM spells where id=1'\n\n columns = %w[id select_type table type possible_keys key key_len ref rows Extra]\n rows = [['1', 'SIMPLE', 'spells', 'const', 'PRIMARY', 'PRIMARY', '4', 'const', '1', '']]\n activerecord_result = ::ActiveRecord::Result.new(columns, rows)\n explainer = lambda { |statement| activerecord_result }\n\n statement = NewRelic::Agent::Database::Statement.new(sql, config, explainer)\n result = NewRelic::Agent::Database.explain_sql(statement)\n\n assert_equal([columns, rows], result)\n end",
"def calculate_plan(queries)\n queries.each do |query|\n if @log_explain\n puts \"Explaining query\"\n puts\n end\n begin\n query.plans << plan(query.statement)\n if @log_explain\n # Pass format to prevent ANALYZE\n puts execute(\"EXPLAIN (FORMAT TEXT) #{safe_statement(query.statement)}\").map {|r| r[\"QUERY PLAN\"]}.join(\"\\n\")\n end\n rescue PG::Error, JSON::NestingError => e\n if @log_explain\n log e.message\n end\n end\n puts if @log_explain\n end\n end",
"def perform_query\n Rails.logger.info queries.to_sql\n queries\n end",
"def result_summary\n options = { style: 'font-size: 25px;' }\n summary = if matches_exist?\n [bold_tag(pluralize(result_count, 'result'), options), filter_text]\n else\n [\n bold_tag(@query, options),\n 'not found -',\n pluralize(result_count, 'similar result'),\n filter_text\n ]\n end\n safe_join(summary, ' ')\n end",
"def query(query, options = {})\n GRel::Debugger.debug \"QUERYING DESCRIBE...\"\n GRel::Debugger.debug query\n GRel::Debugger.debug \"** LIMIT #{@last_query_context.limit}\" if @last_query_context.limit\n GRel::Debugger.debug \"** OFFSET #{@last_query_context.offset}\" if @last_query_context.offset\n GRel::Debugger.debug \"----------------------\"\n args = {:describe => true}\n #args = {}\n args[:accept] = options[:accept] if options[:accept]\n args[:offset] = @last_query_context.offset if @last_query_context.offset\n args[:limit] = @last_query_context.limit if @last_query_context.limit\n @connection.query(@db_name,query, args).body\n end",
"def explain\n explain_limit = limit || 0\n opts = @opts.merge(:limit => -explain_limit.abs, :explain => true)\n @collection.explain(Scope.new(@collection, @selector, opts))\n end",
"def explain\n \n end",
"def set_explain\n @explain = Explain.find(params[:id])\n end",
"def inspect\n \"#{first_seen} #{last_seen} #{query} #{answer} #{rr} #{maptype}\"\n end",
"def to_s\n query.to_s\n end",
"def to_s\n self.query.to_s\n end",
"def ascii_query(sql,*values)\n sth = self.query(sql,*values)\n rows = sth.fetch_all\n col_names = sth.column_names\n sth.finish\n DBI::Utils::TableFormatter.ascii(col_names, rows)\n end",
"def describe_query\n h = {component: @options[:component]}.compact\n if h.empty?\n describe_num_and_type\n else\n \"#{describe_num_and_type} #{h}\"\n end\n end",
"def display_query_sql(users)\n tag.p('SQL:') + tag.code(users.to_sql)\n end",
"def inspect\n\t\tstr = self.to_s\n\t\tstr[-1,0] = if finished?\n\t\t\t\" finished\"\n\t\telse\n\t\t\tstats = []\n\t\t\tstats << \" status=#{ PG.constants.grep(/CONNECTION_/).find{|c| PG.const_get(c) == status} }\" if status != CONNECTION_OK\n\t\t\tstats << \" transaction_status=#{ PG.constants.grep(/PQTRANS_/).find{|c| PG.const_get(c) == transaction_status} }\" if transaction_status != PG::PQTRANS_IDLE\n\t\t\tstats << \" nonblocking=#{ isnonblocking }\" if isnonblocking\n\t\t\tstats << \" pipeline_status=#{ PG.constants.grep(/PQ_PIPELINE_/).find{|c| PG.const_get(c) == pipeline_status} }\" if respond_to?(:pipeline_status) && pipeline_status != PG::PQ_PIPELINE_OFF\n\t\t\tstats << \" client_encoding=#{ get_client_encoding }\" if get_client_encoding != \"UTF8\"\n\t\t\tstats << \" type_map_for_results=#{ type_map_for_results.to_s }\" unless type_map_for_results.is_a?(PG::TypeMapAllStrings)\n\t\t\tstats << \" type_map_for_queries=#{ type_map_for_queries.to_s }\" unless type_map_for_queries.is_a?(PG::TypeMapAllStrings)\n\t\t\tstats << \" encoder_for_put_copy_data=#{ encoder_for_put_copy_data.to_s }\" if encoder_for_put_copy_data\n\t\t\tstats << \" decoder_for_get_copy_data=#{ decoder_for_get_copy_data.to_s }\" if decoder_for_get_copy_data\n\t\t\t\" host=#{host} port=#{port} user=#{user}#{stats.join}\"\n\t\tend\n\t\treturn str\n\tend",
"def raw_query(query) #:nodoc:\n logger.benchmark \"cypher: #{query}\" do\n result = connection.execute_query(query)\n if result\n result[\"data\"]\n else\n []\n end\n end\n end",
"def result\n ActiveRecord::Base.connection.select_all(sql).entries\n end",
"def info_sql\n INFO_SQL\n end",
"def get_query_result_docs(query_obj)\n response = @query_server.query(query_obj)\n response.getResults\n end",
"def explain(**opts)\n self.class.new(collection, selector, options.merge(explain_options(**opts))).first\n end",
"def oracle_sql_results(sql, conn = VACOLS::Case.connection)\n result = conn.execute(sql)\n output = []\n while r = result.fetch_hash\n output << r\n end\n output\nend",
"def explain\n \"#{ enumerable_type.safe_name }<#{ entry_type.explain }>\"\n end",
"def query sql\n result = db[sql].all\n return result\n end",
"def stats(query = nil, target = nil)\n raw \"STATS #{query} #{target}\".strip << \"\\r\\n\"\n end",
"def interpret(expression)\n \t\tresult = [\"Query Result: \"]\n\t\tmainQuery = ' '\n\t\tsubQuery = ' '\n\t\tforUsed = false\n\t\tsearchString = nil\n\t\tsearchStarted = false\n\t\tsearchEnded = false\n\n\t\ttokens = expression.split(' ')\n\t\tfor currentToken in tokens\n \t\t\tif currentToken == \"show\"\n \t\t\t\tnext\n\t\t\tend\n\n\t\t\t#//show in all queries, not really used\n\t\t\tif currentToken == \"title\"\n \t\t\t\tif mainQuery == ' '\n \t\t\t\t\tmainQuery = 'T'\n\t\t\t\telsif forUsed and subQuery == ' '\n \t\t\t\t\t\tsubQuery = 'T'\n\t\t\t\tend\n\t\t\telsif currentToken == \"actor\"\n \t\t\t\tif mainQuery == ' '\n \t\t\t\t\tmainQuery = 'A'\n\t\t\t\telsif forUsed and subQuery == ' '\n \t\t\t\t\t\tsubQuery = 'A'\n\t\t\t\tend\n\n\t\t\telsif currentToken == 'for'\n \t\t\t\tforUsed = true\n\n\t\t\telsif searchString == nil and subQuery != ' ' and currentToken.start_with?(\"<\")\n \t\t\t\tsearchString = currentToken\n\t\t\t\tsearchStarted = true\n\t\t\t\tif currentToken.end_with?(\">\")\n \t\t\t\t\tsearchEnded = true \n\t\t\t\tend\n\n\t\t\telsif searchStarted and not searchEnded\n \t\t\t\tsearchString += \" \" + currentToken\n\t\t\t\tif currentToken.end_with?(\">\")\n\t\t\t\t\tsearchEnded = true \n\t\t\t\tend\n\t\t\tend\n\t\tend\n\n\n\t\t#//remove <>\n\t\tif searchString != nil\n\t\t\tsearchString = searchString.slice(1..-2)\n\t\t\t#searchString = searchString.substring(1,(searchString.length() - 1)) \n\t\tend\n\n\t\texpr = nil\n\t\tif mainQuery == 'A'\n \t\t\tif subQuery == 'T'\n \t\t\t\texpr = DvdActorTitleExpression.new(searchString) \n\t\t\telse\n \t\t\t\texpr = DvdActorExpression.new()\n\t\t\tend\n\t\telsif mainQuery == 'T'\n \t\t\tif subQuery == 'A'\n \t\t\t\texpr = DvdTitleActorExpression.new(searchString) \n\t\t\telse\n \t\t\t\texpr = DvdTitleExpression.new()\n\t\t\tend\n\t\telse\n \t\t\treturn str(result)\n\t\tend\n\n\t\tresult.push(expr.interpret(@ctx))\n\t\tresult.join('')\n\tend",
"def log_query(sql)\n pad = ' '\n puts Paint[pad + sql, :cyan, :bold]\n # @loggers[0]&.debug(' ' + sql)\n end",
"def query_hits(options = {})\n options = { sort: false, transpose: false, subject_regex: nil }.merge(options)\n \n # Get the hits\n hits_hash = {}\n each_query(sort: options[:sort], subject_regex: options[:subject_regex]) do |query, hits| \n # hits.map!(&:transpose!) if options[:transpose]\n hits_hash[query] = hits\n end\n\n # TODO: Transpose. This needs to actually interchange subject and query on the hash level\n \n hits_hash\n end",
"def to_s\n flags = [ ]\n flags << 'qr' if (response?)\n flags << 'aa' if (authorative?)\n flags << 'tc' if (truncated?)\n flags << 'rd' if (recursion_desired?)\n flags << 'ra' if (recursion_available?)\n \n \";; HEADER:\\n;; opcode: #{opcode.to_s.upcase} status: #{response_code.to_s.upcase} id: #{id} \\n\" +\n \";; flags: #{flags.join(' ')}; QUERY: #{questions.length}, ANSWER: #{answers.length}, AUTHORITY: #{nameservers.length}, ADDITIONAL: #{additional_records.length}\" +\n \"\\n\" +\n \";; QUESTION SECTION:\\n\" +\n questions.collect(&:to_s).join(\"\\n\") + \"\\n\" +\n \";; ANSWER SECTION:\\n\" +\n answers.collect(&:to_s).join(\"\\n\") + \"\\n\" +\n \";; NAMESERVER SECTION:\\n\" +\n nameservers.collect(&:to_s).join(\"\\n\") + \"\\n\" +\n \";; ADDITIONAL SECTION:\\n\" +\n additional_records.collect(&:to_s).join(\"\\n\") + \"\\n\"\n end",
"def to_s\n \"#<ResultSet::#{@collection} :total_results => #{@results.size}>\"\n end",
"def postgres_print_reply(resp=nil,sql=nil)\n ip = datastore['RHOST']\n port = datastore['RPORT']\n verbose = datastore['VERBOSE']\n return :error unless resp.kind_of? Connection::Result\n if resp.rows and resp.fields\n print_status \"#{ip}:#{port} Rows Returned: #{resp.rows.size}\" if verbose\n if resp.rows.size > 0\n tbl = Rex::Text::Table.new(\n 'Indent' => 4,\n 'Header' => \"Query Text: '#{sql}'\",\n 'Columns' => resp.fields.map {|x| x.name}\n )\n resp.rows.each {|row| tbl << row.map { |x| x.nil? ? \"NIL\" : x } }\n print_line(tbl.to_s)\n end\n end\n return :complete\n end",
"def get_query_results(query)\n hits = execute_query(query).hpath('hits.hits').first\n return [] unless hits && !hits.empty?\n hits.map do |hit|\n hit['_source'].expand.merge(hit.only('_id', '_type', '_index')).kmap do |key|\n key.to_s.gsub('@', '_').to_sym\n end\n end\n end",
"def to_stdout\n\t\t\tresult_string = String.new\n\t\t\thashes = Array.new\n\n\t\t\t@results.sort_by {|k| k[:scanner] }.each do |result|\n\t\t\t\tunless hashes.include? result[:hash].downcase\n\t\t\t\t\tresult_string << \"#{result[:hash]}:\\n\"\n\t\t\t\t\thashes << result[:hash].downcase\n\t\t\t\tend\n\t\t\t\tresult_string << \"#{result[:scanner]}: \".rjust(25) + \"#{result[:result]}\\n\"\n\t\t\tend if @results != nil\n\n\t\t\tresult_string\n\t\tend",
"def db_query_transform__count query\n tmp_table = \"resultset_table\"\n make_tmp_table = db_query_transform__subquery query, tmp_table\n \"SELECT COUNT(*) FROM #{make_tmp_table}\"\n end",
"def to_s\n \"#{id}:#{query}\"\n end",
"def exec_query(sql, name = 'SQL', binds = [])\n log(sql, name, binds) do\n result = without_prepared_statement?(binds) ? exec_no_cache(sql) :\n exec_cache(sql, binds)\n result_array = result_as_array(result)\n if ActiveRecord::VERSION::MAJOR >= 4\n column_types = compute_field_types(result)\n ret = ActiveRecord::Result.new(result.fields, result_array, column_types)\n else\n ret = ActiveRecord::Result.new(result.fields, result_array)\n end\n result.clear\n ret\n end\n end",
"def to_s\n regex = REPLACEMENT\n\n if Gitlab::Database.mysql?\n regex = Regexp.union(regex, MYSQL_REPLACEMENTS)\n end\n\n sql = @sql.gsub(regex, '?').gsub(CONSECUTIVE) do |match|\n \"#{match.count(',') + 1} values\"\n end\n\n # InfluxDB escapes double quotes upon output, so lets get rid of them\n # whenever we can.\n if Gitlab::Database.postgresql?\n sql = sql.delete('\"')\n end\n\n sql.tr(\"\\n\", ' ')\n end",
"def analyzed_best_bet_query(query)\n analyzed_query = @client.indices.analyze(\n index: @index_name,\n body: {\n text: query,\n analyzer: \"best_bet_stemmed_match\",\n },\n )\n\n analyzed_query.fetch(\"tokens\", []).map { |token_info|\n token_info[\"token\"]\n }.join(\" \")\n rescue Elasticsearch::Transport::Transport::Errors::BadRequest\n \"\"\n end",
"def execute\n # build the query string\n # run the query\n # return the results\n end",
"def query_string\n ast.to_query_string\n end",
"def extract_sql_queries\n sql_queries = Lograge::Sql.store[:lograge_sql_queries]\n return {} unless sql_queries\n\n Lograge::Sql.store[:lograge_sql_queries] = nil\n {\n sql_queries: Lograge::Sql.formatter.call(sql_queries),\n sql_queries_count: sql_queries.length\n }\n end",
"def query_string\n _f = @params.fields.include?(:full_text) ? [:full_text] : fields\n # byebug\n a = query.gsub('/', '').scan( /\"[^\"]+\"|[^ ]+/ ).map do |word|\n if word[0] === '\"'\n m = word.match( /^\"(.*)\"$/ );\n word = m ? m[1] : word;\n end\n Unicode.downcase(word.gsub('\"', ''))\n end\n _q = '(' + a.join('* AND ') + '*)'\n # _q = '/(?=.*?'+a.join( ')(?=.*?' )+').*/';\n #byebug\n index.filter{ ~q(query_string: {fields: _f, query: \"#{_q}\", default_operator: 'or'}) } if _q.present? && _f.present?\n\n #index.query(multi_match: {query: \"#{_q}*\", fields: _f}) if _q.present? && _f.present\n end",
"def print_query(sql)\n IRB::Pager.pager {\n query(sql) do |l|\n puts l\n end\n }\n end",
"def display_analyze_output(json, options={})\n return unless json['tokens']\n\n output = [] << ''\n\n max_length = json['tokens'].map { |d| d['token'].to_s.size }.max\n\n output << Helpers.table(json['tokens'].map do |t|\n [\n t['position'],\n t['token'].ljust(max_length+5).ansi(:bold),\n \"#{t['start_offset']}–#{t['end_offset']}\",\n t['type']\n ]\n end).to_s\n output.join(\"\\n\")\n end",
"def blogs_weblog_explain; \"List your weblogs.\"; end",
"def benchmark_format_mysql_queries(n, conn, sql, params_arr, format_regex, format_param_index, with = true)\n db_type = \"PSQL\"\n db_type = \"MySQL\" if conn.instance_of? Mysql2::Client\n puts sql[0]\n puts format_regex\n puts \"****#{db_type}:****#{sql[1]}**format check**on #{n}******\"\n format_parameters = params_arr.map { |row| row[format_param_index] }\n puts params_arr.select { |x| x[format_param_index] =~ format_regex }.length\n sql_queries = []\n for i in 0...n\n sql_queries << generate_query(sql[0], params_arr[i])\n end\n plan_before = conn.query(\"explain #{sql_queries[0]}\")\n if with\n time = Benchmark.bm do |x|\n x.report { for i in 0...n; conn.query(sql_queries[i]); end }\n x.report { for i in 0...n; execute_mysql_with_format(conn, sql_queries[i], format_regex, format_parameters[i]); end }\n end\n else\n time = Benchmark.bm do |x|\n x.report { for i in 0...n; conn.query(sql_queries[i]); end }\n x.report { for i in 0...n; execute_mysql_without_format(conn, sql_queries[i], format_regex, format_parameters[i]); end }\n end\n end\n return time, [plan_before, nil]\nend",
"def explain_specification\n { selector: { explain: find_command }, db_name: database.name, read: read }\n end",
"def queries_summary(project_id)\n get \"projects/#{project_id}/queries/summary\"\n end",
"def exec_query(sql, name = nil, binds = [])\n result = without_prepared_statement?(binds) ? exec_no_cache(sql, name, binds) :\n exec_cache(sql, name, binds)\n result_array = result_as_array(result)\n if ArVer::GTEQ_4\n column_types = compute_field_types(result)\n ret = ActiveRecord::Result.new(result.fields, result_array, column_types)\n else\n ret = ActiveRecord::Result.new(result.fields, result_array)\n end\n result.clear\n ret\n end",
"def sql! sql=nil\n require 'niceql'\n puts Niceql::Prettifier.prettify_sql sql || $last_sql_command\n end",
"def sections\n reply = @index.byte_num.keys\n if @index.has_queries?\n reply.push('queries')\n end\n reply.map(&:to_sym)\n end",
"def execute_hive_query(query)\n $log.debug \"executing hive query: #{query}\"\n\n # temporary hive query file\n hive_sql_file = \"/tmp/hive_listing_quality_calculator.sql\"\n\n # write the query to the temp file\n File.open(hive_sql_file, 'w') { |f|\n f.puts query\n }\n\n # execute the query thru \"hive -f\" and capture the return status\n return system(\"/home/t/hive_current/bin/hive -f #{hive_sql_file}\")\nend",
"def log_indexes(indexes)\n if indexes.any?\n indexes.map {|i| \"#{i[:table]} (#{i[:columns].join(\", \")})\"}.join(\", \")\n else\n \"None\"\n end\n end",
"def to_s\n PgQuery::Deparse.from ast\n end",
"def dumpDataTables\n print \" ------ users\" + Array.new(34).join('-') + \" \" + Array.new(21).join('-')\n $conn.exec(\"SELECT * FROM users order by id\") do |result|\n result.each do |row|\n print \"\\n %6d %-33s \" % row.values_at('id', 'email')\n end\n end\n print \"\\n ------ contents\" + Array.new(50).join('-')\n $conn.exec(\"SELECT * FROM contents order by id\") do |result|\n result.each do |row|\n print \"\\n %6d %s\" % row.values_at('id', 'text')\n end\n end\n print \"\\n ------ user_responses SHARE\" + Array.new(50).join('-')\n $conn.exec(\"select user_id, string_agg(concat(content_id), ',') from user_responses where response = TRUE group by user_id order by user_id\") do |result|\n result.each do |row|\n print \"\\n %6d %s\" % row.values_at('user_id', 'string_agg')\n end\n end\n print \"\\n ------ user_responses KILL\" + Array.new(50).join('-')\n $conn.exec(\"select user_id, string_agg(concat(content_id), ',') from user_responses where response = FALSE group by user_id order by user_id\") do |result|\n result.each do |row|\n print \"\\n %6d %s\" % row.values_at('user_id', 'string_agg')\n end\n end\n print \"\\n ------ user_responses IGNORE\" + Array.new(50).join('-')\n $conn.exec(\"select user_id, string_agg(concat(content_id), ',') from user_responses where response ISNULL group by user_id order by user_id\") do |result|\n result.each do |row|\n print \"\\n %6d %s\" % row.values_at('user_id', 'string_agg')\n end\n end\nend",
"def format_result(obj)\n formatted_str = \"\\n\\nSearch Results: \\n\\n\"\n obj.each do |key, value|\n unless key == \"_id\"\n\t key_str = key.capitalize\n\t if value.kind_of?(Array)\n\t\t \tformatted_str << key_str << \": \"\n\t\t \tvalue.each do |var|\n\t\t \t\tformatted_str << var.to_s << ((var == value.last) ? \"\" : \",\")\n\t\t \tend\n\t\t \tformatted_str << \"\\n\"\n\t elsif value.is_a?(Hash)\n\t\t formatted_str << key_str << \": \" << \"\\n\"\n\t\t value.each do |var_key, var_value|\n\t\t formatted_str << \"\\t\" << var_key << \":\" << var_value << \"\\n\"\n\t\t end\n\t else\n\t\t formatted_str << key_str << \": \" << value.to_s << \"\\n\"\n\t end\n\t end\n end\n formatted_str\n end",
"def show_and_create_indexes(new_indexes, queries, tables)\n # print summary\n if new_indexes.any?\n new_indexes.each do |index|\n log \"Index found: #{index[:table]} (#{index[:columns].join(\", \")})\"\n end\n else\n log \"No new indexes found\"\n end\n\n # debug info\n if @log_level.start_with?(\"debug\")\n index_queries = new_indexes.flat_map {|i| i[:queries].sort_by(&:fingerprint)}\n if @log_level == \"debug2\"\n fingerprints = Set.new(index_queries.map(&:fingerprint))\n index_queries.concat(queries.reject {|q| fingerprints.include?(q.fingerprint)}.sort_by(&:fingerprint))\n end\n index_queries.each do |query|\n log \"-\" * 80\n log \"Query #{query.fingerprint}\"\n log \"Total time: #{(query.total_time / 60000.0).round(1)} min, avg time: #{(query.total_time / query.calls.to_f).round} ms, calls: #{query.calls}\" if query.total_time\n\n if query.fingerprint == \"unknown\"\n log \"Could not parse query\"\n elsif query.tables.empty?\n log \"No tables\"\n elsif query.missing_tables\n log \"Tables not present in current database\"\n elsif !query.candidate_tables\n log \"No candidate tables for indexes\"\n elsif query.explainable? && !query.high_cost?\n log \"Low initial cost: #{query.initial_cost}\"\n elsif query.explainable?\n query_indexes = query.indexes || []\n log \"Start: #{query.costs[0]}\"\n log \"Pass1: #{query.costs[1]} : #{log_indexes(query.pass1_indexes || [])}\"\n log \"Pass2: #{query.costs[2]} : #{log_indexes(query.pass2_indexes || [])}\"\n if query.costs[3]\n log \"Pass3: #{query.costs[3]} : #{log_indexes(query.pass3_indexes || [])}\"\n end\n log \"Final: #{query.new_cost} : #{log_indexes(query.suggest_index ? query_indexes : [])}\"\n if query_indexes.size == 1 && !query.suggest_index\n log \"Need #{@min_cost_savings_pct}% cost savings to suggest index\"\n end\n else\n log \"Could not run explain\"\n end\n log\n log query.statement\n log\n end\n end\n\n # create\n if @create && new_indexes.any?\n # 1. create lock\n # 2. refresh existing index list\n # 3. create indexes that still don't exist\n # 4. release lock\n with_advisory_lock do\n new_indexes.each do |index|\n unless index_exists?(index)\n statement = \"CREATE INDEX CONCURRENTLY ON #{quote_ident(index[:table])} (#{index[:columns].map {|c| quote_ident(c)}.join(\", \")})\"\n log \"Creating index: #{statement}\"\n started_at = Time.now\n begin\n execute(statement)\n log \"Index created: #{((Time.now - started_at) * 1000).to_i} ms\"\n rescue PG::LockNotAvailable\n log \"Could not acquire lock: #{index[:table]}\"\n end\n end\n end\n end\n end\n\n new_indexes\n end",
"def to_sql_query_info(offset)\n \"SELECT * FROM #{@model.quoted_table_name} WHERE \" +\n \" #{quote_column(@model.primary_key)} = (($id - #{offset}) / #{ThinkingSphinx.indexed_models.size})\"\n end",
"def reformat_query_results(results)\n return embed_table_in_results results\n end",
"def explain(arel, binds = [])\n end",
"def assert_node_has_explain_plan(node, msg = nil)\n msg = \"Expected #{node.inspect} to have an explain plan\"\n\n assert_match SQLITE_EXPLAIN_PLAN_COLUMNS_RE, node.params[:explain_plan].join, msg\n end",
"def get_sql_shorttext_by_sql_id(sql_id)\n # Connect zur DB nachhollen wenn noch auf NullAdapter steht, da Zugriff auf gecachte Werte ohne DB-Connect möglich ist\n open_oracle_connection if ActiveRecord::Base.connection.class != ActiveRecord::ConnectionAdapters::OracleEnhancedAdapter\n\n # erster Versuch direkt aus SGA zu lesen\n sqls = sql_select_all [\"\\\n SELECT /*+ Panorama-Tool Ramm */ SUBSTR(SQL_FullText, 1, 150) SQL_Text\n FROM v$SQLArea\n WHERE SQL_ID = ?\",\n sql_id]\n\n if sqls.size == 0 # Wenn nicht gefunden, dann in AWR-History suchen\n sqls = sql_select_all [\"\\\n SELECT /*+ Panorama-Tool Ramm */ SUBSTR(SQL_Text, 1, 150) SQL_Text\n FROM DBA_Hist_SQLText\n WHERE DBID = ?\n AND SQL_ID = ?\",\n session[:database][:dbid], sql_id]\n end\n\n if sqls.size == 0\n \"< Kein SQL-Text zu ermitteln füer SQL-ID='#{sql_id}' >\"\n else\n sqls[0].sql_text\n end\n end",
"def summary\n \"Results: #{results[:critical].size} critical, \" \\\n \"#{results[:warning].size} warning, \" \\\n \"#{results[:unknown].size} unknown, #{results[:ok].size} ok\"\n end",
"def ask query\n print query.concat \"\\s\"\n end",
"def get_sql_result_str(mysql_res)\n returning String.new do |str|\n while row = mysql_res.fetch_row\n str << row.compact.reject {|s| s.blank? || (s == \"0\")}.join(' ') rescue ''\n end\n end\nend",
"def run_query(query_file , exp , o = {})\n index_path = o[:index_path] || @index_path\n cmd = fwrite('cmd_galago_run_query.log' , \"#{$galago_path}/bin/galago batch-search --index=#{index_path} #{o[:param_query]} \\\n #{to_path(query_file)} |grep -e ^[0-9] > #{to_path(exp+'.res')}\" , :mode=>'a')\n `#{cmd}`\n end",
"def db_query__show_tables__count\n db_query_transform__count db_query__show_tables\n end",
"def query(db, query, type = \"array\")\n begin\n result = db.query(query)\n return (type == \"hash\" ? result.to_hash : result.to_array)\n rescue Mysql::Error => e\n $stderr.puts \"Error code: #{e.errno}\"\n $stderr.puts \"Error message: #{e.error}\"\n $stderr.puts \"Error SQLSTATE: #{e.sqlstate}\" if e.respond_to?(\"sqlstate\")\n exit\n end\n end",
"def search_explain_get(type, id, opts = {})\n data, _status_code, _headers = search_explain_get_with_http_info(type, id, opts)\n return data\n end",
"def info\n if @filtered_rset.empty?\n @columns = {}\n return\n end\n\n query = @filtered_rset.to_sql\n\n @columns = @filtered_rset.first.attributes.each_with_object({}) do |c, m|\n m[c[0].to_sym] = nil\n m\n end\n\n tables = ActiveRecord::Base.connection.tables.select do |t|\n Regexp.new('\\b' + t + '\\b') =~ query\n end\n\n tables.each do |t|\n ActiveRecord::Base.connection.columns(t).each do |c|\n @columns[c.name.to_sym] = c.type if @columns.key?(c.name.to_sym)\n end\n end\n\n unless (extra = @columns.values.reject(&:present?)).empty?\n msg = extra.join(', ') + 'have missing types, perhaps these are aliased?'\n raise KilterError, msg\n end\n end",
"def run_query()\n return nil unless @query\n \n gres = @query.execute()\n if @filterClass \n fres = @filterClass.filter(gres)\n res = fres.kind_of?(Array) ? fres.join(\"\\n\") : fres.to_s\n elsif @filterBlock \n fres = @filterBlock.call(gres)\n res = fres.kind_of?(Array) ? fres.join(\"\\n\") : fres.to_s\n else\n res = fres.result_s\n end\n res\n end",
"def sql\n @sql ||= begin\n bind_params = []\n i = 1\n (selector_keys.length + setter_keys.length).times do\n bind_params << \"$#{i}\"\n i += 1\n end\n hstore_delete_handlers.length.times do\n bind_params << \"$#{i}::text[]\"\n i += 1\n end\n %{SELECT #{name}(#{bind_params.join(', ')})}\n end\n end",
"def sql\n @sql ||= begin\n bind_params = []\n i = 1\n (selector_keys.length + setter_keys.length).times do\n bind_params << \"$#{i}\"\n i += 1\n end\n hstore_delete_handlers.length.times do\n bind_params << \"$#{i}::text[]\"\n i += 1\n end\n %{SELECT #{name}(#{bind_params.join(', ')})}\n end\n end",
"def inspect\n to_table.render :ascii\n end",
"def OLDview_data db, sql, options\n outputfile = options[:output_to]\n formatting = options[:formatting]\n headers = options[:headers]\n #str = db.get_data sql\n rs = db.execute_query sql\n str = rs.content\n columns = rs.columns\n #puts \"SQL: #{sql}.\\nstr: #{str.size}\"\n data = []\n if headers\n data << columns.join(\"\\t\")\n end\n str.each {|line| data << line.join(\"\\t\"); }\n #puts \"Rows: #{data.size}\"\n require 'tempfile'\n tmpfile = Tempfile.new('SQL.XXXXXX')\n filename = tmpfile.path\n filename = Shellwords.escape(filename)\n #puts \"Writing to #{filename}\"\n tmpfile.write(data.join(\"\\n\"))\n tmpfile.close # need to flush, otherwise write is buffered\n headerstr=nil\n if formatting\n headerstr = \"-H\" unless headers\n # sometimes this can be slow, and it can fault on UTF-8 chars\n system(\"cat #{filename} | term-table.rb #{headerstr} | sponge #{filename}\")\n end\n if outputfile\n #puts \"comes here\"\n system(\"cp #{filename} #{outputfile}\")\n filename = outputfile\n end\n system \"wc -l #{filename}\" if $opt_debug\n \n #system \"$EDITOR #{filename}\"\n system \"vim -c ':set nowrap' #{filename}\"\n tmpfile.close\n tmpfile.unlink\nend",
"def inspect\n \"#<#{self.class}: #{sql.inspect}>\"\n end",
"def raw_query\n @raw_query\n end",
"def analyze\n format_results\n end",
"def execute(query, name = 'ANSR-NOSQL')\n end",
"def display_search_results(objects)\n objects.limit(results_limit).reduce(String.new) do |string, object|\n string << (tag.tr display_search_results_row(object))\n end\n end",
"def inspect\n params = {\n :error => @error,\n :warning => @warning,\n :connect_error => @connerror,\n :servers => @servers,\n :connect_timeout => { :timeout => @timeout, :retries => @retries },\n :request_timeout => { :timeout => @reqtimeout, :retries => @reqretries },\n :retries => { :count => @retrycount, :delay => @retrydelay },\n :limits => { :offset => @offset, :limit => @limit, :max => @maxmatches, :cutoff => @cutoff },\n :max_query_time => @maxquerytime,\n :overrides => @overrides,\n :select => @select,\n :match_mode => @mode,\n :ranking_mode => @ranker,\n :sort_mode => { :mode => @sort, :sortby => @sortby },\n :weights => @weights,\n :field_weights => @fieldweights,\n :index_weights => @indexweights,\n :id_range => { :min => @min_id, :max => @max_id },\n :filters => @filters,\n :geo_anchor => @anchor,\n :group_by => { :attribute => @groupby, :func => @groupfunc, :sort => @groupsort },\n :group_distinct => @groupdistinct\n }\n\n \"<Sphinx::Client: %d servers, params: %s>\" %\n [@servers.length, params.inspect]\n end",
"def query_string\n \"#{base_url}dmQuery/#{collection_alias}/#{searchstrings}/#{fields}/\" +\n \"#{sortby}/#{max_recs}/#{start}/#{suppress_pages}/#{docptr}/#{suggest}/\" +\n \"#{facets}/#{showunpub}/#{denormalize_facets}/#{response_format}\"\n end",
"def query_string\n \"#{base_url}dmQuery/#{collection_alias}/#{searchstrings}/#{fields}/\" +\n \"#{sortby}/#{max_recs}/#{start}/#{suppress_pages}/#{docptr}/#{suggest}/\" +\n \"#{facets}/#{showunpub}/#{denormalize_facets}/#{response_format}\"\n end",
"def format_query(query)\n query.map{ |k, v| \"#{k}: #{color(v, BOLD, true)}\" if v.present? }.compact.join(', ')\n end",
"def explain msg\n if false\n puts msg\n end\n end",
"def compile_query\r\n str = []\r\n\r\n str << \"controller|#{q[:controller]}|\" if q[:controller].present?\r\n str << \"action|#{q[:action]}|\" if q[:action].present?\r\n str << \"format|#{q[:format]}|\" if q[:format].present?\r\n str << \"status|#{q[:status]}|\" if q[:status].present?\r\n\r\n str << \"datetime|#{q[:on].strftime('%Y%m%d')}*|\" if q[:on].present?\r\n\r\n str << \"method|#{q[:method]}|\" if q[:method].present?\r\n str << \"path|#{q[:path]}|\" if q[:path].present?\r\n\r\n str.join(\"*\")\r\n end",
"def sql_command_string(query, database, ctrl, grep_for = nil)\n raw_query = query.is_a?(String) ? query : query.join(\";\\n\")\n Chef::Log.debug(\"Control Hash: [#{ctrl.to_json}]\\n\")\n cmd = \"/usr/bin/mysql -B -e \\\"#{raw_query}\\\"\"\n cmd << \" --user=#{ctrl[:user]}\" if ctrl && ctrl.key?(:user) && !ctrl[:user].nil?\n cmd << \" -p#{ctrl[:password]}\" if ctrl && ctrl.key?(:password) && !ctrl[:password].nil?\n cmd << \" -h #{ctrl[:host]}\" if ctrl && ctrl.key?(:host) && !ctrl[:host].nil? && ctrl[:host] != 'localhost'\n cmd << \" -P #{ctrl[:port]}\" if ctrl && ctrl.key?(:port) && !ctrl[:port].nil? && ctrl[:host] != 'localhost'\n cmd << \" -S #{default_socket}\" if ctrl && ctrl.key?(:host) && !ctrl[:host].nil? && ctrl[:host] == 'localhost'\n cmd << \" #{database}\" unless database.nil?\n cmd << \" | grep #{grep_for}\" if grep_for\n Chef::Log.debug(\"Executing this command: [#{cmd}]\\n\")\n cmd\n end",
"def raw_query( view_name, opts={} )\n opts = Gnash.new( opts ) unless opts.empty?\n doc_class = opts[:document_class]\n\n params = []\n params << 'include_docs=true' unless (opts[:select] && opts[:select] != 'all' || opts[:reduce])\n\n\n # TODO: this is according to couchdb really inefficent with large sets of data.\n # A better way would involve, using start and end keys with limit. But this\n # is a really hard one to figure with jumping around to different pages\n params << \"skip=#{opts[:offset]}\" if opts[:offset]\n params << \"limit=#{opts[:limit]}\" if opts[:limit]\n params << \"key=#{opts[:equals]}\" if opts[:equals]\n if opts[:order].to_s == 'desc' || opts[:order].to_s == 'descending'\n desc = true\n params << \"descending=true\"\n end\n if opts[:range] && opts[:range].size == 2\n params << \"startkey=#{opts[:range][desc == true ? 1 : 0 ]}\"\n params << \"endkey=#{opts[:range][desc == true ? 0 : 1]}\"\n end\n\n query_uri = \"#{uri}/_view/#{CGI.escape(view_name.to_s)}?\"\n query_uri << params.join('&')\n\n result = CouchSpring.get( query_uri )\n ResultSet.new( result, doc_class )\n end",
"def assert_segment_has_explain_plan( segment, msg=nil )\n msg = build_message( msg, \"Expected ? to have an explain plan\", segment )\n assert_block( msg ) { segment.params[:explain_plan].join =~ SQLITE_EXPLAIN_PLAN_COLUMNS_RE }\n end",
"def query(sql, name = nil) #:nodoc:\n log(sql, name) do\n @connection.execute(sql).rows\n end\n end"
] |
[
"0.62896997",
"0.61405456",
"0.6137911",
"0.61045426",
"0.5910226",
"0.5868275",
"0.58225495",
"0.5781028",
"0.5762791",
"0.5762791",
"0.5680385",
"0.55741733",
"0.55477244",
"0.55167836",
"0.54632753",
"0.54311174",
"0.5333425",
"0.53226453",
"0.53095716",
"0.52411693",
"0.52021",
"0.51958406",
"0.5165624",
"0.5143204",
"0.50678587",
"0.5051373",
"0.50479656",
"0.50425607",
"0.5019555",
"0.49690506",
"0.4946411",
"0.49348465",
"0.49229932",
"0.49135724",
"0.48975343",
"0.48958683",
"0.4862149",
"0.48451507",
"0.4842071",
"0.4833006",
"0.48324978",
"0.4828573",
"0.48056886",
"0.48040342",
"0.4790591",
"0.47687855",
"0.47683057",
"0.47594327",
"0.4755665",
"0.474853",
"0.47450855",
"0.47079402",
"0.4704965",
"0.46934307",
"0.46930555",
"0.4685671",
"0.46807653",
"0.46725437",
"0.46691275",
"0.4655683",
"0.46475112",
"0.4645741",
"0.4635365",
"0.46339104",
"0.46297964",
"0.46196634",
"0.4611234",
"0.4606815",
"0.45861208",
"0.45770466",
"0.45731077",
"0.45698437",
"0.45688078",
"0.45681435",
"0.4565962",
"0.4562134",
"0.45617554",
"0.45605206",
"0.45599923",
"0.45582184",
"0.45465863",
"0.45457515",
"0.45457515",
"0.4543603",
"0.45418227",
"0.45410517",
"0.45358586",
"0.45327586",
"0.45277387",
"0.4524029",
"0.45148107",
"0.45143098",
"0.45143098",
"0.45138246",
"0.45119196",
"0.4506255",
"0.44999868",
"0.4492547",
"0.4492308",
"0.4479375"
] |
0.68310505
|
0
|
Return a cloned dataset which will use FOR SHARE to lock returned rows.
|
def for_share
lock_style(:share)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def fresh_dataset\n @dataset.clone\n end",
"def sync\n cached_dataset(:_sync) do\n clone(:async=>false)\n end\n end",
"def clone\n\t\t\tCData.new self\n\t\tend",
"def clone_with(data)\n clone = self.clone\n clone.instance_variable_set(:@data, data)\n clone\n end",
"def copy\n DataFrame.new(@rows.copy, rownames: @rownames.copy, colnames: @colnames.copy)\n end",
"def clone\n copy(false)\n end",
"def clone\n other = dup\n other.freeze if self.frozen?\n other\n end",
"def clone\n o = dup\n o.freeze if frozen?\n o\n end",
"def clone\n return self.class.new(RAtlas::clone(@storage))\n end",
"def skip_locked\n cached_dataset(:_skip_locked_ds) do\n raise(Error, 'This dataset does not support skipping locked rows') unless supports_skip_locked?\n clone(:skip_locked=>true)\n end\n end",
"def clone\n other = dup\n other.freeze if frozen?\n other\n end",
"def _associated_dataset\n associated_class.dataset.clone\n end",
"def clone\n self.copy\n end",
"def clone\n self.copy\n end",
"def nolock\n clone(:table_options => \"(NOLOCK)\")\n end",
"def nolock\n clone(:with => \"(NOLOCK)\")\n end",
"def clone\n dup\n end",
"def clone(opts = nil || (return self))\n # return self used above because clone is called by almost all\n # other query methods, and it is the fastest approach\n c = super(:freeze=>false)\n c.opts.merge!(opts)\n unless opts.each_key{|o| break if COLUMN_CHANGE_OPTS.include?(o)}\n c.clear_columns_cache\n end\n c.freeze\n end",
"def nowait\n cached_dataset(:_nowait_ds) do\n raise(Error, 'This dataset does not support raises errors instead of waiting for locked rows') unless supports_nowait?\n clone(:nowait=>true)\n end\n end",
"def clone\n @clone = true\n end",
"def clone() end",
"def clone\n memcached = super\n struct = Lib.memcached_clone(nil, @struct)\n memcached.instance_variable_set('@struct', struct)\n memcached\n end",
"def clone\n self\n end",
"def clone\n return (Matrix.new(@x,@y) do |i,j| self[i][j].clone end)\n end",
"def clone\n @clone ||= super\n end",
"def copy\n ret = self.class.new\n channel_data.each do |datum|\n ret.channel_data << datum.dup\n end\n ret\n end",
"def clone; end",
"def clone; end",
"def clone; end",
"def async\n cached_dataset(:_async) do\n clone(:async=>true)\n end\n end",
"def compound_clone(type, dataset, opts)\n if dataset.is_a?(Dataset) && dataset.opts[:with] && !supports_cte_in_compounds?\n s, ds = hoist_cte(dataset)\n return s.compound_clone(type, ds, opts)\n end\n ds = compound_from_self.clone(:compounds=>(Array(@opts[:compounds]).map(&:dup) + [[type, dataset.compound_from_self, opts[:all]].freeze]).freeze)\n opts[:from_self] == false ? ds : ds.from_self(opts)\n end",
"def clone\n copy = super\n transaction do\n copy.save!\n\n %w[\n registration_information support information_source advance_directive\n ].each do |assoc|\n copy.send(\"#{assoc}\\=\", send(assoc).clone) if send(assoc)\n end\n\n %w[\n patient_identifiers languages providers medications allergies conditions\n all_results immunizations encounters procedures medical_equipments social_history insurance_providers\n ].each do |assoc|\n send(assoc).each do |item|\n copy.send(assoc) << item.clone\n end\n end\n\n end\n copy\n end",
"def _clone(*_)\n new_inst = _klass_new(_fn_name, *_fn_args)\n new_inst._data.replace(__hashish[\n @table.map { |_key, _value|\n if _key.is_a?(::AttributeStruct)\n _key = _key._clone\n else\n _key = _do_dup(_key)\n end\n if _value.is_a?(::AttributeStruct)\n _value = _value._clone\n else\n _value = _do_dup(_value)\n end\n [_key, _value]\n }\n ])\n new_inst\n end",
"def clone\n newDelegate = @delegate.clone\n return SparseMatrix.new(newDelegate)\n end",
"def clone\n Matrix.rows(@rows)\n end",
"def copy\n\t\t\treturn self.dup\n\t\tend",
"def clone\n self\n end",
"def clone(opts = OPTS)\n c = super(:freeze=>false)\n c.opts.merge!(opts)\n unless opts.each_key{|o| break if COLUMN_CHANGE_OPTS.include?(o)}\n c.clear_columns_cache\n end\n c.freeze if frozen?\n c\n end",
"def clone\n end",
"def dup; return self.class.new(RAtlas::dup(@storage)); end",
"def clone_temp\n clone.tap do |c|\n def c.save(*a)\n true\n end\n end\n end",
"def clone\n return Marshal.load(Marshal.dump(self))\n end",
"def frozen_copy\n if frozen?\n self\n else\n dup.freeze\n end\n end",
"def clone\n end",
"def clone\n end",
"def clone\n end",
"def sync\n # TODO stop forcing a sync every time.\n @cache.sync\n\n if cloned?\n fetch\n else\n clone\n end\n reset\n end",
"def clone\n raise NotImplementedError\n end",
"def clone(*) end",
"def clone(*) end",
"def copy\n dup\n end",
"def clone_pool\n super\n end",
"def clone\n clause_cluster = ClauseCluster.new\n clause_cluster.lhs = self.lhs.clone\n clause_cluster.rhs = self.rhs.clone\n clause_cluster.operator = self.operator\n return clause_cluster\n end",
"def for_update\n cached_dataset(:_for_update_ds){lock_style(:update)}\n end",
"def clone()\n clone = self.class.new\n attributes.each do |k, v|\n next if k == :id || k == :uuid\n clone.attribute_set(k, DataMapper::Ext.try_dup(v))\n end\n oproperties.each do |p|\n clone.oproperty_set(p.name, DataMapper::Ext.try_dup(p.value))\n end\n\n clone.uuid = UUIDTools::UUID.random_create\n return clone\n end",
"def dup\n clone\n end",
"def dup\n Data.new( sheet, @data.map(&:dup) )\n end",
"def clone_conn(args = {})\r\n conn = Knj::Db.new(@opts.clone.merge(args))\r\n \r\n if block_given?\r\n begin\r\n yield(conn)\r\n ensure\r\n conn.close\r\n end\r\n \r\n return nil\r\n else\r\n return conn\r\n end\r\n end",
"def clone\n # Do not implement in terms of dup. It breaks rails.\n #\n cls = Rubinius.invoke_primitive :object_class, self\n copy = cls.allocate\n\n Rubinius.invoke_primitive :object_copy_object, copy, self\n Rubinius.invoke_primitive :object_copy_singleton_class, copy, self\n\n Rubinius.privately do\n copy.initialize_copy self\n end\n\n copy.freeze if frozen?\n copy\n end",
"def clone\n self.class.new(setup.map(&:dup))\n end",
"def _clone\n self.class.new(self)\n end",
"def clone\n attr = Hash.new\n\n self.attribute_names.each do |name|\n begin\n attr[name] = read_attribute(name).clone\n rescue TypeError\n attr[name] = read_attribute(name)\n end\n end\n\n cloned_record = self.class.new(attr)\n cloned_record.instance_variable_set \"@new_record\", true\n cloned_record.id = nil\n cloned_record\n end",
"def cloned_conn(args = nil, &block)\r\n clone_conn_args = {\r\n :threadsafe => false\r\n }\r\n \r\n clone_conn_args.merge!(args[:clone_args]) if args and args[:clone_args]\r\n dbconn = self.clone_conn(clone_conn_args)\r\n \r\n begin\r\n yield(dbconn)\r\n ensure\r\n dbconn.close\r\n end\r\n end",
"def copy\n copy = self.clone\n copy.delete_all\n @collection.each do |obj|\n copy.add_param(obj.key,obj.value,obj.type)\n end\n return copy\n end",
"def clone(source)\n copy(source, true)\n end",
"def dup\n duped_rows = rows.map(&:dup)\n self.class.new(duped_rows)\n end",
"def clone\n Marshal.load(Marshal.dump self)\n end",
"def dup\n copy(false)\n end",
"def unlocked_copy\n copy_ = Structure.new\n @indexes.each{ |ai_| copy_.add(ai_.axis_object, ai_.axis_name) }\n copy_\n end",
"def recache_source_dataset!\n if source_dataset_cache?\n if source_dataset.opts[:limit] == 1\n source_dataset.cache_set([self])\n else\n source_dataset.cache_del\n end\n end\n end",
"def dup\n gridcpy = Grid.new(@num_rows,@num_columns)\n for r in 0...@num_rows\n for c in 0...@num_columns\n cur_cell = self.getCell(r,c)\n gridcpy.getCell(r,c).living = cur_cell.living\n gridcpy.getCell(r,c).neighbors = cur_cell.neighbors \n end # end col loop\n end # end row loop\n return gridcpy\n end",
"def freeze_data!\n @data = @data.to_smash(:freeze).freeze\n end",
"def copy_object_set\n @objects.clone\n end",
"def clone()\n\t\ttemp = LinkedList.new()\n\t\tcurrent = @head\n\t\twhile (current)\n\t\t\ttemp.append(current.get_item())\n\t\t\tcurrent = current.get_next()\n\t\tend\n\t\treturn temp;\n\tend",
"def matrix_clone\n @matrix_clone ||= matrix.map { |row| row.clone }\n end",
"def copyFrozen\n\t\tnewG = Grid.new(@rows.size, @cols.size)\n\t\t@rows.zip(newG.rows).each {|oldRow, newRow|\n\t\t\toldRow.zip(newRow).each { |oldCell, newCell|\n\t\t\t\tnewCell.frozenOf(oldCell)\n\t\t\t}\n\t\t}\n\t\treturn newG\n\tend",
"def clone\n q = super\n q.attrs = self.attrs.clone\n return q\n end",
"def clone\n Marshal.load(Marshal.dump(self))\n end",
"def clone\n Marshal.load(Marshal.dump(self))\n end",
"def clone\n Marshal.load(Marshal.dump(self))\n end",
"def clone\n Marshal.load(Marshal.dump(self))\n end",
"def clone\n Marshal.load(Marshal.dump(self))\n end",
"def copy\n self.class.new.tap do |obj|\n self.each_pair do |key, val|\n obj.set(key => (val.is_a?(Array) or val.is_a?(Hash)) ? val.clone : val)\n end\n end\n end",
"def copy\n self.class.new.tap do |obj|\n self.each_pair do |key, val|\n obj.set(key => (val.is_a?(Array) or val.is_a?(Hash)) ? val.clone : val)\n end\n end\n end",
"def shared_data\n if !block_given?\n if @cached_shared_data != nil\n return @cached_shared_data\n end\n return @cached_shared_data = ::Vertx::Util::Utils.safe_create(@j_del.java_method(:sharedData, []).call(),::Vertx::SharedData)\n end\n raise ArgumentError, \"Invalid arguments when calling shared_data()\"\n end",
"def clone\n self.class.new(@attributes.except(:_id).except(:versions).dup)\n end",
"def with_fetch(fetch)\n clone(:fetch=>fetch)\n end",
"def immutable!\n @data = data.to_smash(:freeze)\n end",
"def clone\n newobj = super\n newobj.instance_eval do\n __getobj__.each_pair do |k, v|\n __getobj__[k] = v.clone\n end\n end\n newobj\n end",
"def copy \n t = @tab.clone ;\n for i in 0..3\n t[i] = t[i].clone\n end\n Table.new(t) ;\n end",
"def copy\n result = HashTablet.new(@table.size)\n each {|element| result.insert(element) }\n return result\n end",
"def dataset\n ds = Sequel::Dataset.new(self)\n end",
"def dataset\n ds = Sequel::Dataset.new(self)\n end",
"def load_static_cache_rows\n if rows = Sequel.synchronize{@static_cache_cache[name]}\n rows.map{|row| call(row)}.freeze\n else\n rows = dataset.all.freeze\n raw_rows = rows.map(&:values)\n Sequel.synchronize{@static_cache_cache[name] = raw_rows}\n rows\n end\n end",
"def clone_without_user\n a = self.clone\n a.user = nil\n a\n end",
"def sync\n if not cloned?\n clone\n else\n update\n end\n end",
"def deep_copy\n return Matrix.new(\n @matrix.map{|row| row.map{|e| e.dup}},\n @rownames.map{|e| e.dup},\n @colnames.map{|e| e.dup}\n )\n end",
"def shallow_clone\n cloned = self.clone\n cloned.remove_relations\n return cloned\n end",
"def cache\n clone.tap { |crit| crit.options.merge!(:cache => true) }\n end",
"def clone\n newobj = Marshal.load(Marshal.dump(self))\n props = newobj.instance_variable_get(:@props)\n props[:id] = Engine.instance.db.getid\n put_object(newobj)\n newobj\n rescue\n log.error \"Clone failed\"\n nil\n end",
"def dup\n repo = RDF::N3::Repository.new {|r| r << @mutable}\n self.class.new(repo) do |reasoner|\n reasoner.instance_variable_set(:@options, @options.dup)\n reasoner.instance_variable_set(:@formula, @formula.dup) if @formula\n end\n end"
] |
[
"0.70513123",
"0.664276",
"0.65596753",
"0.6316082",
"0.623675",
"0.62346923",
"0.6225567",
"0.6163938",
"0.6147911",
"0.6105954",
"0.60953987",
"0.60944986",
"0.60446715",
"0.60446715",
"0.601016",
"0.59808946",
"0.5952227",
"0.59251606",
"0.591666",
"0.5907834",
"0.5882934",
"0.58789766",
"0.5870373",
"0.5869546",
"0.58631575",
"0.5861579",
"0.5830807",
"0.5830807",
"0.5830807",
"0.5818296",
"0.5802909",
"0.5793611",
"0.578622",
"0.57698095",
"0.5759539",
"0.57550955",
"0.5751556",
"0.5742815",
"0.5737654",
"0.57189584",
"0.5706725",
"0.5704785",
"0.5672112",
"0.5662949",
"0.5662949",
"0.5662949",
"0.5656155",
"0.5640354",
"0.5631544",
"0.5631544",
"0.56263125",
"0.5600027",
"0.55823064",
"0.5567006",
"0.55617505",
"0.5560716",
"0.55545646",
"0.5548408",
"0.5533464",
"0.55328894",
"0.5503109",
"0.5493508",
"0.5492428",
"0.5483375",
"0.547813",
"0.5476914",
"0.54747564",
"0.5459434",
"0.5455055",
"0.5445572",
"0.5440931",
"0.5440082",
"0.54387397",
"0.54356873",
"0.5424413",
"0.5414448",
"0.539717",
"0.53755516",
"0.53755516",
"0.53755516",
"0.53755516",
"0.53755516",
"0.5371406",
"0.5371406",
"0.5352226",
"0.53484356",
"0.5347087",
"0.5340557",
"0.53025997",
"0.5295315",
"0.5275172",
"0.52738434",
"0.52738434",
"0.52691036",
"0.5255481",
"0.52421176",
"0.52409863",
"0.5215049",
"0.5213582",
"0.5212091",
"0.52062684"
] |
0.0
|
-1
|
Run a full text search on PostgreSQL. By default, searching for the inclusion of any of the terms in any of the cols. Options: :headline :: Append a expression to the selected columns aliased to headline that contains an extract of the matched text. :language :: The language to use for the search (default: 'simple') :plain :: Whether a plain search should be used (default: false). In this case, terms should be a single string, and it will do a search where cols contains all of the words in terms. This ignores search operators in terms. :phrase :: Similar to :plain, but also adding an ILIKE filter to ensure that returned rows also include the exact phrase used. :rank :: Set to true to order by the rank, so that closer matches are returned first. :to_tsquery :: Can be set to :plain or :phrase to specify the function to use to convert the terms to a ts_query. :tsquery :: Specifies the terms argument is already a valid SQL expression returning a tsquery, and can be used directly in the query. :tsvector :: Specifies the cols argument is already a valid SQL expression returning a tsvector, and can be used directly in the query.
|
def full_text_search(cols, terms, opts = OPTS)
lang = Sequel.cast(opts[:language] || 'simple', :regconfig)
unless opts[:tsvector]
phrase_cols = full_text_string_join(cols)
cols = Sequel.function(:to_tsvector, lang, phrase_cols)
end
unless opts[:tsquery]
phrase_terms = terms.is_a?(Array) ? terms.join(' | ') : terms
query_func = case to_tsquery = opts[:to_tsquery]
when :phrase, :plain
:"#{to_tsquery}to_tsquery"
else
(opts[:phrase] || opts[:plain]) ? :plainto_tsquery : :to_tsquery
end
terms = Sequel.function(query_func, lang, phrase_terms)
end
ds = where(Sequel.lit(["", " @@ ", ""], cols, terms))
if opts[:phrase]
raise Error, "can't use :phrase with either :tsvector or :tsquery arguments to full_text_search together" if opts[:tsvector] || opts[:tsquery]
ds = ds.grep(phrase_cols, "%#{escape_like(phrase_terms)}%", :case_insensitive=>true)
end
if opts[:rank]
ds = ds.reverse{ts_rank_cd(cols, terms)}
end
if opts[:headline]
ds = ds.select_append{ts_headline(lang, phrase_cols, terms).as(:headline)}
end
ds
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def full_text_search(cols, terms, opts = {})\n filter(\"CONTAINS (#{literal(cols)}, #{literal(terms)})\")\n end",
"def fulltext_search(search_string)\n\n # Strip out !, (), &, and |, because these are search vector control characters\n # Remove extra spaces\n search_string = search_string.to_s.delete(\"()!&|\").squish\n\n tsquery = sanitize_sql_array([\n %{replace(plainto_tsquery('%s')::text,' & ',' | ')::tsquery}, search_string\n ])\n\n relation = self.select(%{\n #{self.table_name}.*,\n ts_rank(#{self.table_name}.search_vector, #{tsquery}) AS search_ranking\n })\n\n return relation.where(\"#{tsquery} @@ #{self.table_name}.search_vector\")\n\n end",
"def fulltext(keywords, options = {}, &block)\n return if not keywords or keywords.to_s =~ /^\\s*$/\n\n field_names = Util.Array(options.delete(:fields)).compact\n\n add_fulltext(keywords, field_names) do |query, fields|\n query.minimum_match = options.delete(:minimum_match).to_i if options.key?(:minimum_match)\n query.tie = options.delete(:tie).to_f if options.key?(:tie)\n query.query_phrase_slop = options.delete(:query_phrase_slop).to_i if options.key?(:query_phrase_slop)\n\n if highlight_field_names = options.delete(:highlight)\n if highlight_field_names == true\n query.add_highlight\n else\n highlight_fields = []\n Util.Array(highlight_field_names).each do |field_name|\n highlight_fields.concat(@setup.text_fields(field_name))\n end\n query.add_highlight(highlight_fields)\n end\n end\n\n if block && query\n fulltext_dsl = Fulltext.new(query, @setup)\n Util.instance_eval_or_call(fulltext_dsl, &block)\n else\n fulltext_dsl = nil\n end\n\n if fields.empty? && (!fulltext_dsl || !fulltext_dsl.fields_added?)\n @setup.all_text_fields.each do |field|\n unless query.has_fulltext_field?(field)\n unless fulltext_dsl && fulltext_dsl.exclude_fields.include?(field.name)\n query.add_fulltext_field(field, field.default_boost)\n end\n end\n end\n end\n end\n end",
"def search_search_text\n query\n .where(localized_search_text_in(:title), text: \"%#{search_text}%\")\n .or(query.where(localized_search_text_in(:description), text: \"%#{search_text}%\"))\n end",
"def search(search_term)\n conditions = []\n SEARCHABLE_COLUMNS.each do|column|\n conditions << {column=>/#{search_term}/i}\n end\n any_of(conditions)\n end",
"def by_fulltext_index(query, options = {})\n if self == ExtendedDocument\n client = Riddle::Client.new\n else\n client = Riddle::Client.new(fulltext_opts[:server],\n fulltext_opts[:port])\n\n query = query + \" @couchrest-type #{self}\"\n end\n\n client.match_mode = options[:match_mode] || :extended\n\n if (limit = options[:limit])\n client.limit = limit\n end\n\n if (max_matches = options[:max_matches])\n client.max_matches = matches\n end\n\n if (sort_by = options[:sort_by])\n client.sort_mode = :extended\n client.sort_by = sort_by\n end\n\n result = client.query(query)\n\n if result and result[:status] == 0 and (matches = result[:matches])\n keys = matches.collect { |row| (CouchSphinx::MultiAttribute.decode(\n row[:attributes]['csphinx-class']) +\n '-' + row[:doc].to_s) rescue nil }.compact\n\n return keys if options[:raw]\n return multi_get(keys)\n else\n return []\n end\n end",
"def find_fulltext(query, options={}, with_mdate_desc_order=true)\n fulltext_option = {}\n fulltext_option[:order] = :updated_at if with_mdate_desc_order\n ids = matched_ids(query, fulltext_option)\n find_by_ids_scope(ids, options)\n end",
"def index name = nil, dictionary = 'english', &block\n search_name = ['search', name].compact.join('_')\n\n class_eval do\n named_scope search_name.to_sym, lambda { |term|\n # Let's extract the individual terms to allow for quoted terms.\n term = term.scan(/\"([^\"]+)\"|(\\S+)/).flatten.compact.map {|lex| \"'#{lex}'\"}.join(' & ')\n {\n :select => \"#{table_name}.*, ts_rank_cd((#{full_text_indexes.first.to_s}),\n to_tsquery(#{connection.quote(term)})) as rank\",\n :conditions =>\n [\"#{full_text_indexes.first.to_s} @@ to_tsquery(?)\", term],\n :order => 'rank DESC'\n }\n }\n end\n index_name = [table_name, name, 'fts_idx'].compact.join('_')\n (self.full_text_indexes ||= []) <<\n FullTextIndex.new(index_name, dictionary, self, &block)\n end",
"def text_match(column, search)\n column = \"#{column.to_s}_search_index\".to_sym\n self.where(':column @@ plainto_tsquery(:search)', :column => column, :search => search)\n .order_append(Sequel.function(:ts_rank, column, Sequel.function(:plainto_tsquery, search)).desc)\n end",
"def fts_json_query\n \"to_tsvector('english', json_scrape) @@ to_tsquery('#{default_terms}')\"\n end",
"def full_search(keywords, options = {})\n latitude = nil\n longitude = nil\n bounds = options[:bounds]\n where = nil\n \n if options[:alternate].blank? && options[:specific].blank? && options[:skip_where].blank?\n where_results = Atlas::Geography.where_am_i(keywords, options.dup)\n \n keywords = where_results[:query]\n where = where_results[:where].models\n \n unless where.blank?\n selected = where.first\n bounds = selected.bounds\n end\n \n options.delete :fq\n \n # The user has indicated that our \"where\" guess was incorrect, and selected another.\n elsif options[:alternate].present?\n selected = Atlas::Geography.find(options[:alternate])\n bounds = selected.bounds \n \n # Record when a user selects an alternate where result, i.e. we got it wrong!\n # Atlas::GeographicQueryLog.create :status => 'alternate', :query => query, :session_id => options[:session_id]\n\n # The user has sent in a specific bounding box in which to search. Presumably the user is looking\n # at a map, dragging it around, and re-performing searches.\n elsif options[:specific].present?\n bounds = options[:specific]\n end\n\n keywords = '*' if keywords.blank?\n \n if bounds.present?\n results = solr_server.find(\"#{spatial_query(bounds)}#{keywords}\", \n options.merge(:qt => 'geographic', :results => Atlas::Extensions::Place::PlaceResults))\n \n if results.documents.empty? && where.present?\n where[1..-1].each do |geography|\n selected = geography\n envelope = geography.read_attribute(:bounds).envelope\n center = envelope.center\n top_center = center.y + (center.y - envelope.lower_corner.y)\n radius = center.spherical_distance(Point.from_x_y(center.x, top_center)) / 1000.0\n\n results = solr_server.find(\"{!spatial lat=#{center.lat} long=#{center.lng} radius=#{radius} unit=km calc=arc threadCount=2}#{keywords}\", \n options.merge(:qt => 'geographic', :results => Atlas::Extensions::Place::PlaceResults))\n \n break unless results.documents.empty?\n end\n end\n else\n results = solr_server.find(keywords, options.merge(:qt => 'full', :results => Atlas::Extensions::Place::PlaceResults))\n end\n \n { :places => results, :selected => selected, :where => where, :query => (keywords == '*' && '' || keywords) }\n end",
"def faceted_search(*args)\n search(*args)\n end",
"def text_search(*args)\n options = args.length > 1 && args.last.is_a?(::Hash) ? args.pop : {}\n fields = Array(options.delete(:fields) || @text_indexes.keys)\n finder = options.delete(:finder)\n unless finder\n unless defined?(:text_search_find)\n raise NoFinderMethod, \"Could not detect how to find records; you must def text_search_find()\"\n end\n finder = :text_search_find\n end\n\n #\n # Assemble set names for our intersection.\n # Accept two ways of doing search: either {:field => ['value','value'], :field => 'value'},\n # or 'value','value', :fields => [:field, :field]. The first is an AND, the latter an OR.\n #\n ids = []\n if args.empty?\n raise ArgumentError, \"Must specify search string(s) to #{self.name}.text_search\"\n elsif args.first.is_a?(::Hash)\n sets = []\n args.first.each do |f,v|\n sets += text_search_sets_for(f,v)\n end\n # Execute single intersection (AND)\n ids = redis.sinter(*sets)\n else\n fields.each do |f|\n sets = text_search_sets_for(f,args)\n # Execute intersection per loop (OR)\n ids += redis.sinter(*sets)\n end\n end\n\n # Assemble our options for our finder conditions (destructive for speed)\n recalculate_count = options.has_key?(:conditions)\n\n # Calculate pagination if applicable. Presence of :page indicates we want pagination.\n # Adapted from will_paginate/finder.rb\n if options.has_key?(:page)\n page = options.delete(:page) || 1\n per_page = options.delete(:per_page) || self.per_page\n\n Redis::TextSearch::Collection.create(page, per_page, nil) do |pager|\n # Convert page/per_page to limit/offset\n options.merge!(:offset => pager.offset, :limit => pager.per_page)\n if ids.empty?\n pager.replace([])\n pager.total_entries = 0\n else\n pager.replace(send(finder, ids, options){ |*a| yield(*a) if block_given? })\n pager.total_entries = recalculate_count ? wp_count(options, [], finder.to_s) : ids.length # hacked into will_paginate for compat\n end\n end\n else\n # Execute finder directly\n ids.empty? ? [] : send(finder, ids, options)\n end\n end",
"def or_text_match(column, search)\n column = \"#{column.to_s}_search_index\".to_sym\n self.or(':column @@ plainto_tsquery(:search)', :column => column, :search => search)\n .order_append(Sequel.function(:ts_rank, column, Sequel.function(:plainto_tsquery, search)).desc)\n end",
"def grep(cols, terms)\n filter(SQL::BooleanExpression.new(:OR, *Array(cols).collect{|c| SQL::StringExpression.like(c, *terms)}))\n end",
"def search_by_name(query, full_text_search = false)\n query_word_regexps = query.split.map { |word| /#{word}/i }\n if full_text_search\n query_word_results_hash = {}\n updated_search_index.each_value do |word_spec_hash|\n word_spec_hash.each_pair do |word, spec_names|\n query_word_regexps.each do |query_word_regexp|\n set = (query_word_results_hash[query_word_regexp] ||= Set.new)\n set.merge(spec_names) if word =~ query_word_regexp\n end\n end\n end\n found_set_names = query_word_results_hash.values.reduce(:&)\n found_set_names ||= []\n sets = found_set_names.map do |name|\n aggregate.representative_set(name)\n end\n # Remove nil values because representative_set return nil if no pod is found in any of the sources.\n sets.compact!\n else\n sets = aggregate.search_by_name(query, false)\n end\n if sets.empty?\n extra = ', author, summary, or description' if full_text_search\n raise Informative, \"Unable to find a pod with name#{extra} \" \\\n \"matching `#{query}`\"\n end\n sorted_sets(sets, query_word_regexps)\n end",
"def search query\n @content = @reader.read if @content.nil?\n @content.select do |doc|\n rs = []\n query.terms.each do |term|\n if term.compare(doc.send(term.field))\n rs << true\n end\n end\n if query.relation == :and\n rs.count == query.terms.count\n else\n !rs.empty?\n end\n end\n end",
"def default_search_statements\n column_names = @relation.columns.reject do |column|\n ![:string, :text].include?(column.type) || column.array\n end.map do |column|\n \"#{@relation.table_name}.#{column.name}\"\n end\n [SearchStatement.new(column_names, search_statement_default_options([:fulltext]).merge(engines: [:fulltext]))]\n end",
"def search(keywords, *fields)\n return all\n end",
"def search_text(query, text)\n text = pattern(text)\n query.where { title.ilike(text) | description.ilike(text) }\n end",
"def search(term, opts = {})\n raise \"Index not found at path #{@index_path}\" unless File.exists? @index_path\n\n results = []\n\n query = make_query(term, opts[:exact])\n\n @index.execute(\"SELECT sequence_number, kanji, kana, senses, bm25(search) as score FROM search WHERE search MATCH ? LIMIT ?\", query, opts[:max_results]) do |row|\n entry = Entry.from_sql(row)\n score = 0.0\n\n is_exact_match = entry.kanji.include?(term) || entry.kana.include?(term)\n score = 1.0 if is_exact_match\n\n should_add = !opts[:exact] || (opts[:exact] && is_exact_match)\n\n # add the result\n results << [score, entry] if should_add\n end\n\n # Sort the results by first column (score) and return only the second column (entry)\n results.sort_by { |entry| -entry[0] }.map { |entry| entry[1] }\n end",
"def search(params)\n filter_name, keywords, field_queries = extract params\n scope = filtered_by filter_name\n query = text_search keywords\n query = field_search field_queries, query\n scope.where query\n end",
"def fulltextsearch(query, text, words=2)\n result = \"\"\n if text =~ /.*#{query}.*/i\n text_words = text.scan(/\\w*/)\n indexes = []\n text_words.each_with_index do |word,index|\n if word =~ /.*#{query}.*/i\n i = []\n i << index - words unless words == 0 || index - words < 0\n i << index\n i << index + words unless words == 0 || index + words > text_words.length\n indexes << i\n end\n end\n indexes.each do |i|\n result += \"... \" unless i.length == 1\n i.each {|j| result += \"#{text_words[j]} \"}\n result += \" ...\" unless i.length == 1\n end\n end\n result\n end",
"def search_for(keywords, options = {})\n bounds = options[:specific] || options[:bounds]\n keywords = '*:*' if keywords.blank?\n logger.debug \"Search Query: #{spatial_query(bounds)}#{keywords}\"\n solr_server.find(\"#{spatial_query(bounds)}#{keywords}\", options.merge(:results => Atlas::Extensions::Place::PlaceResults))\n end",
"def index!\n set_search_text!\n set_query_attributes!\n update_columns text: self.text, query: self.query\n end",
"def query_string\n _f = @params.fields.include?(:full_text) ? [:full_text] : fields\n # byebug\n a = query.gsub('/', '').scan( /\"[^\"]+\"|[^ ]+/ ).map do |word|\n if word[0] === '\"'\n m = word.match( /^\"(.*)\"$/ );\n word = m ? m[1] : word;\n end\n Unicode.downcase(word.gsub('\"', ''))\n end\n _q = '(' + a.join('* AND ') + '*)'\n # _q = '/(?=.*?'+a.join( ')(?=.*?' )+').*/';\n #byebug\n index.filter{ ~q(query_string: {fields: _f, query: \"#{_q}\", default_operator: 'or'}) } if _q.present? && _f.present?\n\n #index.query(multi_match: {query: \"#{_q}*\", fields: _f}) if _q.present? && _f.present\n end",
"def sql_match(*terms, join: :and, connector: join, **opt)\n json = (opt[:type] == :json)\n connector &&= connector.to_s.strip.upcase\n opt[:columns] &&= Array.wrap(opt[:columns]).compact.map(&:to_sym).presence\n opt[:columns] ||= field_names\n merge_match_terms(*terms, **opt).flat_map { |field, matches|\n matches.map do |value|\n json ? sql_match_json(field, value) : sql_match_pattern(field, value)\n end\n }.then { |result|\n connector ? result.join(\" #{connector} \") : result\n }\n end",
"def search(query, options = {}); end",
"def full_text_search\n @attributes[:full_text_search]\n end",
"def search(*args)\n options = args.extract_options!\n options[:class] = self\n args << options\n ThinkingSphinx::Search.search(*args)\n end",
"def search\n terms = params[:query].split\n query = terms.map { |term| \"title like '%#{term}%' OR body like '%#{term}%' OR tags like '%#{term}%'\" }.join(\" OR \")\n \n @posts = Post.where(query).order(\"created_at DESC\").first(10)\n end",
"def matching(*terms, sort: nil, **opt)\n # noinspection RubyMismatchedReturnType\n where(sql_match(*terms, **opt)).tap do |relation|\n relation.order!(sort) if sort\n end\n end",
"def search\n @documents = api.form(\"everything\")\n .query(%([[:d = fulltext(document, \"#{params[:q]}\")]]))\n .page(params[:page] ? params[:page] : \"1\")\n .page_size(params[:page_size] ? params[:page_size] : \"20\")\n .submit(ref)\n end",
"def fullsearch params = {}\n request(params.merge(:report_type => :phrase_fullsearch))\n end",
"def full_text_search(options, ar_options ={})\n klass = use_invitation(options) ? Invitation : Meeting\n ar_options[:include] ||= DEFAULT_INCLUDES\n\t\t build_query_string(options)\n\n\t\t options[:sort] = @sort_options unless @sort_options.nil?\n\t\t options[:page] ||= 1\n logger.debug \"SEARCH STRING: #{@query_string} OPTIONS: #{options.to_yaml}\"\n\n\t\t begin\n\t\t results = klass.paginate_search(@query_string, options, ar_options) \n\t\t rescue\n\t\t logger.error(\"Error while searching with search string: #{@query_string}\")\n\t\t logger.error(\"Error was: #{$!}.\")\n\t\t logger.error($!.backtrace.join(\"\\n\"))\n\t\t return WillPaginate::Collection.new(0, 20, 0) # This is wrong actually. We need to return an empty paginator... Might be somethign like this WillPaginate::Collection.new(page, per_page, total_hits) ?? \n\t\t end\n results.each do |result|\n result.extend(TTB::CoerceInvitation)\n result.filtered_date = self.start_date\n end\n \n \n #~ real_results = results.sort_by{|inv| inv.start_time_local }\n #~ real_results.extend(TTB::CoerceResults)\n #~ real_results.real_results = results\n\t\t return results\n\t\tend",
"def search_query(search_terms)\n resource_handler.searchable_attributes.map do |attribute|\n if relation = attribute[:relation]\n \"LOWER(#{relation[:model_association].klass.table_name}.#{relation[:attr_method]}) LIKE #{search_terms}\"\n else\n \"LOWER(#{resource_handler.model.table_name}.#{attribute[:name]}) LIKE #{search_terms}\"\n end\n end.join(\" OR \")\n end",
"def search!(\n query, case_sensitive: false, whole_sentence: true,\n limit: 10, skip: 0, sentence_limit: 80\n )\n results = search(\n query,\n case_sensitive: case_sensitive,\n whole_sentence: whole_sentence,\n limit: limit,\n skip: skip\n )\n\n results.each do |doc|\n doc.search!(\n query,\n case_sensitive: case_sensitive,\n whole_sentence: whole_sentence,\n sentence_limit: sentence_limit\n )\n yield(doc) if block_given?\n end\n\n results\n end",
"def search_search_text\n query\n .where(\"decidim_opinions_opinions.title ILIKE ?\", \"%#{search_text}%\")\n .or(query.where(\"decidim_opinions_opinions.body ILIKE ?\", \"%#{search_text}%\"))\n end",
"def search_with_index query\n docs = []\n return docs if query.terms.empty?\n load if @content.nil?\n return docs if @content.nil?\n index = {}\n query.terms.each do |term|\n if term.operator == :eq && term.value.class != Regexp\n set = @attribute_storage[term.field][term.value]\n else\n set = @content.select do |doc|\n term.compare(doc.send(term.field))\n end\n end\n\n if !set.nil? && !set.empty?\n if docs.empty?\n docs = set\n if query.relation == :and\n docs.each do |value|\n index[value] = nil\n end\n end\n else\n if query.relation == :or\n docs += set\n else\n set.each do |value|\n if !index.has_key? value\n docs << value\n index[value] = nil\n end\n end\n end\n end\n end\n end\n docs\n end",
"def search(term)\n # pattern = Regexp.new(pattern, case_insensitive=true)\n # pattern = Regexp.new(pattern, Regexp::EXTENDED | Regexp::IGNORECASE)\n # pattern = Regexp.new(pattern)\n pattern = Regexp.new(term)\n select do |tweet|\n tweet.full_text =~ pattern\n end\n end",
"def search(*args)\n search_internal([\"SEARCH\"], *args)\n end",
"def search_series(terms, base = nil)\n return search_series_strict(terms, base) if terms.is_a?(Hash)\n\n terms = [terms] unless terms.is_a?(Enumerable)\n\n series = base || Series.all\n terms.each do |term|\n escaped = escape_like(term)\n res = series.where('jpn LIKE ? OR eng LIKE ? OR raw LIKE ?',\n escaped, escaped, escaped)\n return res if res.empty?\n series = res\n end\n series\n end",
"def search_TF(query)\n terms = query.split(' ')\n puts terms\n articles_bool = search_bool(query) # first get a simple boolean search of articles\n\n\n # create the corpus of articles \n corpus = []\n articles_bool.each do |article|\n summary = article.summary\n split_summary = summary.split(' ')\n corpus << split_summary\n end\n\n analyzed = nil\n\n # make a hash of TF for each of the articles \n score_map = {}\n articles_bool.each do |article|\n summary = article.summary\n split_summary = summary.split(' ')\n article_tf = TfIdf.new([split_summary])\n score_map[article] = score(article_tf.tf[0],terms,analyzed)\n end\n\n # rank this hash by TF and return the articles\n\n @articles = articles_bool.sort_by { |article|\n -score_map[article]\n }\n\n return @articles\n \n end",
"def text_search(search_text, limit, offset)\n query_strategy.text_search(search_text, limit, offset)\n end",
"def specific_search(**args)\n params = parameters(args) do\n required_params :term, :field_type, :field_key\n optional_params :term, :exact_match, :field_type, :field_key, :return_field_key, :return_item_ids, :start, :limit\n end\n request(:get, 'searchResults/field', params)\n end",
"def search(query, opts = {})\n results_to_hash(call(ft_search(query, opts)), opts)\n end",
"def search(params)\n terms = params[:terms]\n # SELECT id, question FROM questions WHERE searchtext @@ 'lorem' AND NOT EXISTS( SELECT question_id FROM tests_questions WHERE test_id=1);\n # SELECT q.id, q.question FROM questions AS q WHERE q.searchtext @@ 'lorem' AND NOT EXISTS(SELECT q1.id FROM test_questions AS tq, questions AS q1 WHERE tq.question_id=q1.id AND tq.test_id=1);\n sanitized = ActiveRecord::Base.send(:sanitize_sql_array, [\"to_tsquery('english', ?)\", terms.gsub(/\\s/,\"+\")])\n Question.where(\"searchtext @@ #{sanitized} AND id NOT IN(SELECT question_id AS id FROM test_questions WHERE test_id=#{id})\").paginate(page: params[:page], per_page: params[:per_page]).select(:id, :question, :explanation, :hint, :tags, :qtype)\n end",
"def merge_fulltext(params)\n return nil if @fulltexts.empty?\n return Sunspot::Util.deep_merge!(params, @fulltexts.first.to_params) if @fulltexts.length == 1\n subqueries = @fulltexts.map {|fulltext| fulltext.to_subquery }.join(' ')\n Sunspot::Util.deep_merge!(params, {:q => subqueries})\n end",
"def query_search(query, options={})\n run_query query, options\n end",
"def searchable_on(*fields)\n # Make sure that the table to be searched actually exists\n if self.table_exists?\n if fields.first.class.to_s == 'Hash'\n if fields.first.has_key?(:only)\n fields = fields.first[:only]\n elsif fields.first.has_key?(:except)\n fields = self.column_names.collect { |column| \n fields.first[:except].include?(column.to_sym) ? nil : column.to_sym }.compact\n end\n end\n \n assoc_models = self.reflections.collect { |m| m[0] }\n assoc_fields = fields - self.column_names.collect { |column| column.to_sym }\n fields -= assoc_fields\n \n assoc_groupings = {}\n assoc_models.each do |assoc_model|\n assoc_groupings[assoc_model] = []\n \tassoc_fields.each do |assoc_field|\n \t unless assoc_field.to_s.match(/^#{assoc_model.to_s}_/).nil?\n assoc_groupings[assoc_model] << assoc_field.to_s.sub(/^#{assoc_model.to_s}_/, '').to_sym \n end\n end\n end\n \n assoc_groupings = assoc_groupings.delete_if {|group, field_group| field_group.empty?}\n \n self.cattr_accessor :scoped_search_fields, :scoped_search_assoc_groupings\n self.scoped_search_fields = fields\n self.scoped_search_assoc_groupings = assoc_groupings\n self.named_scope :search_for, lambda { |keywords| self.build_scoped_search_conditions(keywords) }\n end\n end",
"def search_by_keyword(query, o={})\n #debugger\n #debug \"[search_by_keyword] query = #{query}\"\n result = Sunspot.search(Item) do\n keywords query\n if o[:doc_only]\n without :itype_str, Item::ITYPE_CONCEPT#['query','concept','tag']\n end\n #debugger\n o.find_all{|k,v|k.to_s =~ /^facet\\_/}.each do |e|\n #debugger\n with (e[0].to_s.split('_')[1..-1].join('_')).to_sym, e[1] if [e[1]].flatten.first != '-1'\n end\n #debugger\n order_by(:basetime, :desc) if o[:order] == \"recency\" || query == TEXT_DUMMY\n paginate(:page => o[:page], :per_page => o[:per_page]) if o[:page]\n facet(o[:facet]) if o[:facet]\n without :hidden_flag, '1'\n end\n #debugger\n if o[:facet]\n result.facet(o[:facet]).rows\n elsif o[:raw]\n result\n else\n result_items = result.hits.map_with_index{|e,i|{:id=>e.instance.id, :rank=>(i+1), :score=>e.score}}\n @cv.add(:type=>'kwd', :query=>query, :created_at=>(o[:created_at] || Time.now), :history_id=>o[:history_id], :result=>result_items) if o[:add_context]\n result_items\n end\n end",
"def fulltext_feature(search_str)\n features = Feature.where(\"lower(value) = ?\", search_str.downcase)\n ret = nil\n if features.count == 1\n feature = features.first\n ret = []\n ret[0] = \", which is a feature.\"\n ret[1] = \"You'll get better results by searching by the features:<br />\"\n ret[1] += raw link_to(\"Search for #{feature.value}\", programs_path(q: {function: feature.id}))\n end\n return ret\n end",
"def search(\n query, case_sensitive: false, whole_sentence: true, limit: 10, skip: 0\n )\n query = query.to_s.strip\n query.replace('\"' + query + '\"') if whole_sentence\n\n # Sort based on the most search hits (aka \"textScore\").\n # We use the sort_proj hash as both a sort and a projection below.\n sort_proj = { score: { :$meta => 'textScore' } }\n query = { :$text => {\n :$search => query,\n :$caseSensitive => case_sensitive\n } }\n\n results = retrieve(:documents, query,\n sort: sort_proj, projection: sort_proj,\n limit: limit, skip: skip)\n return [] if results.count < 1 # respond_to? :empty? == false\n\n # results.respond_to? :map! is false so we use map and overwrite the var.\n results = results.map { |mongo_doc| Wgit::Document.new(mongo_doc) }\n results.each { |doc| yield(doc) } if block_given?\n\n results\n end",
"def grep(columns, patterns, opts=OPTS)\n column_op = opts[:all_columns] ? :AND : :OR\n if opts[:all_patterns]\n conds = Array(patterns).map do |pat|\n SQL::BooleanExpression.new(column_op, *Array(columns).map{|c| SQL::StringExpression.like(c, pat, opts)})\n end\n where(SQL::BooleanExpression.new(:AND, *conds))\n else\n conds = Array(columns).map do |c|\n SQL::BooleanExpression.new(:OR, *Array(patterns).map{|pat| SQL::StringExpression.like(c, pat, opts)})\n end\n where(SQL::BooleanExpression.new(column_op, *conds))\n end\n end",
"def search_process\n @search_text =params[:q].to_s\n all =params[:all].to_s\n exact =params[:exact].to_s\n any =params[:any].to_s\n none =params[:none].to_s\n advanced_query=\"\"\n\n if all != \"\"\n all =all.split(' ')\n all_like =all.map { |x| \"keyword like \" + \"'%\" + x + \"%'\" }\n all_like =all_like.join(' and ')\n advanced_query=all_like\n end\n\n if exact != \"\" && all != \"\"\n exact =\"'%\"+exact+\"%'\"\n advanced_query = advanced_query + \" and keyword like \" + exact\n end\n\n if exact != \"\" && all == \"\"\n exact =\"'%\"+exact+\"%'\"\n advanced_query = \"keyword like \" + exact\n end\n\n if any != \"\" and (all != \"\" or exact != \"\")\n any =any.split(' ')\n any_like =any.map { |x| \"keyword like \" + \"'%\" + x + \"%'\" }\n any_like =any_like.join(' or ')\n advanced_query = advanced_query + \" and (\" + any_like + \")\"\n end\n\n if any != \"\" and all == \"\" and exact == \"\"\n any =any.split(' ')\n any_like =any.map { |x| \"keyword like \" + \"'%\" + x + \"%'\" }\n any_like =any_like.join(' or ')\n advanced_query = \"(\" + any_like + \")\"\n end\n\n if none != \"\" and (all != \"\" or exact != \"\" or any != \"\")\n none =none.split(' ')\n none_not_like=none.map { |x| \"keyword not like \" + \"'%\" + x + \"%'\" }\n\n none_not_like=none_not_like.join(' and ')\n\n advanced_query=advanced_query + \" and \" + none_not_like\n\n end\n\n if none != \"\" and all == \"\" and exact == \"\" and any == \"\"\n none =none.split(' ')\n none_not_like=none.map { |x| \"keyword not like \" + \"'%\" + x + \"%'\" }\n\n none_not_like=none_not_like.join(' and ')\n\n advanced_query= none_not_like\n end\n\n\n advanced_query = \"SELECT Model_ID FROM keyword_symbol_tables WHERE \"+advanced_query\n\n parameter_search_text=@search_text.split.join(\" \")\n keyword_array =parameter_search_text.split(' ')\n keyword_count =keyword_array.size\n\n connection = ActiveRecord::Base.connection\n\n if all != \"\" or exact != \"\" or any != \"\" or none != \"\"\n @resultset = connection.execute(\"#{advanced_query}\");\n else\n @resultset = connection.execute(\"call keyword_search('#{parameter_search_text}',#{keyword_count})\");\n end\n\n ActiveRecord::Base.clear_active_connections!\n\n @resultset_strings = @resultset.map { |result| result.to_s.gsub(/[^0-9A-Za-z]/, '') }\n\n @model_ids =Array.new\n @model_names =Array.new\n @model_types =Array.new\n\n @resultset_strings.each do |result|\n\n substring=result[0..4]\n\n if substring == \"NMLCL\"\n cell=Cell.find_by_Cell_ID(result.to_s)\n name=cell.Cell_Name\n type=\"Cell\"\n end\n\n if substring == \"NMLCH\"\n channel=Channel.find_by_Channel_ID(result.to_s)\n name =channel.Channel_Name\n type =\"Channel\"\n end\n\n\n if substring == \"NMLNT\"\n network=Network.find_by_Network_ID(result.to_s)\n name =network.Network_Name\n type =\"Network\"\n end\n\n if substring == \"NMLSY\"\n synapse=Synapse.find_by_Synapse_ID(result.to_s)\n name =synapse.Synapse_Name\n type =\"Synapse\"\n end\n\n @model_ids.push(result)\n @model_names.push(name)\n @model_types.push(type)\n\n end\n\n if @model_ids.count != 0\n\n render :partial => 'keyword_results_list',\n :locals => {\n :model_ids => @model_ids,\n :model_names => @model_names,\n :model_types => @model_types\n }\n\n else\n\n render :partial => 'no_results'\n\n end\n\n end",
"def search(terms)\n return search_strict(terms) if terms.is_a?(Hash)\n\n terms = [terms] unless terms.is_a?(Enumerable)\n\n series = Series.all\n tracks = Track.all\n terms.each do |term|\n unless series.empty?\n res = search_series(term, series)\n series = res.empty? ? [] : res\n end\n\n unless tracks.empty?\n res = search_tracks(term, tracks)\n tracks = res.empty? ? [] : res\n end\n end\n\n series.each do |s|\n tracks << s.tracks unless s.tracks.empty?\n end\n\n tracks.flatten\n end",
"def search\n if params[:query]\n respond_with Event\n .fulltext_search(params[:query])\n .includes(:categories, :organization, :location)\n else\n Event\n .order(\"updated_at DESC\")\n .includes(:categories, :organization, :location)\n end\n end",
"def add_full_text_context(params)\n return unless blacklight_params[:q].present?\n\n params['hl'] = true\n params['hl.fl'] ||= []\n params['hl.fl'] << 'extracted_text_tsimv'\n end",
"def has_fulltext_search(plan:{A:nil, B:nil, C:nil, D:nil})\n self.fulltext_search_plan = plan\n end",
"def custom_search\n\n # if we’ve no searchable fields, then just fail out\n return [] if @searchable_fields.empty?\n\n # otherwise, construct our query\n fields = []\n values = []\n @searchable_fields.each do |f|\n\n fields << \"#{f} LIKE ?\"\n values << \"%#{params[:search]}%\"\n\n end\n\n # run the query\n @model_class.where( fields.join( ' OR '), *values )\n\n\n end",
"def search(*args)\n search_provider.search(*args)\n end",
"def index\n if params[:search]\n @tutorials = Tutorial.search(params[:search]).order(\"created_at DESC\")\n else\n @tutorials = Tutorial.all.order('created_at DESC')\n end\n=begin\n #for sunspot\n @search = Tutorial.search do\n fulltext params[:search]\n end\n @tutorials = @search.results\n=end\n end",
"def search_conditions(query, fields=nil)\n return nil if query.blank?\n fields ||= @search_columns\n\n # split the query by commas as well as spaces, just in case\n words = query.split(\",\").map(&:split).flatten\n\n binds = {} # bind symbols\n or_frags = [] # OR fragments\n count = 1 # to keep count on the symbols and OR fragments\n\n words.each do |word|\n like_frags = [fields].flatten.map { |f| \"LOWER(#{f}) LIKE :word#{count}\" }\n or_frags << \"(#{like_frags.join(\" OR \")})\"\n binds[\"word#{count}\".to_sym] = \"%#{word.to_s.downcase}%\"\n count += 1\n end\n [or_frags.join(\" AND \"), binds]\n end",
"def search(\n query, case_sensitive: false, whole_sentence: true, limit: 10, skip: 0\n )\n query = query.to_s.strip\n query.replace('\"' + query + '\"') if whole_sentence\n\n # Sort based on the most search hits (aka \"textScore\").\n # We use the sort_proj hash as both a sort and a projection below.\n sort_proj = { score: { :$meta => 'textScore' } }\n query = { :$text => {\n :$search => query,\n :$caseSensitive => case_sensitive\n } }\n\n results = retrieve(DOCUMENTS_COLLECTION, query,\n sort: sort_proj, projection: sort_proj,\n limit: limit, skip: skip)\n\n results.map do |mongo_doc|\n doc = Wgit::Document.new(mongo_doc)\n yield(doc) if block_given?\n doc\n end\n end",
"def text_search_multiple(search_text, limit, offset)\n query_strategy.text_search(search_text, limit, offset)\n end",
"def search(search_terms)\n\n db = Sequel.sqlite(dbfilename)\n dataset = db[:pdfmd_documents].where(\"UPPER(keywords) LIKE UPPER('%#{search_terms[0]}%')\")\n result_files = ''\n dataset.all.each do |match_file|\n match_file.each do |key,value|\n if key == :keywords\n\n # Split the keywords\n keywords = value.downcase.split(/\\s*,\\s*/)\n # Search for matches in the keywords.\n if keywords.find{ |e| /#{search_terms.join(' ').downcase}/ =~ e }\n result_files += match_file[:filename] + \"\\n\"\n end\n end\n\n end\n end\n\n # Ouput result filenames\n result_files\n\n end",
"def query_for_one_keyword\n return @query_for_one_keyword if @query_for_one_keyword\n\n query = fields.map { |field| \"lower(#{field}) LIKE ?\" }\n .join(' OR ')\n @query_for_one_keyword = \"(#{query})\"\n end",
"def search(args = {})\n search_args = {}\n search_args[:body] = self::ElasticsearchQuery.new(args).to_hash\n search_args[:index] = configuration.index\n search_args[:type] = type\n\n result = client.search search_args\n ids = result[\"hits\"][\"hits\"].map {|item| item[\"_source\"][\"id\"] } # use to get the records and sort them in that order\n result_list = includes(:translations, [:master => [:prices, :images]]).where(id: ids).index_by(&:id).slice(*ids).values\n\n # Convert all facets to facet objects\n facet_list = result[\"facets\"].map do |tuple|\n name = tuple[0]\n hash = tuple[1]\n type = hash[\"_type\"]\n body = hash.except!(\"_type\")\n Spree::Search::Elasticsearch::Facet.new(name: name, search_name: name, type: type, body: body)\n end\n\n ResultList.new(\n results: result_list,\n from: Integer(args.fetch(:from, 0)),\n total: result[\"hits\"][\"total\"],\n facets: facet_list\n )\n end",
"def field_matching_terms(terms)\n match = Regexp.new(terms.map { |t| \"(#{Regexp.escape t})\" }.join(\"|\"), Regexp::IGNORECASE)\n SEARCHABLE_COLUMNS.each do |attr|\n attr_val = self.send(attr)\n return attr_val if attr_val =~ match\n end\n \"\"\n end",
"def search(index, query, options = {})\n raise NotImplementedError, 'Search has not been implemented'\n end",
"def index\n @topics = Topic.all\n \n if !Rails.env.production?\n @search = Topic.search do\n fulltext params[:search]\n end\n end\n end",
"def query_fulltext_regexp( query )\n read_db do |dbm|\n dbm.each_value do |raw_val|\n val = RDictCcEntry.format_str(raw_val)\n match_line_found = false\n val.each_line do |line|\n if line =~ /^\\s+/\n if match_line_found\n puts line\n else\n # Skip lines starting with blanks, because these are already\n # translations and they don't belong to the matching line.\n next\n end\n else\n match_line_found = false\n end\n if line.downcase =~ /#{query}/\n puts line\n match_line_found = true\n end\n end\n end\n end\n end",
"def search(query); end",
"def search_text(\n query, case_sensitive: false, whole_sentence: true,\n limit: 10, skip: 0, sentence_limit: 80, top_result_only: false\n )\n results = search(\n query,\n case_sensitive: case_sensitive,\n whole_sentence: whole_sentence,\n limit: limit,\n skip: skip\n )\n\n results\n .map do |doc|\n yield(doc) if block_given?\n\n results = doc.search(\n query,\n case_sensitive: case_sensitive,\n whole_sentence: whole_sentence,\n sentence_limit: sentence_limit\n )\n\n # Only return result if its text has a match - compact is called below.\n next nil if results.empty?\n\n [doc.url, (top_result_only ? results.first : results)]\n end\n .compact\n .to_h\n end",
"def search\n\t\t@articles = Article.where(\"text = ?\",params[:q])\n \n #Article.find_by_text(params[:q])\n \n #debug\n @articles.each do |article|\n puts article.title\n end\n \n \n\t\t#@articles = Article.where(:text => params[:q]) ' 1=1 -- '\n\n\t\t#@articles = Article.where(\"text = ?\", params[:q] )\n \n \n #TODO\n # add filter for other fields\n # Article.where(\"text = ? and title = ?\",params[:text],params[:title])\n \n # to add LIKE filter SQL : name like %aa%\n # \"name LIKE ? OR postal_code like ?\", \"%#{search}%\", \"%#{search}%\"\n \n end",
"def partial_search(query, options = {})\n bounds = options[:bounds] || nil\n if bounds.present?\n results = solr_server.find(\"#{spatial_query(bounds)}name:#{query}*\", options.merge(:qt => 'standard', \n :results => Atlas::Extensions::Place::PlaceResults))\n else\n results = solr_server.find(\"name:#{query}*\", options.merge(:qt => 'standard', \n :results => Atlas::Extensions::Place::PlaceResults))\n end\n results\n end",
"def index\n # @search = Shelter.search do\n # fulltext params[:search]\n # end\n # @shelters = @search.results\n @shelters = Shelter.all\nend",
"def search(*args)\n options = args.extract_options!\n query = args[0].to_s\n\n options.reverse_merge!({\n :classes => [NewsStory, ShowSegment, BlogEntry, ContentShell],\n :page => 1,\n :order => \"public_datetime #{DESCENDING}\",\n :retry_stale => true,\n :populate => true\n })\n\n # We'll want to search only among live content 99% of the\n # time. For the times when we want unpublished stuff,\n # we can pass in `with: { is_live: [true, false] }`, for\n # example.\n options[:with] ||= {}\n options[:with].reverse_merge!(is_live: true)\n\n begin\n ThinkingSphinx.search(Riddle::Query.escape(query), options)\n rescue Riddle::ConnectionError,\n Riddle::ResponseError,\n ThinkingSphinx::SphinxError => e\n # In this one scenario, we need to fail gracefully from a Sphinx error,\n # because otherwise the entire website will be down if media isn't\n # available, or if we need to stop the searchd daemon for some reason,\n # like a rebuild.\n warn \"Caught error in ContentBase.search: #{e}\"\n Kaminari.paginate_array([]).page(0).per(0)\n end\n end",
"def handling_fulltext\n options = yield\n add_sort_by_relevance options if fulltext_name_filtering?\n options\n end",
"def search_for(keywords, options = {})\n solr_server.find(keywords, options.merge(:results => PublicEarth::Search::CollectionResults))\n end",
"def search_terms(*words)\n terms = []\n words.each { |word| terms.push \"'%#{word.gsub(/[^a-z]/i, '').strip}%'\" }\n return nil if terms.empty?\n return 'description ILIKE ' + terms.join(' AND description ILIKE ')\nend",
"def search_text_fields\n #self.content_columns.select {|c| [:string,:text].include?(c.type) }.map {|c| c.name }\n \n end",
"def concatenate\n cols = self.class.columns_hash.reject do |k, v|\n v.name.in?(['search_text', 'locale', 'slug']) || (v.name.split('_').first == 'concatenated') || !v.type.in?([:string, :text])\n end\n self.search_text = cols.map { |k, v| send(v.name).to_s }.reject(&:blank?).join(' ').hanize\nend",
"def search(query)\n\t\t query = \"%#{query}%\"\n\t\t name = arel_table[:name].matches(query)\n\t\t aliases = arel_table[:aliases].matches(query)\n\t\t where(name.or(aliases))\n\t\tend",
"def search_any_term\n render json: Article.with_any_terms(params[:query]).map(&:title)\n end",
"def search\n puts params[:terms]\n query = 'FALSE'\n params[:terms].split(',').each do |term|\n query += \" OR item.name iLIKE ? OR ingredients.unit iLIKE ? OR ingredients.quantity iLIKE ?\"\n end\n @items = Item.where(query, *params[:terms].split(',').map { |i| \"%\"+i+\"%\"}, *params[:terms].split(',').map { |i| \"%\"+i+\"%\"}, *params[:terms].split(',').map { |i| \"%\"+i+\"%\"}).eager_load(:ingredients)\n if @items.any?\n @recipes = Recipe.joins(:ingredients).where('ingredients.item_id IN (?)', @items.pluck(:id)).uniq\n end\n end",
"def search_relevant(namespace, query, options={})\n raise \"Namespace not found\" unless namespace\n return criteria.all if query.blank? && self.allow_empty_search[namespace]\n \n keywords = Util.keywords(query, stem_keywords[namespace], ignore_list[namespace])\n \n map = <<-EOS\n function() {\n var entries = 0\n for(i in keywords)\n for(j in this._keywords[namespace]) {\n if(this._keywords[namespace][j] == keywords[i])\n entries++\n }\n if(entries > 0)\n emit(this._id, entries)\n }\n EOS\n reduce = <<-EOS\n function(key, values) {\n return(values[0])\n }\n EOS\n\n #raise [self.class, self.inspect].inspect\n \n \n \n kw_conditions = keywords.map do |kw|\n {:_keywords => kw}\n end\n\n criteria = (criteria || self).any_of(*kw_conditions)\n\n query = criteria.selector\n\n options.delete(:limit)\n options.delete(:skip)\n options.merge! :scope => {:keywords => keywords, :namespace => namespace}, :query => query\n\n # res = collection.map_reduce(map, reduce, options)\n # res.find.sort(['value', -1]) # Cursor\n \n puts options.inspect\n \n collection.map_reduce(map, reduce, options)\n end",
"def search_for(keywords, options = {})\n solr_server.find(keywords, options.merge(:results => PublicEarth::Search::PlaceResults))\n end",
"def text_search_find(ids, options)\n if defined?(ActiveModel)\n # guess that we're on Rails 3\n raise \"text_search_find not implemented for Rails 3 (yet) - patches welcome\"\n elsif defined?(ActiveRecord::Base) and ancestors.include?(ActiveRecord::Base)\n merge_text_search_conditions!(ids, options)\n all(options)\n elsif defined?(Sequel::Model) and ancestors.include?(Sequel::Model)\n self[primary_key.to_sym => ids].filter(options)\n elsif defined?(DataMapper::Resource) and included_modules.include?(DataMapper::Resource) \n get(options.merge(primary_key.to_sym => ids))\n end\n end",
"def search(term)#select\n my_array = @@conn.exec_params(\"SELECT * FROM contacts WHERE name LIKE $1 OR email LIKE $1;\", [term])\n end",
"def search(query)\n\n\t\t@vquery = VectorData.new\n\t\tquery_terms = query.split\n\t\tt_i = 0\n\n\t\t# For each term in the dictionary.\n\t\t@index.each do |term, data|\n\n\t\t\t# Calculate the weight for the query vector\n\t\t\tif(query_terms.include?(term))\n\n\t\t\t\tweight = (1 + Math.log(query_terms.count(term)))\n\n\t\t\t\t@vquery.norm = weight*weight\n\t\t\t\t@vquery.t_id.push(t_i)\n\t\t\t\t@vquery.w.push(weight)\n\t\t\tend\n\n\t\t\t\tt_i += 1\n\t\tend\n\n\t\t@vquery.norm = Math.sqrt(@vquery.norm)\n\n\t\t# Returns empty if their are no terms matched to the index\n\t\treturn [] if (@vquery.norm == 0)\n\n\t\t# The array to collect the relevance scores\n\t\tscores = []\n\n\t\t# Calculates the similarity between each document and query\n\t\t@vdocs.each do |d_id, doc|\n\n\t\t\tweights_sum = 0\n\n\t\t\t# Multiplies their weights to find the corresponding weights\n\t\t\t# This is using the cosine similarity formula\n\t\t\t@vquery.t_id.each_with_index do |termid, i|\n\t\t\t\tif(doc.t_id.include?(termid))\n\t\t\t\t\tk = doc.t_id.index(termid)\n\t\t\t\t\tweights_sum += @vquery.w[i] * doc.w[k]\n\t\t\t\tend\n\t\t\tend\n\n\t\t\t# Skip if the weights_sum is equal to 0\n\t\t\tnext if(weights_sum == 0)\n\n\t\t\t# Applying the cosine similarity formula\n\t\t\tsim = (weights_sum) / (doc.norm * @vquery.norm)\n\n\t\t\tscores.push([d_id, sim])\n\t\tend\n\n\t\t# Sorts the array by the highest relevance and returns the array\n\t\treturn scores.sort{ |x,y| y[1] <=> x[1]}\n\n\tend",
"def search(term = nil, similarity = 0.3)\n search_term = term.try(:strip)\n if search_term.present?\n ActiveRecord::Base.connection.execute(\"SELECT set_limit(#{similarity});\") if similarity.present?\n fuzzy_search(search_term)\n else\n default_scoped\n end\n end",
"def searchtermForSql(term, sql_column_name)\n if term !~ /[^\\w\\.\\s\\-\\_\\+]/\n # split given values into an array and add some chars needed in the sql query\n term.gsub!(\"+\", \" \")\n values = term.split(\" \").join(\"%' '%\")\n values = (\"'%\" + values + \"%'\").split(\" \")\n if term == \"\" or term =~ /^\\s$/\n values = [\"'%'\"]\n end\n\n #combine the searchstring and do the search\n searchstring = sql_column_name + \" LIKE \" + values.first\n values.each_index do |i|\n next if i == 0\n searchstring += \" OR \" + sql_column_name + \" LIKE \" + values[i]\n end\n return searchstring\n end\n end",
"def search_columns\n return @_search_columns if @_search_columns\n columns.map { |column| column.name if [:string, :text].include?(column.type) }.compact\n end",
"def search(**args)\n params = parameters(args) do\n required_params :term\n optional_params :term, :item_type, :start, :limit, :exact_match\n end\n request(:get, 'searchResults', params)\n end",
"def terms(attributes = nil)\r\n field = attributes.keys.first\r\n query = { field => attributes.values.flatten }\r\n\r\n body = { query: { terms: query } }\r\n result = client.search index: index, type: type, body: body\r\n\r\n result_instance(result)\r\n end",
"def apply_search(results:)\n return results unless search_params.present?\n\n terms = search_params[:search_words] || ''\n return results unless terms.present?\n\n results.search(term: terms)\n end",
"def termfreq_query(ocr_search_terms)\n search_terms = if ocr_search_terms.match?(/\\A\"[\\s\\S]*\"\\z/) # phrase search\n [ocr_search_terms.delete('\"')]\n else\n ocr_search_terms.delete('\"').split(' ')\n end\n if search_terms.length == 1\n \"term_freq:termfreq(#{blacklight_config.ocr_search_field},\\\"#{search_terms.first}\\\")\"\n else\n termfreq_array = search_terms.map { |v| \"termfreq(#{blacklight_config.ocr_search_field},\\\"#{v}\\\")\" }\n \"term_freq:sum(#{termfreq_array.join(',')})\"\n end\n end",
"def search\n @query = params[:q]\n @ads = Ad.within(@city, 30)\n @ads = @ads.fulltext_search(@query).group_by { |x| x.created_at.to_date }\n end",
"def search(options)\n Fotolia::SearchResultSet.new(self, options)\n end"
] |
[
"0.8264869",
"0.6610546",
"0.6217637",
"0.60479075",
"0.6005078",
"0.59993774",
"0.59954834",
"0.5823916",
"0.5810335",
"0.57882714",
"0.57503736",
"0.57445025",
"0.5727415",
"0.5721591",
"0.5706118",
"0.5620432",
"0.5587604",
"0.55812776",
"0.5575823",
"0.5558891",
"0.5557226",
"0.5556316",
"0.5539862",
"0.552441",
"0.5521997",
"0.5500646",
"0.54880774",
"0.5487212",
"0.5480236",
"0.546978",
"0.54450035",
"0.5415002",
"0.54145414",
"0.53959966",
"0.53902596",
"0.5386743",
"0.5386712",
"0.53859234",
"0.53783315",
"0.53772676",
"0.5371656",
"0.5368839",
"0.5345985",
"0.5304637",
"0.5300821",
"0.5299401",
"0.5296915",
"0.5292351",
"0.5272242",
"0.5250392",
"0.52418625",
"0.52294934",
"0.52221",
"0.52207726",
"0.5197958",
"0.518196",
"0.5166904",
"0.516546",
"0.5152708",
"0.515094",
"0.5144053",
"0.5140266",
"0.51219445",
"0.5121863",
"0.5121702",
"0.51068556",
"0.5099907",
"0.50886756",
"0.50875926",
"0.5082913",
"0.50679",
"0.5059319",
"0.50572133",
"0.50517344",
"0.5047579",
"0.5046301",
"0.5046109",
"0.50335085",
"0.5031331",
"0.5026555",
"0.5016441",
"0.5015627",
"0.5013026",
"0.5006974",
"0.500466",
"0.49981672",
"0.49976653",
"0.49927244",
"0.49849147",
"0.49801078",
"0.49694186",
"0.4965772",
"0.49639186",
"0.49608752",
"0.49541333",
"0.49531272",
"0.49508375",
"0.49259943",
"0.49215338",
"0.49159104"
] |
0.88017637
|
0
|
Insert given values into the database.
|
def insert(*values)
if @opts[:returning]
# Already know which columns to return, let the standard code handle it
super
elsif @opts[:sql] || @opts[:disable_insert_returning]
# Raw SQL used or RETURNING disabled, just use the default behavior
# and return nil since sequence is not known.
super
nil
else
# Force the use of RETURNING with the primary key value,
# unless it has been disabled.
returning(insert_pk).insert(*values){|r| return r.values.first}
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def insert values\n im = InsertManager.new @engine\n im.insert values\n @engine.connection.insert im.to_sql\n end",
"def insert values\n if $VERBOSE\n warn <<-eowarn\ninsert (#{caller.first}) is deprecated and will be removed in ARel 3.0.0. Please\nswitch to `compile_insert`\n eowarn\n end\n @engine.connection.insert compile_insert(values).to_sql\n end",
"def insert_record(table, values)\n execute table_insert_query(table, values)\n end",
"def insert_sql(*values)\n if values.empty?\n insert_default_values_sql\n else\n values = values[0] if values.size == 1\n \n # if hash or array with keys we need to transform the values\n if @transform && (values.is_a?(Hash) || (values.is_a?(Array) && values.keys))\n values = transform_save(values)\n end\n from = source_list(@opts[:from])\n\n case values\n when Array\n if values.empty?\n insert_default_values_sql\n else\n \"INSERT INTO #{from} VALUES #{literal(values)}\"\n end\n when Hash\n if values.empty?\n insert_default_values_sql\n else\n fl, vl = [], []\n values.each {|k, v| fl << literal(k.is_a?(String) ? k.to_sym : k); vl << literal(v)}\n \"INSERT INTO #{from} (#{fl.join(COMMA_SEPARATOR)}) VALUES (#{vl.join(COMMA_SEPARATOR)})\"\n end\n when Dataset\n \"INSERT INTO #{from} #{literal(values)}\"\n else\n if values.respond_to?(:values)\n insert_sql(values.values)\n else\n \"INSERT INTO #{from} VALUES (#{literal(values)})\"\n end\n end\n end\n end",
"def insert(*values)\n if @opts[:sql] || @opts[:returning]\n super\n else\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def insert(*values)\n execute_dui(insert_sql(*values)){|c| return c.last_id}\n end",
"def multi_insert_sql(columns, values)\n table = quote_identifier(@opts[:from].first)\n columns = literal(columns)\n values.map do |r|\n \"INSERT INTO #{table} #{columns} VALUES #{literal(r)}\"\n end\n end",
"def dbinsert(table, variables, variable_names)\n i = 1\n marks = \"?\"\n while i < variables.length\n marks += \",?\"\n i += 1\n end\n\n v = \"\"\n i = 0\n while i < variables.length\n v += variables[i].to_s \n i += 1\n if i < variables.length\n v += \", \"\n end\n end\n\n return db.execute(\"INSERT INTO #{table}(#{v}) VALUES (#{marks})\", variable_names)\nend",
"def insert(*values)\n raise NotImplementedError, NOTIMPL_MSG\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil) end",
"def insert_many( sql, values, *args ) # :nodoc:\n # the number of inserts default\n number_of_inserts = 0\n \n base_sql,post_sql = if sql.is_a?( String )\n [ sql, '' ]\n elsif sql.is_a?( Array )\n [ sql.shift, sql.join( ' ' ) ]\n end\n \n sql_size = QUERY_OVERHEAD + base_sql.size + post_sql.size \n\n # the number of bytes the requested insert statement values will take up\n values_in_bytes = self.class.sum_sizes( *values )\n \n # the number of bytes (commas) it will take to comma separate our values\n comma_separated_bytes = values.size-1\n \n # the total number of bytes required if this statement is one statement\n total_bytes = sql_size + values_in_bytes + comma_separated_bytes\n \n max = max_allowed_packet\n \n # if we can insert it all as one statement\n if NO_MAX_PACKET == max or total_bytes < max\n number_of_inserts += 1\n sql2insert = base_sql + values.join( ',' ) + post_sql\n insert( sql2insert, *args )\n else\n value_sets = self.class.get_insert_value_sets( values, sql_size, max )\n value_sets.each do |values|\n number_of_inserts += 1\n sql2insert = base_sql + values.join( ',' ) + post_sql\n insert( sql2insert, *args )\n end\n end \n\n number_of_inserts\n end",
"def insert(*objects)\n objects = objects.first if objects.size == 1 && objects.first.is_a?(Array)\n res = @db.insert_into_db(@name, objects)\n res.size > 1 ? res : res.first\n end",
"def insert_into(table, data)\n\t\tkeys = \"(#{data.keys.join(', ')})\"\n\t\tvalues = \"(#{data.values.map{ |value| \"'#{value}'\" }.join(', ')})\"\n\t\texecute_with_retry \"INSERT INTO #{table} #{keys} VALUES #{values}; \"\n\tend",
"def insert\n attributes = []\n instance_variables.each { |i|\n attributes << i.to_s.delete(\"@\")\n }\n\n query_components_array = []\n attributes.each { |x|\n value = self.send(x)\n\n if value.is_a?(Integer)\n query_components_array << \"#{value}\"\n else\n query_components_array << \"'#{value}'\"\n end\n }\n\n query_string = query_components_array.join(\", \")\n puts query_string\n\n DATABASE.execute(\"INSERT INTO items (name, category, location, quantity, cost, description) VALUES (#{query_string})\")\n\n puts \"Inserted successfully!\"\n end",
"def insert()\n\n # Kräver att det finns ett \"set_table(\"Table\")\" i klassen\n @insertable_vars_full = self.instance_variables # Ta med namnen user.username osv\n @insertable_vars_full.shift(1) # Kinda frisky\n @insertable_vars = []\n @insertable_values = []\n @insertable_vars_full.each do |var|\n @insertable_vars << var[1..-1]\n @insertable_values << self.instance_variable_get(var)\n end\n\n\n @insertable_vars_str = @insertable_vars.join(\", \")\n\n @question_marks = \"\"\n @insertable_vars.each do |key|\n @question_marks.concat(\"?,\")\n end\n @question_marks = @question_marks[0..-2]\n\n DB.execute(\"INSERT INTO #{@table} (#{@insertable_vars_str})\n VALUES (#{@question_marks})\", @insertable_values)\n\n end",
"def merge_insert(*values, &block)\n h = {:type=>:insert, :values=>values}\n if override = @opts[:override]\n h[:override] = insert_override_sql(String.new)\n end\n _merge_when(h, &block)\n end",
"def insert\n \n attributes = []\n instance_variables.each do |i|\n attributes << i.to_s.delete(\"@\") if (i != :@id && i != :@table)\n end\n \n values = []\n attributes.each do |a|\n value = self.send(a)\n \n if value.is_a?(Integer)\n values << \"#{value}\"\n else values << \"'#{value}'\"\n end\n end\n \n DATABASE.execute(\"INSERT INTO students (#{attributes.join(\", \")}) \n VALUES (#{values.join(\", \")})\")\n @id = DATABASE.last_insert_row_id\n\n end",
"def insert(template, *data) # :nodoc:\n chk_conn\n conn = @hibernate_session.connection\n stmt = conn.prepare_statement(template)\n data.each do |d|\n d.each_with_index do |item, index|\n if item.kind_of?(Array)\n set_prepared_statement(stmt, item[0], index+1, item[1])\n else\n set_prepared_statement(stmt, item, index+1, nil)\n end\n end\n stmt.execute_update\n end\n conn.commit\n ensure\n stmt.close rescue nil\n end",
"def insert_many( sql, values, _options = {}, *args ) # :nodoc:\n number_of_inserts = 0\n\n base_sql, post_sql = case sql\n when String\n [sql, '']\n when Array\n [sql.shift, sql.join( ' ' )]\n end\n\n value_sets = ::ActiveRecord::Import::ValueSetsRecordsParser.parse(values,\n max_records: SQLITE_LIMIT_COMPOUND_SELECT)\n\n transaction(requires_new: true) do\n value_sets.each do |value_set|\n number_of_inserts += 1\n sql2insert = base_sql + value_set.join( ',' ) + post_sql\n insert( sql2insert, *args )\n end\n end\n\n ActiveRecord::Import::Result.new([], number_of_inserts, [], [])\n end",
"def insert(values)\n primary_key_value = nil\n\n if primary_key && Hash === values\n primary_key_value = values[values.keys.find { |k|\n k.name == primary_key\n }]\n\n if !primary_key_value && connection.prefetch_primary_key?(klass.table_name)\n primary_key_value = connection.next_sequence_value(klass.sequence_name)\n values[klass.arel_table[klass.primary_key]] = primary_key_value\n end\n end\n\n im = arel.create_insert\n\n # ****** BEGIN PARTITIONED PATCH ******\n actual_arel_table = @klass.dynamic_arel_table(Hash[*values.map{|k,v| [k.name,v]}.flatten]) if @klass.respond_to?(:dynamic_arel_table)\n actual_arel_table = @table unless actual_arel_table\n # Original line:\n # im.into @table\n im.into actual_arel_table\n # ****** END PARTITIONED PATCH ******\n\n conn = @klass.connection\n\n substitutes = values.sort_by { |arel_attr,_| arel_attr.name }\n binds = substitutes.map do |arel_attr, value|\n [@klass.columns_hash[arel_attr.name], value]\n end\n\n substitutes.each_with_index do |tuple, i|\n tuple[1] = conn.substitute_at(binds[i][0], i)\n end\n\n if values.empty? # empty insert\n im.values = Arel.sql(connection.empty_insert_statement_value)\n else\n im.insert substitutes\n end\n\n conn.insert(\n im,\n 'SQL',\n primary_key,\n primary_key_value,\n nil,\n binds)\n end",
"def exec_insert(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def insert(statements)\n raise TriplestoreAdapter::TriplestoreException.new(\"#{@provider.class.name} missing insert method.\") unless @provider.respond_to?(:insert)\n @provider.insert(statements)\n end",
"def insert_sql(*values)\n if values.size == 1 && (v = values[0]).is_a?(Sequel::Model) && !v.respond_to?(:sql_literal_append)\n Sequel::Deprecation.deprecate(\"Passing Sequel::Model instance argument to Sequel::Dataset#insert\", \"Pass model_instance.values or model_instance.to_hash as the argument instead\")\n super(v.to_hash)\n else\n super\n end\n end",
"def create(values)\n a = new(values)\n a.insert\n a\n end",
"def exec_insert(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def insert!(*rows)\n Mao.sql(with_options(:insert => rows.flatten).sql) do |pg_result|\n if @options[:returning]\n pg_result.map {|result| Mao.normalize_result(result, @col_types)}\n else\n pg_result.cmd_tuples\n end\n end\n end",
"def insert\n\n DBConnection.execute2(<<-SQL, attribute_values)\n INSERT INTO\n #{class_obj.table_name} #{sql_columns}\n VALUES\n #{sql_question_marks}\n SQL\n\n self.id = DBConnection.last_insert_row_id\n end",
"def insert_into_table(db, product, exp_date, prod_type)\n\tdb.execute(\"INSERT INTO products (name, expiration_date, type_id) VALUES (?, ?, ?)\",[product, exp_date, prod_type])\nend",
"def insert(*args)\n dataset.insert(*args)\n self\n end",
"def exec_insert(sql, name = nil, binds = []) #:nodoc:\r\n log(sql, \"insert\", binds) { query(sql, binds) }\r\n end",
"def insert\n col_names = self.class.columns.join(\", \")\n question_marks = ([\"?\"] * self.class.columns.length).join(\", \")\n DBConnection.execute(<<-SQL, *attribute_values)\n INSERT INTO\n #{self.class.table_name} (#{col_names})\n VALUES\n (#{question_marks})\n SQL\n\n self.id = DBConnection.last_insert_row_id\n end",
"def _insert_raw(ds)\n if use_prepared_statements_for?(:insert)\n _set_prepared_statement_server(model.send(:prepared_insert, @values.keys)).call(@values)\n else\n super\n end\n end",
"def make_insert(table, columns, fields, row)\n statement = \"INSERT INTO #{table['name']} (#{fields.join(',')}) VALUES (\"\n values = []\n fields.each do |field|\n values << make_val(row[field], columns[field])\n end\n statement << \"#{values.join(',')});\\n\"\n statement\n end",
"def insert\n # Preparing for the query...\n cols = self.class.columns\n col_names = cols.map(&:to_s).join(\", \")\n question_marks = ([\"?\"] * cols.count).join(\", \")\n \n # The actual query\n DBConnection.execute(<<-SQL, *attribute_values)\n INSERT INTO\n #{ self.class.table_name } (#{ col_names })\n VALUES\n (#{ question_marks })\n SQL\n \n # Add an id number for the record\n self.id = DBConnection.last_insert_row_id\n end",
"def insert\n array = [[@name, @tagline, @github, @twitter, @blog_url, @image_url, @biography]]\n ins = DB[:conn].prepare(\"INSERT INTO students (name, tagline, github, twitter, blog_url, image_url, biography) VALUES (?, ?, ?, ?, ?, ?, ?);\")\n array.each { |s| ins.execute(s)}\n self.id = DB[:conn].execute(\"SELECT last_insert_rowid() FROM students;\")[0][0]\n #ask steven re. index figures\n #inserting data into an instance\n end",
"def sqlite3_insert(table_name, values, mem_db)\n @assert.check_table_name(table_name, @dbh)\n\n table_ast = Hash.new\n table_ast = @dbm.get_table_ast(table_name)\n # TODO: Each value element type. All types are acceptable.\n # If hash, {key:value} needs to be {column_name:value}.\n\n # This hash holds a map of column position in the table\n index_to_column = Hash.new\n # Since the position of column in a table is static on\n # creation, we can keep a track of each column position\n # simple by assigning an index to each (A.K.A. indexing).\n tb_index = 0\n table_ast.each {|col_name, col_info|\n index_to_column[tb_index] = [col_name, col_info]\n tb_index += 1\n }\n\n @assert.check_class(values.class, Array, @dbh)\n vl_index = 0\n column_to_value = Hash.new\n values.each {|value|\n if value.class == Hash\n status = value.size == 1\n error_msg = \"Error: Size of hash for insert value cannot exeed 1.\\n\"\n error_msg += \"#{value} has size #{value.size}\"\n @assert.default_error_check(status, error_msg, @dbh)\n \n # Extract user input value\n col_name = value.keys[0]\n status = table_ast.has_key?(col_name.to_s)\n error_msg = \"Column #{col_name.to_s} does not exist in table #{table_name}.\\n\"\n error_msg += @dbm.get_table_schema(table_name)\n @assert.default_error_check(status, error_msg, @dbh)\n column_to_value[col_name.to_s] = value[col_name]\n else\n # Compare input value type with table column type\n cur_column_in_table = index_to_column[vl_index]\n @assert.check_column_value(cur_column_in_table, value, mem_db[table_name], @dbh)\n col_name = cur_column_in_table[0]\n column_to_value[col_name] = value\n end\n\n vl_index += 1\n }\n\n insert_query = \"INSERT INTO #{table_name} (\"\n column_to_value.each_key {|col_name|\n insert_query += \"#{col_name},\"\n }\n insert_query.chomp!(',')\n insert_query += \") VALUES (\"\n column_to_value.each_value {|value|\n if value.class == String\n insert_query += \"'#{value}',\"\n else\n insert_query += \"#{value},\"\n end\n }\n insert_query.chomp!(',')\n insert_query += ');'\n\n @dbh.execute(insert_query)\n return insert_query + \"\\n\"\n end",
"def insert(table, id, attributes) # abstract\n end",
"def table_insert_query(table, values)\n query = \"insert into #{quote_table_name(table)}\"\n query << '(' << values.keys.map do |column_name|\n quote_column_name(column_name)\n end.join(', ') << ') '\n query << 'values(' << values.map do |column_name, value|\n quote_value(table, column_name, value)\n end.join(', ') << ')'\n query\n end",
"def insert(key, values, opt={})\n do_op(:insert, column_family, key, values, opt)\n end",
"def insert(key, values, opt={})\n do_op(:insert, column_family, key, values, opt)\n end",
"def insert()\n\t\t@db.execute(\"insert into ausgaben (jahr, monat, name, betrag, gemeinsam, tags) values(:jahr, :monat, :name, :betrag, :gemeinsam, :tags)\", @options)\n\tend",
"def insert_in_database\n Fetch.new(insertion_query).array\n end",
"def exec_insert(sql, name = nil, binds = [], pk = nil, sequence_name = nil)\n exec_query(sql, name, binds)\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n insert_sql(sql, name, pk, id_value, sequence_name)\n end",
"def insert_by_data data, table\n sql = \"insert into #{table} \"\n case data\n when Array\n data.each do |d|\n insert_by_data(d, table)\n end\n when Hash\n columns = data.keys.to_s.gsub('[','(').gsub(']',')').gsub('\"','')\n values = data.values.to_s.gsub('[','(').gsub(']',')').gsub('nil','NULL')\n sql = sql + columns + \" values \" + values\n query(sql)\n end\n end",
"def build_insert(data)\n fields = \"\"\n values = \"\"\n data.each do |k,v|\n fields += \"`#{escape_str_field(k)}`, \"\n values += escape_value(v)+\", \"\n end\n \"(\"+fields.chomp(', ')+\") VALUES (\"+values.chomp(', ')+\")\"\n end",
"def insert_select(*values)\n with_sql_first(insert_select_sql(*values))\n end",
"def add(values_hash = {}) #values_hash = {}\n table_name = self.to_s.pluralize.underscore\n\n columns = values_hash.keys\n values = values_hash.values\n binding.pry\n CONNECTION.execute(\"INSERT INTO #{table_name} (#{columns.join \", \"}) VALUES (#{values.to_s[1...-1]});\")\n\n id = CONNECTION.last_insert_row_id\n values_hash[\"id\"] = id\n \n self.new(values_hash)\n end",
"def insert key, value\n begin\n db.putnr(key, value)\n rescue StandardError => e ; handle_error(\"Insert #{[key, value].inspect}\", e); end\n end",
"def execute(tuples)\n insert_tuples = with_input_tuples(tuples) do |tuple|\n attributes = input[tuple]\n attributes.to_h\n end\n\n if insert_tuples.length > 1\n multi_insert(insert_tuples)\n else\n insert(insert_tuples)\n end\n end",
"def add_price(asin, title, price_s, price_i, day, day_i)\r\n $db.execute(\"INSERT INTO prices (asin, title, price_s, price_i, day, day_i) VALUES (?,?,?,?,?,?)\", [asin, title, price_s, price_i, day, day_i])\r\nend",
"def execute_insert(sql, opts={})\n synchronize(opts[:server]) do |conn|\n conn.execute(sql)\n insert_result(conn, opts[:table], opts[:values])\n end\n end",
"def insert(first_name, last_name)\n\t$db.execute(\n\t\t\t\t\"INSERT INTO contacts\n\t\t\t\t(first_name, last_name, company, phone, email, created_at, updated_at)\n\t\t\t\tVALUES\n\t\t\t\t(:first_name, :last_name, NULL, NULL, NULL,DATETIME('now'),DATETIME('now'))\", \n\t\t\t\t\"first_name\" => \"#{first_name}\", \"last_name\" => \"#{last_name}\"\n\t\t\t\t)\nend",
"def execute(*values)\n IBM_DB.execute(@stmt, values)\n end",
"def insert(**opts)\n add(**opts)\n save!\n end",
"def insert\n # the array of ::columns of the class joined with commas, drop id\n col_names = self.class.columns[1..-1].join(\", \") \n # an array of question marks\n question_marks = ([\"?\"] * col_names.split.size).join(\", \")\n\n DBConnection.execute(<<-SQL, *attribute_values[1..-1])\n INSERT INTO\n #{self.class.table_name} (#{col_names})\n VALUES\n (#{question_marks})\n SQL\n\n self.id = DBConnection.last_insert_row_id\n end",
"def db_insert table, fields= {}\n #client = Mysql2::Client.new(:host => \"localhost\", :username => \"root\", :password => \"toor\", :database => \"filesorter\")\n @query= \"INSERT INTO `#{table}` (`#{fields.keys.join('`, `')}`) VALUES ('\"+fields.values.join(\"', '\")+\"');\"\n do_query\n end",
"def ins table, col, val\n pst = @con.prepare 'INSERT INTO ' + s(table) + '(' + s(col) + ') VALUES(?)'\n pst.execute s(val)\n #puts 'INSERT INTO ' + s(table) + '(' + s(col) + ') VALUES(' + s(val) + ')'\n end",
"def _insert_raw(ds)\n ds.insert(_insert_values)\n end",
"def _insert_raw(ds)\n ds.insert(_insert_values)\n end",
"def run_insert\n raise DataFactory::NoInsertStatement unless @insert_statement\n\n stmt = db_interface.execute_sql(@insert_statement, *@binds)\n stmt.close\n end",
"def insert(tuples)\n pks = tuples.map { |tuple| relation.insert(tuple) }\n relation.where(relation.primary_key => pks).to_a\n end",
"def add_student(first_name, last_name, birthdate)\n @conn.exec(\"INSERT INTO students_db (first_name, last_name, birthday) VALUES ('#{first_name}', '#{last_name}', '#{last_name}', '#{birthdate}');\")\nend",
"def insert(row, values)\n values = Array(values)\n unless values.empty?\n @data.expand(row, values.count)\n @data.values.insert row, Array.new([@data.columns_count, values.count].max) { |index| values[index] }\n end\n self\n end",
"def insert_statement(statement)\n self.inserts << statement\n end",
"def insert_statement(statement)\n self.inserts << statement\n end",
"def create_students (db, name, essay_1, essay_2, midterm_1, midterm_2)\n\tdb.execute(\"INSERT INTO students (name, essay_1, essay_2, midterm_1, midterm_2) VALUES (?, ?, ?, ?, ?)\", [name, essay_1, essay_2, midterm_1, midterm_2])\nend",
"def save()\n #connect to database\n db = PG.connect({ dbname: 'bounty_hunters', host: 'localhost' })\n #write big long SQL string\n sql = \"INSERT INTO bounties\n (\n name,\n species,\n bounty_value,\n favourite_weapon\n )\n VALUES\n ($1, $2, $3, $4)\"\n\n #make array of values for prepared statement\n values = [@name, @species, @bounty_value, @favourite_weapon]\n #prepare statement\n db.prepare(\"save\", sql)\n #exec statement\n db.exec_prepared(\"save\", values)\n #close link to db\n db.close()\n end",
"def db_insert a, b, c\n if a == \"\" or b == \"\" \n return 'blank'\n end\n $db.execute \"INSERT INTO produtos (cod, prod, prec) VALUES(?, ?, ?)\", a, b, c\nend",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(*insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def save_post(params)\n sql= 'INSERT INTO posts (title, description, url, created_at)\n VALUES($1,$2,$3,NOW())'\n db_connection do |conn|\n conn.exec_params(sql,[params[:title],params[:description],params[:url]])\n end\nend",
"def sql_insert(record)\n flds, vals = parse_fldsvalues(record)\n ph = vals.map{|x| placeholder }\n\n sql = %Q|insert into #{quoted_table}\n ( #{flds.join ','} )\n output inserted.#{quote_field id_fld}\n values( #{ph.join ','} );|\n\n [sql, vals]\n end",
"def prepared_insert(cols)\n cached_prepared_statement(:insert, prepared_columns(cols)){prepare_statement(dataset, :insert, prepared_statement_key_hash(cols))}\n end",
"def insert(objects)\n # Make sure the prepared statements are ready\n prepare unless $has_prepared\n\n objects.map do |o|\n params = {\n plant_id: o['id'],\n value: o['value'],\n }\n case o['type']\n when 'temp'\n DB.call(:insert_temp, params)\n when 'moisture'\n DB.call(:insert_moisture, params)\n when 'light'\n DB.call(:insert_light, params)\n else\n {\n error: \"Unknown metric type: #{o['type']}\",\n params: params,\n }\n end\n end\nend",
"def insert(column, values)\n values = Array(values)\n unless values.nil? or values.empty?\n @data.expand(values.count, column)\n [@data.rows_count, values.count].max.times { |index| (@data.values[index] ||= []).insert column, values[index] }\n end\n self\n end",
"def insert\n \n attributes = []\n instance_variables.each do |i|\n attributes << i.to_s.delete(\"@\") if (i != :@id && i != :@table)\n end\n \n values = []\n attributes.each do |a|\n value = self.send(a)\n \n if value.is_a?(Integer)\n values << \"#{value}\"\n else values << \"'#{value}'\"\n end\n end\n \n DATABASE.execute(\"INSERT INTO slides (#{attributes.join(\", \")}) \n VALUES (#{values.join(\", \")})\")\n @id = DATABASE.last_insert_row_id\n return self\n end",
"def add_to_database\n hash = self.attr_hash\n columns = hash.keys\n values = hash.values\n if self.valid?\n CONNECTION.execute(\"INSERT INTO #{tablename} (#{columns.join \", \"}) VALUES (#{values.to_s[1...-1]});\")\n @id = CONNECTION.last_insert_row_id\n else\n false\n end\n end",
"def add_student(first_name, last_name, birthdate)\n @conn.exec(\"INSERT INTO students (first_name, last_name, birthdate) VALUES ('#{first_name}', '#{last_name}', '#{birthdate}');\")\nend",
"def add_student(first_name, last_name, birthdate)\n @conn.exec(\"INSERT INTO students (first_name, last_name, birthdate) VALUES ('#{first_name}', '#{last_name}', '#{birthdate}');\")\nend",
"def create_user(db,name,age,rating,email)\n\ndb.execute( \"INSERT INTO users(user_name,age,user_rating,user_email)\n VALUES(?,?,?,?)\",[name,age,rating,email])\nend",
"def to_inserts(args={})\n args[:table] ||= Pathname.new(@filename).basename.to_s.downcase.gsub(/\\W/, '_')\n args[:before] ||= @@defaults[:before]\n args[:after] ||= @@defaults[:after]\n insert_sql = args[:ignore] ? 'insert ignore' : 'insert'\n if args[:bulk]\n args[:before] += \"#{insert_sql} into #{args[:table]} values\"\n args[:values_glue] ||= \", \"\n args[:row_format] ||= \" (%s)\"\n args[:row_glue] ||= \",\\n\"\n else\n args[:before] ||= \"\"\n args[:values_glue] ||= \", \"\n args[:row_format] ||= \"#{insert_sql} into #{args[:table]} values(%s)\"\n args[:row_glue] ||= \";\\n\"\n end\n to_any args\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def multiple_value_sets_insert_sql(table_name, column_names, options) # :nodoc:\n \"INSERT #{options[:ignore] ? 'IGNORE ':''}INTO #{table_name} (#{column_names.join(',')}) VALUES \"\n end",
"def store_users(db, name, age, weight)\n\tdb.execute(\"INSERT INTO users (name, age, weight) VALUES (?, ?, ?)\", [name, age, weight])\nend",
"def add(options={})\n column_names = options.keys\n values = options.values\n\n individual_values = []\n\n values.each do |value|\n if value.is_a?(String)\n individual_values << \"'#{value}'\"\n else\n individual_values << value\n end\n end\n\n column_names_for_sql = column_names.join(\", \")\n individual_values_for_sql = individual_values.join(\", \")\n\n CONNECTION.execute(\"INSERT INTO #{get_table_name} (#{column_names_for_sql}) VALUES (#{individual_values_for_sql});\")\n\n options[\"id\"] = CONNECTION.last_insert_row_id\n\n self.new(options)\n end",
"def add(options={})\n column_names = options.keys\n values = options.values\n \n CONNECTION.execute(\"INSERT INTO #{self.table_name} (#{column_names.join(\", \")}) VALUES (#{values.to_s[1...-1]});\")\n \n id = CONNECTION.last_insert_row_id\n options[\"id\"] = id\n \n self.new(options)\n end",
"def save\r\n # raise error if schema is not same between from and to.\r\n # so , create sql from record map\r\n #keys = get_schema.map{|v| v[0]}\r\n keys = @__stored_map__.keys\r\n sql = \"insert into #{self.class.table_name} (#{keys.join(',')}) values( \"\r\n\r\n keys.each_with_index do |key , index|\r\n v = @__stored_map__[key]\r\n sql << ',' if index != 0\r\n sql << (v.kind_of?(String) ? \"'#{v}'\" : v.to_s)\r\n end\r\n sql << \")\"\r\n puts sql if $DEBUG\r\n self.class.execute_sql(sql)\r\n end",
"def insert(db, volume_id, page_number, access_date, ip_token)\n command = \"insert into results values (?, ?, datetime(?), ?);\"\n db.execute(command, volume_id, page_number, access_date, ip_token)\n end",
"def insert(name)\n CONN.execute(\"INSERT INTO artits(name) VALUES(?)\", name)\nend",
"def create_employee(database, first_name, last_name, wage, title, last_four_ssn, business_id)\n database.execute(\"INSERT INTO employees (first_name, last_name, wage, title, last_four_ssn, business_id) VALUES (?, ?, ?, ?, ?, ?)\", [first_name, last_name, wage, title, last_four_ssn, business_id])\nend",
"def add_student(first_name, last_name,birthdate)\n @conn.exec(\"INSERT INTO students (first_name, last_name, birthdate) VALUES ('#{first_name}', '#{last_name}', '#{birthdate}');\")\nend",
"def new_entry (total_cost, tip_percentage, people, total_cost_with_tip, final_per_person)\n DATABASE.execute(\"INSERT INTO split_checks (total_cost, tip_percentage, people, total_cost_with_tip, final_per_person) VALUES (#{total_cost}, #{tip_percentage}, #{people}, #{total_cost_with_tip}, #{final_per_person});\")\nend",
"def add(options = {})\n\n columns = options.keys\n values = options.values\n\n columns_for_sql = columns.join(\", \")\n individual_values_for_sql = []\n\n values.each do |value|\n if value.is_a?(String)\n individual_values_for_sql << \"'#{value}'\"\n else\n individual_values_for_sql << value\n end\n end\n\n values_for_sql = individual_values_for_sql.join(\", \")\n\n table = self.to_s.pluralize.underscore\n\n DATABASE.execute(\"INSERT INTO #{table} (#{columns_for_sql}) VALUES (#{values_for_sql});\")\n\n id = DATABASE.last_insert_row_id\n\n options[\"id\"] = id\n\n self.new(options)\n end",
"def create\n values = attribute_hash\n DBConnection.execute(<<-SQL, values)\n INSERT INTO\n #{self.class.table_name} (#{values.keys.join(', ')})\n VALUES\n (:#{values.keys.join(', :')})\n SQL\n self.id = DBConnection.last_insert_row_id\n end",
"def create_insert(headers, values, model, upsert_fields, ret_vals, ig_cols)\r\n\t\t\tputs \"Creating insert query:\"\r\n\t\t\tputs \"There are #{values.length} rows to insert.\"\r\n\r\n\t\t\tp \"HEADERS:\"\r\n\t\t\tp headers\r\n\r\n\t\t\tp \"IGNORED COLUMNS BEFORE HACK:\"\r\n\t\t\tp ig_cols\r\n\r\n\t\t\tig_cols = [] if ig_cols == nil\t# Weird hack because of an error ruby was throwing\r\n\t\t\treturn_results = []\r\n\r\n\t\t\tp \"IGNORED COLUMNS:\"\r\n\t\t\tp ig_cols\r\n\r\n\t\t\t# Loop through the array of arrays of values to insert\r\n\t\t\tvalues.each do |values_array|\r\n\t\t\t\tupsert_attributes = {}\r\n\t\t\t\tinner_array = []\r\n\t\t\t\t# Now loop through the single array of values\r\n\t\t\t\tp \"VALUES ARRAY:\"\r\n\t\t\t\tp values_array\r\n\r\n\t\t\t\tvalues_array.each_with_index do |val, index|\r\n\t\t\t\t\t# puts \"INDEX: #{index}\"\r\n\r\n\t\t\t\t\tnext if ig_cols.include?(index) # IMPORTANT: Need to ignore the indices of the columns in the CSV that the user specifies\r\n\t\t\t\t\tassociated_column_name = headers.at(index).to_sym\t# Get the header name for the row - need it to match in return values\r\n\r\n\t\t\t\t\t# Store the attributes we want to do the upsert on to pass into find_or_create_by method\r\n\t\t\t\t\tupsert_attributes[associated_column_name] = val if upsert_fields.include?(associated_column_name)\r\n\r\n\t\t\t\t\t# puts \"Line 282: #{upsert_attributes}\"\r\n\t\t\t\tend\r\n\r\n\t\t\t\t# Use ActiveRecord's method to return the updated or inserted row\r\n\t\t\t\t# Workaround - do a select and then insert since I can't figure out how to dynamically add the values to the class\r\n\t\t\t\t# select_result = model.find_by(upsert_attributes)\r\n\t\t\t\tinsert_attributes = {}\r\n\t\t\t\tvalues_array.each_with_index do |val, i|\r\n\t\t\t\t\tif !(upsert_attributes.has_key?(headers[i]))\r\n\t\t\t\t\t\t# puts \"VALUE: #{val}\"\r\n\t\t\t\t\t\tinsert_attributes[headers[i].to_sym] = val\r\n\t\t\t\t\tend\r\n\t\t\t\tend\r\n\t\t\t\tinsert_attributes = insert_attributes.merge upsert_attributes\r\n\r\n\t\t\t\t# if select_result == nil\r\n\t\t\t\t# \tinsert_result = model.create(insert_attributes)\r\n\t\t\t\t# else\r\n\t\t\t\t# \tinsert_result = model.update(insert_attributes)\r\n\t\t\t\t# end\r\n\r\n\t\t\t\t# upsert_result = model.find_or_create_by(upsert_attributes) do |klass|\r\n\t\t\t\t# \t# Check to see that we haven't already included the column and value in the upsert_attributes\r\n\t\t\t\t# \t# and if we haven't, include it as a field we need to add to the database along with the value\r\n\t\t\t\t# \t# puts \"#{klass.instance_variables}\"\r\n\t\t\t\t# \tvalues_array.each_with_index do |val, i|\r\n\t\t\t\t# \t\tif !(upsert_attributes.has_key?(headers[i]))\r\n\t\t\t\t# \t\t\t# puts \"VALUE: #{val}\"\r\n\t\t\t\t# \t\t\tklass.send :write_attribute, headers[i].to_sym, val\r\n\t\t\t\t# \t\tend\r\n\t\t\t\t# \tend\r\n\t\t\t\t# end\r\n\r\n\t\t\t\tp upsert_attributes\r\n\r\n\t\t\t\tupsert_result = model.find_or_initialize_by(upsert_attributes)\r\n\t\t\t\tupsert_result.update_attributes(insert_attributes)\r\n\r\n\t\t\t\t# Return what the user asked for\r\n\t\t\t\t#ret_vals.each { |val| inner_array.push(insert_result[val]) }\r\n\t\t\t\tret_vals.each { |val| inner_array.push(upsert_result[val]) }\r\n\r\n\t\t\t\t# Concatenate the arrays of information the user wants back\r\n\t\t\t\treturn_results.push(inner_array)\r\n\r\n\t\t\tend\r\n\t\t\t# p return_results\r\n\t\t\treturn return_results\r\n\t\tend",
"def insert(object, table)\n sql = object.to_sql(table)\n execute(sql)\n end",
"def add_interest(db, interest)\n db.execute(\"INSERT INTO interests (interest) VALUES (?)\", [interest])\nend",
"def db_insert_events(db, events)\n begin\n db.transaction do\n events.each do |event|\n flatPayload=flatten_event_payload(event)\n db[\n \"INSERT INTO events (\n id, type, actor, org, repo, public, created_at, payload\n )\n VALUES ( ?, ?, ?, ?, ?, ?, ?, ?)\",\n event.id, event.type, event.actor.login, event.org.login, event.repo.name,\n event.public.to_s, event.created_at.to_s, flatPayload].insert\n #puts \" Inserted: #{event.id}\"\n end\n end\n rescue => e\n puts \"Error during processing: #{$!}\"\n end\n end"
] |
[
"0.8384631",
"0.77768993",
"0.7730328",
"0.74888915",
"0.7317124",
"0.7303931",
"0.72238153",
"0.7149737",
"0.7133511",
"0.6991141",
"0.69711584",
"0.6948114",
"0.69270784",
"0.6870819",
"0.6832507",
"0.6806778",
"0.67923105",
"0.67868817",
"0.67507815",
"0.67170304",
"0.670044",
"0.6694283",
"0.66539043",
"0.66406196",
"0.66341376",
"0.66210884",
"0.66181594",
"0.6615819",
"0.66123205",
"0.66042536",
"0.6602709",
"0.6597821",
"0.6563689",
"0.6562909",
"0.65284014",
"0.65280074",
"0.65144944",
"0.6511451",
"0.65040463",
"0.65040463",
"0.6503388",
"0.64902514",
"0.6487557",
"0.6475747",
"0.64539754",
"0.6449773",
"0.64433545",
"0.64390653",
"0.64065135",
"0.63914704",
"0.6374028",
"0.6372894",
"0.6371579",
"0.6352783",
"0.6348197",
"0.6334914",
"0.63170326",
"0.63040847",
"0.63025564",
"0.63025564",
"0.6289996",
"0.6283709",
"0.6259926",
"0.62572664",
"0.6251079",
"0.6251079",
"0.6234176",
"0.62243587",
"0.62229925",
"0.621835",
"0.62013733",
"0.61964697",
"0.61910003",
"0.6188876",
"0.6178256",
"0.6170517",
"0.61659926",
"0.6156679",
"0.6156679",
"0.61500305",
"0.61386186",
"0.61354065",
"0.61354065",
"0.61354065",
"0.6130576",
"0.611344",
"0.61108863",
"0.61062354",
"0.6098845",
"0.60951996",
"0.6093959",
"0.6092671",
"0.60856813",
"0.60700804",
"0.60659915",
"0.60656565",
"0.6063871",
"0.60609",
"0.6055336",
"0.6047662"
] |
0.62298536
|
67
|
Ignore uniqueness/exclusion violations when inserting, using ON CONFLICT DO NOTHING. Exists mostly for compatibility to MySQL's insert_ignore. Example: DB[:table].insert_ignore.insert(a: 1, b: 2) INSERT INTO TABLE (a, b) VALUES (1, 2) ON CONFLICT DO NOTHING
|
def insert_ignore
insert_conflict
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def sql_for_on_duplicate_key_ignore( *args ) # :nodoc:\n arg = args.first\n conflict_target = sql_for_conflict_target( arg ) if arg.is_a?( Hash )\n \" ON CONFLICT #{conflict_target}DO NOTHING\"\n end",
"def sql_for_on_duplicate_key_ignore( table_name, *args ) # :nodoc:\n arg = args.first\n conflict_target = sql_for_conflict_target( arg ) if arg.is_a?( Hash )\n \" ON CONFLICT #{conflict_target}DO NOTHING\"\n end",
"def sneaky_save(avoid_insert_conflict: nil)\n begin\n sneaky_create_or_update(avoid_insert_conflict: avoid_insert_conflict)\n rescue ActiveRecord::StatementInvalid\n false\n end\n end",
"def sneaky_save!(avoid_insert_conflict: nil)\n sneaky_create_or_update(avoid_insert_conflict: avoid_insert_conflict)\n end",
"def _insert_dataset\n if upsert_plugin_upserting\n if postgres?\n super.insert_conflict(update: values_to_update, target: self.class.upsert_plugin_identifying_columns)\n elsif mysql?\n columns_to_update = values_to_update.keys - self.class.upsert_plugin_identifying_columns\n super.on_duplicate_key_update(*columns_to_update)\n else\n super\n end\n else\n super\n end\n end",
"def sneaky_create(avoid_insert_conflict: nil)\n sneaky_attributes_without_id = sneaky_attributes_values\n .except { |key| key.name == \"id\" }\n\n column_keys = sneaky_attributes_without_id.keys\n .map { |key| \"\\\"#{key.name}\\\"\" } # to avoid conflicts with column names\n .join(\", \")\n\n dynamic_keys = sneaky_attributes_without_id.keys\n .map { |key| \":#{key.name}\" }\n .join(\", \")\n\n constraint = generate_constraint(\n avoid_insert_conflict,\n column_keys,\n dynamic_keys\n )\n\n sql = <<~SQL\n INSERT INTO #{self.class.table_name} ( #{column_keys} )\n VALUES (#{dynamic_keys})\n #{constraint}\n RETURNING *\n SQL\n\n mapping = generate_insert_mapping(sneaky_attributes_without_id)\n data = self.class.unscoped.find_by_sql([sql.squish, mapping.to_h]).first\n\n # To trigger generation of @mutations_from_database variable\n # which is necessary for id_in_database\n data.send(:mutations_from_database)\n\n copy_internal(data, \"@attributes\")\n copy_internal(data, \"@mutations_from_database\")\n copy_internal(data, \"@changed_attributes\")\n copy_internal(data, \"@new_record\")\n copy_internal(data, \"@destroyed\")\n\n !!id\n end",
"def raises_uniqueness_violation?(&block)\n transaction(:savepoint=>:only, &block)\n false\n rescue unique_constraint_violation_class => e\n e\n end",
"def insert_conflict_sql(sql)\n if opts = @opts[:insert_conflict]\n sql << \" ON CONFLICT\"\n\n if target = opts[:constraint] \n sql << \" ON CONSTRAINT \"\n identifier_append(sql, target)\n elsif target = opts[:target]\n sql << ' '\n identifier_append(sql, Array(target))\n if conflict_where = opts[:conflict_where]\n sql << \" WHERE \"\n literal_append(sql, conflict_where)\n end\n end\n\n if values = opts[:update]\n sql << \" DO UPDATE SET \"\n update_sql_values_hash(sql, values)\n if update_where = opts[:update_where]\n sql << \" WHERE \"\n literal_append(sql, update_where)\n end\n else\n sql << \" DO NOTHING\"\n end\n end\n end",
"def non_sql_option?(key)\n super || key == :cursor || key == :insert_conflict\n end",
"def ignore!\n @should_ignore = true\n end",
"def disable_insert_returning\n clone(:disable_insert_returning=>true)\n end",
"def ignore\n @ignore = true\n end",
"def ignore_if(&block)\n @@ignores << block\n end",
"def merge_insert(*values, &block)\n h = {:type=>:insert, :values=>values}\n if override = @opts[:override]\n h[:override] = insert_override_sql(String.new)\n end\n _merge_when(h, &block)\n end",
"def async_multi_insert_ignore_hash(hashes)\n if (hashes.empty?)\n yield if (block_given?)\n\n return\n end\n\n columns = hashes.first.keys\n\n insertions = hashes.collect do |row|\n columns.collect { |c| row[c] }\n end\n\n async_multi_insert_ignore(columns, insertions) do |n|\n yield(n) if (block_given?)\n end\n end",
"def ignore!\n self.ignored = true\n end",
"def sql_for_on_duplicate_key_update( table_name, *args ) # :nodoc:\n arg, model, primary_key, locking_column = args\n arg = { columns: arg } if arg.is_a?( Array ) || arg.is_a?( String )\n return unless arg.is_a?( Hash )\n\n sql = ' ON CONFLICT '.dup\n conflict_target = sql_for_conflict_target( arg )\n\n columns = arg.fetch( :columns, [] )\n condition = arg[:condition]\n if columns.respond_to?( :empty? ) && columns.empty?\n return sql << \"#{conflict_target}DO NOTHING\"\n end\n\n conflict_target ||= sql_for_default_conflict_target( table_name, primary_key )\n unless conflict_target\n raise ArgumentError, 'Expected :conflict_target or :constraint_name to be specified'\n end\n\n sql << \"#{conflict_target}DO UPDATE SET \"\n case columns\n when Array\n sql << sql_for_on_duplicate_key_update_as_array( table_name, model, locking_column, columns )\n when Hash\n sql << sql_for_on_duplicate_key_update_as_hash( table_name, model, locking_column, columns )\n when String\n sql << columns\n else\n raise ArgumentError, 'Expected :columns to be an Array or Hash'\n end\n\n sql << \" WHERE #{condition}\" if condition.present?\n\n sql\n end",
"def sql_for_on_duplicate_key_update( table_name, *args ) # :nodoc:\n arg, model, primary_key, locking_column = args\n arg = { columns: arg } if arg.is_a?( Array ) || arg.is_a?( String )\n return unless arg.is_a?( Hash )\n\n sql = ' ON CONFLICT '.dup\n conflict_target = sql_for_conflict_target( arg )\n\n columns = arg.fetch( :columns, [] )\n condition = arg[:condition]\n if columns.respond_to?( :empty? ) && columns.empty?\n return sql << \"#{conflict_target}DO NOTHING\"\n end\n\n conflict_target ||= sql_for_default_conflict_target( primary_key )\n unless conflict_target\n raise ArgumentError, 'Expected :conflict_target to be specified'\n end\n\n sql << \"#{conflict_target}DO UPDATE SET \"\n case columns\n when Array\n sql << sql_for_on_duplicate_key_update_as_array( table_name, model, locking_column, columns )\n when Hash\n sql << sql_for_on_duplicate_key_update_as_hash( table_name, model, locking_column, columns )\n when String\n sql << columns\n else\n raise ArgumentError, 'Expected :columns to be an Array or Hash'\n end\n\n sql << \" WHERE #{condition}\" if condition.present?\n\n sql\n end",
"def check_rules_on_insert\n logger.debug \"Running INSERT checks for #{name}\"\n\n db.query(\"SELECT * FROM #{audit} WHERE `_copied_at` IS NULL\").each do |audit_row|\n pkey = audit_row.select { |k,v| primary_key.include?(k) }\n logger.debug \"Checking row: #{pkey}\"\n\n fields.each do |f|\n violations = f.on_insert(audit_row)\n violations.compact.each { |v| record_violation(v[:description], v[:item], v[:rule_name], audit_row['_last_version'], f.name) }\n end\n\n rules[:insert].each do |rule|\n v = rule.execute(audit_row, self)\n record_violation(v, audit_row, rule.name, audit_row['_last_version']) if v\n end\n end\n end",
"def save_detecting_duplicate_entry_constraint_violation\n begin\n save\n rescue ActiveRecord::StatementInvalid => e\n # Would that rails gave us the nested exception to check...\n if e.message =~ /.*[Dd]uplicate/\n errors.add_to_base(translate_with_theme('duplicate_entry_please_try_again'))\n false\n else\n raise e\n end\n end\n end",
"def ignore\n @ignore = true\n end",
"def multiple_value_sets_insert_sql(table_name, column_names, options) # :nodoc:\n \"INSERT #{options[:ignore] ? 'IGNORE ':''}INTO #{table_name} (#{column_names.join(',')}) VALUES \"\n end",
"def insert_nonexist(table, unique, input)\n id = get_id_existing(table, unique)\n\n if id == nil\n # Insert the information\n id = insert(table, input)\n end\n return id\n end",
"def ignore(*args); end",
"def save_on_insert?\n false\n end",
"def ignore &block\n begin; block.call; rescue; end\n end",
"def ignored_keys\n [id_column, :created_at, :updated_at]\n end",
"def handle_ignored\n if @note.ignore == true\n raise ActiveRecord::RecordNotFound\n end\n end",
"def ignore; end",
"def use_insert?\n !use_copy?\n end",
"def set_timestamp_to_now\n puts 'set_timestamp_to_now'\n db_conn.prepare 'set_timestamp_to_now', \"INSERT INTO #{TABLE} (id, updated_at) VALUES (#{ROW_KEY}, $1)\n ON CONFLICT(id) DO UPDATE SET updated_at = excluded.updated_at\"\n db_conn.exec_prepared 'set_timestamp_to_now', [Time.now]\nend",
"def without_persisting(record)\n if record.class.respond_to? :suppress\n record.class.suppress { yield }\n else\n yield\n end\n end",
"def insert(ignore_associations: false)\n unless new?\n msg = \"#{__FILE__}[#{__LINE__}] : #{self.class} : should be new (not loaded from db) - cannot insert\"\n Robe.logger.error(msg)\n raise DBError, msg\n end\n self.id = uuid unless id && !id.empty?\n if cache && cache.includes?(self.class, id)\n msg = \"#{__FILE__}[#{__LINE__}] : #{self.class} : with id #{id} already in cache - cannot insert\"\n Robe.logger.error(msg)\n raise DBError, msg\n else\n # TODO: unwind associations if insert fails\n result = (ignore_associations ? nil : save_associations).to_promise\n result.to_promise_then do\n self.class.db.insert_one(self.class.collection_name, to_db_hash)\n end.to_promise_then do\n @from_db = true\n cache.insert(self) if cache # no filter\n self.to_promise\n end\n end\n end",
"def without_persisting(record)\n if record.class.respond_to? :suppress\n record.class.suppress { yield }\n else\n yield\n end\n end",
"def use_insert!\n @use_copy = false\n end",
"def on_conflict(column = nil)\n ::MultiInsert::Query::OnConflict.new(self, column)\n end",
"def build_insert_sql(insert) # :nodoc:\n if insert.skip_duplicates? || insert.update_duplicates?\n raise NotImplementedError, \"#{self.class} should define `build_insert_sql` to implement adapter-specific logic for handling duplicates during INSERT\"\n end\n\n \"INSERT #{insert.into} #{insert.values_list}\"\n end",
"def ignore_column(*args)\n args.each { |a| columns[a].ignore if a }\n end",
"def ignore(value = true)\n @ignore = value\n end",
"def ignore(value = true)\n @ignore = value\n end",
"def on_upsert\n #\n end",
"def ignore_key?(key, value)\n id?(key) || # IDs are known to be replaced during imports\n key == 'updated_at' || # these get changed frequently during imports\n key == 'next_run_at' || # these values change based on wall clock\n key == 'notes' # the importer attaches an extra \"by user XYZ\" at the end of a note\n end",
"def ignore(*ids); end",
"def ignore\n @ignored = true\n self\n end",
"def silent_save\n if self.class.find(self.id)\n update\n else\n insert\n end\n end",
"def add_exercise(name)\n #want to check for duplicates, will attempt later\n #exercises = db.execute(\"SELECT name FROM Exercise\")\n #exercises.each do |ex|\n #if name != ex[0]\n $db.execute(\"INSERT OR IGNORE INTO Exercise (name) VALUES (?)\", [name])\nend",
"def ignore_transaction\n builder.ignore_transaction if builder\n end",
"def add_ignore(*rules)\n data['Ignore'] ||= []\n rules.each { |rule| data['Ignore'] << rule.title }\n data['Ignore'].sort!.uniq!\n end",
"def ignore_if_not_exists\n attributes.fetch(:ignoreIfNotExists)\n end",
"def _insert(*)\n fail NotImplementedError\n end",
"def save_if_unique(column)\n save\n rescue ActiveRecord::RecordNotUnique => e\n self.errors.add(column, :taken)\n false\n end",
"def _update_without_checking(columns)\n ds = _update_dataset\n lc = model.lock_column\n rows = ds.clone(ds.send(:default_server_opts, :sql=>ds.output(nil, [Sequel[:inserted][lc]]).update_sql(columns))).all\n values[lc] = rows.first[lc] unless rows.empty?\n rows.length\n end",
"def ignore_unknown= new_ignore_unknown\n frozen_check!\n @gapi.ignore_unknown_values = new_ignore_unknown\n end",
"def insert_or_update(uniq_keys, values_hash, tbl_name='main_table', opts={})\n all_field_names = values_hash.keys\n field_names_as_symbol_string = all_field_names.map{ |k| \":#{k}\" }.join(',') # need to appear as symbols\n sql_statement = \"INSERT INTO #{tbl_name} (#{format_field_names_as_string(all_field_names)}) VALUES (#{field_names_as_symbol_string})\"\n database.execute(sql_statement, values_hash)\n rescue SQLite3::ConstraintException => e\n unique_key_constraint = uniq_keys.map { |k| \"'#{k}'=:#{k}\" }.join(' AND ')\n update_keys = values_hash.keys\n update_keys -= uniq_keys if !opts[:update_unique_keys]\n update_sql = update_keys.map { |k| \"'#{k}'=:#{k}\" }.join(', ')\n sql_statement = \"UPDATE #{tbl_name} SET #{update_sql} WHERE #{unique_key_constraint}\"\n database.execute sql_statement, values_hash\n rescue SQLite3::SQLException => e\n puts \"Exception (#{e.inspect}) raised\" if verbose?\n case e.message\n when /no such table/\n create_table(tbl_name, all_field_names, uniq_keys)\n retry\n when /has no column/\n add_columns(tbl_name, all_field_names)\n retry\n else\n raise e\n end\n end",
"def retain_except_on_create\n data[:retain_except_on_create]\n end",
"def ignore!\n\t\t\t\tSignal.trap(@name, \"IGNORE\")\n\t\t\tend",
"def ignore_unknown\n @gapi.ignore_unknown_values\n end",
"def with_pk!(pk)\n with_pk(pk) || raise(NoMatchingRow.new(dataset))\n end",
"def with_pk!(pk)\n with_pk(pk) || raise(NoMatchingRow.new(dataset))\n end",
"def supports_insert_conflict?\n server_version >= 90500\n end",
"def supports_multi_insert?\n true\n end",
"def ignore?\n @should_ignore\n end",
"def ignore!\n\t\t\t\tSignal.trap(@name, :IGNORE)\n\t\t\tend",
"def set_ignore_input_events(ignore:)\n {\n method: \"Input.setIgnoreInputEvents\",\n params: { ignore: ignore }.compact\n }\n end",
"def postgresql_not_unique_error_class\n /(PG::UniqueViolation)|(ActiveRecord::RecordNotUnique)|(ActiveRecord::JDBCError)/\n end",
"def ignore?\n !!@ignore\n end",
"def insert_sql(*values)\n if values.size == 1 && (v = values[0]).is_a?(Sequel::Model) && !v.respond_to?(:sql_literal_append)\n Sequel::Deprecation.deprecate(\"Passing Sequel::Model instance argument to Sequel::Dataset#insert\", \"Pass model_instance.values or model_instance.to_hash as the argument instead\")\n super(v.to_hash)\n else\n super\n end\n end",
"def supports_insert_select?\n !@opts[:disable_insert_returning]\n end",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(*insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def with_identity_insert_enabled(table_name)\n set_identity_insert(table_name, true)\n yield\n ensure\n set_identity_insert(table_name, false)\n end",
"def with_identity_insert_enabled(table_name)\n set_identity_insert(table_name, true)\n yield\n ensure\n set_identity_insert(table_name, false)\n end",
"def with_identity_insert_enabled(table_name)\n set_identity_insert(table_name, true)\n yield\n ensure\n set_identity_insert(table_name, false)\n end",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def save_without_validation\n self.run_hook :before_save\n document = {}\n\n self.class.public_fields.each do |field|\n document[field] = self.send(field)\n end\n\n if self._id\n result = Driver.client[self.class.coll_name]\n .find({'_id' => self._id}).update_one(document, {:upsert => true})\n else\n document['_id'] = BSON::ObjectId.new\n Driver.client[self.class.coll_name].insert_one(document)\n self._id = document['_id']\n end\n\n self.run_hook :after_save\n set_old_values\n\n result ? true : false\n end",
"def prepare_upsert(options = {})\n raise Errors::ReadonlyDocument.new(self.class) if readonly? && !Mongoid.legacy_readonly\n return false if performing_validations?(options) && invalid?(:upsert)\n result = run_callbacks(:upsert) do\n yield(self)\n true\n end\n self.new_record = false\n post_process_persist(result, options) and result\n end",
"def post_insert(sql, name, pk, id_value, sequence_name)\n if @iiEnabled\n begin\n @connection.do(enable_identity_insert(@iiTable, false))\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned off\"\n end\n end\n end",
"def post_insert(sql, name, pk, id_value, sequence_name)\n if @iiEnabled\n begin\n @connection.do(enable_identity_insert(@iiTable, false))\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned off\"\n end\n end\n end",
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @logger.unknown(\"ODBCAdapter#pre_insert>\") if @trace\n @logger.unknown(\"args=[#{sql}|#{name}|#{pk}|#{id_value}|#{sequence_name}]\") if @trace\n @iiTable = get_table_name(sql)\n @logger.unknown(\"@iiTable=#{@iiTable}\") if @trace\n @iiCol = get_autounique_column(@iiTable)\n @logger.unknown(\"@iiCol=#{@iiCol}\") if @trace\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n remove_null_sequence_value_from_sql(sql, @iiCol)\n# rescue Exception => e\n# raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def multi_insert_sql_strategy\n :union\n end",
"def to_inserts(args={})\n args[:table] ||= Pathname.new(@filename).basename.to_s.downcase.gsub(/\\W/, '_')\n args[:before] ||= @@defaults[:before]\n args[:after] ||= @@defaults[:after]\n insert_sql = args[:ignore] ? 'insert ignore' : 'insert'\n if args[:bulk]\n args[:before] += \"#{insert_sql} into #{args[:table]} values\"\n args[:values_glue] ||= \", \"\n args[:row_format] ||= \" (%s)\"\n args[:row_glue] ||= \",\\n\"\n else\n args[:before] ||= \"\"\n args[:values_glue] ||= \", \"\n args[:row_format] ||= \"#{insert_sql} into #{args[:table]} values(%s)\"\n args[:row_glue] ||= \";\\n\"\n end\n to_any args\n end",
"def ignore\n @ignore ||= []\n end",
"def ignore_associations(*associations)\n ignored_associations.concat associations.flatten.compact.collect(&:to_sym)\n ignored_associations.uniq!\n end",
"def ignore_columns(*columns)\n self.ignored_columns ||= []\n self.ignored_columns += columns.map(&:to_s)\n reset_column_information\n descendants.each(&:reset_column_information)\n self.ignored_columns.tap(&:uniq!)\n end",
"def conflicting_or_created_record\n conflict || create\n end",
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @logger.unknown(\"ODBCAdapter#pre_insert>\") if @trace\n @logger.unknown(\"args=[#{sql}|#{name}|#{pk}|#{id_value}|#{sequence_name}]\") if @trace\n @iiTable = get_table_name(sql)\n @iiCol = get_autounique_column(@iiTable)\n @logger.unknown(\"@iiCol=#{@iiCol}>\") if @trace\n @iiEnabled = false\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n remove_null_sequence_value_from_sql(sql, @iiCol)\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def allows_unique?\n true\n end",
"def _import(columns, values, opts=OPTS)\n if @opts[:returning]\n # no transaction: our multi_insert_sql_strategy should guarantee\n # that there's only ever a single statement.\n sql = multi_insert_sql(columns, values)[0]\n returning_fetch_rows(sql).map{|v| v.length == 1 ? v.values.first : v}\n elsif opts[:return] == :primary_key\n returning(insert_pk)._import(columns, values, opts)\n else\n super\n end\n end",
"def insert_without_hooks( index, *objects )\n\n @without_hooks = true\n\n insert( index, *objects )\n \n @without_hooks = false\n\n return objects\n\n end",
"def enforce_explicit_ignoring\n # Create an empty .semgrepignore to prevent\n # the scanner from implicitly ignoring files or folders\n semgrepignore_path = \"#{@repository.path_to_repo}/.semgrepignore\"\n File.open(semgrepignore_path, \"w\") {} if !File.exist?(semgrepignore_path)\n end",
"def build_insert_set_cols(key)\n \"#{quote_column_name(key)} = EXCLUDED.#{quote_column_name(key)}\"\n end",
"def ignore_me\nend",
"def add_unchecked(tx, on_disconnect=false)\n if on_disconnect\n # we may be disconnecting a blockchain tx so handle that like so\n Toshi::Models::Transaction.where(hsh: tx.hash)\n .update(pool: Toshi::Models::Transaction::BLOCK_POOL)\n end\n\n t = Toshi::Models::UnconfirmedTransaction.from_hsh(tx.hash)\n if t\n raise \"BUG: should only be true for orphan transactions\" if !t.is_orphan?\n t.update(pool: Toshi::Models::UnconfirmedTransaction::MEMORY_POOL)\n else\n t = Toshi::Models::UnconfirmedTransaction.create_from_tx(tx)\n end\n t.mark_spent_outputs\n t.update_unconfirmed_ledger_for_inputs(tx, @output_cache)\n t\n end",
"def multi_insert_sql_strategy\n :values\n end",
"def duplicate_primary_key(duplicate_row:, key:, node_id:)\n # nothing\n end",
"def insert(*values)\n if @opts[:sql] || @opts[:returning]\n super\n else\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def parse_insert_statement(line)\n if regex_match = insert_regex.match(line)\n {\n :ignore => !regex_match[1].nil?,\n :table_name => regex_match[2].to_sym,\n :column_names => regex_match[3].split(/`\\s*,\\s*`/).map { |col| col.gsub('`', \"\").to_sym }\n }\n end\n end",
"def upsert_model(model)\n model_hash = model.to_hash\n columns_to_update = model_hash.keys.reject do |k|\n matching_attributes.include?(k) || skip_updating.include?(k)\n end\n upsert_options = { target: matching_attributes }\n unless columns_to_update.empty?\n update_clause = columns_to_update.map { |key| [ key.to_sym, \"excluded__#{key}\".to_sym ] }.to_h\n timestamps = update_timestamps(columns_to_update)\n upsert_options[:update] = update_clause.merge(timestamps) { |key, oldval, newval| oldval }\n end\n model_insert_clause = model_hash.merge(insert_timestamps) { |key, oldval, newval| oldval }\n\n inserted_id = model_class.dataset.insert_conflict(upsert_options).insert(model_insert_clause)\n # If model was not inserted, the above returns nil\n if inserted_id\n model.id = inserted_id\n end\n model.instance_variable_set(:@new, false)\n end",
"def ignores; end",
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @iiTable = get_table_name(sql)\n @iiCol = get_autounique_column(@iiTable)\n @iiEnabled = false\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n @connection.do(enable_identity_insert(@iiTable, true))\n @iiEnabled = true\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def _insert_raw(ds)\n if use_prepared_statements_for?(:insert)\n _set_prepared_statement_server(model.send(:prepared_insert, @values.keys)).call(@values)\n else\n super\n end\n end"
] |
[
"0.7616945",
"0.75842595",
"0.62032014",
"0.60689807",
"0.5857675",
"0.5805015",
"0.57581186",
"0.57069135",
"0.56307864",
"0.55492955",
"0.5548208",
"0.5485306",
"0.5469368",
"0.5457529",
"0.54266727",
"0.54110026",
"0.53848934",
"0.53830725",
"0.5364832",
"0.53096676",
"0.5296873",
"0.5279065",
"0.5241467",
"0.5235729",
"0.52249444",
"0.52228534",
"0.52109975",
"0.5187942",
"0.5180904",
"0.5158843",
"0.5136536",
"0.513515",
"0.5133612",
"0.51268905",
"0.5106091",
"0.5097993",
"0.5080991",
"0.50597924",
"0.50576735",
"0.50576735",
"0.5057431",
"0.50512815",
"0.5038889",
"0.5029042",
"0.50057286",
"0.4998779",
"0.49346948",
"0.49335393",
"0.49113804",
"0.49044013",
"0.4897007",
"0.48821422",
"0.48594365",
"0.48576283",
"0.48470065",
"0.4836321",
"0.48208252",
"0.48051852",
"0.48051852",
"0.4798135",
"0.479728",
"0.47912326",
"0.4780034",
"0.47659308",
"0.4749866",
"0.47411108",
"0.4739993",
"0.472508",
"0.47235847",
"0.4714295",
"0.4714295",
"0.47100773",
"0.47021794",
"0.46999118",
"0.46990758",
"0.46893126",
"0.46893126",
"0.4683686",
"0.46804157",
"0.4672889",
"0.4668675",
"0.46670127",
"0.4661214",
"0.46598387",
"0.4659664",
"0.46586624",
"0.46473086",
"0.4641506",
"0.46350664",
"0.46335328",
"0.46317402",
"0.46316105",
"0.46311545",
"0.46293306",
"0.46177578",
"0.46121824",
"0.46035376",
"0.46026364",
"0.46000415",
"0.45955688"
] |
0.7311335
|
2
|
Insert a record, returning the record inserted, using RETURNING. Always returns nil without running an INSERT statement if disable_insert_returning is used. If the query runs but returns no values, returns false.
|
def insert_select(*values)
return unless supports_insert_select?
# Handle case where query does not return a row
server?(:default).with_sql_first(insert_select_sql(*values)) || false
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(*insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def insert(*values)\n if @opts[:sql] || @opts[:returning]\n super\n else\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def _insert\n ds = _insert_dataset\n if _use_insert_select?(ds) && (h = _insert_select_raw(ds))\n _save_set_values(h)\n nil\n else\n iid = _insert_raw(ds)\n # if we have a regular primary key and it's not set in @values,\n # we assume it's the last inserted id\n if (pk = autoincrementing_primary_key) && pk.is_a?(Symbol) && !(vals = @values)[pk]\n vals[pk] = iid\n end\n pk\n end\n end",
"def _insert\n ds = _insert_dataset\n if _use_insert_select?(ds) && (h = _insert_select_raw(ds))\n _save_set_values(h)\n nil\n else\n iid = _insert_raw(ds)\n # if we have a regular primary key and it's not set in @values,\n # we assume it's the last inserted id\n if (pk = autoincrementing_primary_key) && pk.is_a?(Symbol) && !(vals = @values)[pk]\n vals[pk] = iid\n end\n pk\n end\n end",
"def run_insert\n raise DataFactory::NoInsertStatement unless @insert_statement\n\n stmt = db_interface.execute_sql(@insert_statement, *@binds)\n stmt.close\n end",
"def insert_select(*values)\n unless @opts[:disable_insert_returning]\n ds = opts[:returning] ? self : returning\n ds.insert(*values){|r| return r}\n end\n end",
"def insert_one(record, constraint_delegate: nil)\n return false unless record.valid?\n\n yield ->(result, conn) {\n assert_result_size(1, result)\n record.set_attributes(result.first) and record.inserted!\n }\n true\n rescue Constraint::ConstraintError => e\n constraint_delegate ||= record.constraint_delegate\n constraint_delegate.constraint_error(e)\n false\n end",
"def disable_insert_returning\n clone(:disable_insert_returning=>true)\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n return super if id_value\n pk = pk_from_insert_sql(sql) unless pk\n select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n return super if id_value\n pk = pk_from_insert_sql(sql) unless pk\n select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n end",
"def insert_record(record, force = true, validate = true)\n if record.new_record?\n if force\n record.save!\n else\n return false unless record.save(:validate => validate)\n end\n end\n\n if @reflection.options[:insert_sql]\n @owner.connection.insert(interpolate_and_sanitize_sql(@reflection.options[:insert_sql], record))\n else\n relation = Arel::Table.new(@reflection.options[:join_table], arel_engine)\n timestamps = record_timestamp_columns(record)\n timezone = record.send(:current_time_from_proper_timezone) if timestamps.any?\n\n attributes = Hash[columns.map do |column|\n name = column.name\n value = case name.to_s\n when @reflection.primary_key_name.to_s\n @owner.id\n when @reflection.association_foreign_key.to_s\n record.id\n when *timestamps\n timezone\n else\n @owner.send(:quote_value, record[name], column) if record.has_attribute?(name)\n end\n [relation[name], value] unless value.nil?\n end]\n\n relation.insert(attributes)\n end\n\n return true\n end",
"def insert_in_database\n Fetch.new(insertion_query).array\n end",
"def supports_insert_select?\n !@opts[:disable_insert_returning]\n end",
"def insert(arel, name = nil, pk = nil, id_value = nil, sequence_name = nil, binds = [])\n exec_insert(to_sql(arel), name, binds)\n retval = last_inserted_id(nil)\n retval = id_value if retval == 0\n return retval\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil, binds = [])\n # Extract the table from the insert sql. Yuck.\n table = sql.split(\" \", 4)[2].gsub('\"', '')\n\n # Try an insert with 'returning id' if available (PG >= 8.2)\n if supports_insert_with_returning? && id_value.nil?\n pk, sequence_name = *pk_and_sequence_for(table) unless pk\n if pk\n sql = substitute_binds(sql, binds)\n id_value = select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n clear_query_cache #FIXME: Why now?\n return id_value\n end\n end\n\n # Otherwise, plain insert\n execute(sql, name, binds)\n\n # Don't need to look up id_value if we already have it.\n # (and can't in case of non-sequence PK)\n unless id_value\n # If neither pk nor sequence name is given, look them up.\n unless pk || sequence_name\n pk, sequence_name = *pk_and_sequence_for(table)\n end\n\n # If a pk is given, fallback to default sequence name.\n # Don't fetch last insert id for a table without a pk.\n if pk && sequence_name ||= default_sequence_name(table, pk)\n id_value = last_insert_id(table, sequence_name)\n end\n end\n id_value\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def insert(*args)\n r = super\n if s = opts[:sequence]\n with_sql(\"SELECT #{literal(s)}.currval FROM dual\").single_value.to_i\n else\n r\n end\n end",
"def insert_sql(sql, name = nil, pri_key = nil, id_value = nil, sequence_name = nil)\n unless pri_key\n table_ref = extract_table_ref_from_insert_sql(sql)\n pri_key = primary_key(table_ref) if table_ref\n end\n\n if pri_key\n select_value(\"#{sql} RETURNING #{quote_column_name(pri_key)}\")\n else\n super\n end\n end",
"def sql_for_insert(sql, pk, id_value, sequence_name, binds)\n unless pk\n table_ref = extract_table_ref_from_insert_sql(sql)\n pk = primary_key(table_ref) if table_ref\n end\n\n sql = \"#{sql} RETURNING #{quote_column_name(pk)}\" if pk\n\n [sql, binds]\n end",
"def insert_product(product)\n db_connection do |conn|\n result = conn.exec(\"SELECT id FROM products WHERE product = $1\", [product[:product]])\n if result.to_a.empty?\n sql = \"INSERT INTO products (product) VALUES ($1) RETURNING id\"\n result = conn.exec(sql, [product[:product]])\n end\n result.first[\"id\"]\n end\nend",
"def insert(data)\n query = \"INSERT INTO `#{@table_name}` \"+build_insert(data)\n\n begin\n queryresult = @mysql.query(query)\n rescue Exception => e\n @log.error(\"#{e}\")\n return nil\n end\n\n expire_table_cache(get_all_related_tables)\n\n if @auto_primary_key\n get_one({@primary_key => get_last_id})\n else\n get_one({@primary_key => data[@primary_key]})\n end\n end",
"def run\n if @prepared_type == :insert\n fetch_rows(prepared_sql){|r| return r.values.first}\n else\n super\n end\n end",
"def insert()\n query = \"INSERT INTO artists (art_name, art_photo) VALUES ($1, $2) RETURNING art_id\"\n @art_id = DbHelper.run_sql_return_first_row_column_value(query, [@art_name, @art_photo], 'art_id').to_i;\n end",
"def save()\n sql = \"INSERT INTO transactions (amount, merchant_id, tag_id)\n VALUES ($1, $2, $3)\n RETURNING id\"\n values = [@amount, @merchant_id, @tag_id]\n @id = SqlRunner.run(sql, values)[0]['id'].to_i\n end",
"def insert_record_if_new(name, type, ttl, content)\n record = records_table.filter(:domain_id => domain.id, :name => name, :type => type, :content => content).first\n insert_record(name, type, ttl, content) unless record\n end",
"def insert_record(record, validate = true, raise = false, &block)\n if raise\n record.save!(validate: validate, &block)\n else\n record.save(validate: validate, &block)\n end\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:\r\n execute(sql, name)\r\n identity = last_inserted_id(nil)\r\n retval = id_value if retval == 0\r\n return retval\r\n end",
"def execute_insert(sql, opts={})\n synchronize(opts[:server]) do |conn|\n conn.execute(sql)\n insert_result(conn, opts[:table], opts[:values])\n end\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil) end",
"def execute_insert(sql, opts=OPTS)\n _execute(sql, opts){|conn| log_connection_yield(sql, conn){conn.execute_batch(sql)}; conn.last_insert_rowid}\n end",
"def execute_insert(sql, opts=OPTS)\n execute(sql, opts.merge(:meth=>:autoid))\n end",
"def insert_record(table, values)\n execute table_insert_query(table, values)\n end",
"def save_record\n if !saved_already?\n if valid?\n run_sql(\"INSERT INTO #{table} (#{string_field_names}) VALUES (#{stringify_self});\")\n @id = CONNECTION.last_insert_row_id\n else\n false\n end\n else\n update_record\n end\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n insert_sql(sql, name, pk, id_value, sequence_name)\n end",
"def exec_insert(sql, name = nil, binds = [], pk = nil, sequence_name = nil)\n exec_query(sql, name, binds)\n end",
"def returning_clause(serial)\n \" RETURNING #{quote_name(serial.field)} INTO :insert_id\"\n end",
"def execute_insert(sql, opts=OPTS)\n synchronize(opts[:server]) do |c|\n if sql.is_a?(Symbol)\n execute_prepared_statement(sql, opts)\n else\n _execute(c, sql, opts)\n end\n _execute(c, \"SELECT IDENTITY_VAL_LOCAL() FROM SYSIBM.SYSDUMMY1\", opts){|stmt| i = stmt.fetch_array.first.to_i; i}\n end\n rescue Connection::Error => e\n raise_error(e)\n end",
"def insert\n\n DBConnection.execute2(<<-SQL, attribute_values)\n INSERT INTO\n #{class_obj.table_name} #{sql_columns}\n VALUES\n #{sql_question_marks}\n SQL\n\n self.id = DBConnection.last_insert_row_id\n end",
"def insert()\n query = \"INSERT INTO sale_items (sli_qty, sli_unit_price, sli_alb_id, sli_slo_id)\n VALUES ($1, $2, $3, $4) RETURNING sli_id\"\n @sli_id = DbHelper.run_sql_return_first_row_column_value(query,\n [@sli_qty, @sli_unit_price, @sli_alb_id, @sli_slo_id],\n 'sli_id').to_i\n end",
"def reaktor_insert(row)\n insert_id = 0\n unless row.idstore.nil?\n Log.write_log($import_log, \"reaktor_insert: Table: #{row.table_name} args: #{row.idstore.args * ', '}\")\n else\n Log.write_log($import_log, \"reaktor_insert: Table: #{row.table_name} No IdStore object\")\n end\n query = \"INSERT INTO #{row.table_name} (#{row.get_column_name_string})\\n VALUES (#{(['?']*row.size).join(', ')})\"\n sth = $dbh_ms.prepare(query)\n begin\n sth.execute(*row.get_column_values)\n rescue\n $stderr.puts \"### Error in #{__FILE__} on line #{__LINE__}. See errorlog\"\n Log.write_log('error', \"Could not insert data. Message: #{$!}. query: \\\"#{get_query_string(sth)}\\\"\")\n raise\n exit\n end\n begin\n insert_id = $dbh_ms.func(:insert_id) unless row.idstore.nil?\n rescue\n $stderr.puts \"### Error in #{__FILE__} on line #{__LINE__}. See errorlog\"\n Log.write_log('error', \"Could not get insert id. Message: #{$!}.\")\n raise\n exit\n end\n if insert_id > 0\n row.store_id(insert_id)\n Log.write_log($import_log, \"Insert id store to table: #{row.table_name} id_store parameters: (#{row.idstore.args * ', '}) id: #{insert_id}\")\n else\n unless row.idstore.nil?\n Log.write_log($import_log, \"No id stored for table: #{row.table_name} id_store parameters: (#{row.idstore.args * ', '})\")\n else\n Log.write_log($import_log, \"No id stored for table: #{row.table_name} No IdStore object\")\n end\n \n end\nend",
"def save()\n sql = \"INSERT INTO transactions (merchant, tag_id, value, datestore) VALUES ('#{@merchant}', #{@tag_id}, #{@value}, '#{@datestore}') RETURNING *;\"\n transaction = SqlRunner.run( sql ).first\n @id = transaction['id'].to_i\nend",
"def insert!(*rows)\n Mao.sql(with_options(:insert => rows.flatten).sql) do |pg_result|\n if @options[:returning]\n pg_result.map {|result| Mao.normalize_result(result, @col_types)}\n else\n pg_result.cmd_tuples\n end\n end\n end",
"def insert(arel, name = nil, pk = nil, id_value = nil, sequence_name = nil, binds = [])\n sql, binds = sql_for_insert(to_sql(arel, binds), pk, id_value, sequence_name, binds)\n value = exec_insert(sql, name, binds)\n id_value\n end",
"def insert!(attributes, returning: nil, record_timestamps: nil)\n insert_all!([ attributes ], returning: returning, record_timestamps: record_timestamps)\n end",
"def save()\n sql = \"INSERT INTO albums (artist_name, album_title, genre) VALUES ($1, $2, $3) RETURNING id;\"\n values = [@artist_name, @album_title, @genre]\n result = SqlRunner.run(sql, values)\n @id = result[0]['id'].to_i\nend",
"def insert_sales_dates(sale_date)\n db_connection do |conn|\n result = conn.exec_params(\"SELECT id FROM sales_dates WHERE sale_date = $1\", [sale_date[:sale_date]])\n if result.to_a.empty?\n result = conn.exec_params(\"INSERT INTO sales_dates (sale_date) VALUES ($1) RETURNING id\", [sale_date[:sale_date]])\n end\n result.first[\"id\"]\n end\nend",
"def _import(columns, values, opts=OPTS)\n if @opts[:returning]\n # no transaction: our multi_insert_sql_strategy should guarantee\n # that there's only ever a single statement.\n sql = multi_insert_sql(columns, values)[0]\n returning_fetch_rows(sql).map{|v| v.length == 1 ? v.values.first : v}\n elsif opts[:return] == :primary_key\n returning(insert_pk)._import(columns, values, opts)\n else\n super\n end\n end",
"def save()\n sql = \"\n INSERT INTO players\n ( name, team )\n VALUES\n ( '#{@name}', '#{@team}' )\n RETURNING *;\n \"\n @id = SqlRunner.run( sql )[0]['id'].to_i()\n end",
"def insert\n # Preparing for the query...\n cols = self.class.columns\n col_names = cols.map(&:to_s).join(\", \")\n question_marks = ([\"?\"] * cols.count).join(\", \")\n \n # The actual query\n DBConnection.execute(<<-SQL, *attribute_values)\n INSERT INTO\n #{ self.class.table_name } (#{ col_names })\n VALUES\n (#{ question_marks })\n SQL\n \n # Add an id number for the record\n self.id = DBConnection.last_insert_row_id\n end",
"def _use_insert_select?(ds)\n (!ds.opts[:select] || ds.opts[:returning]) && ds.supports_insert_select? \n end",
"def _use_insert_select?(ds)\n (!ds.opts[:select] || ds.opts[:returning]) && ds.supports_insert_select? \n end",
"def insert\n DATABASE.execute(\"INSERT INTO students (name, age, github) VALUES (?, ?, ?)\", @name, @age, @github)\n @id = DATABASE.last_insert_row_id\n end",
"def _insert\n return super if model.cti_tables.length == 1\n model.cti_models.each do |m|\n v = {}\n m.cti_table_columns.each{|c| v[c] = @values[c] if @values.include?(c)}\n ds = use_server(m.cti_instance_dataset)\n if ds.supports_insert_select? && (h = ds.insert_select(v))\n @values.merge!(h)\n else\n nid = ds.insert(v)\n @values[primary_key] ||= nid\n end\n end\n db.dataset.supports_insert_select? ? nil : @values[primary_key]\n end",
"def insert(attributes, returning: nil, unique_by: nil, record_timestamps: nil)\n insert_all([ attributes ], returning: returning, unique_by: unique_by, record_timestamps: record_timestamps)\n end",
"def insert\n DATABASE.execute(\"INSERT INTO locations (city) VALUES ('#{@city}')\")\n @id = DATABASE.last_insert_row_id # will return the value of the row id\n end",
"def insert(*values)\n execute_dui(insert_sql(*values)){|c| return c.last_id}\n end",
"def insert(options = {})\n prepare_insert(options) do\n if embedded?\n insert_as_embedded\n else\n insert_as_root\n end\n end\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n # Extract the table from the insert sql. Yuck.\n table = sql.split(\" \", 4)[2].gsub('\"', '')\n\n # Otherwise, insert then grab last_insert_id.\n if insert_id = super\n insert_id\n else\n # If neither pk nor sequence name is given, look them up.\n unless pk || sequence_name\n pk, sequence_name = *pk_and_sequence_for(table)\n end\n\n # If a pk is given, fallback to default sequence name.\n # Don't fetch last insert id for a table without a pk.\n if pk && sequence_name ||= default_sequence_name(table, pk)\n last_insert_id(table, sequence_name)\n end\n end\n end",
"def save()\n sql = \"\n INSERT INTO daily_entries\n (datetime, progress, reputation, feeling_supported, supporting_others, time_management, problem_solving, long_term_priorities, personal_life, happiness)\n VALUES\n ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)\n RETURNING *\n \"\n values = [\n @datetime,\n @progress,\n @reputation,\n @feeling_supported,\n @supporting_others,\n @time_management,\n @problem_solving,\n @long_term_priorities,\n @personal_life,\n @happiness\n ]\n daily_entry = SqlRunner.run(sql, values).first\n @id = daily_entry['id'].to_i\n end",
"def post_insert(sql, name, pk, id_value, sequence_name)\n if @iiEnabled\n begin\n @connection.do(enable_identity_insert(@iiTable, false))\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned off\"\n end\n end\n end",
"def post_insert(sql, name, pk, id_value, sequence_name)\n if @iiEnabled\n begin\n @connection.do(enable_identity_insert(@iiTable, false))\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned off\"\n end\n end\n end",
"def save()\n sql = \"INSERT INTO customers (name) VALUES ($1) RETURNING id\"\n values = [@name]\n @id = SqlRunner.run(sql, values)[0][\"id\"].to_i()\n end",
"def merge_insert(*values, &block)\n h = {:type=>:insert, :values=>values}\n if override = @opts[:override]\n h[:override] = insert_override_sql(String.new)\n end\n _merge_when(h, &block)\n end",
"def returning_id\n @sql_returning = ::MultiInsert::QueryBuilder.returning([:id])\n @returning_flat = true\n self\n end",
"def save\n sql = <<-SQL\n INSERT INTO #{table_name_for_insert} (#{col_names_for_insert})\n VALUES (#{values_for_insert})\n SQL\n\n DB[:conn].execute(sql)\n # need to assign .id here because does not have id when instantiated, but now has one via its database\n @id = DB[:conn].execute(\"SELECT last_insert_rowid() FROM #{table_name_for_insert}\")[0][0]\n end",
"def insert(ignore_associations: false)\n unless new?\n msg = \"#{__FILE__}[#{__LINE__}] : #{self.class} : should be new (not loaded from db) - cannot insert\"\n Robe.logger.error(msg)\n raise DBError, msg\n end\n self.id = uuid unless id && !id.empty?\n if cache && cache.includes?(self.class, id)\n msg = \"#{__FILE__}[#{__LINE__}] : #{self.class} : with id #{id} already in cache - cannot insert\"\n Robe.logger.error(msg)\n raise DBError, msg\n else\n # TODO: unwind associations if insert fails\n result = (ignore_associations ? nil : save_associations).to_promise\n result.to_promise_then do\n self.class.db.insert_one(self.class.collection_name, to_db_hash)\n end.to_promise_then do\n @from_db = true\n cache.insert(self) if cache # no filter\n self.to_promise\n end\n end\n end",
"def insert_one(document, options = nil)\n native.insert_one(document, options || {}).inserted_id\n end",
"def exec_insert(sql, name = nil, binds = []) #:nodoc:\r\n log(sql, \"insert\", binds) { query(sql, binds) }\r\n end",
"def insert(insert_val)\n\t\t\tcase value <=> insert_val\n\t\t\twhen 1 \n\t\t\t\tinsert_left insert_val\n\t\t\twhen -1\n\t\t\t insert_right insert_val\n\t\t\twhen 0\n\t\t\t false\n\t\t\tend\n\t\tend",
"def insert_select(params = {})\n db_str = GenInsert.insert_select(params)\n #@log.debug db_str\n @client.run db_str\n\n return true\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n execute(sql, name)\n id_value\n end",
"def record_transaction\n if self.save\n return true\n else\n return false\n end \n end",
"def insert_sequenced(row)\n sql = row.type.insert_sql_minus_key\n vals = row.field_values_minus_key\n#$stderr.puts sql\n#$stderr.puts vals.inspect\n\n db.do(sql, *vals)\n insert_id = db.select_one(row.type.get_insert_id_sql)[0]\n row.send(row.type.primary_key.setter_name, insert_id)\n row.reset_changed\n end",
"def _insert_raw(ds)\n if use_prepared_statements_for?(:insert)\n _set_prepared_statement_server(model.send(:prepared_insert, @values.keys)).call(@values)\n else\n super\n end\n end",
"def add_to_database\n hash = self.attr_hash\n columns = hash.keys\n values = hash.values\n if self.valid?\n CONNECTION.execute(\"INSERT INTO #{tablename} (#{columns.join \", \"}) VALUES (#{values.to_s[1...-1]});\")\n @id = CONNECTION.last_insert_row_id\n else\n false\n end\n end",
"def save()\n sql = \"INSERT INTO transactions (merchant, amount, tag, tran_date) VALUES ('#{@merchant}', #{@amount}, '#{@tag}', '#{@tran_date}') RETURNING *;\"\n data = SqlRunner.run(sql)\n @id = data.first()['id'].to_i\n end",
"def execute\n @result = session.execute(statement)\n execution_successful?\n end",
"def insert_statement(model, properties, identity_field)\n statement = \"INSERT INTO #{quote_name(model.storage_name(name))} \"\n\n if supports_default_values? && properties.empty?\n statement << 'DEFAULT VALUES'\n else\n statement << <<-SQL.compress_lines\n (#{properties.map { |property| quote_name(property.field) }.join(', ')})\n VALUES\n (#{(['?'] * properties.size).join(', ')})\n SQL\n end\n\n if supports_returning? && identity_field\n statement << \" RETURNING #{quote_name(identity_field.field)}\"\n end\n\n statement\n end",
"def insert(**opts)\n add(**opts)\n save!\n end",
"def save()\n sql = \"INSERT INTO customers\n (name, funds) VALUES ($1, $2) RETURNING id\"\n values = [@name,@funds]\n customer = SqlRunner.run(sql,values).first\n @id = customer['id'].to_i\n end",
"def returning(*values)\n if values.empty?\n cached_dataset(:_returning_ds) do\n raise Error, \"RETURNING is not supported on #{db.database_type}\" unless supports_returning?(:insert)\n clone(:returning=>EMPTY_ARRAY)\n end\n else\n raise Error, \"RETURNING is not supported on #{db.database_type}\" unless supports_returning?(:insert)\n clone(:returning=>values.freeze)\n end\n end",
"def save()\n sql = \"INSERT INTO albums (title, genre, artist_id)\n VALUES ($1, $2, $3) RETURNING id\"\n values = [@title, @genre, @artist_id]\n results = SqlRunner.run(sql, values)\n @id = results[0]['id'].to_i()\n end",
"def save\n # binding.pry\n sql = \"INSERT INTO #{table_name_for_insert} (#{col_names_for_insert}) VALUES (#{values_for_insert})\"\n # binding.pry\n DB[:conn].execute(sql)\n # binding.pry\n @id = DB[:conn].execute(\"SELECT last_insert_rowid() FROM #{table_name_for_insert}\")[0][0]\n # binding.pry\n end",
"def insert(value)\n # YOUR WORK HERE\n end",
"def save\n result = DB.exec(\"INSERT INTO books (name, author) VALUES ('#{@name}', '#{@author}') RETURNING id;\")\n @id = result.first().fetch(\"id\").to_i\nend",
"def save\n sql = \"INSERT INTO #{table_name_for_insert} (#{col_names_for_insert}) VALUES (#{values_for_insert})\"\n\n DB[:conn].execute(sql)\n @id = DB[:conn].execute(\"SELECT last_insert_rowid() FROM #{table_name_for_insert}\")[0][0]\n end",
"def save\n sql = \"INSERT INTO #{table_name_for_insert} (#{col_names_for_insert}) VALUES (#{values_for_insert})\"\n\n DB[:conn].execute(sql)\n\n @id = DB[:conn].execute(\"SELECT last_insert_rowid() FROM #{table_name_for_insert}\")[0][0]\n end",
"def insert(value)\n #YOUR WORK HERE\n end",
"def save\n sql = \"INSERT INTO #{table_name_for_insert} (#{col_names_for_insert}) VALUES (#{values_for_insert})\"\n DB[:conn].execute(sql)\n @id = DB[:conn].execute(\"SELECT last_insert_rowid() FROM #{table_name_for_insert}\")[0][0]\n end",
"def save\n sql = \"INSERT INTO #{table_name_for_insert} (#{col_names_for_insert}) VALUES (#{values_for_insert})\"\n DB[:conn].execute(sql)\n @id = DB[:conn].execute(\"SELECT last_insert_rowid() FROM #{table_name_for_insert}\")[0][0]\n end",
"def save\n sql = \"INSERT INTO #{table_name_for_insert} (#{col_names_for_insert}) VALUES (#{values_for_insert})\"\n DB[:conn].execute(sql)\n @id = DB[:conn].execute(\"SELECT last_insert_rowid() FROM #{table_name_for_insert}\")[0][0]\n end",
"def save\n sql = \"INSERT INTO #{table_name_for_insert} (#{col_names_for_insert}) VALUES (#{values_for_insert})\"\n DB[:conn].execute(sql)\n @id = DB[:conn].execute(\"SELECT last_insert_rowid() FROM #{table_name_for_insert}\")[0][0]\n end",
"def save\n sql = \"INSERT INTO #{table_name_for_insert} (#{col_names_for_insert}) VALUES (#{values_for_insert})\"\n DB[:conn].execute(sql)\n @id = DB[:conn].execute(\"SELECT last_insert_rowid() FROM #{table_name_for_insert}\")[0][0]\n end",
"def save\n sql = \"INSERT INTO #{table_name_for_insert} (#{col_names_for_insert}) VALUES (#{values_for_insert})\"\n DB[:conn].execute(sql)\n @id = DB[:conn].execute(\"SELECT last_insert_rowid() FROM #{table_name_for_insert}\")[0][0]\n end",
"def save\n sql = \"INSERT INTO #{table_name_for_insert} (#{col_names_for_insert}) VALUES (#{values_for_insert})\"\n DB[:conn].execute(sql)\n @id = DB[:conn].execute(\"SELECT last_insert_rowid() FROM #{table_name_for_insert}\")[0][0]\n end",
"def save\n sql = \"INSERT INTO #{table_name_for_insert} (#{col_names_for_insert}) VALUES (#{values_for_insert})\"\n DB[:conn].execute(sql)\n @id = DB[:conn].execute(\"SELECT last_insert_rowid() FROM #{table_name_for_insert}\")[0][0]\n end",
"def save\n sql = \"INSERT INTO #{table_name_for_insert} (#{col_names_for_insert}) VALUES (#{values_for_insert})\"\n DB[:conn].execute(sql)\n @id = DB[:conn].execute(\"SELECT last_insert_rowid() FROM #{table_name_for_insert}\")[0][0]\n end"
] |
[
"0.7542973",
"0.7532102",
"0.68789726",
"0.65688646",
"0.65688646",
"0.62109786",
"0.6128794",
"0.612806",
"0.6119359",
"0.6069529",
"0.6069529",
"0.5990838",
"0.597326",
"0.5886588",
"0.5826648",
"0.58002967",
"0.5799923",
"0.5799923",
"0.5799923",
"0.5736858",
"0.573113",
"0.5717431",
"0.57166624",
"0.5714197",
"0.5709112",
"0.56853175",
"0.5627879",
"0.55945355",
"0.5591199",
"0.5590015",
"0.5584038",
"0.5582871",
"0.5577109",
"0.5556539",
"0.5553898",
"0.5540197",
"0.553331",
"0.5521691",
"0.5499354",
"0.5483538",
"0.5481214",
"0.54804325",
"0.54760915",
"0.5471513",
"0.54625756",
"0.54606956",
"0.5458306",
"0.543825",
"0.54229665",
"0.5411282",
"0.5395009",
"0.5331561",
"0.5322803",
"0.5322803",
"0.53099823",
"0.53076583",
"0.5306727",
"0.53030026",
"0.52984697",
"0.52968",
"0.52949077",
"0.5281777",
"0.527129",
"0.527129",
"0.5260016",
"0.52457666",
"0.52411294",
"0.5240547",
"0.52339226",
"0.5224077",
"0.52176774",
"0.52116156",
"0.52110106",
"0.5203697",
"0.5200726",
"0.51995164",
"0.51810634",
"0.5176858",
"0.5169355",
"0.51641506",
"0.51418424",
"0.5140624",
"0.51366675",
"0.51230323",
"0.51179945",
"0.51155704",
"0.51150745",
"0.51035553",
"0.50947285",
"0.5089557",
"0.5076641",
"0.5072071",
"0.5072071",
"0.5072071",
"0.5072071",
"0.5072071",
"0.5072071",
"0.5072071",
"0.5072071",
"0.5072071"
] |
0.6084432
|
9
|
The SQL to use for an insert_select, adds a RETURNING clause to the insert unless the RETURNING clause is already present.
|
def insert_select_sql(*values)
ds = opts[:returning] ? self : returning
ds.insert_sql(*values)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n return super if id_value\n pk = pk_from_insert_sql(sql) unless pk\n select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n return super if id_value\n pk = pk_from_insert_sql(sql) unless pk\n select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n end",
"def returning_clause(serial)\n \" RETURNING #{quote_name(serial.field)} INTO :insert_id\"\n end",
"def sql_for_insert(sql, pk, id_value, sequence_name, binds)\n unless pk\n table_ref = extract_table_ref_from_insert_sql(sql)\n pk = primary_key(table_ref) if table_ref\n end\n\n sql = \"#{sql} RETURNING #{quote_column_name(pk)}\" if pk\n\n [sql, binds]\n end",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(*insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def insert_select(*values)\n return unless supports_insert_select?\n # Handle case where query does not return a row\n server?(:default).with_sql_first(insert_select_sql(*values)) || false\n end",
"def insert_select(*values)\n unless @opts[:disable_insert_returning]\n ds = opts[:returning] ? self : returning\n ds.insert(*values){|r| return r}\n end\n end",
"def insert_sql(sql, name = nil, pri_key = nil, id_value = nil, sequence_name = nil)\n unless pri_key\n table_ref = extract_table_ref_from_insert_sql(sql)\n pri_key = primary_key(table_ref) if table_ref\n end\n\n if pri_key\n select_value(\"#{sql} RETURNING #{quote_column_name(pri_key)}\")\n else\n super\n end\n end",
"def sql_with_returning(sql)\n table_ref = extract_table_ref_from_update_sql(sql)\n\n returning_columns = quote_returning_column_names(table_ref, nil, :update)\n\n return sql if returning_columns.blank?\n\n \"#{sql} RETURNING #{returning_columns.join(', ')}\"\n end",
"def insert_select(*values)\n with_sql_first(insert_select_sql(*values))\n end",
"def _insert_select_raw(ds)\n if use_prepared_statements_for?(:insert_select)\n if ps = model.send(:prepared_insert_select, @values.keys)\n _set_prepared_statement_server(ps).call(@values)\n end\n else\n super\n end\n end",
"def _use_insert_select?(ds)\n (!ds.opts[:select] || ds.opts[:returning]) && ds.supports_insert_select? \n end",
"def _use_insert_select?(ds)\n (!ds.opts[:select] || ds.opts[:returning]) && ds.supports_insert_select? \n end",
"def prepared_insert_select(cols)\n if dataset.supports_insert_select?\n cached_prepared_statement(:insert_select, prepared_columns(cols)){prepare_explicit_statement(naked.clone(:server=>dataset.opts.fetch(:server, :default)), :insert_select, prepared_statement_key_hash(cols))}\n end\n end",
"def supports_insert_select?\n !@opts[:disable_insert_returning]\n end",
"def insert_returning_columns(ds)\n return unless ds.supports_returning?(:insert)\n return unless values = ds.opts[:select]\n\n values = values.map{|v| ds.unqualified_column_for(v)}\n if values.all?\n values\n end\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil, binds = [])\n # Extract the table from the insert sql. Yuck.\n table = sql.split(\" \", 4)[2].gsub('\"', '')\n\n # Try an insert with 'returning id' if available (PG >= 8.2)\n if supports_insert_with_returning? && id_value.nil?\n pk, sequence_name = *pk_and_sequence_for(table) unless pk\n if pk\n sql = substitute_binds(sql, binds)\n id_value = select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n clear_query_cache #FIXME: Why now?\n return id_value\n end\n end\n\n # Otherwise, plain insert\n execute(sql, name, binds)\n\n # Don't need to look up id_value if we already have it.\n # (and can't in case of non-sequence PK)\n unless id_value\n # If neither pk nor sequence name is given, look them up.\n unless pk || sequence_name\n pk, sequence_name = *pk_and_sequence_for(table)\n end\n\n # If a pk is given, fallback to default sequence name.\n # Don't fetch last insert id for a table without a pk.\n if pk && sequence_name ||= default_sequence_name(table, pk)\n id_value = last_insert_id(table, sequence_name)\n end\n end\n id_value\n end",
"def insert(*values)\n if @opts[:sql] || @opts[:returning]\n super\n else\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def sql_insert_into_select(insertion_table, selection, options = {})\n InsertIntoSelect.new insertion_table, selection, options\n end",
"def _insert_select_raw(ds)\n ds.insert_select(_insert_values)\n end",
"def _insert_select_raw(ds)\n ds.insert_select(_insert_values)\n end",
"def to_sql\n [@sql_insert, @sql_on_conflict, @sql_returning].reject(&:nil?).join(' ')\n end",
"def exec_insert(sql, name = nil, binds = [], pk = nil, sequence_name = nil)\n exec_query(sql, name, binds)\n end",
"def execute_insert(sql, opts=OPTS)\n _execute(sql, opts){|conn| log_connection_yield(sql, conn){conn.execute_batch(sql)}; conn.last_insert_rowid}\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:\r\n execute(sql, name)\r\n identity = last_inserted_id(nil)\r\n retval = id_value if retval == 0\r\n return retval\r\n end",
"def _merge_insert_sql(sql, data)\n sql << \" THEN INSERT \"\n columns, values = _parse_insert_sql_args(data[:values])\n _insert_columns_sql(sql, columns)\n if override = data[:override]\n sql << override\n end\n _insert_values_sql(sql, values)\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n execute(sql, name)\n id_value\n end",
"def merge_insert(*values, &block)\n h = {:type=>:insert, :values=>values}\n if override = @opts[:override]\n h[:override] = insert_override_sql(String.new)\n end\n _merge_when(h, &block)\n end",
"def exec_insert(sql, name = nil, binds = []) #:nodoc:\r\n log(sql, \"insert\", binds) { query(sql, binds) }\r\n end",
"def exec_insert(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n super\n\n exec_query('SELECT @@IDENTITY AS id')\n end",
"def exec_insert(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def build_insert_sql(insert) # :nodoc:\n if insert.skip_duplicates? || insert.update_duplicates?\n raise NotImplementedError, \"#{self.class} should define `build_insert_sql` to implement adapter-specific logic for handling duplicates during INSERT\"\n end\n\n \"INSERT #{insert.into} #{insert.values_list}\"\n end",
"def execute_insert(sql, opts={})\n synchronize(opts[:server]) do |conn|\n conn.execute(sql)\n insert_result(conn, opts[:table], opts[:values])\n end\n end",
"def disable_insert_returning\n clone(:disable_insert_returning=>true)\n end",
"def sql_insert(record)\n flds, vals = parse_fldsvalues(record)\n ph = vals.map{|x| placeholder }\n\n sql = %Q|insert into #{quoted_table}\n ( #{flds.join ','} )\n output inserted.#{quote_field id_fld}\n values( #{ph.join ','} );|\n\n [sql, vals]\n end",
"def raw_sql(record)\n record.class.arel_table.create_insert.tap do |insert_manager|\n insert_manager.insert(insert_values(record))\n end.to_sql\n end",
"def _import(columns, values, opts=OPTS)\n if @opts[:returning]\n # no transaction: our multi_insert_sql_strategy should guarantee\n # that there's only ever a single statement.\n sql = multi_insert_sql(columns, values)[0]\n returning_fetch_rows(sql).map{|v| v.length == 1 ? v.values.first : v}\n elsif opts[:return] == :primary_key\n returning(insert_pk)._import(columns, values, opts)\n else\n super\n end\n end",
"def prepare_explicit_statement(ds, type, vals=OPTS)\n f = ds.opts[:from]\n meth = type == :insert_select ? :returning : :select\n s = ds.opts[meth]\n if f && f.length == 1 && !ds.opts[:join] && (!s || s.empty?)\n ds = ds.send(meth, *columns.map{|c| Sequel.identifier(c)})\n end \n \n prepare_statement(ds, type, vals)\n end",
"def _insert\n ds = _insert_dataset\n if _use_insert_select?(ds) && (h = _insert_select_raw(ds))\n _save_set_values(h)\n nil\n else\n iid = _insert_raw(ds)\n # if we have a regular primary key and it's not set in @values,\n # we assume it's the last inserted id\n if (pk = autoincrementing_primary_key) && pk.is_a?(Symbol) && !(vals = @values)[pk]\n vals[pk] = iid\n end\n pk\n end\n end",
"def _insert\n ds = _insert_dataset\n if _use_insert_select?(ds) && (h = _insert_select_raw(ds))\n _save_set_values(h)\n nil\n else\n iid = _insert_raw(ds)\n # if we have a regular primary key and it's not set in @values,\n # we assume it's the last inserted id\n if (pk = autoincrementing_primary_key) && pk.is_a?(Symbol) && !(vals = @values)[pk]\n vals[pk] = iid\n end\n pk\n end\n end",
"def run_insert\n raise DataFactory::NoInsertStatement unless @insert_statement\n\n stmt = db_interface.execute_sql(@insert_statement, *@binds)\n stmt.close\n end",
"def returning_id\n @sql_returning = ::MultiInsert::QueryBuilder.returning([:id])\n @returning_flat = true\n self\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n table = sql.split(\" \", 4)[2]\n super || last_insert_id(table, sequence_name || default_sequence_name(table, pk))\n end",
"def insert_select(params = {})\n db_str = GenInsert.insert_select(params)\n #@log.debug db_str\n @client.run db_str\n\n return true\n end",
"def execute_insert(sql, opts=OPTS)\n execute(sql, opts.merge(:meth=>:autoid))\n end",
"def statement\n [\n \"insert into\",\n @table,\n column_list,\n query_expression,\n ].compact.join(' ')\n end",
"def prepared_sql\n case prepared_type\n when :select, :all, :each\n # Most common scenario, so listed first.\n select_sql\n when :first\n clone(:limit=>1).select_sql\n when :insert_select\n insert_select_sql(*prepared_modify_values)\n when :insert, :insert_pk\n insert_sql(*prepared_modify_values)\n when :update\n update_sql(*prepared_modify_values)\n when :delete\n delete_sql\n else\n select_sql\n end\n end",
"def multi_insert_sql_strategy\n :union\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n # Extract the table from the insert sql. Yuck.\n table = sql.split(\" \", 4)[2].gsub('\"', '')\n\n # Otherwise, insert then grab last_insert_id.\n if insert_id = super\n insert_id\n else\n # If neither pk nor sequence name is given, look them up.\n unless pk || sequence_name\n pk, sequence_name = *pk_and_sequence_for(table)\n end\n\n # If a pk is given, fallback to default sequence name.\n # Don't fetch last insert id for a table without a pk.\n if pk && sequence_name ||= default_sequence_name(table, pk)\n last_insert_id(table, sequence_name)\n end\n end\n end",
"def insert!(*rows)\n Mao.sql(with_options(:insert => rows.flatten).sql) do |pg_result|\n if @options[:returning]\n pg_result.map {|result| Mao.normalize_result(result, @col_types)}\n else\n pg_result.cmd_tuples\n end\n end\n end",
"def execute(sql, opts={}, &block)\n super(sql, {:type=>:select}.merge(opts), &block)\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil) end",
"def run\n if @prepared_type == :insert\n fetch_rows(prepared_sql){|r| return r.values.first}\n else\n super\n end\n end",
"def insert_conflict_sql(sql)\n if opts = @opts[:insert_conflict]\n sql << \" ON CONFLICT\"\n\n if target = opts[:constraint] \n sql << \" ON CONSTRAINT \"\n identifier_append(sql, target)\n elsif target = opts[:target]\n sql << ' '\n identifier_append(sql, Array(target))\n if conflict_where = opts[:conflict_where]\n sql << \" WHERE \"\n literal_append(sql, conflict_where)\n end\n end\n\n if values = opts[:update]\n sql << \" DO UPDATE SET \"\n update_sql_values_hash(sql, values)\n if update_where = opts[:update_where]\n sql << \" WHERE \"\n literal_append(sql, update_where)\n end\n else\n sql << \" DO NOTHING\"\n end\n end\n end",
"def insert_sql(c, insert)\n\n\t\ttime = Time.now.to_s(:db)\n\n\t\tfirstname = remove_apostrophe(c.first_name)\n\t\tlastname = remove_apostrophe(c.last_name)\n\t\tcompany = remove_apostrophe(c.company)\n\t\tnotes = remove_apostrophe(c.notes)\n\t\temail = remove_apostrophe(c.email)\n\t\tdate_created = map_date(c.date_created)\n\t\tdate_modified = map_date(c.date_modified)\n\t\tphone = c.phone.gsub(/\\s+/, \"\")\n\n\t\tsql = cust = \"\"\n\n\t\tif insert == 1\n\n\t\t\tunallocated_staff_id = 34\n\n\t\t\tcust = \"('#{c.id}', '#{firstname}', '#{lastname}', '#{company}',\\\n\t\t\t'#{email}', '#{phone}', '#{c.store_credit}', '#{c.registration_ip_address}',\\\n\t\t\t'#{notes}', '#{date_created}', '#{date_modified}', '#{time}', '#{time}', '#{c.customer_group_id}', '#{unallocated_staff_id}')\"\n\n\t\t\tsql = \"INSERT INTO customers(id, firstname, lastname, company, email, phone,\\\n\t\t\tstore_credit, registration_ip_address, notes, date_created, date_modified,\\\n\t\t\tcreated_at, updated_at, cust_type_id, staff_id) VALUES #{cust}\"\n\t\telse\n\n\t\t\tsql = \"UPDATE customers SET firstname = '#{firstname}', lastname = '#{lastname}', company = '#{company}',\\\n\t\t\temail = '#{email}', phone = '#{phone}', store_credit = '#{c.store_credit}',\\\n\t\t\tregistration_ip_address = '#{c.registration_ip_address}', notes = '#{notes}', date_created = '#{date_created}',\\\n\t\t\tdate_modified = '#{date_modified}', updated_at = '#{time}', cust_type_id = '#{c.customer_group_id}' WHERE id = '#{c.id}'\"\n\n\n\t\tend\n\n ActiveRecord::Base.connection.execute(sql) \n\n\tend",
"def generate_insert\n @binds = Array.new\n @insert_statement = \"insert into #{fully_qualified_table_name} (\"\n @insert_statement << column_details.keys.sort.map { |k| column_detail(k).column_name }.join(',')\n @insert_statement << ') values ('\n @insert_statement << column_details.keys.sort.map { |k|\n \":#{k}\"\n }.join(',')\n column_details.keys.sort.each { |k|\n if @column_values[k] == nil\n @binds.push [column_type_to_ruby_type(column_details[k]), nil]\n else\n @binds.push @column_values[k]\n end\n }\n @insert_statement << ')'\n @insert_statement\n end",
"def _insert_raw(ds)\n if use_prepared_statements_for?(:insert)\n _set_prepared_statement_server(model.send(:prepared_insert, @values.keys)).call(@values)\n else\n super\n end\n end",
"def insert(arel, name = nil, pk = nil, id_value = nil, sequence_name = nil, binds = [])\n exec_insert(to_sql(arel), name, binds)\n retval = last_inserted_id(nil)\n retval = id_value if retval == 0\n return retval\n end",
"def execute_insert(sql, opts=OPTS)\n synchronize(opts[:server]) do |c|\n if sql.is_a?(Symbol)\n execute_prepared_statement(sql, opts)\n else\n _execute(c, sql, opts)\n end\n _execute(c, \"SELECT IDENTITY_VAL_LOCAL() FROM SYSIBM.SYSDUMMY1\", opts){|stmt| i = stmt.fetch_array.first.to_i; i}\n end\n rescue Connection::Error => e\n raise_error(e)\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n insert_sql(sql, name, pk, id_value, sequence_name)\n end",
"def insert()\n query = \"INSERT INTO sale_items (sli_qty, sli_unit_price, sli_alb_id, sli_slo_id)\n VALUES ($1, $2, $3, $4) RETURNING sli_id\"\n @sli_id = DbHelper.run_sql_return_first_row_column_value(query,\n [@sli_qty, @sli_unit_price, @sli_alb_id, @sli_slo_id],\n 'sli_id').to_i\n end",
"def returning(columns)\n @sql_returning = ::MultiInsert::QueryBuilder.returning(columns)\n @returning_flat = false\n self\n end",
"def generate_pg_insert_query(table_name, keys, rows)\n \"INSERT INTO #{table_name}(#{keys.map { |i| \"\\\"#{i}\\\"\" }.join(',')}) VALUES(#{keys.map { |i| rows[i] == nil ? 'NULL' : \"'\" + pg_conn.escape_string(rows[i]) + \"'\" }.join(',')});\\n\"\n end",
"def pk_from_insert_sql(sql)\n sql[/into\\s+([^\\(]*).*values\\s*\\(/i]\n primary_key($1.strip) if $1\n end",
"def pk_from_insert_sql(sql)\n sql[/into\\s+([^\\(]*).*values\\s*\\(/i]\n primary_key($1.strip) if $1\n end",
"def _insert\n return super if model.cti_tables.length == 1\n model.cti_models.each do |m|\n v = {}\n m.cti_table_columns.each{|c| v[c] = @values[c] if @values.include?(c)}\n ds = use_server(m.cti_instance_dataset)\n if ds.supports_insert_select? && (h = ds.insert_select(v))\n @values.merge!(h)\n else\n nid = ds.insert(v)\n @values[primary_key] ||= nid\n end\n end\n db.dataset.supports_insert_select? ? nil : @values[primary_key]\n end",
"def run(&block)\n case prepared_type\n when :select, :all\n # Most common scenario, so listed first\n all(&block)\n when :each\n each(&block)\n when :insert_select\n with_sql(prepared_sql).first\n when :first\n first\n when :insert, :update, :delete\n if opts[:returning] && supports_returning?(prepared_type)\n returning_fetch_rows(prepared_sql)\n elsif prepared_type == :delete\n delete\n else\n send(prepared_type, *prepared_modify_values)\n end\n when :insert_pk\n fetch_rows(prepared_sql){|r| return r.values.first}\n when Array\n case prepared_type[0]\n when :map, :as_hash, :to_hash, :to_hash_groups\n send(*prepared_type, &block) \n end\n else\n Sequel::Deprecation.deprecate(\"Using an unsupported prepared statement type (#{prepared_type.inspect})\", 'Switch to using :select as the prepared statement type')\n all(&block)\n end\n end",
"def sql\n s = \"\"\n options = @options.dup\n\n if update = options.delete(:update)\n s = \"UPDATE \"\n s << Mao.quote_ident(@table)\n s << \" SET \"\n\n if update.length == 0\n raise ArgumentError, \"invalid update: nothing to set\"\n end\n\n s << update.map do |column, value|\n check_column(column, @table, @col_types)\n\n \"#{Mao.quote_ident(column)} = #{Mao.escape_literal(value)}\"\n end.join(\", \")\n\n if where = options.delete(:where)\n s << \" WHERE \"\n s << Mao::Filter.sql(where)\n end\n elsif insert = options.delete(:insert)\n s = \"INSERT INTO \"\n s << Mao.quote_ident(@table)\n s << \" (\"\n\n keys = insert.map(&:keys).flatten.uniq.sort\n s << keys.map do |column|\n check_column(column, @table, @col_types)\n Mao.quote_ident(column)\n end.join(\", \")\n s << \") VALUES \"\n\n first = true\n insert.each do |row|\n if first\n first = false\n else\n s << \", \"\n end\n\n s << \"(\"\n s << keys.map {|k|\n if row.include?(k)\n Mao.escape_literal(row[k])\n else\n \"DEFAULT\"\n end\n }.join(\", \")\n s << \")\"\n end\n\n if returning = options.delete(:returning)\n s << \" RETURNING \"\n s << returning.map {|c| Mao.quote_ident(c)}.join(\", \")\n end\n else\n s = \"SELECT \"\n\n join = options.delete(:join)\n only = options.delete(:only)\n\n if join\n n = 0\n s << (@col_types.keys.sort.map {|c|\n n += 1\n if !only or (only[@table] and only[@table].include?(c))\n \"#{Mao.quote_ident(@table)}.#{Mao.quote_ident(c)} \" +\n \"#{Mao.quote_ident(\"c#{n}\")}\"\n end\n } + Mao.query(join[0]).col_types.keys.sort.map {|c|\n n += 1\n if !only or (only[join[0]] and only[join[0]].include?(c))\n \"#{Mao.quote_ident(join[0])}.#{Mao.quote_ident(c)} \" +\n \"#{Mao.quote_ident(\"c#{n}\")}\"\n end\n }).reject(&:nil?).join(\", \")\n elsif only\n s << only.map {|c| Mao.quote_ident(c)}.join(\", \")\n else\n s << \"*\"\n end\n\n s << \" FROM #{Mao.quote_ident(@table)}\"\n\n if join\n s << \" INNER JOIN #{Mao.quote_ident(join[0])} ON \"\n s << Mao::Filter.sql(join[1])\n end\n\n if where = options.delete(:where)\n s << \" WHERE \"\n s << Mao::Filter.sql(where)\n end\n\n if order = options.delete(:order)\n s << \" ORDER BY \"\n s << Mao.quote_ident(order[0])\n s << \" \"\n s << order[1]\n end\n\n if limit = options.delete(:limit)\n s << \" LIMIT #{limit}\"\n end\n end\n\n if options.length > 0\n raise ArgumentError,\n \"invalid options in #sql: #{options.inspect}. \" \\\n \"SQL constructed: #{s}\"\n end\n\n s\n end",
"def post_insert(sql, name, pk, id_value, sequence_name)\n if @iiEnabled\n begin\n @connection.do(enable_identity_insert(@iiTable, false))\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned off\"\n end\n end\n end",
"def post_insert(sql, name, pk, id_value, sequence_name)\n if @iiEnabled\n begin\n @connection.do(enable_identity_insert(@iiTable, false))\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned off\"\n end\n end\n end",
"def insert_sequenced(row)\n sql = row.type.insert_sql_minus_key\n vals = row.field_values_minus_key\n#$stderr.puts sql\n#$stderr.puts vals.inspect\n\n db.do(sql, *vals)\n insert_id = db.select_one(row.type.get_insert_id_sql)[0]\n row.send(row.type.primary_key.setter_name, insert_id)\n row.reset_changed\n end",
"def multi_insert_sql(columns, values)\n values = values.map {|r| \"SELECT #{expression_list(r)}\" }.join(\" UNION ALL \")\n [\"#{insert_sql_base}#{source_list(@opts[:from])} (#{identifier_list(columns)}) #{values}\"]\n end",
"def visit_Arel_Nodes_InsertStatement o, *a\n [\n \"INSERT INTO #{visit(o.relation).gsub(/\"/, '')}\",\n \"(#{o.columns.map { |x| x.name }.join ', '})\",\n \" VALUES (#{o.values.left.map { |value| value }.join ', '})\"\n ].compact.join ' '\n end",
"def insert_sql_each\n return enum_for(__method__) unless block_given?\n each_row do |row|\n yield table_dataset.insert_sql( row )\n end\n end",
"def insert_into_sql(sql)\n sql << \" INTO \"\n if (f = @opts[:from]) && f.length == 1\n identifier_append(sql, server_version >= 90500 ? f.first : unaliased_identifier(f.first))\n else\n source_list_append(sql, f)\n end\n end",
"def insert_statement(model, properties, identity_field)\n statement = \"INSERT INTO #{quote_name(model.storage_name(name))} \"\n\n if supports_default_values? && properties.empty?\n statement << 'DEFAULT VALUES'\n else\n statement << <<-SQL.compress_lines\n (#{properties.map { |property| quote_name(property.field) }.join(', ')})\n VALUES\n (#{(['?'] * properties.size).join(', ')})\n SQL\n end\n\n if supports_returning? && identity_field\n statement << \" RETURNING #{quote_name(identity_field.field)}\"\n end\n\n statement\n end",
"def insert(*args)\n r = super\n if s = opts[:sequence]\n with_sql(\"SELECT #{literal(s)}.currval FROM dual\").single_value.to_i\n else\n r\n end\n end",
"def subselect_sql_append(sql, ds)\n ds.clone(:append_sql=>sql, :prepared_args=>prepared_args, :bind_vars=>@opts[:bind_vars]).\n send(:to_prepared_statement, :select, nil, :extend=>prepared_statement_modules).\n prepared_sql\n end",
"def insert_sql(*values)\n if values.empty?\n insert_default_values_sql\n else\n values = values[0] if values.size == 1\n \n # if hash or array with keys we need to transform the values\n if @transform && (values.is_a?(Hash) || (values.is_a?(Array) && values.keys))\n values = transform_save(values)\n end\n from = source_list(@opts[:from])\n\n case values\n when Array\n if values.empty?\n insert_default_values_sql\n else\n \"INSERT INTO #{from} VALUES #{literal(values)}\"\n end\n when Hash\n if values.empty?\n insert_default_values_sql\n else\n fl, vl = [], []\n values.each {|k, v| fl << literal(k.is_a?(String) ? k.to_sym : k); vl << literal(v)}\n \"INSERT INTO #{from} (#{fl.join(COMMA_SEPARATOR)}) VALUES (#{vl.join(COMMA_SEPARATOR)})\"\n end\n when Dataset\n \"INSERT INTO #{from} #{literal(values)}\"\n else\n if values.respond_to?(:values)\n insert_sql(values.values)\n else\n \"INSERT INTO #{from} VALUES (#{literal(values)})\"\n end\n end\n end\n end",
"def insert_default_values_sql\n \"INSERT INTO #{source_list(@opts[:from])} DEFAULT VALUES\"\n end",
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @logger.unknown(\"ODBCAdapter#pre_insert>\") if @trace\n @logger.unknown(\"args=[#{sql}|#{name}|#{pk}|#{id_value}|#{sequence_name}]\") if @trace\n @iiTable = get_table_name(sql)\n @logger.unknown(\"@iiTable=#{@iiTable}\") if @trace\n @iiCol = get_autounique_column(@iiTable)\n @logger.unknown(\"@iiCol=#{@iiCol}\") if @trace\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n remove_null_sequence_value_from_sql(sql, @iiCol)\n# rescue Exception => e\n# raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def insert_statement(statement)\n self.inserts << statement\n end",
"def insert_statement(statement)\n self.inserts << statement\n end",
"def row_sql_insert(table_name, table_struct)\n fields = get_fields(table_struct)\n\n sql = <<-EOF\n INSERT INTO `#{DBNAME}`.`#{table_name}` (\n #{fields.collect { |f| \"`#{f}`\" }.join(\", \")}\n )\n VALUES (\n #{fields.collect { |f| \"'%s'\" }.join(\", \")}\n );\n EOF\n\n sql\nend",
"def insert_in_database\n Fetch.new(insertion_query).array\n end",
"def last_insert_id(conn, opts={})\n stmt = conn.createStatement\n begin\n sql = opts[:prepared] ? 'SELECT @@IDENTITY' : 'SELECT SCOPE_IDENTITY()'\n rs = log_yield(sql){stmt.executeQuery(sql)}\n rs.next\n rs.getInt(1)\n ensure\n stmt.close\n end\n end",
"def insert(*values)\n execute_dui(insert_sql(*values)){|c| return c.last_id}\n end",
"def insert()\n query = \"INSERT INTO artists (art_name, art_photo) VALUES ($1, $2) RETURNING art_id\"\n @art_id = DbHelper.run_sql_return_first_row_column_value(query, [@art_name, @art_photo], 'art_id').to_i;\n end",
"def raw_select(sql, name = 'SQL', binds = [], options = {})\n log(sql, name, binds) { _raw_select(sql, options) }\n end",
"def reaktor_insert(row)\n insert_id = 0\n unless row.idstore.nil?\n Log.write_log($import_log, \"reaktor_insert: Table: #{row.table_name} args: #{row.idstore.args * ', '}\")\n else\n Log.write_log($import_log, \"reaktor_insert: Table: #{row.table_name} No IdStore object\")\n end\n query = \"INSERT INTO #{row.table_name} (#{row.get_column_name_string})\\n VALUES (#{(['?']*row.size).join(', ')})\"\n sth = $dbh_ms.prepare(query)\n begin\n sth.execute(*row.get_column_values)\n rescue\n $stderr.puts \"### Error in #{__FILE__} on line #{__LINE__}. See errorlog\"\n Log.write_log('error', \"Could not insert data. Message: #{$!}. query: \\\"#{get_query_string(sth)}\\\"\")\n raise\n exit\n end\n begin\n insert_id = $dbh_ms.func(:insert_id) unless row.idstore.nil?\n rescue\n $stderr.puts \"### Error in #{__FILE__} on line #{__LINE__}. See errorlog\"\n Log.write_log('error', \"Could not get insert id. Message: #{$!}.\")\n raise\n exit\n end\n if insert_id > 0\n row.store_id(insert_id)\n Log.write_log($import_log, \"Insert id store to table: #{row.table_name} id_store parameters: (#{row.idstore.args * ', '}) id: #{insert_id}\")\n else\n unless row.idstore.nil?\n Log.write_log($import_log, \"No id stored for table: #{row.table_name} id_store parameters: (#{row.idstore.args * ', '})\")\n else\n Log.write_log($import_log, \"No id stored for table: #{row.table_name} No IdStore object\")\n end\n \n end\nend",
"def execute\n result = nil\n ActiveRecord::Base.connection_pool.with_connection do |con|\n result = con.execute(to_sql)\n end\n if @sql_returning.nil?\n nil\n else\n if @returning_flat\n result.values.map{|r| r.first}\n else\n result\n end\n end\n end",
"def select(select)\n @query = select.statement\n end",
"def multi_insert_sql_strategy\n :values\n end",
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @iiTable = get_table_name(sql)\n @iiCol = get_autounique_column(@iiTable)\n @iiEnabled = false\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n @connection.do(enable_identity_insert(@iiTable, true))\n @iiEnabled = true\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def insert_sql(*values)\n if values.size == 1 && (v = values[0]).is_a?(Sequel::Model) && !v.respond_to?(:sql_literal_append)\n Sequel::Deprecation.deprecate(\"Passing Sequel::Model instance argument to Sequel::Dataset#insert\", \"Pass model_instance.values or model_instance.to_hash as the argument instead\")\n super(v.to_hash)\n else\n super\n end\n end",
"def resolve_ids_habtm_sql(source, klass, param, param_class, insert_table)\n\n\t\ttemptable = resolve_ids_temptable_sql(source, klass, param, param_class)\n\n\t\tsource.connection.execute \"\n\t\t\tINSERT INTO #{insert_table} (#{klass.name.underscore + \"_id\"}, #{param}) \n\t\t\t\tSELECT object_id, param_id FROM #{temptable};\"\n\n\t\tresolve_ids_cleanup_sql(source, temptable)\n\n\t\treturn true\n\tend"
] |
[
"0.7520621",
"0.7520621",
"0.7286327",
"0.72612214",
"0.7190447",
"0.71479136",
"0.71042174",
"0.70045507",
"0.6908703",
"0.6906702",
"0.68156385",
"0.6802465",
"0.65613794",
"0.65613794",
"0.65486777",
"0.6533989",
"0.6515687",
"0.64841336",
"0.6463996",
"0.6424976",
"0.63692147",
"0.63692147",
"0.63295496",
"0.63166636",
"0.6241945",
"0.6213891",
"0.6173519",
"0.6161134",
"0.61204755",
"0.6084535",
"0.6068225",
"0.6037315",
"0.60318977",
"0.60288477",
"0.60137385",
"0.60007584",
"0.59937954",
"0.5989957",
"0.59752345",
"0.59270287",
"0.59117395",
"0.59117395",
"0.59079826",
"0.58821666",
"0.5871882",
"0.5871529",
"0.5845957",
"0.58233094",
"0.58005625",
"0.5783425",
"0.5750963",
"0.5698371",
"0.5686983",
"0.56742513",
"0.56637466",
"0.5634714",
"0.56108165",
"0.5597115",
"0.5593921",
"0.55907553",
"0.5588036",
"0.5579049",
"0.5555382",
"0.55433923",
"0.5504868",
"0.5496443",
"0.5496443",
"0.5490849",
"0.5462628",
"0.5422583",
"0.54130393",
"0.54130393",
"0.5403579",
"0.53969383",
"0.5390676",
"0.53452146",
"0.53245544",
"0.5303113",
"0.52912784",
"0.5290992",
"0.52787185",
"0.526426",
"0.5241761",
"0.5224294",
"0.5224294",
"0.52121025",
"0.52039516",
"0.51941025",
"0.5192124",
"0.5182251",
"0.5170707",
"0.5165073",
"0.51631975",
"0.51472026",
"0.5129839",
"0.51243526",
"0.51220715",
"0.5083406"
] |
0.7737149
|
2
|
Support SQL::AliasedExpression as expr to setup a USING join with a table alias for the USING columns.
|
def join_table(type, table, expr=nil, options=OPTS, &block)
if expr.is_a?(SQL::AliasedExpression) && expr.expression.is_a?(Array) && !expr.expression.empty? && expr.expression.all?
options = options.merge(:join_using=>true)
end
super
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def join_using_clause_using_sql_append(sql, using_columns)\n if using_columns.is_a?(SQL::AliasedExpression)\n super(sql, using_columns.expression)\n sql << ' AS '\n identifier_append(sql, using_columns.alias)\n else\n super\n end\n end",
"def table_alias\n if @table_expr.is_a?(AliasedExpression)\n @table_expr.alias\n end\n end",
"def join_table(type, table, expr=nil, table_alias=nil)\n raise(Error::InvalidJoinType, \"Invalid join type: #{type}\") unless join_type = JOIN_TYPES[type || :inner]\n\n table = if Dataset === table\n if table_alias.nil?\n table_alias_num = (@opts[:num_dataset_sources] || 0) + 1\n table_alias = \"t#{table_alias_num}\"\n end\n table.to_table_reference\n else\n table = table.table_name if table.respond_to?(:table_name)\n table_alias ||= table\n table_ref(table)\n end\n\n if Hash === expr or (Array === expr and expr.all_two_pairs?)\n expr = expr.collect do |k, v|\n k = qualified_column_name(k, table_alias) if k.is_a?(Symbol)\n v = qualified_column_name(v, @opts[:last_joined_table] || first_source) if v.is_a?(Symbol)\n [k,v]\n end\n end\n\n quoted_table_alias = quote_identifier(table_alias) \n clause = \"#{@opts[:join]} #{join_type} #{table}#{\" #{quoted_table_alias}\" if quoted_table_alias != table} ON #{literal(filter_expr(expr))}\"\n opts = {:join => clause, :last_joined_table => table_alias}\n opts[:num_dataset_sources] = table_alias_num if table_alias_num\n clone(opts)\n end",
"def using(key, &block)\n if @type == :natural then cmd = 'JOIN '\n elsif @type == :left then cmd = 'LEFT JOIN '\n elsif @type == :right then cmd = 'RIGHT JOIN '\n end\n key = key.to_s\n using_string = \"#{@base_klass.table_name}.#{key} = \"\n using_string << \"#{@join_klass.table_name}.#{key}\"\n @string = \"\\n\" << cmd << @join_klass.table_name << ' ON (' << using_string << ') '\n @clause_parser.append_join(self)\n yield @clause_parser # use extended clause parser for inner block argument\n end",
"def join_table_alias\n cached_fetch(:join_table_alias) do\n s, a = split_join_table_alias\n a || s\n end\n end",
"def assign_join association = nil\n @table_alias = association ? \"#{association.aliased_table_name}.\" : \"\"\n end",
"def to_sql\n @join.association_join.gsub(/::ts_join_alias::/,\n \"#{@reflection.klass.connection.quote_table_name(@join.parent.aliased_table_name)}\"\n )\n end",
"def join_table_alias\n final_reverse_edge[:alias]\n end",
"def aliased_expression_sql(ae)\n \"#{literal(ae.expression)} AS #{quote_identifier(ae.aliaz)}\"\n end",
"def v(o)\n case o\n when Symbol\n t, column, aliaz = Sequel.split_symbol(o)\n if t\n o\n elsif aliaz\n SQL::AliasedExpression.new(SQL::QualifiedIdentifier.new(@table, SQL::Identifier.new(column)), aliaz)\n else\n SQL::QualifiedIdentifier.new(@table, o)\n end\n when SQL::Identifier\n SQL::QualifiedIdentifier.new(@table, o)\n when SQL::QualifiedIdentifier, SQL::JoinClause\n # Return these directly, so we don't accidentally qualify symbols in them.\n o\n else\n super\n end\n end",
"def v(o)\n case o\n when Symbol\n t, column, aliaz = Sequel.split_symbol(o)\n if t\n o\n elsif aliaz\n SQL::AliasedExpression.new(SQL::QualifiedIdentifier.new(@table, SQL::Identifier.new(column)), aliaz)\n else\n SQL::QualifiedIdentifier.new(@table, o)\n end\n when SQL::Identifier\n SQL::QualifiedIdentifier.new(@table, o)\n when SQL::QualifiedIdentifier, SQL::JoinClause\n # Return these directly, so we don't accidentally qualify symbols in them.\n o\n else\n super\n end\n end",
"def delete_using_sql(sql)\n join_from_sql(:USING, sql)\n end",
"def delete_using_sql(sql)\n join_from_sql(:USING, sql)\n end",
"def table_aliases_from_join_fragment(sql)\r\n return [] if sql.blank?\r\n return sql.scan(/JOIN\\s+(`[^`]+`|\"[^\"]+\"|\\[[^\\]]+\\]|\\S+)(?:\\s+(?:AS\\s+)?(`[^`]+`|\"[^\"]+\"|\\[[^\\]]+\\]|\\S+))?/i).collect do |name|\r\n ((name[1] =~ /^ON$/i) ? name[0] : (name[1] || name[0])).gsub(/^[`\"\\[]?(.*)[`\"\\]]?$/, '\\1')\r\n end\r\n end",
"def split_join_table_alias\n associated_class.dataset.split_alias(self[:join_table])\n end",
"def to_table_reference(table_alias=nil)\n \"(#{sql})#{\" #{quote_identifier(table_alias)}\" if table_alias}\"\n end",
"def join(table, field1, field2, join_type = 'INNER JOIN')\n @join = \" #{join_type} #{table} ON #{@from}.#{field1}=#{table}.#{field2}\"\n\n self\n end",
"def table_alias\r\n @table_alias || from_table_name\r\n end",
"def inner_polymorphic_join(target, options = {})\n options[:on] ||= table_name\n options[:on_table_name] ||= connection.quote_table_name(options[:on])\n options[:target_table] ||= connection.quote_table_name(target.to_s.pluralize)\n options[:as] ||= \"owner\"\n postgres = ::ActiveRecord::Base.connection.adapter_name == \"PostgreSQL\"\n \"INNER JOIN #{options[:target_table]} ON #{options[:target_table]}.id = #{options[:on_table_name]}.#{options[:as]}_id AND \" +\n \"#{options[:on_table_name]}.#{options[:as]}_type = #{postgres ? \"E\" : \"\"}'#{target.to_s.camelize}'\"\n end",
"def extend_sql_avoiding_table_naming_clashes!(sql, addition)\r\n used_table_aliases = table_aliases_from_join_fragment(addition)\r\n old_table_aliases = table_aliases_from_join_fragment(sql)\r\n (used_table_aliases & old_table_aliases).each do |join_table_alias|\r\n i = 0\r\n begin\r\n i += 1\r\n new_alias = \"renamed_join_table_#{i}\"\r\n end until !used_table_aliases.include?(new_alias)\r\n convert_table_name_to_new_alias!(sql, join_table_alias, new_alias)\r\n end\r\n sql << \" #{addition} \"\r\n end",
"def construct_simple_join_sql(num)\n connection = klass.connection\n key_value_table = klass.table_name\n\n main_table = definition.klass.table_name\n main_table_pk, value_table_fk_main = reflection_keys(definition.reflection_by_name(definition.klass, relation))\n\n join_sql = \"\\n INNER JOIN #{connection.quote_table_name(key_value_table)} #{key_value_table}_#{num} ON (#{connection.quote_table_name(main_table)}.#{connection.quote_column_name(main_table_pk)} = #{key_value_table}_#{num}.#{connection.quote_column_name(value_table_fk_main)})\"\n return join_sql\n end",
"def build_join_expression(exps, other, type)\n return ['true', []] if type == :cross\n\n a_heads = headers\n b_heads = other.headers\n common_heads = a_heads & b_heads\n b_common_heads = []\n if exps.empty?\n if common_heads.empty?\n msg = \"#{type}-join with no common column names needs join expression\"\n raise UserError, msg\n else\n # A Natural join on all common heads\n common_heads.each do |h|\n ensure_common_types!(self_h: h, other_h: h, other: other)\n end\n nat_exp = common_heads.map { |h| \"(#{h}_a == #{h}_b)\" }.join(' && ')\n [nat_exp, common_heads]\n end\n else\n # We have join expressions to evaluate\n and_conds = []\n partial_result = nil\n last_sym = nil\n exps.each do |exp|\n case exp\n when Symbol\n case exp.to_s.clean\n when /\\A(?<sy>.*)_a\\z/\n a_head = Regexp.last_match[:sy].to_sym\n unless a_heads.include?(a_head)\n raise UserError, \"no column '#{a_head}' in table\"\n end\n\n if partial_result\n # Second of a pair\n ensure_common_types!(self_h: a_head,\n other_h: last_sym,\n other: other)\n partial_result << \"#{a_head}_a)\"\n and_conds << partial_result\n partial_result = nil\n else\n # First of a pair of _a or _b\n partial_result = +\"(#{a_head}_a == \"\n end\n last_sym = a_head\n when /\\A(?<sy>.*)_b\\z/\n b_head = Regexp.last_match[:sy].to_sym\n unless b_heads.include?(b_head)\n raise UserError, \"no column '#{b_head}' in second table\"\n end\n\n if partial_result\n # Second of a pair\n ensure_common_types!(self_h: last_sym,\n other_h: b_head,\n other: other)\n partial_result << \"#{b_head}_b)\"\n and_conds << partial_result\n partial_result = nil\n else\n # First of a pair of _a or _b\n partial_result = +\"(#{b_head}_b == \"\n end\n b_common_heads << b_head\n last_sym = b_head\n else\n # No modifier, so must be one of the common columns\n unless partial_result.nil?\n # We were expecting the second of a modified pair, but got an\n # unmodified symbol instead.\n msg =\n \"follow '#{last_sym}' by qualified exp from the other table\"\n raise UserError, msg\n end\n # We have an unqualified symbol that must appear in both tables\n unless common_heads.include?(exp)\n msg = \"unqualified column '#{exp}' must occur in both tables\"\n raise UserError, msg\n end\n ensure_common_types!(self_h: exp, other_h: exp, other: other)\n and_conds << \"(#{exp}_a == #{exp}_b)\"\n b_common_heads << exp\n end\n when String\n # We have a string expression in which all column references must be\n # qualified.\n and_conds << \"(#{exp})\"\n else\n msg = \"invalid join expression '#{exp}' of class #{exp.class}\"\n raise UserError, msg\n end\n end\n [and_conds.join(' && '), b_common_heads]\n end\n end",
"def convert_table_name_to_new_alias!(sql, old_table_name, new_alias)\r\n regex = Regexp.new(\"(?:(?:JOIN|AS)?\\\\s+|\\\\()[`\\\"\\\\[]?#{old_table_name}[`\\\"\\\\]]?(?:\\\\s+(?:AS\\\\s+)?(?:`[^`]+`|\\\"[^\\\"]+\\\"|\\\\[[^\\\\]]+\\\\]|\\\\S+)|\\\\.|\\\\s)\", Regexp::IGNORECASE)\r\n sql.gsub!(regex) do |match|\r\n prefix = (match =~ /^\\(/) ? '(' : ''\r\n suffix = match.gsub(/^.*?(\\s+ON|.)$/i, '\\1')\r\n if test = match.match(/^JOIN\\s+(?:`[^`]+`|\"[^\"]+\"|\\[[^\\]]+\\]|\\S+)(\\s+(?:AS\\s+)?(?:`[^`]+`|\"[^\"]+\"|\\[[^\\]]+\\]|\\S+))/i) and !(test.captures.first =~ /^ ON$/i)\r\n # If the table name is already aliased within this match, don't replace it\r\n result = match\r\n else\r\n replacement = \"JOIN #{old_table_name} AS #{new_alias}\" if match =~ /^JOIN\\s/i\r\n replacement = \"AS #{new_alias}\" if match =~ /^AS\\s/i\r\n replacement = \" #{new_alias}\" unless match =~ /^(JOIN|AS)\\s/i\r\n result = \"#{prefix}#{replacement}#{suffix}\"\r\n end\r\n result\r\n end\r\n end",
"def join_dependency\n @join_dependency ||= (\n build_join_dependency(\n Arel::SelectManager.new(table.engine, table),\n joins_values\n ) && @join_dependency\n )\n end",
"def calc_from_clause(our_join = join, our_tables = tables)\n implicits = [model.table_name] + our_tables\n result = implicits.uniq.map { |x| \"`#{x}`\" }.join(\", \")\n if our_join\n result += \" \"\n result += calc_join_conditions(model.table_name, our_join).join(\" \")\n end\n result\n end",
"def needs_join_table(table_name1, type, table_name2, clause, join_name = nil)\n join_name ||= \"#{table_name1}=#{type}=#{table_name2}\"\n @needed_join_tables[join_name] ||= {}\n @needed_join_tables[join_name][table] ||= begin\n # define join for this part ('table' = unique for each part)\n\n # don't add to list of tables, just get unique alias name\n second_table = get_alias(table_name2)\n\n # create join\n first_table = table(table_name1)\n\n @join_tables[first_table] ||= []\n @join_tables[first_table] << \"#{type} JOIN #{second_table} ON #{clause.gsub('TABLE1',first_table).gsub('TABLE2',second_table)}\"\n second_table\n end\n end",
"def default_join_table_qualification\n :symbol\n end",
"def add_joins!(sql, options, scope = :auto)\r\n scope = scope(:find) if :auto == scope\r\n join = (scope && scope[:joins]) || options[:joins]\r\n return if join.blank?\r\n extend_sql_avoiding_table_naming_clashes!(sql, scope && scope[:joins])\r\n extend_sql_avoiding_table_naming_clashes!(sql, options[:joins])\r\n end",
"def join_from_sql(type, sql)\n if(from = @opts[:from][1..-1]).empty?\n raise(Error, 'Need multiple FROM tables if updating/deleting a dataset with JOINs') if @opts[:join]\n else\n sql << SPACE << type.to_s << SPACE\n source_list_append(sql, from)\n select_join_sql(sql)\n end\n end",
"def table_alias\n @target_alias\n end",
"def table\n if @table_expr.is_a?(AliasedExpression)\n @table_expr.expression\n else\n @table_expr\n end\n end",
"def join(other, *exps, join_type: :inner)\n unless other.is_a?(Table)\n raise UserError, 'need other table as first argument to join'\n end\n unless JOIN_TYPES.include?(join_type)\n raise UserError, \"join_type may only be: #{JOIN_TYPES.join(', ')}\"\n end\n\n # These may be needed for outer joins.\n self_row_nils = headers.map { |h| [h, nil] }.to_h\n other_row_nils = other.headers.map { |h| [h, nil] }.to_h\n join_exp, other_common_heads =\n build_join_expression(exps, other, join_type)\n ev = Evaluator.new\n result = empty_dup\n other_rows = other.rows\n other_row_matches = Array.new(other_rows.size, false)\n rows.each do |self_row|\n self_row_matched = false\n other_rows.each_with_index do |other_row, k|\n # Same as other_row, but with keys that are common with self and equal\n # in value, removed, so the output table need not repeat them.\n locals = build_locals_hash(row_a: self_row, row_b: other_row)\n matches = ev.evaluate(join_exp, locals: locals)\n next unless matches\n\n self_row_matched = other_row_matches[k] = true\n out_row = build_out_row(row_a: self_row, row_b: other_row,\n common_heads: other_common_heads,\n type: join_type)\n result << out_row\n end\n next unless [:left, :full].include?(join_type)\n next if self_row_matched\n\n result << build_out_row(row_a: self_row,\n row_b: other_row_nils,\n type: join_type)\n end\n if [:right, :full].include?(join_type)\n other_rows.each_with_index do |other_row, k|\n next if other_row_matches[k]\n\n result << build_out_row(row_a: self_row_nils,\n row_b: other_row,\n type: join_type)\n end\n end\n result.normalize_boundaries\n result\n end",
"def join_association_4_2(table, association, join_type, options = {})\n aliases = options.fetch(:aliases, []).index_by(&:table_name)\n associations = association.is_a?(Array) ? association : [association]\n join_dependency = ActiveRecord::Associations::JoinDependency.new(table, associations, [])\n\n constraints = join_dependency.join_constraints([])\n\n binds = constraints.flat_map do |info|\n info.binds.map { |bv| table.connection.quote(*bv.reverse) }\n end\n\n joins = constraints.flat_map do |constraint|\n constraint.joins.map do |join|\n right = if block_given?\n yield join.left.name.to_sym, join.right\n else\n join.right\n end\n\n join.left.table_alias = aliases[join.left.name].name if aliases.key?(join.left.name)\n\n join_type.new(join.left, right)\n end\n end\n\n join_strings = joins.map do |join|\n to_sql(join, table, binds)\n end\n\n join_strings.join(' ')\n end",
"def join_from_sql(type, sql)\n if(from = @opts[:from][1..-1]).empty?\n raise(Error, 'Need multiple FROM tables if updating/deleting a dataset with JOINs') if @opts[:join]\n else\n sql << ' ' << type.to_s << ' '\n source_list_append(sql, from)\n select_join_sql(sql)\n end\n end",
"def find_alias(associations)\n if BabySqueel::ActiveRecord::VersionHelper.at_least_6_1?\n # construct_tables! got removed by rails\n # https://github.com/rails/rails/commit/590b045ee2c0906ff162e6658a184afb201865d7\n #\n # construct_tables_for_association! is a method from the polyamorous (ransack) gem\n join_root = join_dependency.send(:join_root)\n join_root.each_children do |parent, child|\n join_dependency.construct_tables_for_association!(parent, child)\n end\n else\n # If we tell join_dependency to construct its tables, Active Record\n # handles building the correct aliases and attaching them to its\n # JoinDepenencies.\n join_dependency.send(:construct_tables!, join_dependency.send(:join_root))\n end\n\n join_association = find_join_association(associations)\n join_association.table\n end",
"def join(other, join_definition, relation_aliases = {})\n joined = index.join(other.index, join_definition, relation_aliases)\n new(joined, other.index.aliases(joined))\n end",
"def inner_join(sym, **props)\n join(sym, **props.merge(type: :inner))\n end",
"def join_table_source\n cached_fetch(:join_table_source){split_join_table_alias[0]}\n end",
"def find_compatible_table_alias( clause, parent_query )\n alias_ = false\n\n # Sanity check. Only IN queries use the JOIN syntax .\n return alias_ if parent_query['operator'].blank? || 'IN' != parent_query['operator']\n\n # Since we're only checking IN queries, we're only concerned with OR relations.\n return alias_ if parent_query['relation'].blank? || 'OR' != parent_query['relation']\n\n compatible_operators = [ 'IN' ]\n parent_query.each do |sibling|\n next if !sibling.is_a?(Array) || !is_first_order_clause(sibling)\n next if sibling['alias'].blank? || sibling['operator'].blank?\n # The sibling must both have compatible operator to share its alias.\n if compatible_operators.include? sibling['operator'].upcase\n alias_ = sibling['alias']\n break\n end\n end\n\n alias_\n end",
"def aliased_table_name_for_with_sqlserver_support(name,suffix=nil)\n if !parent.table_joins.blank? && parent.table_joins.to_s.downcase =~ %r{join(\\s+\\w+)?\\s+#{Regexp.escape(active_record.connection.quote_table_name(name.downcase))}\\son}i\n @join_dependency.table_aliases[name] += 1\n end\n unless @join_dependency.table_aliases[name].zero?\n # if the table name has been used, then use an alias\n name = active_record.connection.table_alias_for \"#{pluralize(reflection.name)}_#{parent_table_name}#{suffix}\"\n table_index = @join_dependency.table_aliases[name]\n @join_dependency.table_aliases[name] += 1\n name = name[0..active_record.connection.table_alias_length-3] + \"_#{table_index+1}\" if table_index > 0\n else\n @join_dependency.table_aliases[name] += 1\n end\n name\n end",
"def get_alias(use_name, table_name = nil, avoid_alias = true)\n table_name ||= use_name\n\n base = use_name[0..1]\n list = (@unique_alias[base] ||= [])\n list2 = @table_alias[use_name] ||= []\n if avoid_alias && !@tables.include?(table_name)\n alias_name = use_name\n elsif @tables.include?(use_name)\n # links, li1, li2, li3\n alias_name = \"#{base}#{list.size}\"\n else\n # ob1, obj2, objects\n alias_name = \"#{base}#{list.size + 1}\"\n end\n\n # We add to both because @table_alias[use_name] is used in table(use_name)\n # and @table_alias[use_name]\n list << alias_name\n list2 << alias_name\n alias_name\n end",
"def add_join(join)\n @clause[:final_join] = join.implicit_joins\n end",
"def append_join(join)\n @clause[:join] << join.string \n @clause[:join] << join.implicit_joins\n end",
"def nested_alias_escape(query, alias_name)\n sql_query = generate_grouping(query)\n Arel::Nodes::As.new(sql_query, to_arel_sql(double_quote(alias_name)))\n end",
"def merge_using(source, join_condition)\n clone(:merge_using => [source, join_condition].freeze)\n end",
"def _join_table_dataset(opts)\n ds = model.db.from(opts.join_table_source)\n opts[:join_table_block] ? opts[:join_table_block].call(ds) : ds\n end",
"def left_joins_by_alias\n {}\n end",
"def with_query\n Arel::Nodes::As.new(recursive_table, union_term.arel)\n end",
"def eager_graph_association(ds, model, ta, requirements, r, *associations)\n if r.is_a?(SQL::AliasedExpression)\n alias_base = r.alias\n r = r.expression\n else\n alias_base = r[:graph_alias_base]\n end\n assoc_table_alias = ds.unused_table_alias(alias_base)\n loader = r[:eager_grapher]\n if !associations.empty?\n if associations.first.respond_to?(:call)\n callback = associations.first\n associations = {}\n elsif associations.length == 1 && (assocs = associations.first).is_a?(Hash) && assocs.length == 1 && (pr_assoc = assocs.to_a.first) && pr_assoc.first.respond_to?(:call)\n callback, assoc = pr_assoc\n associations = assoc.is_a?(Array) ? assoc : [assoc]\n end\n end\n local_opts = ds.opts[:eager_graph][:local]\n limit_strategy = r.eager_graph_limit_strategy(local_opts[:limit_strategy])\n\n if r[:conditions] && !Sequel.condition_specifier?(r[:conditions]) && !r[:orig_opts].has_key?(:graph_conditions) && !r[:orig_opts].has_key?(:graph_only_conditions) && !r.has_key?(:graph_block)\n Sequel::Deprecation.deprecate(\"Ignoring :conditions for #{r[:model]} #{r[:name]} association during eager_graph/association_join, consider specifying :graph_block\") unless r[:ignore_conditions_warning]\n end\n\n ds = loader.call(:self=>ds, :table_alias=>assoc_table_alias, :implicit_qualifier=>(ta == ds.opts[:eager_graph][:master]) ? first_source : qualifier_from_alias_symbol(ta, first_source), :callback=>callback, :join_type=>local_opts[:join_type], :join_only=>local_opts[:join_only], :limit_strategy=>limit_strategy, :from_self_alias=>ds.opts[:eager_graph][:master])\n if r[:order_eager_graph] && (order = r.fetch(:graph_order, r[:order]))\n ds = ds.order_append(*qualified_expression(order, assoc_table_alias))\n end\n eager_graph = ds.opts[:eager_graph]\n eager_graph[:requirements][assoc_table_alias] = requirements.dup\n eager_graph[:reflections][assoc_table_alias] = r\n if limit_strategy == :ruby\n eager_graph[:limits][assoc_table_alias] = r.limit_and_offset \n end\n eager_graph[:cartesian_product_number] += r[:cartesian_product_number] || 2\n ds = ds.eager_graph_associations(ds, r.associated_class, assoc_table_alias, requirements + [assoc_table_alias], *associations) unless associations.empty?\n ds\n end",
"def join(table,options={})\n # defaults\n options[:type] ||= \"inner\"\n options[:alias] ||= table\n options[:from] ||= @from_table\n\n case options[:type]\n when \"left\"\n @roots[table] = @alias[options[:from]].join(table, Java::javax.persistence.criteria.JoinType::LEFT)\n else\n # default to inner join\n @roots[table] = @alias[options[:from]].join(table)\n end\n\n # set alias\n # @alias[options[:from]] = @roots[table]\n @alias[options[:alias]] = @roots[table]\n @alias[table] = @roots[table]\n\n self\n end",
"def natjoin2_outer(tbl2, missing_value=nil, retain_left=true, retain_right=true)\n natjoin2(tbl2, missing_value, retain_left, retain_right)\n end",
"def add_join_table( id_left, table_left, id_right, table_right, name = nil, &block )\n name ||= [ table_left, table_right ].sort.join( '_' )\n add_table name do\n foreign_key id_left, table_left\n foreign_key id_right, table_right\n primary_key [ id_left, id_right ]\n unique [ id_right, id_left ]\n instance_eval &block if block\n end\n end",
"def join_alias(join)\r\n table_name = join.model_class.table_name\r\n new_alias = table_name\r\n if @join_aliases[table_name]\r\n new_alias = \"#{pluralize(join.reflection)}_#{join.parent.model_class.table_name}\"\r\n if @join_aliases[table_name].include? new_alias\r\n new_alias += '1'\r\n while @join_aliases[table_name].include? new_alias\r\n new_alias = new_alias.succ\r\n end\r\n end\r\n end\r\n (@join_aliases[table_name] ||= []) << new_alias\r\n return new_alias\r\n end",
"def joins(tables,options={})\n # now check for dot notiation\n dot_notation = tables.split \".\"\n parent_table = nil\n options = {}\n dot_notation.each do |j_table|\n options[:from] = parent_table unless parent_table.nil?\n options[:alias] = j_table unless parent_table.nil?\n join(j_table,options)\n parent_table = j_table\n end\n\n self\n end",
"def wrap_with_agg_array(arel_or_rel_query, alias_name, order_by: false, distinct: false)\n distinct = !(!distinct)\n order_exp = distinct ? nil : order_by # Can't order a distinct agg\n query = group_when_needed(arel_or_rel_query)\n query =\n Arel::Nodes::AggregateFunctionName\n .new(\"ARRAY_AGG\", to_sql_array(query), distinct)\n .order_by(order_exp)\n\n nested_alias_escape(query, alias_name)\n end",
"def table_alias_name(value)\n data.table_alias_name = value\n end",
"def construct_join_sql(key_relation, num)\n join_sql = \"\"\n connection = klass.connection\n key = key_relation.to_s.singularize.to_sym\n\n key_table = definition.reflection_by_name(klass, key).table_name\n value_table = klass.table_name.to_s\n\n value_table_fk_key, key_table_pk = reflection_keys(definition.reflection_by_name(klass, key))\n\n main_reflection = definition.reflection_by_name(definition.klass, relation)\n if main_reflection\n main_table = definition.klass.table_name\n main_table_pk, value_table_fk_main = reflection_keys(definition.reflection_by_name(definition.klass, relation))\n\n join_sql = \"\\n INNER JOIN #{connection.quote_table_name(value_table)} #{value_table}_#{num} ON (#{main_table}.#{main_table_pk} = #{value_table}_#{num}.#{value_table_fk_main})\"\n value_table = \" #{value_table}_#{num}\"\n end\n join_sql += \"\\n INNER JOIN #{connection.quote_table_name(key_table)} #{key_table}_#{num} ON (#{key_table}_#{num}.#{key_table_pk} = #{value_table}.#{value_table_fk_key}) \"\n\n return join_sql\n end",
"def addTableAlias(theAlias)\n @metadata.addTableAlias(theAlias)\n end",
"def graph_alias_columns(graph_aliases)\n gas = {}\n identifiers = graph_aliases.map do |col_alias, tc| \n table, column, value = Array(tc)\n column ||= col_alias\n gas[col_alias] = [table, column].freeze\n identifier = value || SQL::QualifiedIdentifier.new(table, column)\n identifier = SQL::AliasedExpression.new(identifier, col_alias) if value || column != col_alias\n identifier\n end\n [identifiers, gas]\n end",
"def as(aliaz)\n ::Sequel::SQL::AliasedExpression.new(self, aliaz)\n end",
"def arel_table\n @arel_table ||= begin\n t= Arel::Table.new(table)\n t.table_alias = alias_name if alias_name != table\n t\n end\n end",
"def schema_ds_join(table_name, opts)\n [:information_schema__columns, {:table_catalog=>:table_catalog,\n :table_schema => :table_schema, :table_name => :table_name} , :c]\n end",
"def association_join_with_paranoid\n return @join if @join\n result = association_join_without_paranoid\n if reflection.klass.paranoid?\n aliased_table = Arel::Table.new(table_name, :as => @aliased_table_name, :engine => arel_engine)\n pb = ActiveRecord::PredicateBuilder.new(arel_engine)\n result.concat(pb.build_from_hash(reflection.klass.paranoid_condition, aliased_table))\n end\n result\n end",
"def column_aliases\n if @table_expr.is_a?(AliasedExpression)\n @table_expr.columns\n end\n end",
"def create_join_excluded_tbl(preserve_null_pk = true)\n if @excluded_join_tbl.nil?\n join_list =join_list()\n cross_join_from = ''\n full_join_from = ''\n satisfied_tbl = create_satisfied_tbl()\n 0.upto(join_list.count-1) do |i|\n join = join_list.find{|j| j['id'] ==i }\n l_rel_list = join['l_rel_list']\n quals = join['quals']\n q = ReverseParseTree.whereClauseConst(quals)\n has_quals = (not join['quals'].nil?)\n join_type = ReverseParseTree.joinTypeConvert(join['jointype'].to_s, has_quals)\n\n r_rel = join['r_rel_list'][0]\n l_arg = (i==0 ? \"#{l_rel_list[0].relname} #{l_rel_list[0].relalias}\" : \"\")\n # for efficiency only change the last join to cross join\n if i == join_list.count-1\n cross_join_from = cross_join_from + \"#{l_arg} CROSS JOIN #{r_rel.relname} #{r_rel.relalias} \"\n else\n cross_join_from = cross_join_from + \"#{l_arg} #{join_type} #{r_rel.relname} #{r_rel.relalias} on #{q} \"\n end\n # full_join_from = full_join_from + \"#{l_arg} CROSS JOIN #{r_rel.relname} #{r_rel.relalias} on #{q}\"\n end\n @excluded_join_tbl = \"#{@table}_join_excluded\"\n # renamed_pk_col = @pk_full_list.map { |pk| \"#{pk['col']} as #{pk['alias']}_pk\" }.join(', ')\n \n if preserve_null_pk\n renamed_pk_col = @pk_full_list.map { |pk| \"#{pk['col']} as #{pk['alias']}_pk\" }.join(', ')\n else\n renamed_pk_col = @pk_full_list.map do |pk|\n pkcol = @all_cols.find{|col| col.colname == pk['colname'] and col.relname==pk['relname']}\n \"COALESCE(#{pkcol.select_name},#{pkcol.null_replacement}) as #{pkcol.colalias}_pk\"\n end.join(',')\n end\n targetListReplacement = \"#{renamed_pk_col},#{@all_cols_select}\"\n query = ReverseParseTree.reverseAndreplace(@parseTree, targetListReplacement, '')\n old_from = from_query()\n # cross join\n all_cols_renamed()\n cross_join_query = query.gsub(/#{old_from}/i,cross_join_from)\n # pk_join_satisfied_tbl = @pk_full_list.map { |pk| \"t.#{pk['alias']}_pk = s.#{pk['alias']}_pk\" }.join(' AND ')\n # pk_not_in_satisfied_tbl = @pk_full_list.map { |pk| \"s.#{pk['alias']}_pk is null\" }.join(' OR ')\n\n create_tbl_query = \"select * from #{satisfied_tbl} where 1=2\"\n create_tbl_query = QueryBuilder.create_tbl(@excluded_join_tbl, '', create_tbl_query)\n DBConn.exec(create_tbl_query)\n # limit to 1000 rows due to resource limitation\n cross_join_query = \"with cross_join as (#{cross_join_query} limit 1000) INSERT INTO #{@excluded_join_tbl} select * from (select t.* from cross_join as t except select * from #{satisfied_tbl}) as tmp\"\n puts cross_join_query\n DBConn.exec(cross_join_query)\n\n # unless preserve_null_pk\n # pk = @pk_full_list.map { |pk| \"#{pk['alias']}_pk\" }.join(',')\n # DBConn.update_null_columns(@excluded_join_tbl,pk)\n # end\n # # full join\n # full_join_query = query.gsub(old_from,full_join_from)\n # full_join_query = \"(#{full_join_query} except select #{@all_cols_renamed} from #{satisfied_tbl})\"\n # full_join_query = \"INSERT INTO #{@excluded_tbl} #{full_join_query}\"\n # DBConn.exec(query)\n end\n return @excluded_join_tbl\n end",
"def prepend_join(join)\n @clause[:join] = join.string << @clause[:join]\n @clause[:join] << join.implicit_joins\n end",
"def combine_expression_with_rule(squeel, left_expression, joins, rule)\n right_expression, right_expression_joins = build_expression_from_rule(squeel, rule)\n\n operator = rule.base_behavior ? :| : :&\n combine_squeel_expressions(left_expression, joins, operator, right_expression,\n right_expression_joins)\n end",
"def initialize(join_type, table_expr)\n @join_type = join_type\n @table_expr = table_expr\n freeze\n end",
"def test_003\n\n target_sql = \"select d.id as id,\nd.taxnumber as taxnumber,\nd.social_security_type as social_security_type,\nd.taxnumber_exemption as taxnumber_exemption\nfrom distributors d\nleft join distributor_addons da on (d.id = da.distributor_id)\nwhere d.id = (11,12,13,14,15,16)\norder by d.id\"\n \n @sql.select do\n d :id, :taxnumber, :social_security_type, :taxnumber_exemption\n end\n\n @sql.from(distributors: 'd') do\n left_join distributor_addons: 'da', on: 'd.id = da.distributor_id'\n end\n\n dist_ids = [11,12,13,14,15,16]\n \n @sql.where do\n d id: dist_ids\n end\n\n @sql.order('d.id')\n\n assert_equal @sql.to_s, target_sql\n end",
"def translation_join_str(language, options = {})\n main_tname = options[:table_alias] || self.table_name\n tname = options[:translation_table_alias] || self.translation_table_name\n \"JOIN #{tname} ON #{main_tname}.#{self.primary_key}=#{tname}.#{self.translation_foreign_key} and #{tname}.#{self.language_column}='#{language.to_s}'\"\n end",
"def quote_identifier_append(sql, name)\n name = (table_mappings[name.to_sym] || name) if name.respond_to?(:to_sym)\n super(sql, name)\n end",
"def table_alias_for(table_name)\n table_name.gsub(/\\./, '_')\n end",
"def symbol_to_column_ref(sym)\n c_table, column, c_alias = split_symbol(sym)\n \"#{\"#{quote_identifier(c_table)}.\" if c_table}#{quote_identifier(column)}#{\" AS #{quote_identifier(c_alias)}\" if c_alias}\"\n end",
"def join(*args)\n\t\tif args.count > 1\n\t\t\tjoins = args.map { |arg| \"INNER JOIN #{arg} ON #{arg}.#{table}_id = #{table}.id\"}.join(\" \")\n\t\t\trows = connection.execute <<-SQL \n\t\t\t\tSELECT * FROM #{table} #{joins};\n\t\t\tSQL\n\t\telse\n\t\t\tcase args.first\n\t\t\twhen String\n\t\t\t\trows = connection.execute <<-SQL\n\t\t\t\t\tSELECT * FROM #{table} #{BlocRecord::Utility.sql_strings(args.first)};\n\t\t\t\tSQL\n\t\t\twhen Symbol\n\t\t\t\trows = connection.execute <<-SQL \n\t\t\t\t\tSELECT * FROM #{table}\n\t\t\t\t\tINNER JOIN #{args.first} ON #{arg.first}.#{table}_id = #{table}.id;\n\t\t\t\tSQL\n\t\t\twhen Hash \n\t\t\t\t#extract the options from the hash\n\t\t\t\tsecond_table = args[0].keys.first \n\t\t\t\tthird_table = args[0].keys.first\n\t\t\t\trows = connection.execute <<-SQL \n\t\t\t\t\tSELECT * FROM #{table}\n\t\t\t\t\tINNER JOIN #{second_table} ON #{second_table}.#{table}_id = #{table}.id\n\t\t\t\t\tINNER JOIN #{third_table} ON #{third_table}.#{second_table}_id = #{second_table}.id;\n\t\t\t\tSQL\n\n\t\t\tend \n\t\tend\n\t\trows_to_array(rows)\n\tend",
"def apply_lookup(scope, cast, relationship, property)\n remote_table_alias = \"lookup#{next_lookup_id}___#{relationship.foreign_key}\"\n remote_id = \"#{remote_table_alias}.id\"\n scope = scope.joins(\n <<-SQL\n left join #{relationship.klass.table_name} as #{remote_table_alias}\n on (#{scope.table_name}.#{relationship.foreign_key}::text) = (#{remote_id}::text)\n SQL\n )\n return scope, \"(#{column_name(scope, remote_table_alias, property)}::#{cast})\"\n end",
"def build_query(table_name, where_expression)\n query = %Q{SELECT *\\n FROM \"#{table_name}\"}\n unless where_expression.nil?\n query << \"\\n WHERE #{where_expression}\"\n end\n unless (pk = find_primary_key(table_name)).empty?\n query << \"\\n ORDER BY \" << pk.join(', ')\n end\n return query\nend",
"def alias_decls; end",
"def evaluate_joins(scope, &block)\n dependency = evaluate!(scope, &block)._arel\n join_arel = Nodes.unwrap(dependency._arel)\n [join_arel, dependency.bind_values]\n end",
"def build_table_aliases(from)\n # for the targets\n returning({}) do |aliases|\n from.map(&:to_s).sort.map(&:to_sym).each_with_index do |plural, t_index|\n table = plural._as_class.table_name\n plural._as_class.columns.map(&:name).each_with_index do |field, f_index|\n aliases[\"#{table}.#{field}\"] = \"t#{t_index}_r#{f_index}\"\n end\n end\n end\n end",
"def link association_name, src_end, src, trg_end, trg\n trg_setter= \"#{trg_end}=\".to_sym\n if src_end\n src_setter= \"#{src_end}=\".to_sym\n trg.each do |t| \n old_src= t.send src_end\n old_src= old_src - src if old_src\n t.send src_setter, src\n end\n end\n src.each do |s|\n old_trg= s.send trg_end\n old_trg= old_trg - trg if old_trg\n s.send trg_setter, trg\n end\n end",
"def add_joins_to_relation(rel, joins)\n return rel.joins(Report::Join.list_to_sql(joins))\n end",
"def join_query(*queries, *tables, join_type=nil, *conditions)\n\tquery = \"select \"\n\tqueries.each do |statement|\n\t\tquery += \" #{statement}\"\n\tend\n\n\tquery += \" from\"\n\n\ttables.each do |table|\n\t\tquery += \"#{table}\"\n\tend\n\n\tquery += \"#{join_type}\" if join_type != nil\n\n\tif join_type == \"INNER\"\n\t\tquery += \"INNER JOIN\"\n\t\tconditions.each do |condition|\n\t\t\tquery += \"ON\"\n\t\t\tquery += \" #{condition}\"\n\t\tend\n\tend\n\n\tif join_type == \"CROSS\"\n\t\tquery += \"CROSS JOIN\"\n\t\tquery += conditions[0]\n\tend\n\n\treturn query\n\nend",
"def aliased_subquery(subquery)\n \"#{subquery.to_subquery} AS #{visit_identifier(subquery.name)}\"\n ensure\n reset_query_state\n end",
"def target_name\n return alias_name || expression || aggregation_type.to_s\n end",
"def add_table(use_name, table_name = nil, avoid_alias = true, type = nil, &block)\n alias_name = get_alias(use_name, table_name, avoid_alias)\n add_alias_to_tables(table_name || use_name, alias_name, type, &block)\n end",
"def addColumnAlias(theAlias)\n @metadata.addColumnAlias(theAlias)\n end",
"def _arel\n if _on\n [_join.new(_table, _on)]\n else\n @associations.each.with_index.inject([]) do |joins, (assoc, i)|\n construct @associations[0..i], joins, assoc._join\n end\n end\n end",
"def target_alias\n @model.table_name\n end",
"def column_alias_for(field)\n column_alias = +field\n column_alias.gsub!(/\\*/, \"all\")\n column_alias.gsub!(/\\W+/, \" \")\n column_alias.strip!\n column_alias.gsub!(/ +/, \"_\")\n @connection.table_alias_for(column_alias)\n end",
"def alias(name)\n Column.new(jcolumn.as(name))\n end",
"def link!\n base = ::ActiveRecord::Associations::ClassMethods::JoinDependency.new(\n @model, [], nil\n )\n \n @fields.each { |field|\n field.model ||= @model\n field.columns.each { |col|\n field.associations[col] = associations(col.__stack.clone)\n field.associations[col].each { |assoc| assoc.join_to(base) }\n }\n }\n \n @attributes.each { |attribute|\n attribute.model ||= @model\n attribute.columns.each { |col|\n attribute.associations[col] = associations(col.__stack.clone)\n attribute.associations[col].each { |assoc| assoc.join_to(base) }\n }\n }\n end",
"def join(args, joiner=nil)\n raise Error, 'argument to Sequel.join must be an array' unless args.is_a?(Array)\n if joiner\n args = args.zip([joiner]*args.length).flatten\n args.pop\n end\n\n return SQL::StringExpression.new(:NOOP, '') if args.empty?\n\n args = args.map do |a|\n case a\n when Symbol, ::Sequel::SQL::Expression, ::Sequel::LiteralString, TrueClass, FalseClass, NilClass\n a\n else\n a.to_s\n end\n end\n SQL::StringExpression.new(:'||', *args)\n end",
"def add_lock!(sql, options)\n if (lock = options[:lock]) && sql =~ /\\A\\s*SELECT/mi\n # Check for and extract the :limit/:offset sub-query\n if sql =~ /\\A(\\s*SELECT t\\.\\* FROM \\()(.*)(\\) AS t WHERE t._row_num BETWEEN \\d+ AND \\d+\\s*)\\Z/m\n prefix, subselect, suffix = [$1, $2, $3]\n add_lock!(subselect, options)\n return sql.replace(prefix + subselect + suffix)\n end\n unless sql =~ SELECT_FROM_WHERE_RE\n # If you get this error, this driver probably needs to be fixed.\n raise NotImplementedError, \"Don't know how to add_lock! to SQL statement: #{sql.inspect}\"\n end\n select_clause, from_word, from_tables, where_clause = $1, $2, $3, $4\n with_clause = lock.is_a?(String) ? \" #{lock} \" : \" WITH(ROWLOCK,UPDLOCK) \"\n\n # Split the FROM clause into its constituent tables, and add the with clause after each one.\n new_from_tables = []\n scanner = StringScanner.new(from_tables)\n until scanner.eos?\n prev_pos = scanner.pos\n if scanner.scan_until(/,|(INNER\\s+JOIN|CROSS\\s+JOIN|(LEFT|RIGHT|FULL)(\\s+OUTER)?\\s+JOIN)\\s+/mi)\n join_operand = scanner.pre_match[prev_pos..-1]\n join_operator = scanner.matched\n else\n join_operand = scanner.rest\n join_operator = \"\"\n scanner.terminate\n end\n\n # At this point, we have something like:\n # join_operand == \"appointments \"\n # join_operator == \"INNER JOIN \"\n # or:\n # join_operand == \"appointment_details AS d1 ON appointments.[id] = d1.[appointment_id]\"\n # join_operator == \"\"\n if join_operand =~ /\\A(.*)(\\s+ON\\s+.*)\\Z/mi\n table_spec, on_clause = $1, $2\n else\n table_spec = join_operand\n on_clause = \"\"\n end\n\n # Add the \"WITH(ROWLOCK,UPDLOCK)\" option to the table specification\n table_spec << with_clause unless table_spec =~ /\\A\\(\\s*SELECT\\s+/mi # HACK - this parser isn't so great\n join_operand = table_spec + on_clause\n\n # So now we have something like:\n # join_operand == \"appointments WITH(ROWLOCK,UPDLOCK) \"\n # join_operator == \"INNER JOIN \"\n # or:\n # join_operand == \"appointment_details AS d1 WITH(ROWLOCK,UPDLOCK) ON appointments.[id] = d1.[appointment_id]\"\n # join_operator == \"\"\n\n new_from_tables << join_operand\n new_from_tables << join_operator\n end\n sql.replace( select_clause.to_s << from_word.to_s << new_from_tables.join << where_clause.to_s )\n end\n sql\n end",
"def alias_node_query\n Category.left_outer_joins(:articles)\n .join_recursive do |query|\n query\n .connect_by(id: :parent_id)\n end\n end",
"def wrap_with_array(arel_or_rel_query, alias_name, order_by: false)\n if order_by && arel_or_rel_query.is_a?(ActiveRecord::Relation)\n arel_or_rel_query = arel_or_rel_query.order(order_by)\n end\n\n query = Arel::Nodes::Array.new(to_sql_array(arel_or_rel_query))\n nested_alias_escape(query, alias_name)\n end",
"def join_table_name(hash, options)\n entries = hash.values\n raise Error, \"must have 2 entries in hash given to (create|drop)_join_table\" unless entries.length == 2\n if options[:name]\n options[:name]\n else\n table_names = entries.map{|e| join_table_name_extract(e)}\n table_names.map(&:to_s).sort.join('_')\n end\n end",
"def summarize_per_relation(generator)\n @from = \"#{generator.to_subquery} AS #{visit_identifier(generator.name)} NATURAL LEFT JOIN #{@from}\"\n end",
"def graph(dataset, join_conditions = nil, options = OPTS, &block)\n # Allow the use of a dataset or symbol as the first argument\n # Find the table name/dataset based on the argument\n table_alias = options[:table_alias]\n table = dataset\n create_dataset = true\n\n case dataset\n when Symbol\n # let alias be the same as the table name (sans any optional schema)\n # unless alias explicitly given in the symbol using ___ notation\n table_alias ||= split_symbol(table).compact.last\n when Dataset\n if dataset.simple_select_all?\n table = dataset.opts[:from].first\n table_alias ||= table\n else\n table_alias ||= dataset_alias((@opts[:num_dataset_sources] || 0)+1)\n end\n create_dataset = false\n when SQL::Identifier\n table_alias ||= table.value\n when SQL::QualifiedIdentifier\n table_alias ||= split_qualifiers(table).last\n when SQL::AliasedExpression\n return graph(table.expression, join_conditions, {:table_alias=>table.alias}.merge!(options), &block)\n else\n raise Error, \"The dataset argument should be a symbol or dataset\"\n end\n table_alias = table_alias.to_sym\n\n if create_dataset\n dataset = db.from(table)\n end\n\n # Raise Sequel::Error with explanation that the table alias has been used\n raise_alias_error = lambda do\n raise(Error, \"this #{options[:table_alias] ? 'alias' : 'table'} has already been been used, please specify \" \\\n \"#{options[:table_alias] ? 'a different alias' : 'an alias via the :table_alias option'}\") \n end\n\n # Only allow table aliases that haven't been used\n raise_alias_error.call if @opts[:graph] && @opts[:graph][:table_aliases] && @opts[:graph][:table_aliases].include?(table_alias)\n \n table_alias_qualifier = qualifier_from_alias_symbol(table_alias, table)\n implicit_qualifier = options[:implicit_qualifier]\n ds = self\n\n # Use a from_self if this is already a joined table (or from_self specifically disabled for graphs)\n if (@opts[:graph_from_self] != false && !@opts[:graph] && joined_dataset?)\n from_selfed = true\n implicit_qualifier = options[:from_self_alias] || first_source\n ds = ds.from_self(:alias=>implicit_qualifier)\n end\n \n # Join the table early in order to avoid cloning the dataset twice\n ds = ds.join_table(options[:join_type] || :left_outer, table, join_conditions, :table_alias=>table_alias_qualifier, :implicit_qualifier=>implicit_qualifier, :qualify=>options[:qualify], &block)\n\n return ds if options[:join_only]\n\n opts = ds.opts\n\n # Whether to include the table in the result set\n add_table = options[:select] == false ? false : true\n # Whether to add the columns to the list of column aliases\n add_columns = !ds.opts.include?(:graph_aliases) # SEQUEL5: Remove graph_aliases support\n\n if graph = opts[:graph]\n graph = graph.dup\n select = opts[:select].dup\n [:column_aliases, :table_aliases, :column_alias_num].each{|k| graph[k] = graph[k].dup}\n else\n # Setup the initial graph data structure if it doesn't exist\n qualifier = ds.first_source_alias\n master = alias_symbol(qualifier)\n raise_alias_error.call if master == table_alias\n\n # Master hash storing all .graph related information\n graph = {}\n\n # Associates column aliases back to tables and columns\n column_aliases = graph[:column_aliases] = {}\n\n # Associates table alias (the master is never aliased)\n table_aliases = graph[:table_aliases] = {master=>self}\n\n # Keep track of the alias numbers used\n ca_num = graph[:column_alias_num] = Hash.new(0)\n\n # All columns in the master table are never\n # aliased, but are not included if set_graph_aliases\n # has been used.\n if add_columns\n if (select = @opts[:select]) && !select.empty? && !(select.length == 1 && (select.first.is_a?(SQL::ColumnAll)))\n select = select.map do |sel|\n raise Error, \"can't figure out alias to use for graphing for #{sel.inspect}\" unless column = _hash_key_symbol(sel)\n column_aliases[column] = [master, column]\n if from_selfed\n # Initial dataset was wrapped in subselect, selected all\n # columns in the subselect, qualified by the subselect alias.\n Sequel.qualify(qualifier, Sequel.identifier(column))\n else\n # Initial dataset not wrapped in subslect, just make\n # sure columns are qualified in some way.\n qualified_expression(sel, qualifier)\n end\n end\n else\n select = columns.map do |column|\n column_aliases[column] = [master, column]\n SQL::QualifiedIdentifier.new(qualifier, column)\n end\n end\n end\n end\n\n # Add the table alias to the list of aliases\n # Even if it isn't been used in the result set,\n # we add a key for it with a nil value so we can check if it\n # is used more than once\n table_aliases = graph[:table_aliases]\n table_aliases[table_alias] = add_table ? dataset : nil\n\n # Add the columns to the selection unless we are ignoring them\n if add_table && add_columns\n column_aliases = graph[:column_aliases]\n ca_num = graph[:column_alias_num]\n # Which columns to add to the result set\n cols = options[:select] || dataset.columns\n # If the column hasn't been used yet, don't alias it.\n # If it has been used, try table_column.\n # If that has been used, try table_column_N \n # using the next value of N that we know hasn't been\n # used\n cols.each do |column|\n col_alias, identifier = if column_aliases[column]\n column_alias = :\"#{table_alias}_#{column}\"\n if column_aliases[column_alias]\n column_alias_num = ca_num[column_alias]\n column_alias = :\"#{column_alias}_#{column_alias_num}\" \n ca_num[column_alias] += 1\n end\n [column_alias, SQL::AliasedExpression.new(SQL::QualifiedIdentifier.new(table_alias_qualifier, column), column_alias)]\n else\n ident = SQL::QualifiedIdentifier.new(table_alias_qualifier, column)\n [column, ident]\n end\n column_aliases[col_alias] = [table_alias, column].freeze\n select.push(identifier)\n end\n end\n [:column_aliases, :table_aliases, :column_alias_num].each{|k| graph[k].freeze}\n ds = ds.clone(:graph=>graph.freeze)\n add_columns ? ds.select(*select) : ds\n end",
"def tables_and_joins\n sql = \"#{fact_class.table_name}\"\n cube_class.dimensions_hierarchies.each do |dimension_name, hierarchy_name|\n dimension_table_name = fact_class.dimension_class(dimension_name).table_name\n sql += \" LEFT JOIN #{dimension_table_name} as #{dimension_name}\"\n sql += \" ON #{fact_class.table_name}.\"\n sql += \"#{fact_class.dimension_relationships[dimension_name].foreign_key}\"\n sql += \" = #{dimension_name}.id\\n\"\n end\n sql\n end",
"def arel_table\n if associations.none?\n model.arel_table\n else\n # if the target attribute is in the same table as the model (the base table),\n # alias the table to avoid conflicting table from clauses\n # seems AR should do this for us...\n ref = reflections.last\n if ref.klass.table_name == model.table_name\n ref.klass.arel_table.alias(ref.alias_candidate(model.table_name))\n else\n ref.klass.arel_table\n end\n end\n end"
] |
[
"0.7116942",
"0.6341112",
"0.6315534",
"0.61744845",
"0.61428946",
"0.5945142",
"0.5900595",
"0.5848707",
"0.5788944",
"0.5637029",
"0.5637029",
"0.55842906",
"0.5584003",
"0.5542869",
"0.55291176",
"0.5422773",
"0.539866",
"0.53588843",
"0.53473353",
"0.5325972",
"0.53084874",
"0.5306905",
"0.5259713",
"0.52464867",
"0.5217253",
"0.5194031",
"0.5143476",
"0.5143128",
"0.5078558",
"0.5073857",
"0.5063211",
"0.50606614",
"0.5059165",
"0.5046",
"0.5042353",
"0.502955",
"0.50216544",
"0.50079006",
"0.49859616",
"0.49556345",
"0.49340135",
"0.4932711",
"0.4929694",
"0.4922505",
"0.4911753",
"0.49013588",
"0.4891794",
"0.48908433",
"0.4884825",
"0.48828593",
"0.48525402",
"0.4846749",
"0.48244143",
"0.48166424",
"0.4776696",
"0.4775598",
"0.47568992",
"0.4748076",
"0.4724482",
"0.47211713",
"0.47196662",
"0.47167033",
"0.4702413",
"0.4683233",
"0.4665494",
"0.4643023",
"0.4639452",
"0.45955577",
"0.45868427",
"0.45828173",
"0.4580936",
"0.45782247",
"0.4572204",
"0.45671844",
"0.45490417",
"0.45399344",
"0.45390865",
"0.4534767",
"0.4518287",
"0.45120886",
"0.45087087",
"0.45050058",
"0.44927445",
"0.4490469",
"0.4486891",
"0.4480653",
"0.44763193",
"0.44690067",
"0.44684473",
"0.44483468",
"0.44456494",
"0.44421342",
"0.44400436",
"0.4436168",
"0.44341853",
"0.44155112",
"0.44033283",
"0.43923926",
"0.43879056",
"0.43831953"
] |
0.694539
|
1
|
Locks all tables in the dataset's FROM clause (but not in JOINs) with the specified mode (e.g. 'EXCLUSIVE'). If a block is given, starts a new transaction, locks the table, and yields. If a block is not given, just locks the tables. Note that PostgreSQL will probably raise an error if you lock the table outside of an existing transaction. Returns nil.
|
def lock(mode, opts=OPTS)
if defined?(yield) # perform locking inside a transaction and yield to block
@db.transaction(opts){lock(mode, opts); yield}
else
sql = 'LOCK TABLE '.dup
source_list_append(sql, @opts[:from])
mode = mode.to_s.upcase.strip
unless LOCK_MODES.include?(mode)
raise Error, "Unsupported lock mode: #{mode}"
end
sql << " IN #{mode} MODE"
@db.execute(sql, opts)
end
nil
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def lock(mode, &block)\n sql = LOCK % [source_list(@opts[:from]), mode]\n @db.synchronize do\n if block # perform locking inside a transaction and yield to block\n @db.transaction {@db.execute(sql); yield}\n else\n @db.execute(sql) # lock without a transaction\n self\n end\n end\n end",
"def lock(mode, &block)\n sql = LOCK % [@opts[:from], mode]\n @db.synchronize do\n if block # perform locking inside a transaction and yield to block\n @db.transaction {@db.execute(sql); yield}\n else\n @db.execute(sql) # lock without a transaction\n self\n end\n end\n end",
"def with_database_exclusive_table_lock_sqlite(&block)\n block.call\n end",
"def with_database_exclusive_table_lock_postgresql(&block)\n # If we just use the regular :sanitize_sql support, we get:\n # LOCK TABLE 'foo'\n # ...which, for whatever reason, PostgreSQL doesn't like. Escaping it this way works fine.\n escaped = @low_card_model.connection.quote_table_name(@low_card_model.table_name)\n run_sql(\"LOCK TABLE #{escaped}\", { })\n block.call\n end",
"def with_database_exclusive_table_lock(&block)\n case @low_card_model.connection.class.name\n when /postgresql/i then with_database_exclusive_table_lock_postgresql(&block)\n when /mysql/i then with_database_exclusive_table_lock_mysql(&block)\n when /sqlite/i then with_database_exclusive_table_lock_sqlite(&block)\n else\n raise LowCardTables::Errors::LowCardUnsupportedDatabaseError, %{You asked for low-card IDs for one or more hashes specifying rows that didn't exist,\nbut, when we went to create them, we discovered that we don't know how to exclusively\nlock tables in your database. (This is very important so that we don't accidentally\ncreate duplicate rows.)\n\nYour database adapter's class name is '#{@low_card_model.connection.class.name}'; please submit at least\na bug report, or, even better, a patch. :) Adding support is quite easy, as long as you know the\nequivalent of 'LOCK TABLE'(s) in your database.}\n end\n end",
"def locked_transaction(&block)\n unless connection.open_transactions.zero?\n raise \"locked_transaction cannot be called from within another transaction!\"\n end\n unsafe_locked_transaction do\n yield if block_given?\n end\n end",
"def with_database_exclusive_table_lock_mysql(&block)\n begin\n escaped = @low_card_model.connection.quote_table_name(@low_card_model.table_name)\n run_sql(\"LOCK TABLES #{escaped} WRITE\", { })\n block.call\n ensure\n begin\n run_sql(\"UNLOCK TABLES\", { })\n rescue ::ActiveRecord::StatementInvalid => si\n # we tried our best!\n end\n end\n end",
"def lock(name, mode)\n yield\n end",
"def visit_Arel_Nodes_Lock(o, a = nil)\n # SQL Layer does not support row locks\n end",
"def locks\n sql = %q(\n select\n pg_stat_activity.procpid,\n pg_class.relname,\n pg_locks.transactionid,\n pg_locks.granted,\n substr(pg_stat_activity.current_query,1,30) as query_snippet,\n age(now(),pg_stat_activity.query_start) as \"age\"\n from pg_stat_activity,pg_locks left\n outer join pg_class on (pg_locks.relation = pg_class.oid)\n where pg_stat_activity.current_query <> '<insufficient privilege>' and\n pg_locks.pid=pg_stat_activity.procpid and pg_locks.mode = 'ExclusiveLock' order by query_start)\n\n exec_sql(sql, find_uri)\n end",
"def locks\n dataset.from(:pg_class).join(:pg_locks, :relation=>:relfilenode).select{[pg_class[:relname], Sequel::SQL::ColumnAll.new(:pg_locks)]}\n end",
"def lock_table(db, table)\n begin\n db.query(\"LOCK #{table}\")\n rescue Mysql::Error => e\n $stderr.puts \"Error code: #{e.errno}\"\n $stderr.puts \"Error message: #{e.error}\"\n $stderr.puts \"Error SQLSTATE: #{e.sqlstate}\" if e.respond_to?(\"sqlstate\")\n exit\n end\n end",
"def with_flock(mode)\n return yield if @locked\n begin\n loop do\n # HACK: JRuby returns false if the process is already hold by the same process\n # see https://github.com/jruby/jruby/issues/496\n Thread.pass until @fd.flock(mode)\n # Check if database was replaced (cleared or compactified) in the meantime\n # break if not\n stat = @fd.stat\n break if stat.nlink > 0 && stat.ino == @inode\n open\n end\n @locked = true\n yield\n ensure\n @fd.flock(File::LOCK_UN)\n @locked = false\n end\n end",
"def with_lock_retries(*args, **kwargs, &block)\n if transaction_open?\n if enable_lock_retries?\n Gitlab::AppLogger.warn 'Lock retries already enabled, executing the block directly'\n yield\n else\n raise <<~EOF\n #{__callee__} can not be run inside an already open transaction\n\n Use migration-level lock retries instead, see https://docs.gitlab.com/ee/development/migration_style_guide.html#retry-mechanism-when-acquiring-database-locks\n EOF\n end\n else\n super(*args, **kwargs.merge(allow_savepoints: false), &block)\n end\n end",
"def select_lock_sql(sql)\n lock = @opts[:lock]\n if lock == :share\n sql << ' FOR SHARE'\n else\n super\n end\n\n if lock\n if @opts[:skip_locked]\n sql << \" SKIP LOCKED\"\n elsif @opts[:nowait]\n sql << \" NOWAIT\"\n end\n end\n end",
"def transaction\n File.open(lock_file_path, File::RDWR | File::CREAT, 0644) do |file|\n # Get lock\n file.sync = true\n file.flock(File::LOCK_EX)\n\n # Execute block\n yield\n\n # Push all changes\n apply\n\n # Release lock\n file.flock(File::LOCK_UN)\n end\n end",
"def lock(&block)\n # TODO: only use replace strategy when server is executing the lock\n return call_strategy unless (locked_token = locksmith.lock(&block))\n\n locked_token\n end",
"def transaction(mode = :deferred, &block)\n @db.transaction(mode, &block)\n end",
"def without_locking(&block)\n current = ActiveRecord::Base.lock_optimistically\n ActiveRecord::Base.lock_optimistically = false if current\n begin\n block.call\n ensure\n ActiveRecord::Base.lock_optimistically = true if current\n end\n end",
"def select_lock_sql(sql)\n @opts[:lock] == :update ? sql : super\n end",
"def try_await_lock(table, i); end",
"def with_app_lock( &block )\n # acquire lock_expiration\n ok = with_connection_lock do |locked_self|\n if locked_self.lock_expiration.nil? then\n row.update_all lock_expiration: Time.now + DELTA\n true\n end\n end\n # use and release lock_expiration outside of the connection_lock\n if ok then\n begin\n block.call\n ensure\n row.update_all lock_expiration: nil\n end\n end\n end",
"def transaction(mode = READ_CACHING)\n @default_caching_mode = mode\n open(mode)\n\n outer = @current_transaction\n if outer\n # freeze old cache\n outer.read_cache = @read_cache.dup\n outer.write_cache = @write_cache.dup\n outer.delete_cache = @delete_cache.dup\n end\n\n if iterator?\n if @transaction_mode == DYNAMIC_TRANSACTION_MODE\n\tODBM.Fail ErrMixedTransaction \n end\n @transaction_mode = STATIC_TRANSACTION_MODE\n @current_transaction = StaticTransaction.new(self, mode, outer)\n# @current_transaction.transaction do\n#\tyield @current_transaction\n# end\n @current_transaction.transaction do |txn|\n\tyield txn\n end\n else\n if @transaction_mode == STATIC_TRANSACTION_MODE\n\tODBM.Fail ErrMixedTransaction \n end\n \n @transaction_mode = DYNAMIC_TRANSACTION_MODE\n\n @current_transaction = DynamicTransaction.new(self, mode, outer)\n @current_transaction.start\n return @current_transaction\n end\n\n end",
"def exclusive_schema_lock()\n check_return_code(PureHailDB.ib_schema_lock_exclusive(@trx_ptr))\n end",
"def nolock\n clone(:table_options => \"(NOLOCK)\")\n end",
"def in_lock consistency_spec\n locker = Locker.new Snapscatter.parse_spec(consistency_spec)\n locker.lock\n begin\n yield\n ensure\n locker.unlock\n end\n end",
"def add_lock!(sql, options)\n case lock = options[:lock]\n when true; sql << ' FOR UPDATE'\n when String; sql << \" #{lock}\"\n end\n end",
"def with_advisory_lock_connection\n pool = ::ActiveRecord::ConnectionAdapters::ConnectionHandler.new.establish_connection(\n ::ActiveRecord::Base.connection_db_config.configuration_hash.except(:prefer_secondary)\n )\n\n pool.with_connection { |connection| yield(connection) } # rubocop:disable Style/ExplicitBlockArgument\n ensure\n pool&.disconnect!\n end",
"def blocking\n sql = %q(\n select bl.pid as blocked_pid,\n ka.current_query as blocking_statement,\n now() - ka.query_start as blocking_duration,\n kl.pid as blocking_pid,\n a.current_query as blocked_statement,\n now() - a.query_start as blocked_duration\n from pg_catalog.pg_locks bl\n join pg_catalog.pg_stat_activity a\n on bl.pid = a.procpid\n join pg_catalog.pg_locks kl\n join pg_catalog.pg_stat_activity ka\n on kl.pid = ka.procpid\n on bl.transactionid = kl.transactionid and bl.pid != kl.pid\n where not bl.granted)\n\n exec_sql(sql, find_uri)\n end",
"def with_lock(name, opts={}, &b)\n mode = opts[:mode] || :exclusive\n\n raise ArgumentError, \":mode option must be either :shared or :exclusive, not #{mode.inspect}\" unless [:shared, :exclusive].include?(mode)\n\n if mode == :shared\n shared_locker(name).with_lock(&b)\n else\n locker(name).with_lock(&b)\n end\n end",
"def transaction\n @pool.hold do |conn|\n @transactions ||= []\n if @transactions.include? Thread.current\n return yield(conn)\n end\n conn.execute(SQL_BEGIN)\n begin\n @transactions << Thread.current\n result = yield(conn)\n conn.execute(SQL_COMMIT)\n result\n rescue => e\n conn.execute(SQL_ROLLBACK)\n raise e unless Error::Rollback === e\n ensure\n @transactions.delete(Thread.current)\n end\n end\n end",
"def acquire_lock\n\t\t@@logger.info { \"Acquiring a lock in the database.\" } if have_logger?\n\t\tTournament.dataset.filter(:id => self.id, :locked => false).update(:locked => true) != 0\n\tend",
"def set_locking_strategy\n if @config.enable_locking\n @lock = Aws::SessionStore::DynamoDB::Locking::Pessimistic.new(@config)\n else\n @lock = Aws::SessionStore::DynamoDB::Locking::Null.new(@config)\n end\n end",
"def with_lock\n lock\n begin\n yield if block_given?\n ensure\n unlock\n end\n end",
"def lockable(options={})\n unlock_strategy = options[:unlock_strategy]\n unlock_strategy ||= self.unlock_strategy if respond_to?(:unlock_strategy)\n unlock_strategy ||= :both\n\n lock_strategy = options[:lock_strategy]\n lock_strategy ||= self.lock_strategy if respond_to?(:lock_strategy)\n lock_strategy ||= :failed_attempts\n\n if lock_strategy == :failed_attempts\n apply_schema :failed_attempts, Integer, :default => 0\n end\n\n if [:both, :email].include?(unlock_strategy)\n apply_schema :unlock_token, String\n end\n\n apply_schema :locked_at, DateTime\n end",
"def open( tran: false, mode: :immediate ) # tran = true transaction\n #DBlog::stoD( parse_caller( caller.first ) ) if $debug == true\n @db = SQLite3::Database.new( @DBfile )\n @db.busy_timeout(1000)\n ecount = 0\n roll = false\n begin\n roll = false\n if tran == true\n @db.transaction( mode ) do\n roll = true\n yield self\n end\n else\n yield self\n end\n rescue SQLite3::BusyException => e\n DBlog::sto(\"SQLite3::BusyException tran = #{tran.to_s} #{ecount}\")\n begin\n @db.rollback() if roll == true\n rescue\n DBlog::sto(\"rollback fail #{$!}\")\n end\n if ecount > 59\n Commlib::errPrint( \"SQLite3::BusyException exit\", $!, e )\n return\n else\n #Commlib::errPrint( \"SQLite3::BusyException retry\", $!, e )\n ecount += 1\n sleep( 1 )\n DBlog::sto(\"retry\")\n retry\n end\n rescue => e\n Commlib::errPrint( \"SQLite3::another error\", $!, e )\n begin\n @db.rollback() if roll == true\n rescue\n DBlog::sto(\"rollback fail #{$!}\")\n end\n return\n ensure\n close()\n end\n end",
"def add_lock!(sql, options)\n if (lock = options[:lock]) && sql =~ /\\A\\s*SELECT/mi\n # Check for and extract the :limit/:offset sub-query\n if sql =~ /\\A(\\s*SELECT t\\.\\* FROM \\()(.*)(\\) AS t WHERE t._row_num BETWEEN \\d+ AND \\d+\\s*)\\Z/m\n prefix, subselect, suffix = [$1, $2, $3]\n add_lock!(subselect, options)\n return sql.replace(prefix + subselect + suffix)\n end\n unless sql =~ SELECT_FROM_WHERE_RE\n # If you get this error, this driver probably needs to be fixed.\n raise NotImplementedError, \"Don't know how to add_lock! to SQL statement: #{sql.inspect}\"\n end\n select_clause, from_word, from_tables, where_clause = $1, $2, $3, $4\n with_clause = lock.is_a?(String) ? \" #{lock} \" : \" WITH(ROWLOCK,UPDLOCK) \"\n\n # Split the FROM clause into its constituent tables, and add the with clause after each one.\n new_from_tables = []\n scanner = StringScanner.new(from_tables)\n until scanner.eos?\n prev_pos = scanner.pos\n if scanner.scan_until(/,|(INNER\\s+JOIN|CROSS\\s+JOIN|(LEFT|RIGHT|FULL)(\\s+OUTER)?\\s+JOIN)\\s+/mi)\n join_operand = scanner.pre_match[prev_pos..-1]\n join_operator = scanner.matched\n else\n join_operand = scanner.rest\n join_operator = \"\"\n scanner.terminate\n end\n\n # At this point, we have something like:\n # join_operand == \"appointments \"\n # join_operator == \"INNER JOIN \"\n # or:\n # join_operand == \"appointment_details AS d1 ON appointments.[id] = d1.[appointment_id]\"\n # join_operator == \"\"\n if join_operand =~ /\\A(.*)(\\s+ON\\s+.*)\\Z/mi\n table_spec, on_clause = $1, $2\n else\n table_spec = join_operand\n on_clause = \"\"\n end\n\n # Add the \"WITH(ROWLOCK,UPDLOCK)\" option to the table specification\n table_spec << with_clause unless table_spec =~ /\\A\\(\\s*SELECT\\s+/mi # HACK - this parser isn't so great\n join_operand = table_spec + on_clause\n\n # So now we have something like:\n # join_operand == \"appointments WITH(ROWLOCK,UPDLOCK) \"\n # join_operator == \"INNER JOIN \"\n # or:\n # join_operand == \"appointment_details AS d1 WITH(ROWLOCK,UPDLOCK) ON appointments.[id] = d1.[appointment_id]\"\n # join_operator == \"\"\n\n new_from_tables << join_operand\n new_from_tables << join_operator\n end\n sql.replace( select_clause.to_s << from_word.to_s << new_from_tables.join << where_clause.to_s )\n end\n sql\n end",
"def db_lock(id, duration = 5)\n start = Time.now\n locks = @database.from(:locks)\n while true\n if start < Time.now - duration\n @logger.error(\"Could not acquire a lock for #{id}\")\n # TO-DO: Safeguard to prune old locks\n # This will be necessary if deadlocks are encountered\n # locks.where { created_at < (Time.now - duration) }.delete\n break\n end\n begin\n locks.insert(id: id, created_at: Time.now)\n @database.transaction do\n yield\n end\n break\n rescue Sequel::UniqueConstraintViolation\n sleep(0.1)\n ensure\n locks.where(id: id).delete\n end\n end\n end",
"def with_lock\n lock!(true)\n yield\n ensure\n unlock!\n end",
"def transaction\n @pool.hold do |conn|\n @transactions ||= []\n if @transactions.include? Thread.current\n return yield(conn)\n end\n log_info(SQL_BEGIN)\n conn.execute(SQL_BEGIN)\n begin\n @transactions << Thread.current\n yield(conn)\n rescue Exception => e\n log_info(SQL_ROLLBACK)\n conn.execute(SQL_ROLLBACK)\n raise e unless Error::Rollback === e\n ensure\n unless e\n log_info(SQL_COMMIT)\n conn.execute(SQL_COMMIT)\n end\n @transactions.delete(Thread.current)\n end\n end\n end",
"def add_lock!( sql, options )\n sql\n end",
"def acquire_lock!(lock_name = table_name, wait_timeout = 0, &block)\n acquire_lock(lock_name, table_name, &block) or raise LockTimeout, 'Timeout waiting for lock'\n end",
"def select_and_lock(relation, limit:)\n relation = upcoming(relation)\n\n # FOR UPDATE SKIP LOCKED selects and locks entries, but skips those that\n # are already locked - preventing this transaction from being locked.\n sql = relation.to_sql + \" FOR UPDATE SKIP LOCKED\"\n sql += \" LIMIT #{limit}\" if limit\n\n item_class.find_by_sql(sql)\n end",
"def lock_type\n @lock_type ||= :mysql\n end",
"def perform(options = {}, &block)\n @record.transaction do\n @record.class.lock('FOR UPDATE NOWAIT').find(@record.id)\n yield\n end\n rescue\n nil\n end",
"def transaction(&block)\n @@semaphore.synchronize{\n block.call\n }\n end",
"def with_multilock(keys)\n # We're going to remove nil values from the passed in keys, since we can't lock on nil.\n # Cast the passed-in object to an array so we can handle a Set.\n # Make a copy of the keys array so we don't modify the passed-in object.\n keys = keys.nil? ? [] : keys.to_a.dup.compact\n\n # If no keys have been passed in, just yield and return.\n # This simplifies things for any calling code that wants to pass in a\n # variable number of dependent lock-needing resources when there's a\n # possibility that certain situations may not require any locks at all.\n if keys.blank?\n yield Hash.new\n return\n end\n\n raise ArgumentError, \"Duplicate object id found in given keys: #{keys.join(', ')}\" if keys.uniq.length != keys.length\n lock_objects = {}\n already_locked_ids = []\n\n keys.each do |key|\n lock_objects[key] = lock(key)\n rescue Hyacinth::Exceptions::LockError\n already_locked_ids << key\n end\n\n if already_locked_ids.present?\n # unlock any locks we just established\n lock_objects.each do |_key, lock_object|\n lock_object.unlock\n end\n # and then raise an exception\n raise Hyacinth::Exceptions::LockError, already_locked_ids.length == 1 ?\n \"Lock on #{already_locked_ids.first} is currently held by another process.\" :\n \"Locks on #{already_locked_ids.join(', ')} are currently held by other processes.\"\n end\n\n # TODO: Write a test to ensure that the locks are unlocked if the given block raises an exception.\n begin\n # yield lock_objects so that given block can extend the locks if necessary\n yield lock_objects\n ensure\n # Unlock lock_objects now that we're done with them\n lock_objects.each do |_key, lock_object|\n lock_object.unlock\n end\n end\n end",
"def synchronize_resultset\n # make it reentrant\n return yield if defined?(@resultset_locked) && @resultset_locked\n\n begin\n @resultset_locked = true\n File.open(resultset_writelock, \"w+\") do |f|\n f.flock(File::LOCK_EX)\n yield\n end\n ensure\n @resultset_locked = false\n end\n end",
"def transaction(session, access_mode = :write)\n if !block_given?\n tx = self.class.transaction_class.new(session)\n tx.access_mode = access_mode\n tx.begin\n return tx\n end\n\n begin\n tx = transaction(session, access_mode)\n yield tx\n rescue => e\n tx.mark_failed if tx\n raise e\n ensure\n tx.close if tx\n end\n end",
"def mlock!(modes)\n @session.chanserv.set(self.name, :mlock, modes)\n end",
"def transaction(&block)\n @cache.transaction do\n yield(@cache)\n end\n end",
"def lock\n # Flush everything to start with a clean state\n # and to protect the @locked variable\n flush\n\n with_flock(File::LOCK_EX) do\n replay\n result = yield\n flush\n result\n end\n end",
"def run_in_transaction isolation\n if isolation\n Base.transaction isolation: isolation do\n yield\n end\n else\n yield\n end\n end",
"def run_in_transaction isolation\n if isolation\n Base.transaction isolation: isolation do\n yield\n end\n else\n yield\n end\n end",
"def with_locked_file(filename)\n puts \"locking #{filename}\"\n if dataset_marked?(filename, 'locked') \n raise \"already marked as locked\"\n end\n # actual locking\n mark_dataset(filename, 'locked')\n begin\n yield\n ensure\n puts \"unlocking #{filename}\"\n # actual unlocking\n unmark_dataset(filename, 'locked')\n end\n end",
"def transaction( &block )\n connect do | conn |\n conn.transaction do | conn |\n yield SqlRunner.new(SingleConnectionPool.new( conn ))\n end\n end\n end",
"def synchronize(scope, &block)\n Rails.cache.dalli.with do |dalli|\n RemoteLock.new(RemoteLock::Adapters::Dalli.new(dalli))\\\n .synchronize(scope, initial_wait: 0.1, retries: 100, &block)\n end\n end",
"def add_lock!(sql, options)\n sql\n end",
"def lock(options)\n allocation = nil\n attempt = 0\n begin\n if options[:debug_callback]\n options[:debug_callback].call('Trying to acquire lock')\n end\n \n allocation = acquire(\n options[:name],\n :location => options[:location],\n :timeout => options[:timeout]\n )\n rescue ResourceNotFound\n if options[:create_resource]\n if options[:debug_callback]\n options[:debug_callback].call('Trying to create resource')\n end\n \n resource = Resource.soft_create(\n :name => options[:name],\n :location => options[:location],\n :capacity => options[:capacity]\n )\n retry\n else\n raise\n end\n rescue ResourceBusy\n if options[:wait]\n if attempt >= options[:wait_retries]\n raise\n else\n if options[:debug_callback]\n options[:debug_callback].call('Waiting for busy lock')\n end\n \n sleep options[:wait_sleep]\n attempt += 1\n retry\n end\n else\n raise\n end\n end\n \n begin\n rv = yield\n ensure\n if options[:debug_callback]\n options[:debug_callback].call('Releasing lock')\n end\n \n release(allocation)\n end\n rv\n end",
"def lock_to!(user)\n update_columns(locked_at: Time.current, locked_by: user.id)\n end",
"def lock!(resource, uuid, *lock_names)\n build_locks(resource, lock_names, uuid).each(&:save!)\n end",
"def synchronize(*)\n if ActiveRecord.version >= Gem::Version.new(\"5.1.0\")\n activerecord_connection.lock.synchronize do\n yield activerecord_raw_connection\n end\n else\n yield activerecord_raw_connection\n end\n end",
"def data_lock(item, properties, &blk)\n locked = false\n lock_path = \"plan_store/%s\" % item\n config = {\"plan_store\" => properties}\n\n playbook.data_stores.from_hash(config)\n playbook.data_stores.prepare\n\n playbook.data_stores.lock(lock_path)\n locked = true\n\n yield\n ensure\n playbook.data_stores.release(lock_path) if locked\n end",
"def unlock_all!\n locks.active.each(&:unlock!)\n end",
"def lock(circuit, state)\n raise NotImplementedError\n end",
"def lock\n post :lock\n end",
"def transaction(&block)\n raise InvalidDbError if @stale\n\n return transaction_in_staging(true, &block) if self.staging?\n\n begin\n transaction_in_staging(false, &block)\n ensure\n self.unstage\n end\n end",
"def each(mode = nil)\n mode = @default_caching_mode unless mode\n\n if mode != SCAN_DB_ONLY\n for key, value in @read_cache\n\t@write_cache[key] = value if mode == UPDATE\n\tyield key, value\n end\n end\n\n if mode != SCAN_CACHE_ONLY\n @db.each do |key, obj|\n\tnext unless mode == SCAN_DB_ONLY or @read_cache[key] == NULL\n\t@read_cache[key] = obj if mode == READ_CACHING\n\t@write_cache[key] = obj if mode == UPDATE\n\tyield key, obj\n end\n end\n end",
"def smart_transaction\n result = nil\n ensure_in_transaction do\n begin\n handle = connection.lo_open(oid)\n result = yield handle\n connection.lo_close(handle)\n end\n end\n result\n end",
"def with_locked_xml(filename)\n puts \"locking #{filename}\"\n if dataset_marked?(filename, 'locked') \n raise \"already marked as locked\"\n end\n # actual locking\n mark_dataset(filename, 'locked')\n begin\n yield\n ensure\n puts \"unlocking #{filename}\"\n # actual unlocking\n unmark_dataset(filename, 'locked')\n end\n end",
"def transaction(&block)\n ActiveRecord::Base.transaction(&block)\n end",
"def visit_Arel_Nodes_Lock o, a = nil\n do_visit o.expr, a\n end",
"def lock_database\n lock_command = <<-EOS.gsub(/^ +/, ' ')\n echo 'use admin\n db.runCommand({\"fsync\" : 1, \"lock\" : 1})' | #{ \"#{ mongo_utility } #{ mongo_uri }\" }\n EOS\n\n run(lock_command)\n end",
"def lock_database\n lock_command = <<-EOS\n echo 'use admin\n db.runCommand({\"fsync\" : 1, \"lock\" : 1})' | #{mongo_shell}\n EOS\n\n run(lock_command)\n end",
"def transaction\n use do |connection|\n connection.transaction do |conn|\n begin\n yield conn\n rescue Rollback\n return\n end\n end\n end\n end",
"def transaction(&block)\n begin\n @store.transaction\n block.call(@store)\n @store.commit\n rescue SQLite3::Exception => exception\n raise \"SQLite exception: #{exception}\"\n end\n end",
"def lock!(key, key2 = nil, timeout: nil)\n unless in_transaction?\n raise \"You cannot use lock! outside of a transaction\"\n end\n\n if key.is_a?(String)\n # calculate a 31 bit checksum < 0\n key = Digest::CRC32.hexdigest(key).to_i(16) # get a 32-bit stable checksum\n key &= ~0x80000000 # reset bit 31\n key = -key # make it negative\n end\n\n # shorten key, key2 to the allowed number of bits\n if key2\n key = apply_bitmask(key, MASK_31_BITS)\n key2 = apply_bitmask(key2, MASK_31_BITS)\n else\n key = apply_bitmask(key, MASK_63_BITS)\n end\n\n if timeout\n lock_w_timeout(key, key2, timeout)\n else\n lock_wo_timeout(key, key2)\n end\n end",
"def lock!\n freeze!\n @locked = true\n self\n end",
"def lock(opts={})\n super\n end",
"def lock(opts={})\n super\n end",
"def lock(opts={})\n super\n end",
"def select_and_lock_batch(op:, max_batch_size:)\n relation = item_class.all\n relation = relation.where(op: op) if op\n\n match = select_and_lock(relation, limit: 1).first\n return [] unless match\n\n batch_size = calculate_batch_size(op: match.op, max_batch_size: max_batch_size)\n return [match] if batch_size <= 1\n\n batch_relation = relation.where(op: match.op)\n select_and_lock(batch_relation, limit: batch_size)\n end",
"def request_lock lock\n\n\tif @intent_locks[lock.baseset].empty?\n\t\t@intent_locks[lock.baseset] << lock.lock_type\n\t\tputs @intent_locks\n\telse\n\t\tcloned_locks = @intent_locks[lock.baseset].clone\n\t\tcloned_locks.each do |element|\n\t\t\tputs @lock_matrix[element][lock.lock_type]\n\t\t\tif @lock_matrix[element][lock.lock_type]\n\t\t\t\t@intent_locks[lock.baseset] << lock.lock_type\n\t\t\telse\n\t\t\t\treturn false\n\t\t\tend\n\t\tend\n\tend\n\n\treturn true\nend",
"def call(*, &block)\n lock_instance.execute(&block)\n end",
"def lock(key)\n returned_from_block = nil\n client.lock(key, @ttl) do |locked|\n raise UnableToAcquireLockError unless locked\n returned_from_block = yield\n end\n returned_from_block\n end",
"def lock(args)\n raise NotImplemented unless @lock_class\n raise Conflict unless parent_exists?\n\n lock_check(args[:scope])\n lock = @lock_class.explicit_locks(@path).find{|l| l.scope == args[:scope] && l.kind == args[:type] && l.user == @user}\n unless(lock)\n token = UUIDTools::UUID.random_create.to_s\n lock = @lock_class.generate(@path, @user, token)\n lock.scope = args[:scope]\n lock.kind = args[:type]\n lock.owner = args[:owner]\n lock.depth = args[:depth].is_a?(Symbol) ? args[:depth] : args[:depth].to_i\n if(args[:timeout])\n lock.timeout = args[:timeout] <= @max_timeout && args[:timeout] > 0 ? args[:timeout] : @max_timeout\n else\n lock.timeout = @default_timeout\n end\n lock.save if lock.respond_to? :save\n end\n begin\n lock_check(args[:type])\n rescue DAV4Rack::LockFailure => lock_failure\n lock.destroy\n raise lock_failure\n rescue HTTPStatus::Status => status\n status\n end\n [lock.remaining_timeout, lock.token]\n end",
"def lock\n if block_given?\n raise 'Race condition' if locking?\n\n @locking = true\n yield\n return @locking = false\n end\n @locking = true\n end",
"def sync_with_flock(&block)\n fail ArgumentError, \"No block specified\" if block.nil?\n\n file = File.open(LOCK_FILE_PATH)\n file.flock(File::LOCK_EX)\n block.call\n ensure\n file.flock(File::LOCK_UN)\n file.close\n end",
"def lock_changes\n begin\n @lock_count += 1\n yield\n ensure\n @lock_count -= 1\n end\n end",
"def execute(&block)\n TempTableContext.with_context(db) {|context| execute_in_context(context, &block)}\n end",
"def transaction(force_sync = false, &block)\n # Ruby 1.9.3 does not support @mutex.owned?\n if @mutex.respond_to?(:owned?)\n force_sync = false if @mutex.locked? && @mutex.owned?\n else\n # If we allowed this in Ruby 1.9.3, it might possibly cause recursive\n # locking within the same thread.\n force_sync = false\n end\n if !force_sync && (@in_transaction || options[:without_mutex])\n block.call self\n else\n @mutex.synchronize do\n @in_transaction = true\n result = block.call\n @in_transaction = false\n result\n end\n end\n end",
"def with_transaction(read_only: false, &block)\n @env.transaction(read_only, &block)\n end",
"def perform_atomic_update(&block)\n Edition.connection.execute \"set transaction isolation level serializable\"\n Edition.connection.transaction do\n yield\n end\n end",
"def transaction(start_db_transaction=true)\n yield\n end",
"def skip_locked\n cached_dataset(:_skip_locked_ds) do\n raise(Error, 'This dataset does not support skipping locked rows') unless supports_skip_locked?\n clone(:skip_locked=>true)\n end\n end",
"def with_connection\n ActiveRecord::Base.connection_pool.with_connection do |connection|\n connection.transaction do\n if connection.adapter_name == \"PostgreSQL\"\n connection.execute \"SET TRANSACTION READ ONLY\"\n connection.execute \"SET LOCAL statement_timeout = 100\"\n # TODO support equivalent options for other adapters (such as mysql)\n end\n\n yield connection\n end\n end\n end",
"def to_lock\n raise AbstractFunction,\n \"#to_lock must be implemented on #{self.class.name}!\"\n end",
"def to_lock\n raise AbstractFunction,\n \"#to_lock must be implemented on #{self.class.name}!\"\n end",
"def resource_locks\n :all\n end",
"def transaction\n raise Mysql2::Error, 2002 if @my.nil?\n\n if block_given?\n begin\n @my.query('START TRANSACTION WITH CONSISTENT SNAPSHOT')\n yield # Start executing the query black.\n @my.query('COMMIT')\n rescue Mysql2::Error => e\n @my.query('ROLLBACK')\n raise e\n end\n end\n end"
] |
[
"0.80996597",
"0.79946315",
"0.6768143",
"0.6706339",
"0.6692717",
"0.64592165",
"0.63696915",
"0.6278538",
"0.61654204",
"0.6085429",
"0.5980977",
"0.5813835",
"0.5738062",
"0.5736289",
"0.5650982",
"0.5603849",
"0.5572628",
"0.54926866",
"0.5452597",
"0.54325897",
"0.539995",
"0.5383124",
"0.5377131",
"0.53759545",
"0.53542376",
"0.5349773",
"0.534302",
"0.5336483",
"0.53153616",
"0.5293356",
"0.527001",
"0.52682036",
"0.5266558",
"0.52288765",
"0.5218966",
"0.5207879",
"0.52074873",
"0.5202774",
"0.520209",
"0.5171949",
"0.5170673",
"0.5166415",
"0.51625174",
"0.51596206",
"0.5140082",
"0.5129738",
"0.5129068",
"0.51288074",
"0.5124095",
"0.5100536",
"0.509808",
"0.5096684",
"0.5089663",
"0.5089663",
"0.50809616",
"0.5076967",
"0.50709534",
"0.50688833",
"0.50516474",
"0.5039974",
"0.5022188",
"0.50060165",
"0.4990403",
"0.4985401",
"0.49690104",
"0.4945785",
"0.49440625",
"0.49396846",
"0.49331757",
"0.4904093",
"0.4903995",
"0.48987046",
"0.48878375",
"0.48670703",
"0.48656008",
"0.48597986",
"0.48505384",
"0.48493078",
"0.48054123",
"0.48054123",
"0.48054123",
"0.4790876",
"0.4784859",
"0.47838137",
"0.47761804",
"0.47711036",
"0.47650382",
"0.47564688",
"0.47549614",
"0.4754953",
"0.47536427",
"0.47401637",
"0.47268027",
"0.47240546",
"0.47208378",
"0.47172537",
"0.4713711",
"0.4713711",
"0.47041285",
"0.47029483"
] |
0.8043198
|
1
|
Support OVERRIDING USER|SYSTEM VALUE for MERGE INSERT.
|
def merge_insert(*values, &block)
h = {:type=>:insert, :values=>values}
if override = @opts[:override]
h[:override] = insert_override_sql(String.new)
end
_merge_when(h, &block)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def insert_override_sql(sql)\n case opts[:override]\n when :system\n sql << \" OVERRIDING SYSTEM VALUE\"\n when :user\n sql << \" OVERRIDING USER VALUE\"\n end\n end",
"def user(value)\n merge(grcuser: value.to_s)\n end",
"def user(value)\n merge(gadruser: value.to_s)\n end",
"def user_platform_params\n merged_params_one = { updated_by: @user.id }\n merged_params_two = { created_by: @user.id } if params[:action] == 'create'\n merged_params = { user_id: @user.id }\n\n params.require(:user_platform).permit(:platform_id, :overview, :strategy, :login_user, :login_pass,\n :internal_id, :notes, :rating)\n .merge(merged_params, merged_params_one, merged_params_two)\n end",
"def before_merge_rpx_data( from_user, to_user )\n\t\t\t\n\t\t\tend",
"def user(value)\n merge(rvuser: value.to_s)\n end",
"def user(value)\n merge(leuser: value.to_s)\n end",
"def overriding_user_value\n clone(:override=>:user)\n end",
"def define_user\n case new_resource.im_install_mode\n when 'admin'\n user = if new_resource.user.nil?\n 'root'\n else\n unless im_user_exists_unix?(new_resource.user)\n Chef::Log.fatal \"User Name provided #{new_resource.user}, does not exist\"\n raise \"User Verification 1: User Name provided #{new_resource.user}, does not exist\"\n end\n new_resource.user\n end\n user\n when 'nonAdmin', 'group'\n user = if new_resource.user.nil?\n Chef::Log.fatal \"User Name not provided! Please provide the user that should be used to install your product\"\n raise \"User Name not provided! Please provide the user that should be used to install your product\"\n else\n unless im_user_exists_unix?(new_resource.user)\n Chef::Log.fatal \"User Name provided #{new_resource.user}, does not exist\"\n raise \"User Verification 1: User Name provided #{new_resource.user}, does not exist\"\n end\n new_resource.user\n end\n user\n end\nend",
"def define_user\n case new_resource.im_install_mode\n when 'admin'\n user = if new_resource.user.nil?\n 'root'\n else\n unless im_user_exists_unix?(new_resource.user)\n Chef::Log.fatal \"User Name provided #{new_resource.user}, does not exist\"\n raise \"User Verification 1: User Name provided #{new_resource.user}, does not exist\"\n end\n new_resource.user\n end\n user\n when 'nonAdmin', 'group'\n user = if new_resource.user.nil?\n Chef::Log.fatal \"User Name not provided! Please provide the user that should be used to install your product\"\n raise \"User Name not provided! Please provide the user that should be used to install your product\"\n else\n unless im_user_exists_unix?(new_resource.user)\n Chef::Log.fatal \"User Name provided #{new_resource.user}, does not exist\"\n raise \"User Verification 1: User Name provided #{new_resource.user}, does not exist\"\n end\n new_resource.user\n end\n user\n end\nend",
"def merge_user!(other_user:)\n # these methods do not invoke callbacks, since not really needed for taking ownership\n CurationActivity.where(user_id: other_user.id).update_all(user_id: id)\n ResourceState.where(user_id: other_user.id).update_all(user_id: id)\n Resource.where(user_id: other_user.id).update_all(user_id: id)\n Resource.where(current_editor_id: other_user.id).update_all(current_editor_id: id)\n\n # merge in any special things updated in other user and prefer these details from other_user over self.user\n out_hash = {}\n %i[first_name last_name email tenant_id last_login orcid].each do |i|\n out_hash[i] = other_user.send(i) unless other_user.send(i).blank?\n end\n update(out_hash)\n end",
"def merge_user!(user_id_or_screen_name)\n case user_id_or_screen_name\n when Integer\n self[:user_id] = user_id_or_screen_name\n when String\n self[:screen_name] = user_id_or_screen_name\n end\n self\n end",
"def user(value)\n merge(user: value.to_s)\n end",
"def merge_owner!(owner_id_or_owner_screen_name)\n case owner_id_or_owner_screen_name\n when Integer\n self[:owner_id] = owner_id_or_owner_screen_name\n when String\n self[:owner_screen_name] = owner_id_or_owner_screen_name\n end\n self\n end",
"def merge_owner!(owner_id_or_owner_screen_name)\n case owner_id_or_owner_screen_name\n when Integer\n self[:owner_id] = owner_id_or_owner_screen_name\n when String\n self[:owner_screen_name] = owner_id_or_owner_screen_name\n end\n self\n end",
"def os_user\n @os_user\n end",
"def overriding_system_value\n clone(:override=>:system)\n end",
"def after_merge_rpx_data( from_user, to_user )\n\t\t\t\n\t\t\tend",
"def canonical_user_identities(opennebula_user)\n fail 'User object not provided!' unless opennebula_user\n identities = []\n\n identities << opennebula_user['TEMPLATE/KRB_PRINCIPAL']\n identities << opennebula_user['TEMPLATE/X509_DN'].split('|') if opennebula_user['TEMPLATE/X509_DN']\n identities << opennebula_user['NAME']\n identities << opennebula_user['ID'].to_s\n identities.flatten!\n identities.compact!\n\n Egi::Fedcloud::Vmhound::Log.debug \"[#{self.class}] Assigning identities #{identities.inspect} \" \\\n \"to user #{opennebula_user['ID'].inspect}\"\n identities\n end",
"def merge\n return if old_user.nil? || new_user.nil? || old_user.id == new_user.id\n\n copy_missing_one_to_ones\n replace_old_with_new_in_groups\n set_replacement_references\n deactivate_old_user\n faye_publisher.broadcast_to_contacts\n old_faye_publisher.broadcast_to_contacts\n end",
"def create_user(resource)\n session = Puppet::NetDev::CE::Device.session\n\n set_user_xml = '<rpc><edit-config><target><running/></target><default-operation>merge</default-operation><error-option>rollback-on-error</error-option><config><aaa xmlns=\"http://www.huawei.com/netconf/vrp\" content-version=\"1.0\" format-version=\"1.0\"><lam><users><user operation=\"merge\"><userName>' + (resource[:name]).to_s + '</userName>'\n\n if resource[:password]\n set_user_xml += '<password>' + (resource[:password]).to_s + '</password>'\n end\n\n set_user_xml += '</user></users></lam></aaa></config></edit-config></rpc>'\n\n session.rpc.do_config(set_user_xml)\n end",
"def set_or_return(symbol, arg, options={})\n if options && options[:default] == 'root'\n if USER_PROPERTIES.include?(symbol) && node.platform_family?('windows')\n options = options.dup\n options[:default] = Poise::Utils::Win32.admin_user\n elsif GROUP_PROPERTIES.include?(symbol)\n options = options.dup\n options[:default] = node['root_group']\n end\n end\n super(symbol, arg, options)\n end",
"def merge_owner_into_options!(owner_id_or_owner_username, options={})\n case owner_id_or_owner_username\n when Fixnum\n options[:owner_id] = owner_id_or_owner_username\n when String\n options[:owner_username] = owner_id_or_owner_username\n end\n options\n end",
"def sysuser_\n RequestStore[:current_user] = User.system_user\nend",
"def merge_user_id( from_user )\n\t\t self.rpx_identifier = from_user.rpx_identifier\n\t\t from_user.rpx_identifier = nil\n\t\t from_user.save\n\t\t from_user.reload\t\t\n\t\tend",
"def set_sys_user\n @sys_user = Sys::User.find(params[:id])\n end",
"def vital_source_login\n\t\t@vs = \"vitalsource\"\n\tend",
"def set_sys_user\n @sys_user = Sys::User.find(params[:id])\n end",
"def determine_duplicate_error(existing)\n if existing.user == User.current\n errors.add(:key, l(:error_key_in_use_by_you, name: existing.title))\n elsif User.current.admin?\n errors.add(:key, l(:error_key_in_use_by_other, login: existing.user.login, name: existing.title))\n else\n errors.add(:key, l(:error_key_in_use_by_someone))\n end\n end",
"def merge(other_user)\n return unless other_user.instance_of? User\n return if !self.auth_token.blank? and other_user.auth_token.blank? and self.auth_token != other_user.auth_token\n\n attribute_set('auth_token', auth_token || other_user.auth_token)\n\n reservations_to_move = other_user.reservations\n reservations_to_move.update({:user_id => id})\n reservations.concat(reservations_to_move).save\n reservations.reload\n reservations_to_move.reload\n\n participations_to_move = other_user.participations\n participations_to_move.update({:user_id => id})\n participations.concat(participations_to_move).save\n participations.reload\n participations_to_move.reload\n self\n end",
"def ssh_user(val=nil)\n from_setting_or_image_info :ssh_user, val, 'root'\n end",
"def system?\n id == User::SYSTEM_USER_ID\n end",
"def system?\n id == User::SYSTEM_USER_ID\n end",
"def true_user\n @impersonated_user || current_user\n end",
"def true_user\n @impersonated_user || current_user\n end",
"def user_assign(value)\n forget_value(\"user\")\n assign value,\"user\"\n end",
"def check_create_consistency(new, user)\n if new.changeset.nil?\n raise OSM::APIChangesetMissingError\n elsif new.changeset.user_id != user.id\n raise OSM::APIUserChangesetMismatchError\n elsif !new.changeset.is_open?\n raise OSM::APIChangesetAlreadyClosedError, new.changeset\n end\n end",
"def set_system_user\n @system_user = SystemUser.find(params[:id])\n end",
"def create(location_params, user)\n status = false\n\n # generate random tag\n self.tag = (('A'..'Z').to_a + ('a'..'z').to_a + (0..9).to_a).shuffle[0..7].join\n\n if self.save\n self.owner << user\n status = true\n end\n return status\n end",
"def system_user\n User.find_by('first_name = ? AND last_name = ?', 'system', 'user')\n end",
"def tag_vm_user(_pool_name, _vm_name)\n # noop by design. If the provider does not implement this method, this base method is called (because inherited)\n # and should basically do nothing.\n true\n end",
"def user_assign(value)\n forget_value(\"user\")\n assign(value,\"user\")\n end",
"def login_failure(user=nil)\n blank(\n la_ip: ::GDO::Net::GDT_IP.current,\n la_user: user == nil ? nil : user.get_id,\n ).insert\n end",
"def correlation_id\n ENV['MHV_SM_USER_ID']\n end",
"def create\n @systemuser = Systemuser.new(params[:systemuser])\n @systemuser.name = params[:systemuser][:name]\n \n if params[:systemuser][:group_id] == 4\n @systemuser.ro = 1\n else\n @systemuser.ro = 0\n end\n \n @systemuser.enabled = 1\n\n respond_to do |format|\n if @systemuser.save\n format.html { redirect_to systemusers_path, notice: 'Wizard user was successfully created.' }\n format.json { render json: @systemuser, status: :created, location: @systemuser }\n else\n format.html { render action: \"new\" }\n format.json { render json: @systemuser.errors, status: :unprocessable_entity }\n end\n end\n end",
"def current_user\n \tsuper || guest_user\n \t#Super quiere decir que exactamente con los valores \n \t#del metodo original sin cambiar nada\n end",
"def ensure_different_creator_and_user\n errors.add(:user_id, \"can't be same as creator\") if user == creator\n end",
"def ro_user\n #raise \"You need to specify a SELECT user!\" if @ro_user.nil?\n @ro_user || {}\n end",
"def user_or_org\n self.is_user? ? 'user' : 'organization'\n end",
"def impersonate_sql_user(service_instance,verbose)\n\n # Print the current user\n blah = session.sys.config.getuid if verbose == \"true\"\n print_status(\"Current user: #{blah}\") if verbose == \"true\"\n\n # Define target user/pid\n targetuser = \"\"\n targetpid = \"\"\n\n # Identify SQL Server service processes\n print_status(\"Searching for sqlservr.exe processes not running as SYSTEM...\")\n session.sys.process.get_processes().each do |x|\n\n # Search for all sqlservr.exe processes\n if ( x['name'] == \"sqlservr.exe\" and x['user'] != \"NT AUTHORITY\\\\SYSTEM\")\n\n # Found one\n print_good(\"Found \\\"#{x['user']}\\\" running sqlservr.exe process #{x['pid']}\")\n\n # Define target pid / user\n if x['user'] =~ /NT SERVICE/ then\n if x['user'] == \"NT SERVICE\\\\MSSQL$#{service_instance}\" then\n targetuser = \"NT SERVICE\\\\MSSQL$#{service_instance}\"\n targetpid = x['pid']\n end\n else\n targetuser = x['user']\n targetpid = x['pid']\n end\n end\n end\n\n # Attempt to migrate to target sqlservr.exe process\n if targetuser == \"\" then\n print_error(\"Unable to find sqlservr.exe process not running as SYSTEM\")\n return 0\n else\n begin\n # Migrating works, but I can't rev2self after its complete\n print_status(\"Attempting to migrate to process #{targetpid}...\")\n session.core.migrate(targetpid.to_i)\n\n # Statusing\n blah = session.sys.config.getuid if verbose == \"true\"\n print_status(\"Current user: #{blah}\") if verbose == \"true\"\n print_good(\"Successfully migrated to sqlservr.exe process #{targetpid}\")\n return 1\n rescue\n print_error(\"Unable to migrate to sqlservr.exe process #{targetpid}\")\n return 0\n end\n end\n end",
"def set_user\n\t\tif study_subject\n\t\t\t#\tbecause it is possible to create the first, then the second\n\t\t\t#\tand then delete the first, and create another, first and\n\t\t\t#\tsecond kinda lose their meaning until the merge, so set them\n\t\t\t#\tboth as the same until the merge\n\t\t\tcase study_subject.abstracts.count\n\t\t\t\twhen 0 \n\t\t\t\t\tself.entry_1_by_uid = current_user.try(:uid)||0\n\t\t\t\t\tself.entry_2_by_uid = current_user.try(:uid)||0\n\t\t\t\twhen 1 \n\t\t\t\t\tself.entry_1_by_uid = current_user.try(:uid)||0\n\t\t\t\t\tself.entry_2_by_uid = current_user.try(:uid)||0\n\t\t\t\twhen 2\n\t\t\t\t\tabs = study_subject.abstracts\n\t\t\t\t\t#\tcompact just in case a nil crept in\n\t\t\t\t\tself.entry_1_by_uid = [abs[0].entry_1_by_uid,abs[0].entry_2_by_uid].compact.first\n\t\t\t\t\tself.entry_2_by_uid = [abs[1].entry_1_by_uid,abs[1].entry_2_by_uid].compact.first\n\t\t\t\t\tself.merged_by_uid = current_user.try(:uid)||0\n\t\t\tend\n\t\tend\n\tend",
"def default_value_for_author\n git_global_config_for(\"user.name\") do |author_val|\n warn(\"Using author from git as: #{author_val}\")\n end\n end",
"def merge_identities(user, auth)\n if auth.info[:first_name] && !user.first_name.present?\n user.first_name = auth.info[:first_name]\n end\n\n if auth.info[:last_name] && !user.last_name.present?\n user.last_name = auth.info[:last_name]\n end\n\n if auth.info[:email] && !user.email.present?\n user.email = auth.info[:email]\n end\n\n create_new_identity(user,auth)\n end",
"def merge_owner_into_options!(owner_id_or_owner_screen_name, options={})\n case owner_id_or_owner_screen_name\n when Fixnum\n options[:owner_id] = owner_id_or_owner_screen_name\n when String\n options[:owner_screen_name] = owner_id_or_owner_screen_name\n end\n options\n end",
"def created_by; User.get(self.created_by_id) || Lead.get(self.created_by_id) || Account.get(self.created_by_id) || Contact.get(self.created_by_id) || Opportunity.get(self.created_by_id); end",
"def created_by; User.get(self.created_by_id) || Lead.get(self.created_by_id) || Account.get(self.created_by_id) || Contact.get(self.created_by_id) || Opportunity.get(self.created_by_id); end",
"def created_by; User.get(self.created_by_id) || Lead.get(self.created_by_id) || Account.get(self.created_by_id) || Contact.get(self.created_by_id) || Opportunity.get(self.created_by_id); end",
"def created_by; User.get(self.created_by_id) || Lead.get(self.created_by_id) || Account.get(self.created_by_id) || Contact.get(self.created_by_id) || Opportunity.get(self.created_by_id); end",
"def created_by; User.get(self.created_by_id) || Lead.get(self.created_by_id) || Account.get(self.created_by_id) || Contact.get(self.created_by_id) || Opportunity.get(self.created_by_id); end",
"def user_on_same_console\n if user.nil? || trade.nil? || user.console != trade.user.console\n errors.add(:trade_id, \"Trade must be for the same console\")\n end\n end",
"def merge_user_id( from_user )\n\t\t\tself.rpx_identifiers << from_user.rpx_identifiers\t\n\t\t\tfrom_user.reload\n\t\tend",
"def user_provider=(_arg0); end",
"def merge_target_into_options!(user_id_or_username, options={})\n case user_id_or_username\n when Fixnum\n options[:target] = user_id_or_username\n when String\n options[:\"target:username\"] = user_id_or_username\n end\n options\n end",
"def check_for_localhome(user,system_user)\n host = 'ovid02.u.washington.edu'\n Net::SSH.start(host,system_user, {auth_methods: %w( publickey )}) do |ssh|\n output = ssh.exec!(\"cpw -poh #{user}\")\n if output =~ /Unknown/\n return \"No MySQL Localhome Set for #{user}\".red\n else\n return \"Localhome for #{user}: #{output}\"\n end\n end\n end",
"def retuser\n @retain_user_connection_parameters.retuser\n end",
"def system_user\n # By convention, the first user is always the system user.\n User.find_by_id(1)\n end",
"def get_system_user\n User.where('first_name = ? AND last_name = ?', 'system', 'user').first\n end",
"def org_swap?\n @user.can_super_admin?\n end",
"def login_user(user)\n case user\n when /ubuntu/\n 'ubuntu'\nwhen /debian/\n 'root'\nwhen /amazon/\n 'ec2-user'\nwhen /rhel/,/centos/,/redhat/\n 'centos'\nend\nend",
"def ssh_user(arg=nil)\n set_or_return(:ssh_user, arg, :kind_of => String)\n end",
"def package_user(val = NULL)\n if null?(val)\n @package_user || 'root'\n else\n @package_user = val\n end\n end",
"def merge_user_into_options!(user_id_or_screen_name, options={})\n case user_id_or_screen_name\n when Fixnum\n options[:user_id] = user_id_or_screen_name\n when String\n options[:screen_name] = user_id_or_screen_name\n end\n options\n end",
"def unix_uid=(_arg0); end",
"def user_id\n raise \"Implement in Client or Advocate\"\n end",
"def should_merge_accounts\n @into_user = current_user\n redirect_to sign_out_user_path unless @other_user = User.find_by_id(session[:user_to_merge_in_id])\n end",
"def package_user(val = NULL)\n if null?(val)\n @package_user || \"root\"\n else\n @package_user = val\n end\n end",
"def sys_user_params\n params.require(:sys_user).permit(:login_name, :name)\n end",
"def autonomous_system_organization; end",
"def autonomous_system_organization; end",
"def autonomous_system_organization; end",
"def add user, pin = nil\n command = aqhbci <<-CMD\n adduser \\\n --tokentype=#{user.tokentype} \\\n --context=#{user.context} \\\n --bank=#{user.bank} \\\n --user=#{user.userid} \\\n --server=#{user.server} \\\n --username=#{user.name} \\\n --hbciversion=#{user.hbciversion}\n CMD\n stdin, stdout, stderr, wait_thr = Open3.popen3(command.strip)\n success = wait_thr.value.success?\n\n if pin && success\n with_secure_pin user, pin do |f|\n sysid_command = aqhbci(\"getsysid --user=#{user.userid}\", \"--pinfile=#{f.path.strip}\").strip\n stdin, stdout, stderr, wait_thr = Open3.popen3(sysid_command)\n wait_thr.join\n success = success && wait_thr.value.success?\n end\n end\n return success\n end",
"def merge_conflict?; end",
"def edit_or_create_user(struct)\n struct.remapkeys!\n if struct.has_key? :user and struct.has_key? :pass\n rt = RT_Client.new(:user => struct[:user], :pass => struct[:pass])\n struct.delete(:user)\n struct.delete(:pass)\n else\n rt = RT_Client.new\n end\n val = rt.edit_or_create_user(struct)\n rt = nil\n val\n end",
"def merge_user_into_options!(user_id_or_username, options={})\n case user_id_or_username\n when Fixnum\n options[:user] = user_id_or_username\n when String\n options[:\"user:username\"] = user_id_or_username\n end\n options\n end",
"def current_user_for_segmentation\n User.first_or_create!(first_name: \"John\", last_name: \"Doe\")\n end",
"def add_user_rights_row\n @organization = Organization.find(params[:organization_id])\n @new_ur_identity = Identity.find_or_create(params[:new_ur_identity_id])\n @user_rights = user_rights(@organization.id)\n end",
"def user_id=(value)\n if value == @defaults['userId']\n @values.delete 'userId' if @values.key? 'userId'\n else\n @values['userId'] = value\n end\n end",
"def new_user?(seminar,user)\n user = $app_ids[seminar][user].to_s\n user[0] == 48\nend",
"def user_check(resource)\n return true unless self[:name] == \"user\"\n return true unless self[:unless_system_user]\n\n resource[:audit] = :uid\n\n return false if system_users.include?(resource[:name])\n\n current_values = resource.retrieve_resource\n current_values[resource.property(:uid)] > self[:unless_system_user]\n end",
"def system_user\n User.find_by_name(\"DanbooruBot\") || User.admins.first\n end",
"def user\n super || create_user\n end",
"def running_as_normaluser?\n\tKitchenplan::Log.debug \"#{self.class} : Running as superuser? UID = #{Process.uid} != 0?\"\n\tProcess.uid != 0\n end",
"def running_as_normaluser?\n\tKitchenplan::Log.debug \"#{self.class} : Running as superuser? UID = #{Process.uid} != 0?\"\n\tProcess.uid != 0\n end",
"def other_user_param\n \t\tparams.permit(:other_user_id)\n \tend",
"def set_UserID2(value)\n set_input(\"UserID2\", value)\n end",
"def created_by=(user)\n write_attribute(:created_by, user.id) unless user.nil? or user.id.nil?\n end",
"def ensure_user_group\n if new_resource.gid.is_a?(String)\n group_name = new_resource.gid\n Etc.getgrnam(new_resource.gid).gid\n else\n group_name = new_resource.username\n Etc.getgrgid(new_resource.gid).gid\n end\nrescue ArgumentError\n Chef::Log.info(\n \"user_account[#{new_resource.username}] creating group #{group_name}\")\n group group_name do\n gid new_resource.gid if new_resource.gid.is_a?(Integer)\n end.run_action(:create)\n Etc.getgrnam(group_name).gid\nend",
"def merge\n @user = User.find(params[:id])\n authorize @user\n\n if params[:id] == params[:merge_id]\n flash.now[:alert] = _(\"You attempted to merge 2 accounts with the same email address.\n Please merge with a different email address.\")\n else\n merge_accounts\n end\n\n # After merge attempt get departments and plans\n @departments = @user.org.departments.order(:name)\n @plans = Plan.active(@user).page(1)\n\n render :edit\n end",
"def auth_as_owner\n unless current_user == @vm.lab_user.user or @admin\n respond_to do |format|\n #You don't belong here. Go away.\n format.html { redirect_to root_path , :notice=> 'Sorry, this machine does not belong to you!' }\n format.json { render :json=> {:success => false , :message=> 'Sorry, this machine does not belong to you!'} }\n end\n end\n end",
"def set_Upsert(value)\n set_input(\"Upsert\", value)\n end",
"def modified_by(user)\n #none by default\n end"
] |
[
"0.61191076",
"0.5459569",
"0.5366964",
"0.53619355",
"0.5327483",
"0.52746403",
"0.5234136",
"0.5216786",
"0.5106798",
"0.5106798",
"0.5089168",
"0.5080714",
"0.507201",
"0.5056244",
"0.5054719",
"0.50507313",
"0.49865487",
"0.49532232",
"0.48932272",
"0.48855317",
"0.48846143",
"0.48753554",
"0.4863012",
"0.4856775",
"0.485627",
"0.48529914",
"0.48371693",
"0.483289",
"0.48235035",
"0.48142764",
"0.48050192",
"0.47959396",
"0.47959396",
"0.47624448",
"0.47611856",
"0.47599313",
"0.47485206",
"0.47152218",
"0.47143224",
"0.47078925",
"0.47058606",
"0.46985102",
"0.46954352",
"0.46928883",
"0.468573",
"0.46834162",
"0.46729374",
"0.46646985",
"0.46640807",
"0.46609682",
"0.46600065",
"0.4652163",
"0.46468994",
"0.4643553",
"0.4642169",
"0.4642169",
"0.4642169",
"0.4642169",
"0.4642169",
"0.46352813",
"0.4624548",
"0.46228734",
"0.46138653",
"0.46079585",
"0.4600624",
"0.4598207",
"0.45969644",
"0.45947048",
"0.45932496",
"0.45928556",
"0.45879215",
"0.45870382",
"0.45807043",
"0.45806825",
"0.45784664",
"0.45732638",
"0.4569636",
"0.45696348",
"0.45696348",
"0.45696348",
"0.45663896",
"0.45662627",
"0.45628655",
"0.45603642",
"0.45596272",
"0.4554353",
"0.45509067",
"0.4547683",
"0.45445684",
"0.45440167",
"0.45433465",
"0.45428973",
"0.45428973",
"0.45410475",
"0.45360935",
"0.45332167",
"0.45310628",
"0.45250338",
"0.4516734",
"0.45161018",
"0.45104977"
] |
0.0
|
-1
|
Use OVERRIDING USER VALUE for INSERT statements, so that identity columns always use the user supplied value, and an error is not raised for identity columns that are GENERATED ALWAYS.
|
def overriding_system_value
clone(:override=>:system)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @iiTable = get_table_name(sql)\n @iiCol = get_autounique_column(@iiTable)\n @iiEnabled = false\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n @connection.do(enable_identity_insert(@iiTable, true))\n @iiEnabled = true\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def post_insert(sql, name, pk, id_value, sequence_name)\n if @iiEnabled\n begin\n @connection.do(enable_identity_insert(@iiTable, false))\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned off\"\n end\n end\n end",
"def post_insert(sql, name, pk, id_value, sequence_name)\n if @iiEnabled\n begin\n @connection.do(enable_identity_insert(@iiTable, false))\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned off\"\n end\n end\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n super\n\n exec_query('SELECT @@IDENTITY AS id')\n end",
"def insert_override_sql(sql)\n case opts[:override]\n when :system\n sql << \" OVERRIDING SYSTEM VALUE\"\n when :user\n sql << \" OVERRIDING USER VALUE\"\n end\n end",
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @logger.unknown(\"ODBCAdapter#pre_insert>\") if @trace\n @logger.unknown(\"args=[#{sql}|#{name}|#{pk}|#{id_value}|#{sequence_name}]\") if @trace\n @iiTable = get_table_name(sql)\n @iiCol = get_autounique_column(@iiTable)\n @logger.unknown(\"@iiCol=#{@iiCol}>\") if @trace\n @iiEnabled = false\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n remove_null_sequence_value_from_sql(sql, @iiCol)\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @logger.unknown(\"ODBCAdapter#pre_insert>\") if @trace\n @logger.unknown(\"args=[#{sql}|#{name}|#{pk}|#{id_value}|#{sequence_name}]\") if @trace\n @iiTable = get_table_name(sql)\n @logger.unknown(\"@iiTable=#{@iiTable}\") if @trace\n @iiCol = get_autounique_column(@iiTable)\n @logger.unknown(\"@iiCol=#{@iiCol}\") if @trace\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n remove_null_sequence_value_from_sql(sql, @iiCol)\n# rescue Exception => e\n# raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n return super if id_value\n pk = pk_from_insert_sql(sql) unless pk\n select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n return super if id_value\n pk = pk_from_insert_sql(sql) unless pk\n select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n end",
"def generate_user_id\n # ap(generate_user_id: {})\n @user_data.insert({}).to_s\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n execute(sql, name)\n id_value\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n table = sql.split(\" \", 4)[2]\n super || last_insert_id(table, sequence_name || default_sequence_name(table, pk))\n end",
"def sql_for_insert(sql, pk, id_value, sequence_name, binds)\n unless pk\n table_ref = extract_table_ref_from_insert_sql(sql)\n pk = primary_key(table_ref) if table_ref\n end\n\n sql = \"#{sql} RETURNING #{quote_column_name(pk)}\" if pk\n\n [sql, binds]\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:\r\n execute(sql, name)\r\n identity = last_inserted_id(nil)\r\n retval = id_value if retval == 0\r\n return retval\r\n end",
"def insert_sql(sql, name = nil, pri_key = nil, id_value = nil, sequence_name = nil)\n unless pri_key\n table_ref = extract_table_ref_from_insert_sql(sql)\n pri_key = primary_key(table_ref) if table_ref\n end\n\n if pri_key\n select_value(\"#{sql} RETURNING #{quote_column_name(pri_key)}\")\n else\n super\n end\n end",
"def user_id_not_changed\n if user_id_changed? && self.persisted?\n errors.add(:user_id, \"Change of user_id not allowed!\")\n end\n end",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def before_create_save(record)\n do_save_logic(record)\n record.usr_id = session[:usr_id]\n end",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(*insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def before_create_save(record)\n record.usr_id = session[:usr_id]\n end",
"def before_create_save(record)\n record.usr_id = session[:usr_id]\n end",
"def before_create_save(record)\n record.usr_id = session[:usr_id]\n end",
"def with_identity_insert_enabled(table_name)\n set_identity_insert(table_name, true)\n yield\n ensure\n set_identity_insert(table_name, false)\n end",
"def column_definition_default_sql(sql, column)\n super\n if !column[:serial] && !['smallserial', 'serial', 'bigserial'].include?(column[:type].to_s) && !column[:default]\n if (identity = column[:identity])\n sql << \" GENERATED \"\n sql << (identity == :always ? \"ALWAYS\" : \"BY DEFAULT\")\n sql << \" AS IDENTITY\"\n elsif (generated = column[:generated_always_as])\n sql << \" GENERATED ALWAYS AS (#{literal(generated)}) STORED\"\n end\n end\n end",
"def execute_insert(sql, opts=OPTS)\n execute(sql, opts.merge(:meth=>:autoid))\n end",
"def user_id=(value)\n if value == @defaults['userId']\n @values.delete 'userId' if @values.key? 'userId'\n else\n @values['userId'] = value\n end\n end",
"def last_insert_id(table, sequence_name) #:nodoc:\n identity = select_value(\"SELECT scope_identity()\")\n if identity.class == System::DBNull\n nil\n else\n System::Convert.to_int32(identity)\n end\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil) end",
"def with_identity_insert_enabled(table_name)\n set_identity_insert(table_name, true)\n yield\n ensure\n set_identity_insert(table_name, false)\n end",
"def with_identity_insert_enabled(table_name)\n set_identity_insert(table_name, true)\n yield\n ensure\n set_identity_insert(table_name, false)\n end",
"def set_default_exuid\n self.exuid ||=\n Base32.encode(SecureRandom.random_bytes(16)).downcase.sub(/=*$/, '')\n end",
"def with_identity_insert_enabled(table_name, &block)\n set_identity_insert(table_name, true)\n yield\n ensure\n set_identity_insert(table_name, false)\n end",
"def user_column\n IdMethods::USER_COLUMN\n end",
"def insert_statement(model, properties, serial)\n statement = \"\"\n # Check if there is a serial property being set directly\n require_identity_insert = !properties.empty? && properties.any? { |property| property.serial? }\n set_identity_insert(model, statement, true) if require_identity_insert\n statement << super\n set_identity_insert(model, statement, false) if require_identity_insert\n statement\n end",
"def set_creator_id_if_appropriate\n if @record.respond_to? :creator_id=\n if current_user.nil?\n log \"Warning: @#{@record.base_model}.creator_id isn't being set, since current_user was nil.\"\n else\n @record.creator_id = current_user.id\n end\n end\n end",
"def identity_create\n # Potential threat of overlap\n identity = Identity.create(uid:rand(100000000..9999999999), provider: 'registration')\n identity.user_id = resource.id\n identity.name = params['user']['name'] #Looks very ugly\n identity.email = resource.email\n identity.save\n end",
"def default_value_noninteractive\n case @generate_option\n when :never_generate, :no_generate_as_default, :generate_as_default\n value = nil\n when :generate_no_query\n password = Simp::Cli::Utils.generate_password\n value = encrypt(password)\n end\n value\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n insert_sql(sql, name, pk, id_value, sequence_name)\n end",
"def identity=(v)\n @identity = v\n end",
"def generate_identity\n handle_name ||= username if respond_to?(:username)\n handle_name ||= short_name if respond_to?(:short_name)\n handle_name ||= name.parameterize(\"_\").gsub(\"-\",\"_\") if respond_to?(:name)\n\n self.handle = Handle.build_unique(self, handle_name)\n self.handle.identifiable = self\n\n self[identity_field] = handle.name\n instance_variable_set(:\"@#{identity_field}\", handle.name)\n end",
"def created_by=(user)\n write_attribute(:created_by, user.id) unless user.nil? or user.id.nil?\n end",
"def insert_sequenced(row)\n sql = row.type.insert_sql_minus_key\n vals = row.field_values_minus_key\n#$stderr.puts sql\n#$stderr.puts vals.inspect\n\n db.do(sql, *vals)\n insert_id = db.select_one(row.type.get_insert_id_sql)[0]\n row.send(row.type.primary_key.setter_name, insert_id)\n row.reset_changed\n end",
"def _insert\n ds = _insert_dataset\n if _use_insert_select?(ds) && (h = _insert_select_raw(ds))\n _save_set_values(h)\n nil\n else\n iid = _insert_raw(ds)\n # if we have a regular primary key and it's not set in @values,\n # we assume it's the last inserted id\n if (pk = autoincrementing_primary_key) && pk.is_a?(Symbol) && !(vals = @values)[pk]\n vals[pk] = iid\n end\n pk\n end\n end",
"def _insert\n ds = _insert_dataset\n if _use_insert_select?(ds) && (h = _insert_select_raw(ds))\n _save_set_values(h)\n nil\n else\n iid = _insert_raw(ds)\n # if we have a regular primary key and it's not set in @values,\n # we assume it's the last inserted id\n if (pk = autoincrementing_primary_key) && pk.is_a?(Symbol) && !(vals = @values)[pk]\n vals[pk] = iid\n end\n pk\n end\n end",
"def insert(arel, name = nil, pk = nil, id_value = nil, sequence_name = nil, binds = [])\n exec_insert(to_sql(arel), name, binds)\n retval = last_inserted_id(nil)\n retval = id_value if retval == 0\n return retval\n end",
"def user_id; 1; end",
"def fill_mandatory_attributes(record, new_record)\n record[:usr_id] = $DB[\"SELECT nextval('users_usr_id_seq') as id\"].first[:id] if new_record\n record[:usr_screen_name] ||= \"userX#{$cntr+=1}\"\n\n if new_record\n [:usr_firstname, :usr_lastname].each do |col|\n record[col] = \"tmp_placeholder\"\n LOGGER.warn \"No #{col} for existing record: #{record[:usr_id]}\"\n end\n end\nend",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n # Extract the table from the insert sql. Yuck.\n table = sql.split(\" \", 4)[2].gsub('\"', '')\n\n # Otherwise, insert then grab last_insert_id.\n if insert_id = super\n insert_id\n else\n # If neither pk nor sequence name is given, look them up.\n unless pk || sequence_name\n pk, sequence_name = *pk_and_sequence_for(table)\n end\n\n # If a pk is given, fallback to default sequence name.\n # Don't fetch last insert id for a table without a pk.\n if pk && sequence_name ||= default_sequence_name(table, pk)\n last_insert_id(table, sequence_name)\n end\n end\n end",
"def pk_from_insert_sql(sql)\n sql[/into\\s+([^\\(]*).*values\\s*\\(/i]\n primary_key($1.strip) if $1\n end",
"def pk_from_insert_sql(sql)\n sql[/into\\s+([^\\(]*).*values\\s*\\(/i]\n primary_key($1.strip) if $1\n end",
"def last_insert_id(conn, opts={})\n nil\n end",
"def last_insert_id(conn, opts={})\n nil\n end",
"def insert_statement(model, properties, identity_field)\n statement = \"INSERT INTO #{quote_name(model.storage_name(name))} \"\n\n if supports_default_values? && properties.empty?\n statement << 'DEFAULT VALUES'\n else\n statement << <<-SQL.compress_lines\n (#{properties.map { |property| quote_name(property.field) }.join(', ')})\n VALUES\n (#{(['?'] * properties.size).join(', ')})\n SQL\n end\n\n if supports_returning? && identity_field\n statement << \" RETURNING #{quote_name(identity_field.field)}\"\n end\n\n statement\n end",
"def insert_user(con, email_address)\n result = con.exec \"SELECT MAX(id) FROM users\"\n p result.values\n id = result.values[0][0].to_i + 1\n con.exec \"INSERT INTO users VALUES(#{id},'#{email_address}', 0)\"\nend",
"def last_insert_id\n @connection.identity_val_local\n end",
"def identity=(value)\n @identity = value\n end",
"def quote_default_value(value, column)\n if column.type == :uuid && value =~ /\\(\\)/\n value\n else\n quote(value)\n end\n end",
"def insert(*args)\n r = super\n if s = opts[:sequence]\n with_sql(\"SELECT #{literal(s)}.currval FROM dual\").single_value.to_i\n else\n r\n end\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil, binds = [])\n # Extract the table from the insert sql. Yuck.\n table = sql.split(\" \", 4)[2].gsub('\"', '')\n\n # Try an insert with 'returning id' if available (PG >= 8.2)\n if supports_insert_with_returning? && id_value.nil?\n pk, sequence_name = *pk_and_sequence_for(table) unless pk\n if pk\n sql = substitute_binds(sql, binds)\n id_value = select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n clear_query_cache #FIXME: Why now?\n return id_value\n end\n end\n\n # Otherwise, plain insert\n execute(sql, name, binds)\n\n # Don't need to look up id_value if we already have it.\n # (and can't in case of non-sequence PK)\n unless id_value\n # If neither pk nor sequence name is given, look them up.\n unless pk || sequence_name\n pk, sequence_name = *pk_and_sequence_for(table)\n end\n\n # If a pk is given, fallback to default sequence name.\n # Don't fetch last insert id for a table without a pk.\n if pk && sequence_name ||= default_sequence_name(table, pk)\n id_value = last_insert_id(table, sequence_name)\n end\n end\n id_value\n end",
"def exec_insert(sql, name = nil, binds = [], pk = nil, sequence_name = nil)\n exec_query(sql, name, binds)\n end",
"def user_id\n @values.fetch('userId') { \n @values['userId'] = nil\n }\n end",
"def set_default_exuid\n self.exuid ||=\n SecureRandom.random_bytes(8).unpack('Q').first & 0x7fffffffffffffff\n end",
"def null_or_value(value)\n return \"NULL\" if value.empty?\n # Escape any single quotes to encure values returned do not not cause\n # issues with the SQL insert statement\n return \"'#{value.gsub(\"'\", \"\\\\\\\\'\")}'\"\nend",
"def insert_id\n @insert_id\n end",
"def create_record(attr = nil, force_id: false, no_raise: false, **)\n # noinspection RubyScope, RubyMismatchedReturnType\n super do |attr|\n unless administrator?\n org = current_org&.id or raise \"no org for #{current_user}\"\n attr[:org_id] = org\n end\n attr[:org_id] = nil if attr.key?(:org_id) && (attr[:org_id].to_i == 0)\n end\n end",
"def user_uid=(uid=\"\")\n if uid != \"\"\n write_attribute(:user_uid, uid)\n end\n end",
"def user_side_create(user)\n user_data = user.for_db\n row_data = map_to_row!(user_data)\n execute_sql(:create, :user) { table.insert(row_data) }\n end",
"def store_user_id(user)\n\t\t@id = user\n\tend",
"def _insert_raw(ds)\n if use_prepared_statements_for?(:insert)\n _set_prepared_statement_server(model.send(:prepared_insert, @values.keys)).call(@values)\n else\n super\n end\n end",
"def insert_pk\n if (f = opts[:from]) && !f.empty?\n case t = f.first\n when Symbol, String, SQL::Identifier, SQL::QualifiedIdentifier\n if pk = db.primary_key(t)\n pk\n end\n end\n end\n end",
"def insert_default_values_sql\n \"INSERT INTO #{source_list(@opts[:from])} DEFAULT VALUES\"\n end",
"def property_schema_statement(schema)\n statement = super\n\n if schema.has_key?(:sequence_name)\n statement << \" DEFAULT nextval('#{schema[:sequence_name]}') NOT NULL\"\n end\n\n statement\n end",
"def _insert\n return super if model.cti_tables.length == 1\n model.cti_models.each do |m|\n v = {}\n m.cti_table_columns.each{|c| v[c] = @values[c] if @values.include?(c)}\n ds = use_server(m.cti_instance_dataset)\n if ds.supports_insert_select? && (h = ds.insert_select(v))\n @values.merge!(h)\n else\n nid = ds.insert(v)\n @values[primary_key] ||= nid\n end\n end\n db.dataset.supports_insert_select? ? nil : @values[primary_key]\n end",
"def new_record(attr = nil, force_id: false, **)\n # noinspection RubyScope, RubyMismatchedReturnType\n super do |attr|\n unless administrator?\n org = current_org&.id or raise \"no org for #{current_user}\"\n attr[:org_id] = org\n end\n attr[:org_id] = 0 if attr.key?(:org_id) && attr[:org_id].nil?\n end\n end",
"def insert(arel, name = nil, pk = nil, id_value = nil, sequence_name = nil, binds = [])\n sql, binds = sql_for_insert(to_sql(arel, binds), pk, id_value, sequence_name, binds)\n value = exec_insert(sql, name, binds)\n id_value\n end",
"def create_new_identity(user, auth)\n Identity.create! do |id|\n id.provider = auth['provider']\n id.uid = auth['uid']\n id.user = user\n end\n user\n end",
"def user_id=(value)\n @user_id = value\n end",
"def user_id=(value)\n @user_id = value\n end",
"def user_id=(value)\n @user_id = value\n end",
"def user_id=(value)\n @user_id = value\n end",
"def user_id=(value)\n @user_id = value\n end",
"def user_id=(value)\n @user_id = value\n end",
"def genrateuid\n\n res1=@con.query(\"select uid from user order by uid desc limit 1\")\n row10=res1.fetch_row\n if row10.nil?\n @id=100\n \n else\n no=row10[0].to_i\n @id=no+1\n end\n\n res2=@con.prepare(\"insert into user values(?,?)\")\n res2.execute(@id,@name)\n end",
"def save\n super if self.user_id.present?\n # Else do nothing (dont call save)\n end",
"def last_insert_id(table, sequence_name) #:nodoc:\n Integer(select_value(\"SELECT currval('#{sequence_name}')\"))\n end",
"def missing_primary_key(source_row:, node_id:)\n # nothing\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end"
] |
[
"0.6686499",
"0.6655386",
"0.6655386",
"0.63866425",
"0.60911673",
"0.6082206",
"0.5972562",
"0.58565605",
"0.58565605",
"0.5833101",
"0.58189833",
"0.5700909",
"0.5526677",
"0.5524249",
"0.5490326",
"0.5410405",
"0.53914815",
"0.53832036",
"0.53686273",
"0.53593254",
"0.53593254",
"0.53593254",
"0.53498924",
"0.53457886",
"0.5322408",
"0.5322114",
"0.52796453",
"0.52762103",
"0.5271585",
"0.5271585",
"0.5270409",
"0.5265128",
"0.525859",
"0.52510446",
"0.5226385",
"0.522211",
"0.52028006",
"0.5192179",
"0.5187836",
"0.51816815",
"0.5176191",
"0.5174001",
"0.51669383",
"0.51669383",
"0.5163403",
"0.5151025",
"0.51268506",
"0.5107382",
"0.5107022",
"0.5107022",
"0.50998497",
"0.50998497",
"0.50938314",
"0.50919914",
"0.5038953",
"0.50359595",
"0.50329316",
"0.5026983",
"0.5009135",
"0.49999624",
"0.49957374",
"0.4973039",
"0.49676535",
"0.495413",
"0.49415326",
"0.49320474",
"0.4927804",
"0.4918157",
"0.4915112",
"0.49132648",
"0.49012414",
"0.4900196",
"0.48961735",
"0.4895579",
"0.48872587",
"0.48802224",
"0.48737982",
"0.48737982",
"0.48737982",
"0.48737982",
"0.48737982",
"0.48737982",
"0.4848876",
"0.48415232",
"0.48267177",
"0.4811053",
"0.47910732",
"0.47910732",
"0.47910732",
"0.47910732",
"0.47910732",
"0.47910732",
"0.4790744",
"0.4790744",
"0.4790744",
"0.4790744",
"0.4790744",
"0.4790744",
"0.4790744",
"0.4790744",
"0.4790744"
] |
0.0
|
-1
|
Use OVERRIDING USER VALUE for INSERT statements, so that identity columns always use the sequence value instead of the user supplied value.
|
def overriding_user_value
clone(:override=>:user)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def post_insert(sql, name, pk, id_value, sequence_name)\n if @iiEnabled\n begin\n @connection.do(enable_identity_insert(@iiTable, false))\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned off\"\n end\n end\n end",
"def post_insert(sql, name, pk, id_value, sequence_name)\n if @iiEnabled\n begin\n @connection.do(enable_identity_insert(@iiTable, false))\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned off\"\n end\n end\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n super\n\n exec_query('SELECT @@IDENTITY AS id')\n end",
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @iiTable = get_table_name(sql)\n @iiCol = get_autounique_column(@iiTable)\n @iiEnabled = false\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n @connection.do(enable_identity_insert(@iiTable, true))\n @iiEnabled = true\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def generate_user_id\n # ap(generate_user_id: {})\n @user_data.insert({}).to_s\n end",
"def before_create_save(record)\n record.usr_id = session[:usr_id]\n end",
"def before_create_save(record)\n record.usr_id = session[:usr_id]\n end",
"def before_create_save(record)\n record.usr_id = session[:usr_id]\n end",
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @logger.unknown(\"ODBCAdapter#pre_insert>\") if @trace\n @logger.unknown(\"args=[#{sql}|#{name}|#{pk}|#{id_value}|#{sequence_name}]\") if @trace\n @iiTable = get_table_name(sql)\n @iiCol = get_autounique_column(@iiTable)\n @logger.unknown(\"@iiCol=#{@iiCol}>\") if @trace\n @iiEnabled = false\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n remove_null_sequence_value_from_sql(sql, @iiCol)\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @logger.unknown(\"ODBCAdapter#pre_insert>\") if @trace\n @logger.unknown(\"args=[#{sql}|#{name}|#{pk}|#{id_value}|#{sequence_name}]\") if @trace\n @iiTable = get_table_name(sql)\n @logger.unknown(\"@iiTable=#{@iiTable}\") if @trace\n @iiCol = get_autounique_column(@iiTable)\n @logger.unknown(\"@iiCol=#{@iiCol}\") if @trace\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n remove_null_sequence_value_from_sql(sql, @iiCol)\n# rescue Exception => e\n# raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n table = sql.split(\" \", 4)[2]\n super || last_insert_id(table, sequence_name || default_sequence_name(table, pk))\n end",
"def before_create_save(record)\n do_save_logic(record)\n record.usr_id = session[:usr_id]\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n execute(sql, name)\n id_value\n end",
"def set_default_exuid\n self.exuid ||=\n Base32.encode(SecureRandom.random_bytes(16)).downcase.sub(/=*$/, '')\n end",
"def insert_sequenced(row)\n sql = row.type.insert_sql_minus_key\n vals = row.field_values_minus_key\n#$stderr.puts sql\n#$stderr.puts vals.inspect\n\n db.do(sql, *vals)\n insert_id = db.select_one(row.type.get_insert_id_sql)[0]\n row.send(row.type.primary_key.setter_name, insert_id)\n row.reset_changed\n end",
"def last_insert_id(table, sequence_name) #:nodoc:\n identity = select_value(\"SELECT scope_identity()\")\n if identity.class == System::DBNull\n nil\n else\n System::Convert.to_int32(identity)\n end\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n return super if id_value\n pk = pk_from_insert_sql(sql) unless pk\n select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n return super if id_value\n pk = pk_from_insert_sql(sql) unless pk\n select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n end",
"def insert_override_sql(sql)\n case opts[:override]\n when :system\n sql << \" OVERRIDING SYSTEM VALUE\"\n when :user\n sql << \" OVERRIDING USER VALUE\"\n end\n end",
"def user_id=(value)\n if value == @defaults['userId']\n @values.delete 'userId' if @values.key? 'userId'\n else\n @values['userId'] = value\n end\n end",
"def insert(*args)\n r = super\n if s = opts[:sequence]\n with_sql(\"SELECT #{literal(s)}.currval FROM dual\").single_value.to_i\n else\n r\n end\n end",
"def identity_create\n # Potential threat of overlap\n identity = Identity.create(uid:rand(100000000..9999999999), provider: 'registration')\n identity.user_id = resource.id\n identity.name = params['user']['name'] #Looks very ugly\n identity.email = resource.email\n identity.save\n end",
"def store_user_id(user)\n\t\t@id = user\n\tend",
"def last_insert_id(table, sequence_name) #:nodoc:\n Integer(select_value(\"SELECT currval('#{sequence_name}')\"))\n end",
"def next_val_sequence(name)\n if self.class.to_s =~ /ActiveRecord::ConnectionAdapters::Mysql/\n self.insert_sql(\"INSERT INTO %s_sequence VALUES(NULL)\" % name)\n else\n # the default insert_sql is nonsense, but jdbc_mysql doesn't override it\n self.execute(\"INSERT INTO %s_sequence VALUES(NULL)\" % name)\n end\n end",
"def user_id; 1; end",
"def set_default_exuid\n self.exuid ||=\n SecureRandom.random_bytes(8).unpack('Q').first & 0x7fffffffffffffff\n end",
"def insert_sql(sql, name = nil, pri_key = nil, id_value = nil, sequence_name = nil)\n unless pri_key\n table_ref = extract_table_ref_from_insert_sql(sql)\n pri_key = primary_key(table_ref) if table_ref\n end\n\n if pri_key\n select_value(\"#{sql} RETURNING #{quote_column_name(pri_key)}\")\n else\n super\n end\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:\r\n execute(sql, name)\r\n identity = last_inserted_id(nil)\r\n retval = id_value if retval == 0\r\n return retval\r\n end",
"def user_id=(value)\n @user_id = value\n end",
"def user_id=(value)\n @user_id = value\n end",
"def user_id=(value)\n @user_id = value\n end",
"def user_id=(value)\n @user_id = value\n end",
"def user_id=(value)\n @user_id = value\n end",
"def user_id=(value)\n @user_id = value\n end",
"def genrateuid\n\n res1=@con.query(\"select uid from user order by uid desc limit 1\")\n row10=res1.fetch_row\n if row10.nil?\n @id=100\n \n else\n no=row10[0].to_i\n @id=no+1\n end\n\n res2=@con.prepare(\"insert into user values(?,?)\")\n res2.execute(@id,@name)\n end",
"def insert(arel, name = nil, pk = nil, id_value = nil, sequence_name = nil, binds = [])\n exec_insert(to_sql(arel), name, binds)\n retval = last_inserted_id(nil)\n retval = id_value if retval == 0\n return retval\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def set_UserID(value)\n set_input(\"UserID\", value)\n end",
"def identity=(v)\n @identity = v\n end",
"def insert_statement(model, properties, serial)\n statement = \"\"\n # Check if there is a serial property being set directly\n require_identity_insert = !properties.empty? && properties.any? { |property| property.serial? }\n set_identity_insert(model, statement, true) if require_identity_insert\n statement << super\n set_identity_insert(model, statement, false) if require_identity_insert\n statement\n end",
"def sql_for_insert(sql, pk, id_value, sequence_name, binds)\n unless pk\n table_ref = extract_table_ref_from_insert_sql(sql)\n pk = primary_key(table_ref) if table_ref\n end\n\n sql = \"#{sql} RETURNING #{quote_column_name(pk)}\" if pk\n\n [sql, binds]\n end",
"def created_by=(user)\n write_attribute(:created_by, user.id) unless user.nil? or user.id.nil?\n end",
"def set_creator_id_if_appropriate\n if @record.respond_to? :creator_id=\n if current_user.nil?\n log \"Warning: @#{@record.base_model}.creator_id isn't being set, since current_user was nil.\"\n else\n @record.creator_id = current_user.id\n end\n end\n end",
"def generate_identity\n handle_name ||= username if respond_to?(:username)\n handle_name ||= short_name if respond_to?(:short_name)\n handle_name ||= name.parameterize(\"_\").gsub(\"-\",\"_\") if respond_to?(:name)\n\n self.handle = Handle.build_unique(self, handle_name)\n self.handle.identifiable = self\n\n self[identity_field] = handle.name\n instance_variable_set(:\"@#{identity_field}\", handle.name)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def set_UserId(value)\n set_input(\"UserId\", value)\n end",
"def insert_user(con, email_address)\n result = con.exec \"SELECT MAX(id) FROM users\"\n p result.values\n id = result.values[0][0].to_i + 1\n con.exec \"INSERT INTO users VALUES(#{id},'#{email_address}', 0)\"\nend",
"def user_uid=(uid=\"\")\n if uid != \"\"\n write_attribute(:user_uid, uid)\n end\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n insert_sql(sql, name, pk, id_value, sequence_name)\n end"
] |
[
"0.641532",
"0.641532",
"0.6394933",
"0.6344944",
"0.61432767",
"0.5999143",
"0.5999143",
"0.5999143",
"0.59119934",
"0.5821073",
"0.580872",
"0.5805352",
"0.5773662",
"0.57513803",
"0.57405144",
"0.57136685",
"0.5676146",
"0.5676146",
"0.5662054",
"0.5639374",
"0.56253314",
"0.5566092",
"0.55425954",
"0.5502789",
"0.5496775",
"0.54816526",
"0.54748553",
"0.5442147",
"0.5427686",
"0.5398749",
"0.5398749",
"0.5398749",
"0.5398749",
"0.5398749",
"0.5398749",
"0.53512204",
"0.53490394",
"0.53161263",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316059",
"0.5316039",
"0.5316039",
"0.53005946",
"0.52836514",
"0.5275167",
"0.5260519",
"0.5259825",
"0.5258002",
"0.524864",
"0.524864",
"0.5248288",
"0.5248288",
"0.5248288",
"0.5248288",
"0.5248288",
"0.5248288",
"0.5248108",
"0.5248108",
"0.5248108",
"0.5248108",
"0.5248108",
"0.5248108",
"0.5248108",
"0.5248108",
"0.5248108",
"0.5248108",
"0.5248108",
"0.5248108",
"0.5248108",
"0.5248108",
"0.5248108",
"0.5248108",
"0.5248108",
"0.5248108",
"0.5248108",
"0.5248108",
"0.5248108",
"0.52431476",
"0.5234618",
"0.5212702"
] |
0.0
|
-1
|
PostgreSQL supports using the WITH clause in subqueries if it supports using WITH at all (i.e. on PostgreSQL 8.4+).
|
def supports_cte_in_subqueries?
supports_cte?
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def pipe_cte_with!(subquery)\n return self unless subquery.try(:with_values?)\n\n # Add subquery CTE's to the parents query stack. (READ THE SPECIAL NOTE ABOVE!)\n if @scope.with_values?\n @scope.cte.pipe_cte_with!(subquery.cte)\n else\n # Top level has no with values\n @scope.with!(subquery.cte)\n end\n\n self\n end",
"def immediate_subqueries\n my_nodes_tagged(:subquery)\n end",
"def immediate_subqueries\n my_nodes_tagged(:subquery)\n end",
"def subquery\n subqueries.\n compact.\n inject(&:merge)\n end",
"def select_with_sql_base\n opts[:with].any?{|w| w[:recursive]} ? \"WITH RECURSIVE \" : super\n end",
"def with_statement\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 17 )\n return_value = WithStatementReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n string_literal59 = nil\n clause60 = nil\n block61 = nil\n\n tree_for_string_literal59 = nil\n stream_WITH = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token WITH\" )\n stream_block = ANTLR3::AST::RewriteRuleSubtreeStream.new( @adaptor, \"rule block\" )\n stream_clause = ANTLR3::AST::RewriteRuleSubtreeStream.new( @adaptor, \"rule clause\" )\n begin\n # at line 371:5: 'with' clause block\n string_literal59 = match( WITH, TOKENS_FOLLOWING_WITH_IN_with_statement_2451 )\n if @state.backtracking == 0\n stream_WITH.add( string_literal59 )\n end\n @state.following.push( TOKENS_FOLLOWING_clause_IN_with_statement_2453 )\n clause60 = clause\n @state.following.pop\n if @state.backtracking == 0\n stream_clause.add( clause60.tree )\n end\n @state.following.push( TOKENS_FOLLOWING_block_IN_with_statement_2455 )\n block61 = block\n @state.following.pop\n if @state.backtracking == 0\n stream_block.add( block61.tree )\n end\n # AST Rewrite\n # elements: WITH, clause, block\n # token labels: \n # rule labels: return_value\n # token list labels: \n # rule list labels: \n # wildcard labels: \n if @state.backtracking == 0\n\n return_value.tree = root_0\n stream_return_value = return_value ? subtree_stream( \"rule return_value\", return_value.tree ) : subtree_stream( \"token return_value\" )\n\n root_0 = @adaptor.create_flat_list\n # 371:25: -> ^( 'with' clause block )\n # at line 371:28: ^( 'with' clause block )\n root_1 = @adaptor.create_flat_list\n root_1 = @adaptor.become_root( stream_WITH.next_node, root_1 )\n\n @adaptor.add_child( root_1, stream_clause.next_tree )\n @adaptor.add_child( root_1, stream_block.next_tree )\n\n @adaptor.add_child( root_0, root_1 )\n\n\n\n return_value.tree = root_0\n\n end# - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n end\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 17 )\n\n end\n \n return return_value\n end",
"def with_statement\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 10 )\n return_value = WithStatementReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n\n _last = _first_0 = nil\n string_literal35 = nil\n clause36 = nil\n block37 = nil\n\n tree_for_string_literal35 = nil\n\n begin\n root_0 = @adaptor.create_flat_list\n\n\n # at line 66:5: ^( 'with' clause block )\n _save_last_1 = _last = @input.look\n _first_1 = nil\n root_1 = @adaptor.create_flat_list\n _last = @input.look\n string_literal35 = match( WITH, TOKENS_FOLLOWING_WITH_IN_with_statement_305 )\n\n tree_for_string_literal35 = @adaptor.copy_node( string_literal35 )\n\n root_1 = @adaptor.become_root( tree_for_string_literal35, root_1 )\n\n\n\n match( DOWN, nil )\n _last = @input.look\n @state.following.push( TOKENS_FOLLOWING_clause_IN_with_statement_307 )\n clause36 = clause\n @state.following.pop\n\n @adaptor.add_child( root_1, clause36.tree )\n _last = @input.look\n @state.following.push( TOKENS_FOLLOWING_block_IN_with_statement_309 )\n block37 = block\n @state.following.pop\n\n @adaptor.add_child( root_1, block37.tree )\n\n match( UP, nil )\n @adaptor.add_child( root_0, root_1 )\n _last = _save_last_1\n\n\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 10 )\n\n end\n \n return return_value\n end",
"def db_query_transform__subquery query, tmp_table=\"resultset_table\"\n \"(#{query}) as #{tmp_table}\"\n end",
"def inner_query\n self.class.\n select(\"#{SUBQUERY_TABLE_ALIAS}.*\").\n from(\"#{table_name} AS #{SUBQUERY_TABLE_ALIAS}\")\n end",
"def select_with_sql_cte(sql, cte)\n super\n select_with_sql_cte_search_cycle(sql, cte)\n end",
"def build_subselect(key, o)\n stmt = Nodes::SelectStatement.new\n core = stmt.cores.first\n core.froms = o.relation\n core.wheres = o.wheres\n core.projections = [key]\n stmt.limit = o.limit\n stmt.offset = o.offset\n stmt.orders = []\n stmt\n end",
"def build_subselect key, o\n stmt = Nodes::SelectStatement.new\n core = stmt.cores.first\n core.froms = o.relation\n core.wheres = o.wheres\n core.projections = [key]\n stmt.limit = o.limit\n stmt.orders = o.orders\n stmt\n end",
"def ct_subquery_sql(options)\n # the source query contains a variable number of \"extra\" columns\n # ones needed in the output but not involved in pivoting\n source_sql = \"SELECT ARRAY[#{sql_row_name_columns.join(', ')}],\n #{sql_crosstab_columns.join(', ')}, year, gross_quantity\n FROM (#{subquery_sql(options)}) subquery\n ORDER BY 1, #{sql_crosstab_columns.length + 2}\" # order by row_name and year\n source_sql = ActiveRecord::Base.send(:sanitize_sql_array, [source_sql, years])\n source_sql = ActiveRecord::Base.connection.quote_string(source_sql)\n # the categories query returns values by which to pivot (years)\n categories_sql = 'SELECT * FROM UNNEST(ARRAY[?])'\n categories_sql = ActiveRecord::Base.send(:sanitize_sql_array, [categories_sql, years.map(&:to_i)])\n ct_columns = [\n 'row_name TEXT[]',\n report_crosstab_columns.map.each_with_index { |c, i| \"#{sql_crosstab_columns[i]} #{crosstab_columns[c][:pg_type]}\" },\n years_columns.map { |y| \"#{y} numeric\" }\n ].flatten.join(', ')\n # a set returning query requires that output columns are specified\n <<-SQL\n SELECT * FROM CROSSTAB('#{source_sql}', '#{categories_sql}')\n AS ct(#{ct_columns})\n SQL\n end",
"def build_subselect(key, o)\n subselect = super\n\n # Materialize subquery by adding distinct\n # to work with MySQL 5.7.6 which sets optimizer_switch='derived_merge=on'\n unless has_limit_or_offset_or_orders?(subselect)\n core = subselect.cores.last\n core.set_quantifier = Arel::Nodes::Distinct.new\n end\n\n Nodes::SelectStatement.new.tap do |stmt|\n core = stmt.cores.last\n core.froms = Nodes::Grouping.new(subselect).as(\"__active_record_temp\")\n core.projections = [Arel.sql(quote_column_name(key.name))]\n end\n end",
"def subquery_sql(options)\n gross_exports_query(options)\n end",
"def test_with\n x = with_temp_buffer_string {\n insert_string(\"a\\n\")\n with(:save_excursion) {\n beginning_of_buffer\n insert_string(\"This is inserted at the beginning of buffer.\"); newline\n }\n }\n assert_equal(\"This is inserted at the beginning of buffer.\\na\\n\", x)\n end",
"def with_query\n Arel::Nodes::As.new(recursive_table, union_term.arel)\n end",
"def block_node_taken_by_with_method_with_no_normal_args\n each_backward_chained_node(node, :child_as_second_arg) do |chained_node, child_node|\n next unless chained_node.block_type?\n return nil unless child_node.children[1] == :with\n return nil if child_node.children[2]\n return chained_node\n end\n end",
"def sql_inventory_groups\n \"WITH ooc_groups AS\n (\n SELECT assg.asset_id, grp.ooc_group_id as group_id,grp.ooc_group_name as group_name,\n grp.ooc_group_type as group_type, grp.ooc_group_status as group_status\n FROM hip_ooc_asset_group_v AS assg\n JOIN hip_ooc_group_v AS grp ON grp.ooc_group_id = assg.ooc_group_id\n WHERE grp.ooc_group_status != 'deleted'\n AND grp.org_l1_id = #{org_l1_id}\n AND grp.org_id = #{org_id}\n ),\n hc_groups as(\n SELECT assg.asset_id, grp.hc_group_id as group_id,grp.group_name,'hc cycle'as group_type ,\n grp.is_current\n FROM hip_asset_group_v AS assg\n JOIN hip_hc_group_v AS grp ON grp.hc_group_id = assg.hc_group_id\n WHERE grp.org_l1_id = #{org_l1_id}\n AND grp.org_id = #{org_id}\n ),\n all_groups as (\n select * from ooc_groups\n union\n select * from hc_groups\n )\n SELECT assh.host_name,assh.ip_string_list, assh.hc_start_date,assh.security_policy_name,\n assh.tool_asset_id,os.os_product,assh.system_status,assh.hc_auto_flag,assh.hc_auto_interval_weeks,\n assh.hc_manual_interval_weeks,assh.hc_manual_flag,\n #{group_type_columns_frag}\n CASE\n WHEN assh.hc_auto_flag='y' and assh.hc_manual_flag='y' then 'Yes'\n WHEN assh.hc_auto_flag='n' and assh.hc_manual_flag='n' then 'No'\n ELSE NULL\n END AS hc_required \n FROM dim_comm_tool_asset_hist_v AS assh\n LEFT join all_groups AS g ON g.asset_id = assh.tool_asset_id\n JOIN dim_comm_os_v AS os ON os.os_id=assh.os_id\n WHERE\n assh.org_l1_id=#{org_l1_id} AND assh.org_id=#{org_id}\n AND CURRENT_TIMESTAMP BETWEEN assh.row_from_timestamp AND COALESCE(assh.row_to_timestamp, CURRENT_TIMESTAMP)\n group by assh.host_name,assh.ip_string_list,assh.hc_start_date,assh.security_policy_name,\n assh.tool_asset_id,os.os_product,assh.system_status,assh.hc_auto_flag, assh.hc_auto_interval_weeks, assh.hc_manual_interval_weeks,\n assh.hc_manual_flag\n ORDER BY assh.host_name\"\n end",
"def test_or_and_and_with_nested_context\n names = context_names With::Context.build(['foo', 'bar'], 'baz', 'buz') { with('boz'){ with('bum') } }\n assert_equal [[['foo', 'baz', 'buz', 'boz', 'bum']], [['bar', 'baz', 'buz', 'boz', 'bum']]], names\n end",
"def test_and_or_and_with_nested_context\n names = context_names With::Context.build('foo', ['bar', 'baz'], 'buz') { with('boz') { with('bum') } }\n assert_equal [[['foo', 'bar', 'buz', 'boz', 'bum'], ['foo', 'baz', 'buz', 'boz', 'bum']]], names\n end",
"def subquery_sql(options)\n net_imports_query(options)\n end",
"def test_nested_context\n names = context_names With::Context.build('foo'){ with('bar') { with('baz') }}\n assert_equal [[['foo', 'bar', 'baz']]], names\n end",
"def with(name, dataset, opts=OPTS)\n raise(Error, 'This dataset does not support common table expressions') unless supports_cte?\n if hoist_cte?(dataset)\n s, ds = hoist_cte(dataset)\n s.with(name, ds, opts)\n else\n clone(:with=>((@opts[:with]||EMPTY_ARRAY) + [Hash[opts].merge!(:name=>name, :dataset=>dataset)]).freeze)\n end\n end",
"def test_nesting_behavior_simple_within_method\n Fluid.let([:a, 1]) {\n assert_equal(1, Fluid.a)\n Fluid.let {\n assert_equal(1, Fluid.a)\n Fluid.let([:a, 2]) {\n assert_equal(2, Fluid.a)\n }\n assert_equal(1, Fluid.a)\n }\n assert_equal(1, Fluid.a)\n }\n end",
"def with_block(&block)\n end",
"def subquery_for(relation)\n operand = relation.operand\n subquery = dispatch(operand)\n if collapse_subquery_for?(relation)\n @from\n else\n aliased_subquery(subquery)\n end\n end",
"def inline_fragment_ast(type, with_children: true)\n selections = []\n if with_children\n # Class-based types return all fields in `.fields`\n all_fields = type.respond_to?(:all_fields) ? type.all_fields : type.fields.values\n all_fields = all_fields.sort_by(&:graphql_name)\n all_fields.each do |field|\n field_type = field.type.unwrap\n if node_field?(field) && include?(field_type)\n selections << node_field_ast(field)\n elsif connection_field?(field) && include?(field_type)\n selections << connection_field_ast(field)\n end\n end\n elsif id = type.get_field('id')\n selections << field_ast(id)\n end\n\n selections.compact!\n\n if selections.none?\n nil\n else\n GraphQL::Language::Nodes::InlineFragment.new(\n type: make_type_name_node(type.graphql_name),\n selections: selections,\n )\n end\n end",
"def with(*args, &block)\n return with_and_options(args, &block)\n end",
"def select(&block); end",
"def context(nodes, &block); end",
"def with\n yield self\n end",
"def execute_sql(my_sql)\n pg_result = ActiveRecord::Base.connection.execute(my_sql)\n\n # In this example we are just calling #to_a to convert the PG::Result to an\n # Array. PG::Result has a nice API for slicing and dicing itself so you may\n # want to to something clever instead. See\n # https://www.rubydoc.info/gems/pg/PG/Result for details.\n #\n # The important bit here is that we are copying all the data we care about\n # out of the PG::Result in preparation for later clearing the PG::Result\n results = pg_result.to_a\n\n # Calling #clear on the PG::Result is the important bit of cleanup and the\n # whole reason this method exists. See\n # https://www.rubydoc.info/gems/pg/PG/Result#clear-instance_method\n pg_result.clear\n\n yield results if block_given?\n\n results\nend",
"def select_statement(with_identifier: true)\n return [degenerate_select_fragment] if type == Dimension::TYPES[:degenerate]\n\n ss = [\"#{label_fragment} AS #{name}\"]\n ss << \"#{identifier_fragment} AS #{name}_identifier\" if with_identifier\n ss\n end",
"def lazy_select\n lazify.call(S.select)\n end",
"def run_eager\n root_operation = query.selected_operation\n root_op_type = root_operation.operation_type || \"query\"\n root_type = schema.root_type_for_operation(root_op_type)\n st = get_current_runtime_state\n st.current_object = query.root_value\n st.current_result = @response\n runtime_object = root_type.wrap(query.root_value, context)\n runtime_object = schema.sync_lazy(runtime_object)\n\n if runtime_object.nil?\n # Root .authorized? returned false.\n @response = nil\n else\n call_method_on_directives(:resolve, runtime_object, root_operation.directives) do # execute query level directives\n gathered_selections = gather_selections(runtime_object, root_type, root_operation.selections)\n # This is kind of a hack -- `gathered_selections` is an Array if any of the selections\n # require isolation during execution (because of runtime directives). In that case,\n # make a new, isolated result hash for writing the result into. (That isolated response\n # is eventually merged back into the main response)\n #\n # Otherwise, `gathered_selections` is a hash of selections which can be\n # directly evaluated and the results can be written right into the main response hash.\n tap_or_each(gathered_selections) do |selections, is_selection_array|\n if is_selection_array\n selection_response = GraphQLResultHash.new(nil, nil, false)\n final_response = @response\n else\n selection_response = @response\n final_response = nil\n end\n\n @dataloader.append_job {\n st = get_current_runtime_state\n st.current_object = query.root_value\n st.current_result = selection_response\n # This is a less-frequent case; use a fast check since it's often not there.\n if (directives = selections[:graphql_directives])\n selections.delete(:graphql_directives)\n end\n call_method_on_directives(:resolve, runtime_object, directives) do\n evaluate_selections(\n runtime_object,\n root_type,\n root_op_type == \"mutation\",\n selections,\n selection_response,\n final_response,\n nil,\n )\n end\n }\n end\n end\n end\n delete_all_interpreter_context\n nil\n end",
"def inside_t1\n yield\n end",
"def supports_lateral_subqueries?\n server_version >= 90300\n end",
"def get_statement(ast, count=1)\n result = []\n n = -1\n iters = Array.new(count) {|e| n += 2 }\n iters.each do |i|\n result << ast.children[i]\n end\n result.each {|n| n.remove_from_parent! }\n result\nend",
"def grouping_parentheses(o, collector)\n if o.expr.is_a? Nodes::SelectStatement\n collector << \"(\"\n visit o.expr, collector\n collector << \")\"\n else\n visit o.expr, collector\n end\n end",
"def merge_by_values(relation, other)\n other.cte.with_values.each do |name, expression|\n relation = if other.cte.materialized_key?(name)\n relation.with!.materialized(name => expression)\n elsif other.cte.not_materialized_key?(name)\n relation.with!.not_materialized(name => expression)\n else\n relation.with!(name => expression)\n end\n end\n\n relation\n end",
"def select(*) end",
"def XQuery(model, &block)\n XQuery::Generic.with(model, &block)\nend",
"def subquery(from_mode, to_mode, query)\n if from_mode == :none\n return query\n else\n return ['in', 'certname',\n ['extract', 'certname',\n [\"select_#{to_mode}\", query]]]\n end\n end",
"def with(value)\n yield value\n end",
"def nesting() end",
"def subquery_columns\n explicit_columns_in_subquery? ? explicit_columns : super\n end",
"def aliased_subquery(subquery)\n \"#{subquery.to_subquery} AS #{visit_identifier(subquery.name)}\"\n ensure\n reset_query_state\n end",
"def with!(*args)\n options = args.extract_options!\n args.each do |table|\n instance = table.is_a?(Class) && table < PostgreSQL::AuxiliaryStatement \\\n ? table.new(options) \\\n : PostgreSQL::AuxiliaryStatement.instantiate(table, self, options)\n\n self.auxiliary_statements_values += [instance]\n end\n\n self\n end",
"def select!(&block); end",
"def stars_working_with_ben_affleck\n MovieDatabase.execute(<<-SQL)\n SELECT\n movies.title, actors.name\n FROM\n movies\n JOIN actors ON castings.actor_id = actors.id\n JOIN castings ON castings.movie_id = movies.id\n WHERE\n castings.ord = 1\n AND \n movies.title IN\n (SELECT\n movies.title\n FROM\n movies\n JOIN actors ON castings.actor_id = actors.id\n JOIN castings ON castings.movie_id = movies.id\n WHERE\n actors.name = 'Ben Affleck'\n AND\n castings.ord != 1) \nSQL\nend",
"def context(&block); end",
"def context(&block); end",
"def within_transaction\n if use_transaction\n first.within_transaction do\n yield\n success?\n end\n else\n yield\n end\n end",
"def hoist_cte?(ds)\n ds.is_a?(Dataset) && ds.opts[:with] && !supports_cte_in_subqueries?\n end",
"def each_subexp(include_root = true, &block)\n yield self if include_root\n each do |child|\n if child.is_a?(Sexp)\n child.each_subexp(&block)\n end\n end\n end",
"def aggregated_over_time_query\n quantity_field = @country_ids.present? ? \"#{entity_quantity}_reported_quantity\" : \"#{@reported_by}_reported_quantity\"\n\n <<-SQL\n SELECT ROW_TO_JSON(row)\n FROM (\n SELECT JSON_AGG(JSON_BUILD_OBJECT('x', year, 'y', value) ORDER BY year) AS datapoints\n FROM (\n SELECT year, ROUND(SUM(#{quantity_field}::FLOAT)) AS value\n FROM #{shipments_table}\n #{child_taxa_join}\n WHERE #{@condition} AND #{quantity_field} IS NOT NULL AND #{country_condition}\n AND #{child_taxa_condition}\n GROUP BY year\n #{quantity_condition(quantity_field)}\n ORDER BY value DESC\n #{limit}\n ) t\n ) row\n SQL\n end",
"def all(sql, *args, into: nil, &block)\n raise ArgumentError, \"all no longer support blocks, use each instead.\" if block\n\n rows, pg_source_oid, column_info = each_without_conversion(sql, *args, into: into)\n\n result = convert_rows_to_result rows, into: into, pg_source_oid: pg_source_oid\n\n # [TODO] - resolve associations. Note that this is only possible if the type\n # is not an Array (i.e. into is nil)\n\n result.pagination_scope = sql if sql.is_a?(::Simple::SQL::Connection::Scope) && sql.paginated?\n result.column_info = column_info\n result\n end",
"def user_comments\n multi = {:query1 => \"SELECT target_id FROM connection WHERE source_id='#{@user.id}'\",\n :query2 => \"SELECT post_id FROM stream WHERE source_id IN (SELECT target_id FROM #query1)\",\n :query3 => \"SELECT post_id FROM comment WHERE post_id IN (SELECT post_id FROM #query2)\", # AND (fromid = '#{@user.id}')\",\n :query4 => \"SELECT post_id, message FROM stream WHERE post_id IN (SELECT post_id FROM #query3)\"\n }\n # query = \"SELECT post_id, message FROM stream WHERE post_id IN\n # (SELECT post_id FROM comment WHERE post_id IN \n # (SELECT post_id FROM stream WHERE source_id IN\n # (SELECT target_id FROM connection WHERE source_id='#{@user.id}')) AND \n # (fromid = '#{@user.id}'))\"\n pp @session.fql_multiquery(multi)\nend",
"def prepare_sub_query(base, settings)\n @union_all = settings.union_all if @union_all.nil?\n @sub_query ||= settings.sub_query\n @depth ||= settings.depth\n @path ||= settings.path\n\n # Collect the connection\n @connect ||= settings.connect || begin\n key = base.primary_key\n [key.to_sym, :\"parent_#{key}\"] unless key.nil?\n end\n\n raise ArgumentError, <<-MSG.squish if @sub_query.nil? && @query.is_a?(String)\n Unable to generate sub query from a string query. Please provide a `sub_query`\n property on the \"#{table_name}\" settings.\n MSG\n\n if @sub_query.nil?\n raise ArgumentError, <<-MSG.squish if @connect.blank?\n Unable to generate sub query without setting up a proper way to connect it\n with the main query. Please provide a `connect` property on the \"#{table_name}\"\n settings.\n MSG\n\n left, right = @connect.map(&:to_s)\n condition = @query.arel_table[right].eq(table[left])\n\n if @query.where_values_hash.key?(right)\n @sub_query = @query.unscope(where: right.to_sym).where(condition)\n else\n @sub_query = @query.where(condition)\n @query = @query.where(right => nil)\n end\n elsif @sub_query.respond_to?(:call)\n # Call a proc to get the real sub query\n call_args = @sub_query.try(:arity) === 0 ? [] : [OpenStruct.new(@args)]\n @sub_query = @sub_query.call(*call_args)\n end\n end",
"def add_lock!(sql, options)\n if (lock = options[:lock]) && sql =~ /\\A\\s*SELECT/mi\n # Check for and extract the :limit/:offset sub-query\n if sql =~ /\\A(\\s*SELECT t\\.\\* FROM \\()(.*)(\\) AS t WHERE t._row_num BETWEEN \\d+ AND \\d+\\s*)\\Z/m\n prefix, subselect, suffix = [$1, $2, $3]\n add_lock!(subselect, options)\n return sql.replace(prefix + subselect + suffix)\n end\n unless sql =~ SELECT_FROM_WHERE_RE\n # If you get this error, this driver probably needs to be fixed.\n raise NotImplementedError, \"Don't know how to add_lock! to SQL statement: #{sql.inspect}\"\n end\n select_clause, from_word, from_tables, where_clause = $1, $2, $3, $4\n with_clause = lock.is_a?(String) ? \" #{lock} \" : \" WITH(ROWLOCK,UPDLOCK) \"\n\n # Split the FROM clause into its constituent tables, and add the with clause after each one.\n new_from_tables = []\n scanner = StringScanner.new(from_tables)\n until scanner.eos?\n prev_pos = scanner.pos\n if scanner.scan_until(/,|(INNER\\s+JOIN|CROSS\\s+JOIN|(LEFT|RIGHT|FULL)(\\s+OUTER)?\\s+JOIN)\\s+/mi)\n join_operand = scanner.pre_match[prev_pos..-1]\n join_operator = scanner.matched\n else\n join_operand = scanner.rest\n join_operator = \"\"\n scanner.terminate\n end\n\n # At this point, we have something like:\n # join_operand == \"appointments \"\n # join_operator == \"INNER JOIN \"\n # or:\n # join_operand == \"appointment_details AS d1 ON appointments.[id] = d1.[appointment_id]\"\n # join_operator == \"\"\n if join_operand =~ /\\A(.*)(\\s+ON\\s+.*)\\Z/mi\n table_spec, on_clause = $1, $2\n else\n table_spec = join_operand\n on_clause = \"\"\n end\n\n # Add the \"WITH(ROWLOCK,UPDLOCK)\" option to the table specification\n table_spec << with_clause unless table_spec =~ /\\A\\(\\s*SELECT\\s+/mi # HACK - this parser isn't so great\n join_operand = table_spec + on_clause\n\n # So now we have something like:\n # join_operand == \"appointments WITH(ROWLOCK,UPDLOCK) \"\n # join_operator == \"INNER JOIN \"\n # or:\n # join_operand == \"appointment_details AS d1 WITH(ROWLOCK,UPDLOCK) ON appointments.[id] = d1.[appointment_id]\"\n # join_operator == \"\"\n\n new_from_tables << join_operand\n new_from_tables << join_operator\n end\n sql.replace( select_clause.to_s << from_word.to_s << new_from_tables.join << where_clause.to_s )\n end\n sql\n end",
"def lazy_select(&block)\n Enumerator.new do |y|\n self.each do |x|\n y.yield(x) if block.call(x)\n end\n end\n end",
"def with!\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 82 )\n\n type = WITH\n channel = ANTLR3::DEFAULT_CHANNEL\n\n \n # - - - - main rule block - - - -\n # at line 203:8: 'with'\n match( \"with\" )\n\n \n @state.type = type\n @state.channel = channel\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 82 )\n\n end",
"def get_query_options(&block)\n ActsAsRecursiveTree::Options::QueryOptions.from(&block)\n end",
"def test_nesting_behavior_complex_within_method\n Fluid.let([:a, 1], :b) {\n assert_equal(1, Fluid.a)\n assert_equal(nil, Fluid.b)\n Fluid.let(:a) {\n assert_equal(nil, Fluid.a)\n assert_equal(nil, Fluid.b)\n Fluid.let([:b, 'b'],\n [:a, [1, 2]]) {\n assert_equal([1,2], Fluid.a)\n assert_equal('b', Fluid.b)\n }\n assert_equal(nil, Fluid.a)\n assert_equal(nil, Fluid.b)\n }\n assert_equal(1, Fluid.a)\n assert_equal(nil, Fluid.b)\n }\n end",
"def select_sql\n return super unless l = @opts[:limit]\n o = @opts[:offset] || 0\n order = @opts[:order]\n dsa1 = dataset_alias(1)\n dsa2 = dataset_alias(2)\n rn = row_number_column\n irn = Sequel::SQL::Identifier.new(rn).qualify(dsa2)\n subselect_sql(unlimited.\n from_self(:alias=>dsa1).\n select_more(Sequel::SQL::QualifiedIdentifier.new(dsa1, WILDCARD),\n Sequel::SQL::WindowFunction.new(SQL::Function.new(:ROW_NUMBER), Sequel::SQL::Window.new(:order=>order)).as(rn)).\n from_self(:alias=>dsa2).\n select(Sequel::SQL::QualifiedIdentifier.new(dsa2, WILDCARD)).\n where((irn > o) & (irn <= l + o)))\n end",
"def process_subquery_grid(opts = {})\n klass = opts[:klass] || nil\n column_defs = opts[:column_defs] || nil\n q = opts[:q] || nil\n page = opts[:page] || DEFAULT_PAGE\n per_page = opts[:per_page] || DEFAULT_PER_PAGE\n sort_info = opts[:sort_info] || DEFAULT_SORT_INFO\n field_lookup = opts[:field_lookup] || {}\n inner_join_query = opts[:inner_join_query] || nil\n inner_where_query = opts[:inner_where_query] || nil\n inner_group_query = opts[:inner_group_query] || nil\n\n # Enforce default values\n page ||= DEFAULT_PAGE\n per_page ||= DEFAULT_PER_PAGE\n sort_info ||= DEFAULT_SORT_INFO\n\n # Parse parameters into correct format\n column_defs = parse_params(column_defs)\n page = page.to_i\n per_page = per_page.to_i\n sort_info = parse_params(sort_info)\n\n # Check for client errors\n raise \"Invalid per_page parameter. Valid values are #{VALID_PER_PAGES}\" unless (VALID_PER_PAGES).include? per_page\n sort_info ||= {}\n\n # inner query\n inner_select_query = process_select(klass, column_defs, field_lookup)\n inner_select_query += \", #{klass.table_name}.#{inner_group_query} AS #{inner_group_query}\" unless inner_group_query.blank?\n inner_query = %{\n SELECT\n #{inner_select_query}\n FROM\n #{klass.table_name}\n }\n inner_query += \" #{inner_join_query}\" unless inner_join_query.blank?\n inner_query += \" WHERE #{inner_where_query}\" unless inner_where_query.blank?\n inner_query += \" GROUP BY #{inner_group_query}\" unless inner_group_query.blank?\n\n # outer query\n outer_query = %{\n SELECT *\n FROM\n (#{inner_query}) AS internal\n }\n\n # filter & order\n filtered_query = outer_query\n where_query = process_where(column_defs, q, field_lookup, true)\n filtered_query += \" WHERE #{where_query}\" unless where_query.blank?\n order_query = process_order(sort_info, field_lookup)\n filtered_query += \" ORDER BY #{order_query}\" unless order_query.blank?\n\n # pagination\n objects = klass.paginate_by_sql(filtered_query, page: page, per_page: per_page)\n\n objects\n end",
"def recurse_result_set(result, options = {}, &block)\n return result unless block_given? \n inner_recursion = options.delete(:inner_recursion)\n result_set = inner_recursion ? result : result.dup\n \n parent_id = (options.delete(:parent_id) || result_set.first[result_set.first.parent_col_name]) rescue nil\n options[:level] ||= 0\n options[:nested] = true unless options.key?(:nested)\n \n siblings = options[:nested] ? result_set.select { |s| s.parent_id == parent_id } : result_set \n siblings.sort! {|a,b| a.send(options[:sort_on]) <=> b.send(options[:sort_on])} if options[:sort_on]\n siblings.each do |sibling|\n result_set.delete(sibling) \n block.call(sibling, options[:level])\n opts = { :parent_id => sibling.id, :level => options[:level] + 1, :inner_recursion => true, :sort_on => options[:sort_on]} \n recurse_result_set(result_set, opts, &block) if options[:nested]\n end\n result_set.each { |orphan| block.call(orphan, options[:level]) } unless inner_recursion\n end",
"def with( &blk )\n # blk[ self ]\n self.instance_eval &blk\n end",
"def within_transaction\n if use_transactions && !empty?\n first.within_transaction do\n yield\n success?\n end\n else\n yield\n end\n end",
"def with(node, provisioner)\n if node['with']\n node['with'].include?(provisioner)\n end\nend",
"def select_sql\n return super unless o = @opts[:offset]\n l = @opts[:limit]\n order = @opts[:order]\n dsa1 = dataset_alias(1)\n dsa2 = dataset_alias(2)\n rn = row_number_column\n irn = Sequel::SQL::Identifier.new(rn).qualify(dsa2)\n subselect_sql(unlimited.\n from_self(:alias=>dsa1).\n select_more(Sequel::SQL::QualifiedIdentifier.new(dsa1, WILDCARD),\n Sequel::SQL::WindowFunction.new(SQL::Function.new(:ROW_NUMBER), Sequel::SQL::Window.new(:order=>order)).as(rn)).\n from_self(:alias=>dsa2).\n select(Sequel::SQL::QualifiedIdentifier.new(dsa2, WILDCARD)).\n where(l ? ((irn > o) & (irn <= l + o)) : (irn > o))) # Leave off limit in case of limit(nil, offset)\n end",
"def with\n @with\n end",
"def to_sql\n \"\n SELECT row_to_json(fc)\n FROM ( SELECT 'FeatureCollection' AS type, array_to_json(array_agg(f)) AS features\n FROM ( SELECT 'Feature' AS type\n , ST_AsGeoJSON(subquery.geom)::json AS geometry\n , row_to_json(\n (SELECT l FROM (SELECT id, geoid) AS l)\n ) AS properties\n\n FROM (\n SELECT\n ct.id,\n ct.geom,\n ct.geoid,\n ST_Area(ST_SetSRID(geom,4326)) as d,\n ST_Area(\n ST_Intersection(\n ST_SetSRID( ST_GeomFromGeoJSON('#{ @geojson }'), 4326),\n ST_SetSRID(geom,4326)\n )\n ) as n\n FROM census_tracts_2010 AS ct\n WHERE\n ST_Intersects(\n ST_SetSRID(geom,4326),\n ST_SetSRID( ST_GeomFromGeoJSON('#{ @geojson }'), 4326)\n )\n ) subquery\n WHERE (n/d*100) >= 15\n\n\n ) AS f\n ) AS fc;\n \"\n end",
"def subgraph_with(vertex)\n vertices = Set.new\n edges = Set.new\n pending = [vertex]\n until pending.empty?\n this_node = pending.shift\n conn_edges, conn_nodes = neighbors(this_node)\n conn_nodes.each do |node|\n pending << node unless vertices.include?(node)\n end\n vertices << this_node\n edges << conn_edges\n end\n PipeGraph.new(edges, vertices)\n end",
"def in_transaction(opts = {})\n yield\n end",
"def with_connection\n ActiveRecord::Base.connection_pool.with_connection do |connection|\n connection.transaction do\n if connection.adapter_name == \"PostgreSQL\"\n connection.execute \"SET TRANSACTION READ ONLY\"\n connection.execute \"SET LOCAL statement_timeout = 100\"\n # TODO support equivalent options for other adapters (such as mysql)\n end\n\n yield connection\n end\n end\n end",
"def best_rank_subquery(group_by)\n @source.respond_to?(:project) or raise ThroughHierarchySourceError, \"#{@source} cannot be converted into a subquery\"\n subq = source.\n project(foreign_type_column, foreign_key_column, group_by, best_rank).\n where(filters).\n group(source[group_by]).\n as(best_rank_table_name)\n\n spawn(subq)\n end",
"def with_transaction\n ActiveRecord::Base.transaction { yield }\n end",
"def with_scope(scope)\n scope = set_current_scope(scope)\n result = yield scope\n set_current_scope(nil)\n result\n end",
"def sub_in(result)\n expression = result\n end",
"def with(*ags, &b)\n @expectation.with(*ags, &b)\n self\n end",
"def expand_forall_in_countall(expr, expression, problem)\n expressions = Array.new\n constraints = process_forall_statement(expr)\n constraints.each do |c|\n expressions << c.expression\n end\n expressions\n end",
"def how_many_bills\n sql = <<-SQL\n WITH bills as (\n SELECT DISTINCT guest_name\n \tFROM guest_appearances\n \tWHERE guest_name LIKE \"Bill %\"\n )\n SELECT count(guest_name)\n FROM bills;\n SQL\n puts DB[:conn].execute(sql)[0][0]\nend",
"def build_base_select\n id_node = base_table[primary_key]\n\n base_table.where(\n ids.apply_to(id_node)\n ).project(\n id_node,\n base_table[parent_key],\n Arel.sql('0').as(depth_column.to_s)\n )\n end",
"def run_tuples(query, options = {})\n GRel::Debugger.debug \"QUERYING SELECT...\"\n GRel::Debugger.debug query\n GRel::Debugger.debug \"** LIMIT #{@last_query_context.limit}\" if @last_query_context.limit\n GRel::Debugger.debug \"** OFFSET #{@last_query_context.offset}\" if @last_query_context.offset\n GRel::Debugger.debug \"----------------------\"\n args = {}\n args[:accept] = options[:accept] if options[:accept]\n args[:offset] = @last_query_context.offset if @last_query_context.offset\n args[:limit] = @last_query_context.limit if @last_query_context.limit\n @connection.query(@db_name,query, args).body\n end",
"def paren_nest; end",
"def paren_nest; end",
"def paren_nest; end",
"def query(&block)\n dataset.query(&block)\n end",
"def query(&block)\n dataset.query(&block)\n end",
"def query(&block)\n dataset.query(&block)\n end",
"def block_contents(node, context)\n block_given? ? yield : ss_comments(node)\n node << (child = block_child(context))\n while tok(/;/) || has_children?(child)\n block_given? ? yield : ss_comments(node)\n node << (child = block_child(context))\n end\n node\n end",
"def test_002\n target_sql = \"select shipments.number as shipment_number,\nvariants.sku as sku,\nvariants.price as price,\nvariants.weight as weight,\nvariants.height as height,\nvariants.width as width,\nvariants.depth as length,\nproducts.description as product_description,\norders.number as order_number\nfrom orders\njoin shipments on (shipments.order_id = orders.id)\njoin line_items on (line_items.order_id = orders.id)\njoin variants on (line_items.variant_id = variants.id)\njoin products on (variants.product_id = products.id)\njoin state_events on (state_events.stateful_id = orders.id and state_events.name = 'payment' and state_events.stateful_type = 'Order' and state_events.next_state in ('paid','credit_owed'))\nwhere orders.state = 'complete' and orders.shipment_state = 'ready' and state_events.created_at >= '2012-10-01' and state_events.created_at <= '2015-03-08' and shipments.warehouse_id = 28\"\n \n @sql.select do\n shipments number: 'shipment_number'\n variants :sku, :price, :weight, :height, :width, depth: 'length'\n products description: 'product_description'\n orders number: 'order_number'\n end\n \n @sql.from :orders do\n join :shipments, on: 'shipments.order_id = orders.id'\n join :line_items, on: 'line_items.order_id = orders.id'\n join :variants, on: 'line_items.variant_id = variants.id'\n join :products, on: 'variants.product_id = products.id'\n join :state_events do\n state_events stateful_id: :'orders.id',\n name: 'payment',\n stateful_type: 'Order',\n next_state: %w(paid credit_owed)\n end\n end\n\n begin_date = '2012-10-01'\n end_date = '2015-03-08'\n warehouse_id = 28\n \n @sql.where do\n orders state: 'complete', shipment_state: 'ready'\n \n con '>=' do\n state_events created_at: begin_date\n end\n con '<=' do\n state_events created_at: end_date\n end\n \n shipments warehouse_id: warehouse_id\n end\n\n assert_equal @sql.to_s, target_sql\n end",
"def select_with(entity, material, &block)\n case entity.typename\n when 'Face'\n if entity.material && entity.material.display_name == material.display_name\n yield entity # invoke the block\n # select this face's parents so you can see where the material is used\n select_parents_of entity, &block\n end\n # recursively call this method for this entity's children to fina a match\n when 'Group'\n entity.entities.each { |sub_entity| select_with sub_entity, material, &block }\n when 'ComponentInstance'\n entity.definition.entities.each { |sub_entity| select_with sub_entity, material, &block }\n end\n end",
"def subselect_sql_append(sql, ds)\n ds.clone(:append_sql=>sql, :prepared_args=>prepared_args, :bind_vars=>@opts[:bind_vars]).\n send(:to_prepared_statement, :select, nil, :extend=>prepared_statement_modules).\n prepared_sql\n end",
"def unionise *sub_queries\n sub_queries_with_parens = sub_queries.map do |i| \n \"{ #{i} }\" \n end\n\n sub_queries_with_parens.join(' UNION ')\n end",
"def my_select(&block)\n result = []\n my_each do |elem|\n result << elem if block.call(elem) == true\n end\n result\n end",
"def subquery(definition, other_definition, conditions)\n validate_definition_instance(definition)\n validate_definition_instance(other_definition)\n [conditions].flatten.each { |c| validate_node_or_attribute(c) }\n\n current_model = definition.model\n #current_table = definition.table\n current_joins = definition.joins\n\n other_table = other_definition.table\n other_model = other_definition.model\n #other_joins = other_definition.joins\n\n # build an exist subquery to apply conditions that\n # refer to another table\n\n subquery = other_definition.table\n\n # add conditions to subquery\n [conditions].flatten.each do |c|\n subquery = subquery.where(c)\n end\n\n # add joins that provide other table access to current table\n\n\n which_joins = current_joins\n join_paths_index = nil\n join_path_current_index = nil\n join_path_other_index = nil\n which_joins.each_with_index do |item, index|\n join_path_current_index = item.find_index { |j| j[:join] == current_model }\n join_path_other_index = item.find_index { |j| j[:join] == other_model }\n if !join_path_current_index.nil? && !join_path_other_index.nil?\n join_paths_index = index\n break\n end\n end\n\n first_index = [join_path_current_index, join_path_other_index].min\n last_index = [join_path_current_index, join_path_other_index].max\n relevant_joins = which_joins[join_paths_index][first_index..last_index]\n\n\n relevant_joins.each do |j|\n join_table = j[:join]\n join_condition = j[:on]\n\n # assume this is an arel_table if it doesn't respond to .arel_table\n arel_table = join_table.respond_to?(:arel_table) ? join_table.arel_table : join_table\n\n if arel_table.name == other_table.name && !join_condition.nil?\n # add join as condition if this is the main table in the subquery\n subquery = subquery.where(join_condition)\n elsif arel_table.name != other_table.name && !join_condition.nil?\n # add full join if this is not the main table in the subquery\n subquery = subquery.join(arel_table).on(join_condition)\n end\n\n end\n\n subquery.project(1).exists\n end",
"def select( & block )\n\n load_parent_state\n \n return super\n\n end"
] |
[
"0.65679926",
"0.6192909",
"0.6192909",
"0.58103573",
"0.56718576",
"0.5354886",
"0.53479433",
"0.521933",
"0.5205962",
"0.5173415",
"0.5141739",
"0.4965617",
"0.49448887",
"0.49384052",
"0.49377605",
"0.49164754",
"0.4882845",
"0.4826609",
"0.47812265",
"0.4751949",
"0.47344384",
"0.47047922",
"0.4685685",
"0.46572852",
"0.46065778",
"0.45963642",
"0.456392",
"0.45614594",
"0.45181695",
"0.4487865",
"0.44664568",
"0.44443434",
"0.4431561",
"0.44213235",
"0.44186223",
"0.44089827",
"0.43990117",
"0.43981174",
"0.43749404",
"0.43712586",
"0.43618327",
"0.43439713",
"0.43324322",
"0.43281847",
"0.4319986",
"0.43132344",
"0.4304985",
"0.43001774",
"0.42979357",
"0.42940935",
"0.4292433",
"0.4279563",
"0.4279563",
"0.42779836",
"0.4260754",
"0.425817",
"0.42544436",
"0.42543232",
"0.4252585",
"0.42429233",
"0.42395097",
"0.42391858",
"0.42298922",
"0.4226368",
"0.42250273",
"0.42207515",
"0.4211343",
"0.4210773",
"0.42071828",
"0.4204077",
"0.42018193",
"0.42018032",
"0.41976696",
"0.41952664",
"0.4193607",
"0.41915116",
"0.41908035",
"0.4169653",
"0.41567126",
"0.41565934",
"0.41495833",
"0.41444305",
"0.41392374",
"0.41356716",
"0.4126494",
"0.41228968",
"0.41193783",
"0.41193783",
"0.41193783",
"0.41182098",
"0.41182098",
"0.41182098",
"0.41177356",
"0.4113959",
"0.41131493",
"0.41131252",
"0.41117665",
"0.41074392",
"0.41040528",
"0.41031384"
] |
0.516487
|
10
|
DISTINCT ON is a PostgreSQL extension
|
def supports_distinct_on?
true
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def distinct\n with_opts(:distinct=>true)\n end",
"def merge_distinct_on\n return if other.distinct_on_values.blank?\n relation.distinct_on_values += other.distinct_on_values\n end",
"def query_all(args = {})\n query(args.merge(select: \"DISTINCT #{model.table_name}.*\"))\n end",
"def without_duplicates(bindings); end",
"def apply_distinct_on_eager_limit_strategy(ds)\n keys = predicate_key\n ds.distinct(*keys).order_prepend(*keys)\n end",
"def distinct\n result = empty_dup\n uniq_rows = rows.uniq\n uniq_rows.each do |row|\n result << row\n end\n result\n end",
"def distinct_count_sql(records)\n \"DISTINCT #{records.table_name}.#{records.primary_key}\"\n end",
"def apply_filter_by_associations_distinct_on_limit_strategy(ds)\n k = filter_by_associations_limit_key \n ds.where(k=>apply_distinct_on_eager_limit_strategy(associated_eager_dataset.select(*k)))\n end",
"def distinct(*columns)\n select(*columns) unless columns.empty?\n @conjunction.set_distinct\n nil\n end",
"def distinct(columns, order_by)\n \"DISTINCT #{columns_for_distinct(columns, order_by)}\"\n end",
"def distinct(columns, order_by)\n \"DISTINCT #{columns_for_distinct(columns, order_by)}\"\n end",
"def true_eager_graph_limit_strategy\n if associated_class.dataset.supports_ordered_distinct_on? && !offset\n :distinct_on\n else\n super\n end\n end",
"def assert_distinct_relation(primary_key_value)\n if unique_db_primary_keys.include?(primary_key_value) # Include on Set is O(1)\n # Change the InventoryCollection's :association or :arel parameter to return distinct results. The :through\n # relations can return the same record multiple times. We don't want to do SELECT DISTINCT by default, since\n # it can be very slow.\n if inventory_collection.assert_graph_integrity\n raise(\"Please update :association or :arel for #{inventory_collection} to return a DISTINCT result. \")\n else\n logger.warn(\"Please update :association or :arel for #{inventory_collection} to return a DISTINCT result. \"\\\n \" The duplicate value is being ignored.\")\n return false\n end\n else\n unique_db_primary_keys << primary_key_value\n end\n true\n end",
"def filter_daos!\n unique_daos = {}\n\n daos.keep_if do |dao|\n hash = dao.values_for(composite_key_columns).hash\n keep = (unique_daos[hash] ||= dao) == dao\n\n # unlink to be deleted dao and add a link to\n dao.unlink! replace_with: unique_daos[hash] unless keep\n\n keep\n end\n end",
"def subqueries\n [\n select_distinct_on,\n # default filters -- all scopes have them\n filter_by_subscription_or_topics,\n filter_by_start_date,\n filter_by_end_date,\n # grouping\n group_distinct_on,\n # ordering for GROUP BY\n order_distinct_on,\n ]\n end",
"def uniq\n distinct\n end",
"def assert_distinct_relation(primary_key_value)\n if unique_db_primary_keys.include?(primary_key_value) # Include on Set is O(1)\n # Change the InventoryCollection's :association or :arel parameter to return distinct results. The :through\n # relations can return the same record multiple times. We don't want to do SELECT DISTINCT by default, since\n # it can be very slow.\n unless inventory_collection.assert_graph_integrity\n logger.warn(\"Please update :association or :arel for #{inventory_collection} to return a DISTINCT result. \"\\\n \" The duplicate value is being ignored.\")\n return false\n else\n raise(\"Please update :association or :arel for #{inventory_collection} to return a DISTINCT result. \")\n end\n else\n unique_db_primary_keys << primary_key_value\n end\n true\n end",
"def tags\n Tag.joins(household_items: :moving)\n .where(movings: {id: id})\n .select('DISTINCT tags.name')\n .order('tags.name')\n end",
"def dsa\n\t\t\t\t\twhere(:reference_name => \"dsa\").select('DISTINCT value')\n\t\t\t\tend",
"def supports_count_distinct?\n true\n end",
"def distinct\n self.map('lambda{|x| [x, nil]}')\n .reduce_by_key('lambda{|x,_| x}')\n .map('lambda{|x| x[0]}')\n end",
"def distinct(field, constraints = {})\n query(constraints).distinct(field)\n end",
"def uniq(*args)\n clone(:distinct => args)\n end",
"def grouped_duplicates(collection); end",
"def secunia\n\t\t\t\t\twhere(:reference_name => \"secunia\").select('DISTINCT value')\n\t\t\t\tend",
"def fetch_distinct_values_from_a_jsonb_column(jsonb_column_name, jsonb_column_key, where_conditions_hash = {}, distinct = true)\n table_name = connection.quote_table_name(self.table_name)\n jsonb_column = connection.quote_column_name(jsonb_column_name)\n where_condition_string = ' WHERE ' unless where_conditions_hash.empty?\n where_conditions_hash.each do |k, v|\n where_column = connection.quote_column_name(k)\n value = connection.quote(v)\n where_condition_string += (\"#{where_column} = #{value}\" + \"AND\")\n end\n where_condition_string.delete_suffix!(\"AND\") unless where_conditions_hash.empty?\n jsonb_column_key = ALIASES[jsonb_column_key.to_sym]\n sql_string = \"SELECT #{distinct ? \"DISTINCT\" : \"\"} #{jsonb_column} -> '#{jsonb_column_key}' FROM #{table_name}\" + where_condition_string.to_s\n sql_query = Arel.sql(sql_string)\n result = connection.exec_query(sql_query)\n result.rows.flatten.compact.map do |str|\n new_str = str.delete_prefix('\"') if str.start_with?('\"')\n new_str = new_str.delete_suffix('\"') if new_str && new_str.end_with?('\"')\n new_str || str # A string can be unquoted, in such cases, new_str is nil.\n end\n end",
"def unique\n lambda do |rec, acc|\n acc.uniq!\n end\n end",
"def expression(expression)\n if expression.function_sql == \"DISTINCT\"\n \"#{expression.function_sql} #{expression.attribute.to_sql(self)}\" +\n (expression.alias ? \" AS #{quote_column_name(expression.alias)}\" : '')\n else\n \"#{expression.function_sql}(#{expression.attribute.to_sql(self)})\" +\n (expression.alias ? \" AS #{quote_column_name(expression.alias)}\" : \"\")\n end\n end",
"def make_uniq_by(&block)\n result = []\n self.each do |e|\n unless result.any?{|x| yield(x,e)} then result.push(e) end\n end\n return result\n end",
"def supports_count_distinct?\n false\n end",
"def activity\n PublicActivity::Activity\n .includes(:owner, :trackable, post: [:author, :images, :graetzl, :comments], meeting: [:address, :graetzl, :comments])\n .select('DISTINCT ON(trackable_id, trackable_type) *')\n .where(key: STREAM_ACTIVITY_KEYS)\n .where(\"(owner_id IN (?) AND owner_type = 'User')\n OR\n (trackable_id IN (?) AND trackable_type = 'Meeting')\n OR\n (trackable_id IN (?) AND trackable_type = 'Post')\", users.pluck(:id), meetings.pluck(:id), posts.pluck(:id))\n .order(:trackable_id, :trackable_type, created_at: :desc)\n .sort_by(&:created_at).reverse\n end",
"def values_distinct(column)\n values(column).uniq\n end",
"def distinct(columns, order_by) #:nodoc:\n return \"DISTINCT #{columns}\" if order_by.blank?\n\n # Construct a clean list of column names from the ORDER BY clause, removing\n # any ASC/DESC modifiers\n order_columns = order_by.split(',').collect { |s| s.split.first }\n order_columns.delete_if { |c| c.blank? }\n order_columns = order_columns.zip((0...order_columns.size).to_a).map { |s,i| \"#{s} AS alias_#{i}\" }\n\n # Return a DISTINCT ON() clause that's distinct on the columns we want but includes\n # all the required columns for the ORDER BY to work properly.\n sql = \"DISTINCT ON (#{columns}) #{columns}, \"\n sql << order_columns * ', '\n end",
"def combine!(check_overlap = true)\n if scope = self.aars_options[:scope]\n scope = scope.is_a?(Array) ? scope : [scope]\n select = \"DISTINCT \" + scope.map { |a| connection.quote_column_name(a) }.join(\", \")\n scopes = find(:all, :select => select)\n scopes.each { |scope| combine_with_scope!(scope.attributes, check_overlap)}\n else\n combine_with_scope!(nil, check_overlap)\n end\n end",
"def julie_andrews_stars\n MovieDatabase.execute(<<-SQL)\n SELECT DISTINCT(movies.title), lead_actors.name\n FROM actors AS julie_actors\n JOIN castings AS julie_castings on julie_actors.id = julie_castings.actor_id\n JOIN movies ON julie_castings.movie_id = movies.id\n JOIN castings AS lead_castings ON movies.id = lead_castings.movie_id\n JOIN actors AS lead_actors on lead_actors.id = lead_castings.actor_id\n WHERE julie_actors.name = 'Julie Andrews' AND lead_castings.ord = 1\nSQL\nend",
"def direct_duplicates\n\t\tself.class.unscope.where(archetype: self)\n\tend",
"def owasp\n\t\t\t\t\twhere(:reference_name => \"owasp\").select('DISTINCT value')\n\t\t\t\tend",
"def distinct(columns, order_by)\n return \"DISTINCT #{columns}\" if order_by.blank?\n\n # construct a valid DISTINCT clause, ie. one that includes the ORDER BY columns, using\n # FIRST_VALUE such that the inclusion of these columns doesn't invalidate the DISTINCT\n order_columns = order_by.split(',').map { |s| s.strip }.reject(&:blank?)\n order_columns = order_columns.zip((0...order_columns.size).to_a).map do |c, i|\n \"FIRST_VALUE(#{c.split.first}) OVER (PARTITION BY #{columns} ORDER BY #{c}) AS alias_#{i}__\"\n end\n sql = \"DISTINCT #{columns}, \"\n sql << order_columns * \", \"\n end",
"def iava\n\t\t\t\t\twhere(:reference_name => \"iava\").select('DISTINCT value')\n\t\t\t\tend",
"def uniq() end",
"def scrooge_select_sql( set )\n set.map{|a| attribute_with_table( a ) }.join( ScroogeComma )\n end",
"def query_by_duplicates\n tmp = self.clone\n tmp.list = list.select {|k,v| v.filename.count > 1}\n end",
"def request_distinct_targets\n\t\tWmapRequest.find(:all, :select => 'DISTINCT host,address,port,ssl')\n\tend",
"def distinct_answers\n Answer.select(\"DISTINCT(choice_id)\")\n end",
"def summarize_per_relation(generator)\n @from = \"#{generator.to_subquery} AS #{visit_identifier(generator.name)} NATURAL LEFT JOIN #{@from}\"\n end",
"def distinct_value\n id\n end",
"def evaluate\n distinct? ? distinct_relation : relation\n end",
"def pluck_unique(column_name, results = last_results)\n results.map {|r| r[column_name]}.uniq\nend",
"def unique_entries_by_(key) \n seen = Set.new()\n entries.select { |e|\n k = e.send(key)\n seen.add?(k)\n }.sort{|a, b| a.range.low <=> b.range.low }\n end",
"def rhsa\n\t\t\t\t\twhere(:reference_name => \"rhsa\").select('DISTINCT value')\n\t\t\t\tend",
"def _select_map_single\n rows = []\n clone(:_sequel_pg_type=>:first).fetch_rows(sql){|s| rows << s}\n rows\n end",
"def predicates; distinct_predicates.map(&:predicate).compact; end",
"def request_distinct_targets\n\t\tRequest.find(:all, :select => 'DISTINCT host,port,ssl')\n\tend",
"def getUntappdUnique\n db.execute(\"SELECT name FROM #{@untappdTable} GROUP BY name\")\n end",
"def julie_andrews_stars\n MovieDatabase.execute(<<-SQL)\n SELECT\n m.title, a.name\n FROM\n movie m \n JOIN\n casting c ON m.id = c.movieid\n JOIN\n actor a ON a.id = c.actorid\n WHERE\n c.ord = 1 AND c.movieid in (SELECT\n DISTINCT(m.id)\n FROM\n movie m \n JOIN\n casting c ON m.id = c.movieid\n JOIN\n actor a ON a.id = c.actorid\n WHERE\n a.name = 'Julie Andrews')\n ORDER BY\n m.title;\nSQL\nend",
"def query\n <<-SPARQL\n SELECT DISTINCT ?item ?vglistId WHERE {\n ?item wdt:P8351 ?vglistId.\n }\n SPARQL\nend",
"def uniq_by\n clean = []\n self.collect{|x| yield(x)}.uniq.each do |x|\n clean << self.select{|y| yield(y) == x}.last\n end\n clean\n end",
"def uniq!() end",
"def to_group_sql\n case\n when is_many?, is_string?, ThinkingSphinx.use_group_by_shortcut?\n nil\n else\n @columns.collect { |column|\n column_with_prefix(column)\n }\n end\n end",
"def index\n @submissions = Field.find_by_sql(\"SELECT DISTINCT ON (unique_id) unique_id, form_id , updated_at FROM Fields WHERE user_id= #{current_user.id}\")\n end",
"def latest_answers\n unscope(:order).select('DISTINCT ON (question_id) *').order(:question_id, created_at: :desc)\n end",
"def latest_answers\n unscope(:order).select('DISTINCT ON (question_id) *').order(:question_id, created_at: :desc)\n end",
"def latest_answers\n unscope(:order).select('DISTINCT ON (question_id) *').order(:question_id, created_at: :desc)\n end",
"def build_subselect(key, o)\n subselect = super\n\n # Materialize subquery by adding distinct\n # to work with MySQL 5.7.6 which sets optimizer_switch='derived_merge=on'\n unless has_limit_or_offset_or_orders?(subselect)\n core = subselect.cores.last\n core.set_quantifier = Arel::Nodes::Distinct.new\n end\n\n Nodes::SelectStatement.new.tap do |stmt|\n core = stmt.cores.last\n core.froms = Nodes::Grouping.new(subselect).as(\"__active_record_temp\")\n core.projections = [Arel.sql(quote_column_name(key.name))]\n end\n end",
"def columns_for_distinct(columns, orders)\n # Lifted from the default Postgres implementation\n order_columns = orders.map{ |s|\n # Convert Arel node to string\n s = s.to_sql unless s.is_a?(String)\n # Remove any ASC/DESC modifiers\n s.gsub(/\\s+(ASC|DESC)\\s*(NULLS\\s+(FIRST|LAST)\\s*)?/i, '')\n }.reject(&:blank?).map.with_index { |column, i| \"#{column} AS alias_#{i}\" }\n\n [super, *order_columns].join(', ')\n end",
"def books_not_sorted_to_libraries\n ActiveRecord::Base.connection.exec_query(books_not_sorted_to_libraries_sql).collect &:values\nend",
"def primary_keys(table)\n pks = query(<<-end_sql, 'SCHEMA')\n SELECT DISTINCT attr.attname\n FROM pg_attribute attr\n INNER JOIN pg_depend dep ON attr.attrelid = dep.refobjid AND attr.attnum = dep.refobjsubid\n INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey)\n WHERE cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n pks.present? ? pks[0] : pks\n end",
"def columns_for_distinct(columns, orders) #:nodoc:\n order_columns = orders.reject(&:blank?).map{ |s|\n # Convert Arel node to string\n s = s.to_sql unless s.is_a?(String)\n # Remove any ASC/DESC modifiers\n s.gsub(/\\s+(?:ASC|DESC)\\b/i, '')\n .gsub(/\\s+NULLS\\s+(?:FIRST|LAST)\\b/i, '')\n }.reject(&:blank?).map.with_index { |column, i| \"#{column} AS alias_#{i}\" }\n\n (order_columns << super).join(', ')\n end",
"def cwe\n\t\t\t\t\twhere(:reference_name => \"cwe\").select('DISTINCT value')\n\t\t\t\tend",
"def apply_correlated_subquery_limit_strategy(ds)\n table = ds.first_source_table\n table_alias = ds.first_source_alias\n primary_key = associated_class.primary_key\n key = self[:key]\n cs_alias = :t1\n cs = associated_dataset.\n from(Sequel.as(table, :t1)).\n select(*qualify(cs_alias, primary_key)).\n where(Array(qualify(cs_alias, key)).zip(Array(qualify(table_alias, key)))).\n limit(*limit_and_offset)\n ds.where(qualify(table_alias, primary_key)=>cs)\n end",
"def price_like_shiny_mouse_sub\n # There are multiple 'Shiny Mouse' toys that all have different prices.\n # Your goal is to list all names and prices of the toys with the same prices \n # as the different 'Shiny Mouse' toys. \n\n # Exclude the 'Shiny Mouse' toy from your results.\n # Order your alphabetically by toy name.\n\n # USE A SUBQUERY\n execute(<<-SQL) \n SELECT DISTINCT toys.name, toys.price\n FROM cattoys\n JOIN cats ON cat_id = cats.id\n JOIN toys ON toy_id = toys.id\n WHERE toys.price IN (\n SELECT price\n FROM toys\n WHERE toys.name = 'Shiny Mouse')\n AND toys.name != 'Shiny Mouse'\n ORDER BY toys.name ASC\n SQL\nend",
"def osvdb\n\t\t\t\t\twhere(:reference_name => \"osvdb\").select('DISTINCT value')\n\t\t\t\tend",
"def playing_with(num=10)\n return [] if not page_id or page_id==0\n \nsql = <<-SQL\n select terms.*\n from terms, matches matches1,matches matches2 \n where terms.id=matches2.term_id \n and matches2.page_id=matches1.page_id \n and matches1.date_for_sorting = matches2.date_for_sorting\n and matches1.id=#{id}\n and matches2.id<>#{id}\n and matches2.status='notified'\n group by terms.text\n limit #{num}\n SQL\n terms = Term.find_by_sql(sql) \n Term.uniques(terms)\n end",
"def distinct\n @interactiondistinct = Interaction.select('location').where('DATEDIFF(CURRENT_TIMESTAMP,date_time)<33').uniq\n end",
"def columns_for_distinct(columns, orders) #:nodoc:\n order_columns = orders.reject(&:blank?).map{ |s|\n # Convert Arel node to string\n s = s.to_sql unless s.is_a?(String)\n # Remove any ASC/DESC modifiers\n s.gsub(/\\s+(?:ASC|DESC)\\b/i, '')\n .gsub(/\\s+NULLS\\s+(?:FIRST|LAST)\\b/i, '')\n }.reject(&:blank?).map.with_index { |column, i| \"#{column} AS alias_#{i}\" }\n\n [super, *order_columns].join(', ')\n end",
"def find_uniqable!(uid)\n where_uniqable(uid).take!\n end",
"def generate_array_query\n \"SELECT DISTINCT id, name FROM(SELECT id_col AS id, name_col as name \" + \\\n \"FROM #{CellMetadatum::BIGQUERY_TABLE}, UNNEST(#{self.big_query_id_column}) AS id_col WITH OFFSET id_pos, \" + \\\n \"UNNEST(#{self.big_query_name_column}) as name_col WITH OFFSET name_pos WHERE id_pos = name_pos)\"\n end",
"def duplicates(collection); end",
"def columns_for_distinct(columns, orders) # :nodoc:\n order_columns = orders.compact_blank.map { |s|\n # Convert Arel node to string\n s = visitor.compile(s) unless s.is_a?(String)\n # Remove any ASC/DESC modifiers\n s.gsub(/\\s+(?:ASC|DESC)\\b/i, \"\")\n }.compact_blank.map.with_index { |column, i| \"#{column} AS alias_#{i}\" }\n\n (order_columns << super).join(\", \")\n end",
"def unique_metric_ids\n Answer.select(:metric_id).where(company_id: left.id).uniq.pluck :metric_id\n # pluck seems dumb here, but .all isn't working (returns *all card)\nend",
"def distinct\n \"#{username}\\##{tag}\"\n end",
"def distinct\n \"#{username}\\##{tag}\"\n end",
"def _select_pk_ds\n @_select_pk_ds ||= metadata_dataset.\n from(:pg_class, :pg_attribute, :pg_index, :pg_namespace).\n where{[\n [pg_class[:oid], pg_attribute[:attrelid]],\n [pg_class[:relnamespace], pg_namespace[:oid]],\n [pg_class[:oid], pg_index[:indrelid]],\n [pg_index[:indkey].sql_subscript(0), pg_attribute[:attnum]],\n [pg_index[:indisprimary], 't']\n ]}.\n select{pg_attribute[:attname].as(:pk)}\n end",
"def scrooge_select_sql(set)\n set.collect do |name|\n \"#{@quoted_table_name}.#{connection.quote_column_name(name)}\"\n end.join(ScroogeComma)\n end",
"def uniq_by(&block)\n ret = dup\n ret.uniq_by!(&block)\n ret\n end",
"def distinct(field)\n database.command(\n :distinct => collection.name,\n :key => field.to_s,\n :query => selector\n ).documents.first['values']\n end",
"def test_join1\n\t\tr = prep(\"program foo;\\nfoo(A,B) :- bar(A,B);\\n\")\n\t\t\n\t\t# set up schema table's predicate\n\t\t## require 'ruby-debug'; debugger\n\t\tterm_schema = @terms.schema_of\n\t\tterm_pred = Predicate.new(false,@terms.name,@terms,term_schema.variables)\n\t\tterm_pred.set(r, \"global\", \"r3\", 1)\n\t\t\n\t\tsj = ScanJoin.new(r, term_pred, @preds.schema_of)\t\n\t\tts = TupleSet.new(\"pred\", *@preds.tuples)\n\t\tres = sj.evaluate(ts)\n\n\t\tassert_equal(2, res.tups.length)\n\tend",
"def test_distinct_no_duplicates\n stream = FromArray.new([1, 2, 3, 4, 5])\n collected = stream.distinct.collect\n assert(collected == collected.uniq)\n assert(collected.length == collected.uniq.length)\n end",
"def index\n @public_teams = Team.where(private: nil)\n #@companies_with_public_teams = Team.where(private: nil).group(:company_id)\n @companies_with_public_teams = Team.where(private: nil).select(\"DISTINCT ON (company_id) *\")\n end",
"def include_associations_of_dups(*associations)\n\t\t\tassociations << {except: []} if associations.empty?\n\t\t\tunless associations[0].is_a? Hash\n\t\t\t\tassociations = [{only: associations}]\n\t\t\tend\n\t\t\tassociations = build_associations_from_hash associations[0]\n\t\t\t\n\t\t\t# override each association method\n\t\t\tassociations.each do |name|\n\t\t\t\tnext unless valid_association_for_combining?(name)\n\t\t\t\tdefine_method name do |*arguments|\n\t\t\t\t\treturn super(arguments) if duplicates.empty?\n\t\t\n\t\t\t\t\tw = [] # where clause\n\t\t\t\t\ttype, key, tbl = self.get_sql_names_for_combining name, self.class.reflections[name]\n\t\t\t\n\t\t\t\t\t# do we need to specify the resource type? (if polymorphic, for example)\n\t\t\t\t\tw << \"#{tbl}.#{type}='#{self.class.name}'\" if type.present?\n\n\t\t\t\t\t# get the IDs of each duplicate, and self\n\t\t\t\t\tids = [id] + duplicates.map(&:id)\n\t\t\t\t\tids.map! { |i| \"#{tbl}.#{key}=#{i}\" }\n\t\t\t\t\tw << \"(#{ids.join(' or ')})\" if ids.size > 0\n\n\t\t\t\t\twsql = w.join ' and '\n\t\t\n\t\t\t\t\t# construct relation\n\t\t\t\t\tr = association(name).reader(arguments).unscope(where: key).where(wsql)\n\t\t\t\t\tself.class.reflections[name].options[:uniq] ? r.uniq : r\n\t\t\t\tend\n\t\t\tend\n\t\tend",
"def distinct(key)\n @collection.distinct(self, key)\n end",
"def generate_non_array_query\n \"SELECT DISTINCT #{self.big_query_id_column} AS id, #{self.big_query_name_column} AS name FROM #{CellMetadatum::BIGQUERY_TABLE}\"\n end",
"def select_statement(query)\n model = query.model\n fields = query.fields\n conditions = query.conditions\n limit = query.limit\n offset = query.offset\n order = query.order\n group_by = nil\n\n # FIXME: using a boolean for qualify does not work in some cases,\n # such as when you have a self-referrential many to many association.\n # if you don't qualfiy the columns with a unique alias, then the\n # SQL query will fail. This may mean though, that it might not\n # be enough to pass in a Property, but we may need to know the\n # table and the alias we should use for the column.\n\n qualify = query.links.any?\n\n if query.unique?\n group_by = fields.select { |p| p.kind_of?(Property) }\n end\n\n # create subquery to find all valid keys and then use these keys to retrive all other columns\n use_subquery = qualify\n\n # when we can include ROWNUM condition in main WHERE clause\n use_simple_rownum_limit = limit && (offset||0 == 0) && group_by.blank? && order.blank?\n\n unless (limit && limit > 1) || offset > 0 || qualify\n # TODO: move this method to Query, so that it walks the conditions\n # and finds an OR operator\n\n # TODO: handle cases where two or more properties need to be\n # used together to be unique\n\n # if a unique property is used, and there is no OR operator, then an ORDER\n # and LIMIT are unecessary because it should only return a single row\n if conditions.kind_of?(Query::Conditions::AndOperation) &&\n conditions.any? { |operand| operand.kind_of?(Query::Conditions::EqualToComparison) && operand.subject.respond_to?(:unique?) && operand.subject.unique? } &&\n !conditions.any? { |operand| operand.kind_of?(Query::Conditions::OrOperation) }\n order = nil\n limit = nil\n end\n end\n\n conditions_statement, bind_values = conditions_statement(conditions, qualify)\n\n statement = \"SELECT #{columns_statement(fields, qualify)}\"\n if use_subquery\n statement << \" FROM #{quote_name(model.storage_name(name))}\"\n statement << \" WHERE (#{columns_statement(model.key, qualify)}) IN\"\n statement << \" (SELECT DISTINCT #{columns_statement(model.key, qualify)}\"\n end\n statement << \" FROM #{quote_name(model.storage_name(name))}\"\n statement << join_statement(query, qualify) if qualify\n statement << \" WHERE (#{conditions_statement})\" unless conditions_statement.blank?\n if use_subquery\n statement << \")\"\n end\n if use_simple_rownum_limit\n statement << \" AND rownum <= ?\"\n bind_values << limit\n end\n statement << \" GROUP BY #{columns_statement(group_by, qualify)}\" unless group_by.blank?\n statement << \" ORDER BY #{order_statement(order, qualify)}\" unless order.blank?\n\n add_limit_offset!(statement, limit, offset, bind_values) unless use_simple_rownum_limit\n\n return statement, bind_values\n end",
"def frey_example\n # Find all the cats that are the same color as the cat named 'Freyja'.\n # Including 'Freyja' in the results.\n # DO NOT USE A SUBQUERY\n\n execute(<<-SQL)\n SELECT\n color_cats.name\n FROM\n cats AS freyja_cats\n JOIN\n cats AS color_cats ON freyja_cats.color = color_cats.color\n WHERE\n freyja_cats.name = 'Freyja';\n SQL\nend",
"def uniq\n end",
"def uniq\n end",
"def distinct(field)\n klass.collection.distinct(field, selector)\n end",
"def test_distinct_one_duplicate_element\n stream = FromArray.new([1, 1])\n collected = stream.distinct.collect\n assert(collected.length == 1)\n assert(collected == collected.uniq)\n end",
"def contact_query(select)\n \"SELECT DISTINCT #{select} \n FROM \n people, kassi_event_participations\n WHERE\n people.id = person_id AND\n person_id <> '#{id}' AND \n kassi_event_id IN (\n SELECT kassi_event_id FROM kassi_event_participations WHERE person_id = '#{id}'\n )\"\n end",
"def distinct\n \"#{username}##{discriminator}\"\n end"
] |
[
"0.67026377",
"0.64181125",
"0.6210453",
"0.6127123",
"0.6123081",
"0.58022785",
"0.57420564",
"0.5667245",
"0.5633823",
"0.556171",
"0.556171",
"0.5497905",
"0.5404564",
"0.54010093",
"0.5378762",
"0.53698623",
"0.53211236",
"0.5319152",
"0.5245783",
"0.52205163",
"0.5183464",
"0.51768816",
"0.5160418",
"0.51471984",
"0.51219904",
"0.5120336",
"0.50985473",
"0.5088349",
"0.5076269",
"0.50645465",
"0.50613064",
"0.5045601",
"0.5044231",
"0.50260925",
"0.5024067",
"0.50119674",
"0.5006353",
"0.4988395",
"0.49821264",
"0.4979613",
"0.49161562",
"0.4913216",
"0.4912145",
"0.4895111",
"0.48932245",
"0.48895457",
"0.4879915",
"0.48770013",
"0.48745885",
"0.48742068",
"0.4872799",
"0.4848953",
"0.48484385",
"0.48417953",
"0.48284292",
"0.481337",
"0.4807321",
"0.47907302",
"0.4788665",
"0.47843733",
"0.47813746",
"0.47813746",
"0.47813746",
"0.47718486",
"0.47714382",
"0.47623086",
"0.47512582",
"0.47452277",
"0.47434932",
"0.4728911",
"0.47181666",
"0.4712018",
"0.47108072",
"0.47094136",
"0.4706709",
"0.47024873",
"0.4702008",
"0.46969104",
"0.46886823",
"0.4684718",
"0.46705708",
"0.46695882",
"0.46642274",
"0.46638784",
"0.46603408",
"0.4641162",
"0.46365574",
"0.46343902",
"0.46325943",
"0.46231684",
"0.4622749",
"0.46215132",
"0.46197277",
"0.4617921",
"0.4611363",
"0.4611363",
"0.46035576",
"0.46019623",
"0.45976806",
"0.4588241"
] |
0.737513
|
0
|
PostgreSQL 9.5+ supports GROUP CUBE
|
def supports_group_cube?
server_version >= 90500
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def group_cube\n raise Error, \"GROUP BY CUBE not supported on #{db.database_type}\" unless supports_group_cube?\n clone(:group_options=>:cube)\n end",
"def group_by\n end",
"def group_by\n\n end",
"def grouping\n @grouping ||= :clustered\n end",
"def sql_inventory_groups\n \"WITH ooc_groups AS\n (\n SELECT assg.asset_id, grp.ooc_group_id as group_id,grp.ooc_group_name as group_name,\n grp.ooc_group_type as group_type, grp.ooc_group_status as group_status\n FROM hip_ooc_asset_group_v AS assg\n JOIN hip_ooc_group_v AS grp ON grp.ooc_group_id = assg.ooc_group_id\n WHERE grp.ooc_group_status != 'deleted'\n AND grp.org_l1_id = #{org_l1_id}\n AND grp.org_id = #{org_id}\n ),\n hc_groups as(\n SELECT assg.asset_id, grp.hc_group_id as group_id,grp.group_name,'hc cycle'as group_type ,\n grp.is_current\n FROM hip_asset_group_v AS assg\n JOIN hip_hc_group_v AS grp ON grp.hc_group_id = assg.hc_group_id\n WHERE grp.org_l1_id = #{org_l1_id}\n AND grp.org_id = #{org_id}\n ),\n all_groups as (\n select * from ooc_groups\n union\n select * from hc_groups\n )\n SELECT assh.host_name,assh.ip_string_list, assh.hc_start_date,assh.security_policy_name,\n assh.tool_asset_id,os.os_product,assh.system_status,assh.hc_auto_flag,assh.hc_auto_interval_weeks,\n assh.hc_manual_interval_weeks,assh.hc_manual_flag,\n #{group_type_columns_frag}\n CASE\n WHEN assh.hc_auto_flag='y' and assh.hc_manual_flag='y' then 'Yes'\n WHEN assh.hc_auto_flag='n' and assh.hc_manual_flag='n' then 'No'\n ELSE NULL\n END AS hc_required \n FROM dim_comm_tool_asset_hist_v AS assh\n LEFT join all_groups AS g ON g.asset_id = assh.tool_asset_id\n JOIN dim_comm_os_v AS os ON os.os_id=assh.os_id\n WHERE\n assh.org_l1_id=#{org_l1_id} AND assh.org_id=#{org_id}\n AND CURRENT_TIMESTAMP BETWEEN assh.row_from_timestamp AND COALESCE(assh.row_to_timestamp, CURRENT_TIMESTAMP)\n group by assh.host_name,assh.ip_string_list,assh.hc_start_date,assh.security_policy_name,\n assh.tool_asset_id,os.os_product,assh.system_status,assh.hc_auto_flag, assh.hc_auto_interval_weeks, assh.hc_manual_interval_weeks,\n assh.hc_manual_flag\n ORDER BY assh.host_name\"\n end",
"def group_with_builtin(coll)\n puts \"Grouping by 'Make' (built-in)\"\n coll.group({\n :key => [:Make],\n :initial => {:crashes => 0},\n :reduce => 'function(doc, prev) {prev.crashes += 1;}'\n })\nend",
"def group(group: T.unsafe(nil)); end",
"def group(*) end",
"def ii_groups; end",
"def to_group_sql\n case\n when is_many?, is_string?, ThinkingSphinx.use_group_by_shortcut?\n nil\n else\n @columns.collect { |column|\n column_with_prefix(column)\n }\n end\n end",
"def test_grouping\n make_dummy_source(\"http://groupme/source1\", N::FOAFX.Goat, N::FOAFX.Bee)\n make_dummy_source(\"http://groupme/source2\", N::FOAFX.Goat)\n make_dummy_source(\"http://groupme/source3\", N::FOAFX.Bee)\n results = Source.groups_by_property(:type, [ N::FOAFX.Goat, N::FOAFX.Bee ])\n assert_equal(2, results.size)\n assert_equal(2, results[N::FOAFX.Goat].size)\n assert_equal(2, results[N::FOAFX.Bee].size)\n end",
"def group(name, args)\n raise \":data is needed as an argument to group metrics\" unless args[:data]\n raise \":subgroup is needed as an argument to group metrics\" unless args.include?(:subgroup)\n\n args[:aggregator] = \"sumSeries\" unless args[:aggregator]\n\n group_args = args.clone\n group_args[:data] = \"groupByNode(#{group_args[:data]},#{group_args[:subgroup]},\\\"#{group_args[:aggregator]}\\\")\"\n field \"#{name}_group\", group_args\n\n end",
"def test_grouping_partial_set\n\n i = CartItem.for_product(Product.find(5))\n i.save\n\n groups = CartGroup.groups_for_items([i])\n\n assert_equal(1, groups.size)\n\n assert_equal(1, groups[0].size)\n assert_equal(5, groups[0][0].product_id)\n assert(!groups[0].set_discount?)\n assert_equal(BigDecimal('14.99'), groups[0].total)\n assert_equal(BigDecimal('0.00'), groups[0].savings)\n groups[0].items_with_prices { |item, price| assert_equal(BigDecimal('14.99'), price) }\n\n end",
"def test_grouping\n\n i1 = CartItem.create(:product_id => 1)\n i2 = CartItem.create(:product_id => 4)\n i3 = CartItem.create(:product_id => 5)\n\n groups = CartGroup.groups_for_items([i1, i2, i3])\n\n assert_equal(2, groups.size)\n\n assert_equal(1, groups[0].size)\n assert_equal(1, groups[0][0].product_id)\n assert(!groups[0].set_discount?)\n assert_equal(BigDecimal('9.99'), groups[0].total)\n assert_equal(BigDecimal('0.00'), groups[0].savings)\n groups[0].items_with_prices { |item, price| assert_equal(BigDecimal('9.99'), price) }\n\n assert_equal(2, groups[1].size)\n assert_equal(4, groups[1][0].product_id)\n assert_equal(5, groups[1][1].product_id)\n assert_equal(5, groups[1].sorted_items[0].product_id)\n assert_equal(4, groups[1].sorted_items[1].product_id)\n assert(groups[1].set_discount?)\n assert_equal(BigDecimal('23.98'), groups[1].total)\n assert_equal(ApplicationHelper.round_currency(BigDecimal('6.00')), ApplicationHelper.round_currency(groups[1].savings))\n groups[1].items_with_prices do |item, price|\n assert_equal(BigDecimal('0.00'), price) if item.product.id == 5\n assert_equal(BigDecimal('23.98'), price) if item.product.id == 4\n end\n\n end",
"def ungrouped\n cached_dataset(:_ungrouped_ds){clone(:group => nil, :having => nil)}\n end",
"def grouped?\n !group_by_column.nil?\n end",
"def grouped?\n !group_by_column.nil?\n end",
"def group\n klass.collection.group(\n :key => field_list,\n :cond => selector,\n :initial => { :group => [] },\n :reduce => Javascript.group\n ).collect do |docs|\n docs[\"group\"] = docs[\"group\"].collect do |attrs|\n Mongoid::Factory.from_db(klass, attrs)\n end\n docs\n end\n end",
"def groups\n quads\n end",
"def group_query_builder group\n query_arr = []\n g_question_names = questions_for_group(group).map{|question|sanitize(question.attribute_name)}.uniq\n\n entities_for_group(group).each do |entity|\n cols = []\n e_question_names = questions_for_entity(entity).map{|question|sanitize(question.attribute_name)}\n\n #\n g_question_names.each do |q_name|\n if e_question_names.include? q_name\n cols.push \"CAST(\\\"#{q_name}\\\" AS FLOAT) AS \\\"#{q_name}\\\"\"\n else\n cols.push \"CAST(888 AS FLOAT) AS #{q_name}\"\n end\n end\n\n #\n cols.push \"#{entity.reference_year} AS year\" unless entity.reference_year.nil?\n cols.push sanitize(entity.entity_type_fk)\n\n # convert to sql\n query_arr.push \"SELECT \\n\\t#{cols.join(\",\\n\\t\")} \\nFROM #{entity.entity_type}\"\n end\n\n return query_arr.join(\"\\nUNION ALL\\n\")\n end",
"def taggable_document_collection_groups_container\n Rails.cache.fetch(taggable_document_collection_groups_cache_digest, expires_in: 1.day) do\n DocumentCollection.latest_edition.alphabetical.includes(:groups).flat_map do |collection|\n collection.groups.map { |group| [\"#{collection.title} (#{group.heading})\", group.id] }\n end\n end\n end",
"def group\n return if record.respond_to?(:where)\n record.group\n end",
"def ct_subquery_sql(options)\n # the source query contains a variable number of \"extra\" columns\n # ones needed in the output but not involved in pivoting\n source_sql = \"SELECT ARRAY[#{sql_row_name_columns.join(', ')}],\n #{sql_crosstab_columns.join(', ')}, year, gross_quantity\n FROM (#{subquery_sql(options)}) subquery\n ORDER BY 1, #{sql_crosstab_columns.length + 2}\" # order by row_name and year\n source_sql = ActiveRecord::Base.send(:sanitize_sql_array, [source_sql, years])\n source_sql = ActiveRecord::Base.connection.quote_string(source_sql)\n # the categories query returns values by which to pivot (years)\n categories_sql = 'SELECT * FROM UNNEST(ARRAY[?])'\n categories_sql = ActiveRecord::Base.send(:sanitize_sql_array, [categories_sql, years.map(&:to_i)])\n ct_columns = [\n 'row_name TEXT[]',\n report_crosstab_columns.map.each_with_index { |c, i| \"#{sql_crosstab_columns[i]} #{crosstab_columns[c][:pg_type]}\" },\n years_columns.map { |y| \"#{y} numeric\" }\n ].flatten.join(', ')\n # a set returning query requires that output columns are specified\n <<-SQL\n SELECT * FROM CROSSTAB('#{source_sql}', '#{categories_sql}')\n AS ct(#{ct_columns})\n SQL\n end",
"def test_grouping_bundle\n\n i1 = CartItem.create(:product_id => 1)\n i2 = CartItem.create(:product_id => 2)\n i3 = CartItem.create(:product_id => 4)\n i4 = CartItem.create(:product_id => 5)\n\n groups = CartGroup.groups_for_items([i1, i2, i3, i4])\n\n assert_equal(2, groups.size)\n\n assert_equal(1, groups[0].size)\n assert_equal(1, groups[0][0].product_id)\n\n assert_equal(3, groups[1].size)\n assert_equal(2, groups[1][0].product_id)\n assert_equal(4, groups[1][1].product_id)\n assert_equal(5, groups[1][2].product_id)\n assert(groups[1].bundle_discount?)\n assert(!groups[1].set_discount?)\n assert_equal(ApplicationHelper.round_currency(BigDecimal('31.98')), ApplicationHelper.round_currency(groups[1].total))\n assert_equal(ApplicationHelper.round_currency(BigDecimal('7.99')), ApplicationHelper.round_currency(groups[1].savings))\n groups[1].items_with_prices do |item, price|\n assert_equal(BigDecimal('0.00'), price) if item.product.id == 2\n assert_equal(BigDecimal('0.00'), price) if item.product.id == 5\n assert_equal(BigDecimal('31.98'), price) if item.product.id == 4\n end\n\n end",
"def group_by?; @group_by; end",
"def _reduce_7(val, _values)\n Group.new(val[1])\nend",
"def grouping_sets\n raise Error, \"GROUP BY GROUPING SETS not supported on #{db.database_type}\" unless supports_grouping_sets?\n clone(:group_options=>:\"grouping sets\")\n end",
"def i_groups; end",
"def group; end",
"def group\n raise(NotImplementedError)\n end",
"def group_rollup\n raise Error, \"GROUP BY ROLLUP not supported on #{db.database_type}\" unless supports_group_rollup?\n clone(:group_options=>:rollup)\n end",
"def test_grouping_bundle_multiple\n\n i1 = CartItem.create(:product_id => 3)\n i2 = CartItem.create(:product_id => 2)\n i3 = CartItem.create(:product_id => 4)\n i4 = CartItem.create(:product_id => 5)\n\n groups = CartGroup.groups_for_items([i1, i2, i3, i4])\n\n assert_equal(1, groups.size)\n\n assert_equal(4, groups[0].size)\n assert_equal(3, groups[0][0].product_id)\n assert_equal(2, groups[0][1].product_id)\n assert_equal(4, groups[0][2].product_id)\n assert_equal(5, groups[0][3].product_id)\n assert_equal(3, groups[0].sorted_items[0].product_id)\n assert_equal(2, groups[0].sorted_items[1].product_id)\n assert_equal(5, groups[0].sorted_items[2].product_id)\n assert_equal(4, groups[0].sorted_items[3].product_id)\n assert(groups[0].bundle_discount?)\n assert(!groups[0].set_discount?)\n assert_equal(BigDecimal('43.97'), groups[0].total)\n assert_equal(ApplicationHelper.round_currency(BigDecimal('10.99')), ApplicationHelper.round_currency(groups[0].savings))\n groups[0].items_with_prices do |item, price|\n assert_equal(BigDecimal('0.00'), price) if item.product.id == 2\n assert_equal(BigDecimal('0.00'), price) if item.product.id == 3\n assert_equal(BigDecimal('0.00'), price) if item.product.id == 5\n assert_equal(BigDecimal('43.97'), price) if item.product.id == 4\n end\n\n end",
"def popular_group_by_year\n sql = <<-SQL\n SELECT year, guest_group, MAX(num)\n FROM (\n SELECT year, guest_group, COUNT(*) AS num\n FROM guests\n GROUP BY year, guest_group\n )\n GROUP BY year;\n SQL\n DB[:conn].execute(sql)\nend",
"def group_by(collection, grouping_key)\n [].tap do |providers| #building the returned Array\n extract_uniq_values_for_key(collection, grouping_key).each do |provider_name|\n current_provider_array, collection = collection.partition{|h| h[grouping_key] == provider_name} #reducing the collection\n providers << current_provider_array\n end\n end\nend",
"def group_with(other, num_partitions=nil)\n self.union(other).group_by_key(num_partitions)\n end",
"def group_all\n grp = {}\n grp.merge!(groupby_fields)\n grp.merge!(groupby_values)\n { '$group' => grp }\n end",
"def group_with_map_reduce(coll, limit = 0)\n puts \"Grouping by 'Make' (map/reduce)\"\n opts = {\n :limit => limit.to_i,\n :out => {:inline => true},\n :raw => true\n }\n coll.map_reduce(@map, @reduce, opts)\nend",
"def grouped_duplicates(collection); end",
"def group_by *groupings\n @dimensions.merge! paramerize(groupings, VALID_DIMENSIONS, 'Invalid dimension group')\n self\n end",
"def group_by(collection, grouping_value)\n collection.reduce({}) do |acc, item| #we build a dictionary (or hashmap)\n acc.tap do |acc| #because I love old fashioned functional style\n acc[item[grouping_value]] ||= [] #because a new value\n acc[item[grouping_value]] << item\n end\n end.map do |key, value| #transform the hash to an array\n value\n end\nend",
"def cogroup(*others)\n unioned = self\n others.each do |other|\n unioned = unioned.union(other)\n end\n\n unioned.group_by_key\n end",
"def group_by(*group_cols, **agg_cols)\n sorted_tab = order_by(group_cols)\n groups = sorted_tab.rows.group_by do |r|\n group_cols.map { |k| r[k] }\n end\n grp_types = types.select { |k, _v| group_cols.include?(k) }\n result = Table.new(*group_cols, **grp_types)\n groups.each_pair do |_vals, grp_rows|\n result << row_from_group(grp_rows, group_cols, agg_cols)\n end\n result.normalize_boundaries\n result\n end",
"def evaluate_group(grp)\n true\n end",
"def group_for_key(series_key)\n raise NotImplementedError, \"Implement group_for_key\"\n end",
"def most_popular_group_per_year\n sql = <<-SQL\n -- SELECT year, category FROM guests GROUP BY year, category ORDER BY count(category), year DESC\n SELECT DISTINCT year, category, count(category) FROM guests GROUP BY year, category ORDER BY count(category) DESC\n SQL\n DB[:conn].execute(sql)\nend",
"def group\n raise \"View#reduce must have been set before grouping is permitted\" unless query[:reduce]\n update_query(:group => true)\n end",
"def group\n raise \"View#reduce must have been set before grouping is permitted\" unless query[:reduce]\n update_query(:group => true)\n end",
"def aggregate\n klass.collection.group(\n :key => field_list,\n :cond => selector,\n :initial => { :count => 0 },\n :reduce => Javascript.aggregate\n )\n end",
"def group_key\n columns = []\n\n @columns.each do |column|\n columns.push(column) if column.group\n end\n\n columns\n end",
"def groups(*groups); end",
"def groups(*groups); end",
"def grouped\n # temporary feature flag in case we need a prod 'kill' switch for this feature\n raise ApiErrors::FeatureDisabled unless Panoptes.flipper[:subject_group_selection].enabled?\n\n skip_policy_scope\n\n # setup the selector params from user input, note validation occurs in the operation class\n selector_param_keys = %i[workflow_id subject_set_id num_rows num_columns]\n selector_params = params.permit(*selector_param_keys)\n\n # Sanity check -- use a testing feature flag\n # against an allow listed workflow id env var\n allowed_workflow_ids = ENV.fetch('SUBJECT_GROUP_WORKFLOW_ID_ALLOWLIST').split(',')\n raise ApiErrors::FeatureDisabled unless allowed_workflow_ids.include?(selector_params[:workflow_id])\n\n group_selection_result = SubjectGroups::Selection.run!(\n num_rows: selector_params.delete(:num_rows),\n num_columns: selector_params.delete(:num_columns),\n uploader_id: ENV.fetch('SUBJECT_GROUP_UPLOADER_ID'),\n params: selector_params,\n user: api_user\n )\n # get the list of the groups 'placeholder' group_subject ids\n group_subject_ids = group_selection_result.subject_groups.map(&:group_subject_id)\n\n selected_subject_scope =\n Subject\n .where(id: group_subject_ids)\n .order(\"idx(array[#{group_subject_ids.join(',')}], id)\") # guardrails-disable-line\n\n selection_context = Subjects::SelectorContext.new(\n group_selection_result.subject_selector,\n group_subject_ids\n ).format\n\n # serialize the subject_group's group_subject data\n render json_api: SubjectSelectorSerializer.page(\n group_selection_result.subject_selector.params,\n selected_subject_scope,\n selection_context\n )\n end",
"def grpNull \n \"grpNull\" \n end",
"def to_hash_groups(key_column, value_column = nil, opts = Sequel::OPTS)\n if value_column && !opts[:hash]\n clone(:_sequel_pg_type=>:hash_groups, :_sequel_pg_value=>[key_column, value_column]).fetch_rows(sql){|s| return s}\n {}\n elsif opts.empty?\n super(key_column, value_column)\n else\n super\n end\n end",
"def groups; end",
"def groups; end",
"def groups; end",
"def get_grouping_options\n RedmineCharts::GroupingUtils.types\n end",
"def groups(opts={'start' => nil, 'limit' => nil})\n update if running?\n if succeeded?\n return Cursor.new({'collection' => link('groups'),\n 'start' => opts['start'],\n 'limit' => opts['limit']}.update(@opts)) { |g| g['group_id'] }\n elsif running?\n raise VeritableError.new(\"Grouping on column #{column_id} is still running and not yet ready to return groups.\")\n elsif failed?\n raise VeritableError.new(\"Grouping on column #{column_id} has failed and cannot return groups.\")\n else\n raise VeritableError.new(\"Grouping -- Shouldn't be here -- please let us know at support@priorknowledge.com.\")\n end\n end",
"def group _args\n \"group _args;\" \n end",
"def aggregate_table_name(options={})\n \"#{options[:prefix]}#{cube_class.name.tableize.singularize}_agg\"\n end",
"def group new_collection, collection, options\n @@graph.add(new_collection, collection, options.merge(operation: :group))\n end",
"def find_groups\n initialize_groups\n @edges.each do |e|\n v1 = e[0]\n v2 = e[1]\n g1 = group_for_id(v1)\n g2 = group_for_id(v2)\n merge_groups(g1, g2)\n end\n end",
"def summarize_per_subset\n @having = ANY_ROWS\n end",
"def bucketize(series_name, bucket_name)\n series_with_name(series_name).bucket_with_name(bucket_name).random_group rescue nil\n end",
"def subgroups\n @subgroups ||= [].tap do |subgroups|\n row_data = rows.map(&:elements).map(&:dup)\n 3.times do\n row_set = row_data.pop(3)\n 3.times do\n subgroups << row_set.map{ |row| row.pop(3) }.flatten\n end\n end\n end.map { |elements| Subgroup.new(elements) }\n end",
"def group_by(column)\n @conjunction.add_group_by(column)\n nil\n end",
"def to_hash_groups(key_column, value_column=nil, opts=OPTS)\n if (@opts[:eager_graph] || @opts[:eager]) && !opts.has_key?(:all)\n opts = Hash[opts]\n opts[:all] = true\n end\n super\n end",
"def group(entry)\n push(\"$group\" => evolve(entry.__expand_complex__))\n end",
"def visit_Arel_Nodes_Grouping o, collector\n visit(o.expr, collector)\n end",
"def aggregate_after_grouping?; @aggregate_after_grouping; end",
"def stacked_grouping_query?\n @summary.query_group.group_count == 2\n end",
"def groupFromNetId _args\n \"groupFromNetId _args;\" \n end",
"def group_key(id)\n nil\n end",
"def create_aggregates(db: EventSourcery::Postgres.config.event_store_database,\n table_name: EventSourcery::Postgres.config.aggregates_table_name)\n db.create_table(table_name) do\n uuid :aggregate_id, primary_key: true\n column :version, :bigint, default: 1\n end\n end",
"def aggregation?\n false\n end",
"def group(*columns)\n clone(:group => columns)\n end",
"def map_records_into_measure_groups\n qm = QualityMeasure.new(@measure_id, @sub_id)\n measure = Builder.new(get_db, qm.definition, @parameter_values)\n records = get_db.collection('records')\n records.map_reduce(measure.map_function, \"function(key, values){return values;}\",\n :out => {:reduce => 'patient_cache'}, \n :finalize => measure.finalize_function,\n :query => {:test_id => @parameter_values['test_id']})\n apply_manual_exclusions\n end",
"def build_aggregate_sql(column_mask)\n dimension_column_names = dimensions_to_columns.collect do |c|\n \"#{c.table_alias}.#{c.name}\"\n end\n\n sql = <<-SQL\n SELECT\n #{mask_columns_with_null(dimension_column_names, column_mask).join(\",\")},\n #{aggregated_fact_column_sql}\n FROM #{tables_and_joins}\n SQL\n\n group = mask_columns_with_null(dimension_column_names, column_mask).reject{|o| o == 'null'}.join(\",\")\n sql += \"GROUP BY #{group}\" if !group.empty?\n sql\n end",
"def aggregate op, type = :fixnum\n check_closed\n\n aggregation_impl op, type\n end",
"def stacked_group\n @stacked_group ||= @summary.query_group.query_groups[1]\n end",
"def in_frontend_groups\n # groups = []\n # xrefs = self.racc_route_destination_xrefs\n \n # vlabels = self.racc_routes.map {|rr| rr.vlabel_map }\n # vlabels.compact!\n # vlabels.delete_if {|v| v.group.category != \"f\" || v.group.group_default == false}\n # groups = vlabels.map {|v| Operation.first(:conditions => {:app_id => v.app_id, :newop_rec => v.vlabel}).group }\n Group.all(:conditions => [\"name IN \n (SELECT vlabel_group FROM racc_op WHERE newop_rec IN \n (SELECT vlabel FROM racc_vlabel_map WHERE vlabel IN \n (SELECT route_name FROM racc_route WHERE route_id IN \n (SELECT route_id FROM racc_route_destination_xref WHERE app_id = :app_id AND destination_id = :id)\n AND app_id = :app_id)\n AND app_id = :app_id)\n AND app_id = :app_id)\n AND app_id = :app_id AND category = :category AND group_default = :group_default\", {:id => self.id, :app_id => self.app_id, :category => 'f', :group_default => false}])\n end",
"def group_builder; end",
"def max_snapshots_per_group; end",
"def effective_group(store)\n group(store) || store.default_group\n end",
"def groupID _args\n \"groupID _args;\" \n end",
"def groups(*args)\n end",
"def test_add_ad_group_cpc\n bids = @ad_group_srv.module::ManualCPCAdGroupBids.new\n keyword_max_cpc = {\n :amount => {\n :microAmount => 1000000\n }\n }\n bids.keywordMaxCpc = keyword_max_cpc\n operation = {\n :operand => {\n :name => 'Ad Group #%s' % (Time.new.to_f * 1000).to_i,\n :status => 'PAUSED',\n :campaignId => @cpc_campaign.id,\n :bids => bids,\n },\n :operator => 'ADD'\n }\n\n # Add ad group.\n response = @ad_group_srv.mutate([operation])\n ad_group = response.rval.value.first\n\n assert_not_nil(ad_group, 'Invalid ad group returned')\n assert_added_correctly(operation, ad_group)\n @ad_group = ad_group\n end",
"def grouped_by_access_feature!\n groups = {'audio_described_performance' => [], 'captioned_performance' => [], 'signed_performance' => [], 'touch_tour' => [], 'relaxed_performance' => [], 'talk_back' => []}\n\n @instances.each do |instance|\n instance_types = instance.meta_attributes.select{|attr_key, attr_value| groups.keys.include?(attr_key) && attr_value==\"true\"}.keys\n\n if instance_types.any?\n instance_types.each do |type|\n groups[type].push(instance)\n end\n end\n end\n\n groups.each do |type, instances|\n groups[type] = slice_instances_by_date(instances.reverse)\n end\n\n groups\n end",
"def test_10a\r\n db = build\r\n assert_equal [],db.groups\r\n end",
"def embiggen_grouped_results(values)\n embiggened_results = []\n values.each do |resultset|\n container = {} \n resultset.each do |set|\n data = {}\n set.data.each do |key, value|\n if value.kind_of? Hash\n if value.empty?\n value = []\n else \n value = [value] if value.kind_of? Hash\n end\n end\n data[key] = value\n end\n container.merge!(data) do |key, old, nue|\n if old.kind_of? Array\n old.push nue.first\n else\n nue\n end \n end\n end\n embiggened_results.push container\n end\n\n embiggened_results\n end",
"def aggregate(table, id, start_time, end_time, interval)\n rows = rows(table, id, start_time, end_time)\n return [] if rows.count < 2\n @queries = []\n clusters = cluster_rows(rows, interval)\n clusters.each do |cluster|\n aggregate_cluster(cluster, table, id)\n end\n return @queries\n end",
"def get_group_by\n @group_by\n end",
"def test_add_ad_group_cpm\n bids = @ad_group_srv.module::ManualCPMAdGroupBids.new\n max_cpm = {\n :amount => {\n :microAmount => 3000000\n }\n }\n bids.maxCpm = max_cpm\n operation = {\n :operand => {\n :name => 'Ad Group #%s' % (Time.new.to_f * 1000).to_i,\n :status => 'PAUSED',\n :campaignId => @cpm_campaign.id,\n :bids => bids,\n },\n :operator => 'ADD'\n }\n\n # Add ad group.\n response = @ad_group_srv.mutate([operation])\n ad_group = response.rval.value.first\n\n assert_not_nil(ad_group, 'Invalid ad group returned')\n assert_added_correctly(operation, ad_group)\n end",
"def max_snapshots_per_group=(_arg0); end",
"def supports_grouping_sets?\n server_version >= 90500\n end",
"def multiple_dimensions_output_without_time_column\n mr = Javascript::MultiDimensionalCount.new(cron)\n\n array = []\n cron.multiple_dataset.sources.each do |_source|\n # _source = _source.time_range # TODO\n array += _source.map_reduce(mr.map_func, mr.reduce_func)\n .out(inline: 1) # TODO use replace mode\n .to_a.map do |i|\n v = i['value']\n _h = {:c => v['count']}\n\n cron.group_by_columns.each do |_group_by_column|\n _h[_group_by_column[:column_name]] = v[_group_by_column[:column_name].to_s]\n end\n\n _h[:other_json] = {}\n cron.group_concat_columns.each do |_group_concat_column|\n _h[:other_json][_group_concat_column] = v[\"#{_group_concat_column}_values\"].inject({}) {|_h2, i2| _h2[i2] ||= 0; _h2[i2] += 1; _h2 }\n end\n _h[:other_json] = _h[:other_json].to_json\n\n _h\n end\n end\n array\n\n # TODO support sum_columns\n end",
"def grouping\n if submission_id.nil?\n test_group_result.test_run.grouping\n else\n submission.grouping\n end\n end",
"def iii_groups; end",
"def group(group: 'group_A')\n fetch(\"world_cup.groups.#{group}\")\n end"
] |
[
"0.68689656",
"0.5705153",
"0.56319845",
"0.5569599",
"0.5513369",
"0.5487213",
"0.54530466",
"0.54303354",
"0.53565013",
"0.5345835",
"0.5262092",
"0.5247198",
"0.52150446",
"0.52146983",
"0.52088636",
"0.5204516",
"0.5204516",
"0.520431",
"0.5196585",
"0.51422703",
"0.513014",
"0.5120033",
"0.5110145",
"0.50943184",
"0.50864476",
"0.5071021",
"0.5061688",
"0.50551474",
"0.5032916",
"0.5019074",
"0.49965298",
"0.49958766",
"0.49850127",
"0.49807683",
"0.49411288",
"0.49388656",
"0.49305728",
"0.49125084",
"0.4894666",
"0.4886674",
"0.48820385",
"0.4867489",
"0.4836848",
"0.48363012",
"0.4825022",
"0.48244268",
"0.48199126",
"0.48125824",
"0.4794688",
"0.47943422",
"0.47943422",
"0.47763208",
"0.47612748",
"0.47565922",
"0.47563642",
"0.47563642",
"0.47563642",
"0.47375783",
"0.47349447",
"0.47285315",
"0.47253227",
"0.47228542",
"0.47223157",
"0.4718448",
"0.4714936",
"0.47122827",
"0.47098628",
"0.4709817",
"0.47093907",
"0.47011352",
"0.47007105",
"0.4700226",
"0.46854344",
"0.46852198",
"0.4678205",
"0.4673387",
"0.46567217",
"0.46547434",
"0.46504784",
"0.46472797",
"0.46466342",
"0.4628333",
"0.46230534",
"0.4619797",
"0.4611904",
"0.4604015",
"0.46008024",
"0.4598698",
"0.45931703",
"0.45921034",
"0.45890912",
"0.45808104",
"0.45781088",
"0.4568259",
"0.4536911",
"0.45367414",
"0.45324194",
"0.4529849",
"0.45247334",
"0.4522286"
] |
0.49682838
|
34
|
PostgreSQL 9.5+ supports GROUP ROLLUP
|
def supports_group_rollup?
server_version >= 90500
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def group_rollup\n raise Error, \"GROUP BY ROLLUP not supported on #{db.database_type}\" unless supports_group_rollup?\n clone(:group_options=>:rollup)\n end",
"def aggregate_after_grouping?; @aggregate_after_grouping; end",
"def commits_rollup\n return commits if @groups.empty?\n @groups.inject([]) {|commits, group| commits |= group.commits_rollup}.sort!\n end",
"def group_rows(group, col_count, group_text = nil)\n mri = options.mri\n grp_output = \"\"\n if mri.extras[:grouping] && mri.extras[:grouping][group] # See if group key exists\n if mri.group == \"c\" # Show counts row\n if group == :_total_\n grp_output << \"<tr><td class='group' colspan='#{col_count}'>Count for All Rows: #{mri.extras[:grouping][group][:count]}</td></tr>\"\n else\n g = group_text ? group_text : group\n grp_output << \"<tr><td class='group' colspan='#{col_count}'>Count for #{g.blank? ? \"<blank>\" : g}: #{mri.extras[:grouping][group][:count]}</td></tr>\"\n end\n else\n if group == :_total_\n grp_output << \"<tr><td class='group' colspan='#{col_count}'>All Rows</td></tr>\"\n else\n g = group_text ? group_text : group\n grp_output << \"<tr><td class='group' colspan='#{col_count}'>#{g.blank? ? \"<blank>\" : g} </td></tr>\"\n end\n end\n MiqReport::GROUPINGS.each do |calc| # Add an output row for each group calculation\n if mri.extras[:grouping][group].key?(calc.first) # Only add a row if there are calcs of this type for this group value\n grp_output << \"<tr>\"\n grp_output << \"<td class='group'>#{calc.last.pluralize}:</td>\"\n mri.col_order.each_with_index do |c, c_idx| # Go through the columns\n next if c_idx == 0 # Skip first column\n grp_output << \"<td class='group' style='text-align:right'>\"\n grp_output << CGI.escapeHTML(mri.format(c.split(\"__\").first,\n mri.extras[:grouping][group][calc.first][c],\n :format => mri.col_formats[c_idx] ? mri.col_formats[c_idx] : :_default_\n )\n ) if mri.extras[:grouping][group].key?(calc.first)\n grp_output << \"</td>\"\n end\n grp_output << \"</tr>\"\n end\n end\n end\n grp_output << \"<tr><td class='group' colspan='#{col_count}'> </td></tr>\" unless group == :_total_\n grp_output\n end",
"def group_with_builtin(coll)\n puts \"Grouping by 'Make' (built-in)\"\n coll.group({\n :key => [:Make],\n :initial => {:crashes => 0},\n :reduce => 'function(doc, prev) {prev.crashes += 1;}'\n })\nend",
"def to_group_sql\n case\n when is_many?, is_string?, ThinkingSphinx.use_group_by_shortcut?\n nil\n else\n @columns.collect { |column|\n column_with_prefix(column)\n }\n end\n end",
"def visit_axiom_aggregate_sum(sum)\n aggregate_function_sql(SUM, sum)\n end",
"def aggregated_over_time_query\n quantity_field = @country_ids.present? ? \"#{entity_quantity}_reported_quantity\" : \"#{@reported_by}_reported_quantity\"\n\n <<-SQL\n SELECT ROW_TO_JSON(row)\n FROM (\n SELECT JSON_AGG(JSON_BUILD_OBJECT('x', year, 'y', value) ORDER BY year) AS datapoints\n FROM (\n SELECT year, ROUND(SUM(#{quantity_field}::FLOAT)) AS value\n FROM #{shipments_table}\n #{child_taxa_join}\n WHERE #{@condition} AND #{quantity_field} IS NOT NULL AND #{country_condition}\n AND #{child_taxa_condition}\n GROUP BY year\n #{quantity_condition(quantity_field)}\n ORDER BY value DESC\n #{limit}\n ) t\n ) row\n SQL\n end",
"def sql_inventory_groups\n \"WITH ooc_groups AS\n (\n SELECT assg.asset_id, grp.ooc_group_id as group_id,grp.ooc_group_name as group_name,\n grp.ooc_group_type as group_type, grp.ooc_group_status as group_status\n FROM hip_ooc_asset_group_v AS assg\n JOIN hip_ooc_group_v AS grp ON grp.ooc_group_id = assg.ooc_group_id\n WHERE grp.ooc_group_status != 'deleted'\n AND grp.org_l1_id = #{org_l1_id}\n AND grp.org_id = #{org_id}\n ),\n hc_groups as(\n SELECT assg.asset_id, grp.hc_group_id as group_id,grp.group_name,'hc cycle'as group_type ,\n grp.is_current\n FROM hip_asset_group_v AS assg\n JOIN hip_hc_group_v AS grp ON grp.hc_group_id = assg.hc_group_id\n WHERE grp.org_l1_id = #{org_l1_id}\n AND grp.org_id = #{org_id}\n ),\n all_groups as (\n select * from ooc_groups\n union\n select * from hc_groups\n )\n SELECT assh.host_name,assh.ip_string_list, assh.hc_start_date,assh.security_policy_name,\n assh.tool_asset_id,os.os_product,assh.system_status,assh.hc_auto_flag,assh.hc_auto_interval_weeks,\n assh.hc_manual_interval_weeks,assh.hc_manual_flag,\n #{group_type_columns_frag}\n CASE\n WHEN assh.hc_auto_flag='y' and assh.hc_manual_flag='y' then 'Yes'\n WHEN assh.hc_auto_flag='n' and assh.hc_manual_flag='n' then 'No'\n ELSE NULL\n END AS hc_required \n FROM dim_comm_tool_asset_hist_v AS assh\n LEFT join all_groups AS g ON g.asset_id = assh.tool_asset_id\n JOIN dim_comm_os_v AS os ON os.os_id=assh.os_id\n WHERE\n assh.org_l1_id=#{org_l1_id} AND assh.org_id=#{org_id}\n AND CURRENT_TIMESTAMP BETWEEN assh.row_from_timestamp AND COALESCE(assh.row_to_timestamp, CURRENT_TIMESTAMP)\n group by assh.host_name,assh.ip_string_list,assh.hc_start_date,assh.security_policy_name,\n assh.tool_asset_id,os.os_product,assh.system_status,assh.hc_auto_flag, assh.hc_auto_interval_weeks, assh.hc_manual_interval_weeks,\n assh.hc_manual_flag\n ORDER BY assh.host_name\"\n end",
"def combine_multi_statements(total_sql)\n total_sql\n end",
"def aggregate op, type = :fixnum\n check_closed\n\n aggregation_impl op, type\n end",
"def group_all\n grp = {}\n grp.merge!(groupby_fields)\n grp.merge!(groupby_values)\n { '$group' => grp }\n end",
"def combine_multi_statements(total_sql)\n total_sql\n end",
"def rollup(*tags)\n\t @budget = 0\n\t #roll up sub organization's accounts\n\t @sub.each do |sub|\n\t sub.rollup\n @budget += sub.budget\n end\n \n #roll up this org's accounts\n @accounts.each do |account|\n @budget += account.dollars\n end\n \n @budget += @Account.dollars # phase this out -- kind of represents sum of all accounts\n\tend",
"def aggregate\n klass.collection.group(\n :key => field_list,\n :cond => selector,\n :initial => { :count => 0 },\n :reduce => Javascript.aggregate\n )\n end",
"def group_by_columns\n @group = \" GROUP BY #{column_list_for(@columns)}\" if @columns.any?\n end",
"def group_by(*group_cols, **agg_cols)\n sorted_tab = order_by(group_cols)\n groups = sorted_tab.rows.group_by do |r|\n group_cols.map { |k| r[k] }\n end\n grp_types = types.select { |k, _v| group_cols.include?(k) }\n result = Table.new(*group_cols, **grp_types)\n groups.each_pair do |_vals, grp_rows|\n result << row_from_group(grp_rows, group_cols, agg_cols)\n end\n result.normalize_boundaries\n result\n end",
"def group_by\n end",
"def build_group_html_rows(group, col_count, label = nil, group_text = nil)\n in_a_widget = self.rpt_options[:in_a_widget] || false\n\n html_rows = []\n\n content =\n if group == :_total_\n _(\"All Rows\")\n else\n group_label = group_text || group\n group_label = _(\"<Empty>\") if group_label.blank?\n \"#{label}#{group_label}\"\n end\n\n if (self.group == 'c') && extras && extras[:grouping] && extras[:grouping][group]\n display_count = _(\"Count: %{number}\") % {:number => extras[:grouping][group][:count]}\n end\n content << \" | #{display_count}\" unless display_count.blank?\n html_rows << \"<tr><td class='group' colspan='#{col_count}'>#{CGI.escapeHTML(content)}</td></tr>\"\n\n if extras && extras[:grouping] && extras[:grouping][group] # See if group key exists\n MiqReport::GROUPINGS.each do |calc| # Add an output row for each group calculation\n if extras[:grouping][group].key?(calc.first) # Only add a row if there are calcs of this type for this group value\n grp_output = \"\"\n grp_output << \"<tr>\"\n grp_output << \"<td#{in_a_widget ? \"\" : \" class='group'\"} style='text-align:right'>#{_(calc.last)}:</td>\"\n col_order.each_with_index do |c, c_idx| # Go through the columns\n next if c_idx == 0 # Skip first column\n grp_output << \"<td#{in_a_widget ? \"\" : \" class='group'\"} style='text-align:right'>\"\n grp_output << CGI.escapeHTML(\n format(\n c.split(\"__\").first, extras[:grouping][group][calc.first][c],\n :format => self.col_formats[c_idx] ? self.col_formats[c_idx] : :_default_\n )\n ) if extras[:grouping][group].key?(calc.first)\n grp_output << \"</td>\"\n end\n grp_output << \"</tr>\"\n html_rows << grp_output\n end\n end\n end\n html_rows << \"<tr><td class='group_spacer' colspan='#{col_count}'> </td></tr>\" unless group == :_total_\n html_rows\n end",
"def group_by\n\n end",
"def selects_the_titles_of_all_projects_and_their_pledge_amounts #GROUP BY will get rid of repeats\n #projects have many pledges, pledges belong to projects\n\"SELECT projects.title, SUM(pledges.amount) \nFROM pledges INNER JOIN projects ON pledges.project_id = projects.id \nGROUP BY projects.title\"\nend",
"def groupby_standardcost\n { 'std_cost' => { '$sum' => '$standard_cost' } } \n end",
"def summarize_per_table_dum\n @having = NO_ROWS\n end",
"def _reduce_7(val, _values)\n Group.new(val[1])\nend",
"def aggregate\n #response = Result.collection.map_reduce(self.map_fn(), _reduce(), :raw => true, :out => {:inline => true}, :query => {:execution_id => id})\n response = Result.where(execution_id: id).map_reduce(self.map_fn(), self.query.reduce).out(inline: true).raw()\n results = response['results']\n if results\n self.aggregate_result = {}\n results.each do |result|\n result = prettify_generated_result(result) if self.query.generated? && result['value']['rereduced']\n self.aggregate_result[result['_id']] = result['value']\n end\n save!\n end\n end",
"def aggregate\n []\n end",
"def ct_subquery_sql(options)\n # the source query contains a variable number of \"extra\" columns\n # ones needed in the output but not involved in pivoting\n source_sql = \"SELECT ARRAY[#{sql_row_name_columns.join(', ')}],\n #{sql_crosstab_columns.join(', ')}, year, gross_quantity\n FROM (#{subquery_sql(options)}) subquery\n ORDER BY 1, #{sql_crosstab_columns.length + 2}\" # order by row_name and year\n source_sql = ActiveRecord::Base.send(:sanitize_sql_array, [source_sql, years])\n source_sql = ActiveRecord::Base.connection.quote_string(source_sql)\n # the categories query returns values by which to pivot (years)\n categories_sql = 'SELECT * FROM UNNEST(ARRAY[?])'\n categories_sql = ActiveRecord::Base.send(:sanitize_sql_array, [categories_sql, years.map(&:to_i)])\n ct_columns = [\n 'row_name TEXT[]',\n report_crosstab_columns.map.each_with_index { |c, i| \"#{sql_crosstab_columns[i]} #{crosstab_columns[c][:pg_type]}\" },\n years_columns.map { |y| \"#{y} numeric\" }\n ].flatten.join(', ')\n # a set returning query requires that output columns are specified\n <<-SQL\n SELECT * FROM CROSSTAB('#{source_sql}', '#{categories_sql}')\n AS ct(#{ct_columns})\n SQL\n end",
"def selects_user_names_and_amounts_of_all_pledges_grouped_by_name_then_orders_them_by_the_amount\n#Select users.name, SUM(pledges.amount) #Group by users.name #order by SUM(pledges.amount)\n#pledges belong to users, users have many pledges --> pledges go after from \n\"SELECT users.name, SUM(pledges.amount)\nFROM pledges INNER JOIN users ON pledges.user_id = users.id \nGROUP BY users.name \nORDER BY SUM(pledges.amount)\"\nend",
"def groupby_bookingnet\n { 'booking_net' => { '$sum' => '$booking_net' } } \n end",
"def replace_aggregate!(&block)\n map! do |op|\n case\n when op.respond_to?(:aggregate?) && op.aggregate?\n yield op\n when op.respond_to?(:replace_aggregate!)\n op.replace_aggregate!(&block) \n else\n op\n end\n end\n self\n end",
"def time_entry_count_by_group\n r = nil\n if grouped?\n begin\n # Rails3 will raise an (unexpected) RecordNotFound if there's only a nil group value\n r = TimeEntry.sum(:hours, :group => group_by_statement, :include => [:status, :project], :conditions => statement)\n rescue ActiveRecord::RecordNotFound\n \n end\n c = group_by_column\n if c.is_a?(QueryCustomFieldColumn)\n r = r.keys.inject({}) {|h, k| h[c.custom_field.cast_value(k)] = r[k]; h}\n end\n end\n r\n rescue ::ActiveRecord::StatementInvalid => e\n raise StatementInvalid.new(e.message)\n end",
"def sum(field)\n grouped(:sum, field.to_s, Javascript.sum)\n end",
"def statement\n case @metric.aggregate\n when :sum\n parts = {\n select: select_statement,\n joins: dimension_joins,\n having: having_statement,\n order: order_by_statement\n }\n\n statement = ([model] + parts.keys).inject do |chain, method|\n chain.public_send(method, parts[method])\n end\n\n statement = process_scope_dimension_filter(statement)\n statement = process_lambda_dimension_filter(statement)\n statement = process_ransack_dimension_filter(statement)\n\n # The original gem did not handle has_many relationships. In order to support\n # has_many, we need to first do an inner query to select out distinct rows _before_\n # attempting the sum. Therefore we build up the query piece\n # by piece rather than using the basic statement.\n\n sum_definition = parts[:select].first\n original_columns = parts[:select].drop(1)\n\n # Collect a list of all renamed columns from the original query so that we can include\n # these in the outer query.\n renamed_columns = []\n original_columns.each do |sel|\n renamed_columns << sel.split(' AS ').last\n end\n\n # In some situations the column we're summing over is not included as a part of the aggregation\n # in the inner query. In such cases we must explicitly select the desired column in the inner\n # query, so that we can sum over it in the outer query.\n summation_metric = if select_aggregate.include?(\"CASE\")\n select_aggregate.split('CASE WHEN ').last.split(' ').first\n else\n ''\n end\n\n outer_columns = ([sum_definition] << renamed_columns).flatten.uniq.join(', ')\n inner_columns = (original_columns << [summation_metric, fact_model.measure.to_s]).flatten.uniq.reject(&:blank?).join(', ').remove(\"\\n\").squeeze(' ')\n inner_from = statement.to_sql.split('FROM').last\n group_by = outer_group_by_statement.join(', ')\n\n # Finally, construct the query we want and return it as a string\n full_statement = \"SELECT #{outer_columns} FROM(SELECT #{distinct}, #{inner_columns} FROM #{inner_from}) AS T\"\n\n # Add the GROUP BY clause only if it's non nil and non empty\n full_statement = \"#{full_statement} GROUP BY #{group_by}\" if group_by.present?\n\n full_statement\n\n else\n parts = {\n select: select_statement,\n joins: dimension_joins,\n group: group_by_statement,\n having: having_statement,\n order: order_by_statement\n }\n\n statement = ([model] + parts.keys).inject do |chain, method|\n chain.public_send(method, parts[method])\n end\n\n statement = process_scope_dimension_filter(statement)\n statement = process_lambda_dimension_filter(statement)\n statement = process_ransack_dimension_filter(statement)\n\n statement.to_sql\n end\n end",
"def group\n raise \"View#reduce must have been set before grouping is permitted\" unless query[:reduce]\n update_query(:group => true)\n end",
"def build_aggregate_sql(column_mask)\n dimension_column_names = dimensions_to_columns.collect do |c|\n \"#{c.table_alias}.#{c.name}\"\n end\n\n sql = <<-SQL\n SELECT\n #{mask_columns_with_null(dimension_column_names, column_mask).join(\",\")},\n #{aggregated_fact_column_sql}\n FROM #{tables_and_joins}\n SQL\n\n group = mask_columns_with_null(dimension_column_names, column_mask).reject{|o| o == 'null'}.join(\",\")\n sql += \"GROUP BY #{group}\" if !group.empty?\n sql\n end",
"def group_by(column)\n @conjunction.add_group_by(column)\n nil\n end",
"def group\n raise \"View#reduce must have been set before grouping is permitted\" unless query[:reduce]\n update_query(:group => true)\n end",
"def selects_the_titles_of_all_projects_and_their_pledge_amounts_alphabetized_by_name\n\"SELECT title , SUM(amount) FROM pledges, projects WHERE pledges.project_id = projects.id GROUP BY(title);\"\nend",
"def row_from_group(rows, grp_cols, agg_cols)\n new_row = {}\n grp_cols.each do |h|\n new_row[h] = rows.first[h]\n end\n agg_cols.each_pair do |h, agg_func|\n items = rows.map { |r| r[h] }\n new_h = \"#{agg_func}_#{h}\".as_sym\n new_row[new_h] = Column.new(header: h,\n items: items).send(agg_func)\n end\n new_row\n end",
"def group_by?; @group_by; end",
"def postprocessed\n by_type(Ungroup, Unfold)\n end",
"def sum_gfooter(*cols)\n gfooter('Group Total', *cols)\n end",
"def sum_gfooter(*cols)\n gfooter('Group Total', nil, *cols)\n end",
"def aggregate name, o, collector\n collector << \"#{name}(\"\n if o.distinct\n collector << \"DISTINCT \"\n end\n collector = inject_join(o.expressions, Arel::Collectors::Sunstone.new, \", \")# << \")\"\n if o.alias\n collector << \" AS \"\n visit o.alias, collector\n else\n collector\n end\n end",
"def groupby_baselist\n { 'base_list' => { '$sum' => '$tms_sales_allocated_bookings_base_list' } } \n end",
"def group_concat(sep = nil, *orders, group: nil, order: nil)\n if orders.present?\n warn(\"Warning : ArelExtensions: group_concat: you should now use the kwarg 'order' to specify an order in the group_concat.\")\n end\n order_tabs = [orders].flatten.map{ |o|\n if o.is_a?(Arel::Nodes::Ascending) || o.is_a?(Arel::Nodes::Descending)\n o\n elsif o.respond_to?(:asc)\n o.asc\n end\n }.compact\n ArelExtensions::Nodes::GroupConcat.new(self, sep, group: group, order: (order || order_tabs))\n end",
"def unionise *sub_queries\n sub_queries_with_parens = sub_queries.map do |i| \n \"{ #{i} }\" \n end\n\n sub_queries_with_parens.join(' UNION ')\n end",
"def calc_query(query, table, aggregate = {}, as=nil)\n\tquery = \"select #{query}\"\n\n\tif !aggregate.empty?\n\t\taggregate.each do |operation, column|\n\t\t\tquery += \"#{operation}(#{column}) \"\n\t\t\tquery += as \" #{as}\"\n\t\tend\n\tend\n\n\tquery += \" from #{table}\"\n\n\treturn query\nend",
"def aggregate_sum(aggr)\n sum = {}\n aggr.each do |ts, counterVals|\n sum[ts] = {} unless sum.has_key?ts\n counterVals.each do |obj, count|\n if obj.respond_to?(:enterprise_id)\n eid = obj.public_send(:enterprise_id).to_s\n sum[ts][eid] = sum[ts].fetch(eid, 0) + count\n end\n end\n end\n sum\n end",
"def db_query_transform__subquery query, tmp_table=\"resultset_table\"\n \"(#{query}) as #{tmp_table}\"\n end",
"def selects_the_titles_of_all_projects_and_their_pledge_amounts_alphabetized_by_name\n<<-SQL\n SELECT DISTINCT title ,SUM(amount) FROM projects INNER JOIN pledges ON projects.id = pledges.project_id\n GROUP BY projects.title\n SQL\nend",
"def groupby_date(query, period: \"month\", column: \"updated_at\")\n query.group(\"DATE_TRUNC('#{period}', #{column})\").count.sort_by { |key, _v| key || Time.utc(1900) }.to_h\nend",
"def selects_the_titles_of_all_projects_and_their_pledge_amounts_alphabetized_by_title\n \"SELECT title, SUM(pledges.amount) FROM pledges LEFT OUTER JOIN projects ON projects.id = pledges.project_id GROUP BY title;\"\n\nend",
"def aggregate(enum)\n finalize(\n enum.inject(least){|memo,tuple| \n happens(memo, tuple)\n })\n end",
"def rollup(plan, src_plan_key, super_id, obj_plan_key)\n id_to_obj = {}\n plan[src_plan_key].each do |o|\n id = o[super_id]\n id_to_obj[id] = [] unless id_to_obj.key?(id)\n id_to_obj[id] << o\n end\n\n plan[obj_plan_key].each do |o|\n id = o['id']\n o[src_plan_key] = id_to_obj[id] if id_to_obj.key?(id)\n end\n plan.delete(src_plan_key)\n end",
"def group(*) end",
"def aggregated_fact_column_sql\n aggregate_fields.collect { |c| \n \"#{c.strategy_name}(#{c.from_table_name}.#{c.name}) AS #{c.label_for_table}\"\n }.join(\",\")\n end",
"def group_query_builder group\n query_arr = []\n g_question_names = questions_for_group(group).map{|question|sanitize(question.attribute_name)}.uniq\n\n entities_for_group(group).each do |entity|\n cols = []\n e_question_names = questions_for_entity(entity).map{|question|sanitize(question.attribute_name)}\n\n #\n g_question_names.each do |q_name|\n if e_question_names.include? q_name\n cols.push \"CAST(\\\"#{q_name}\\\" AS FLOAT) AS \\\"#{q_name}\\\"\"\n else\n cols.push \"CAST(888 AS FLOAT) AS #{q_name}\"\n end\n end\n\n #\n cols.push \"#{entity.reference_year} AS year\" unless entity.reference_year.nil?\n cols.push sanitize(entity.entity_type_fk)\n\n # convert to sql\n query_arr.push \"SELECT \\n\\t#{cols.join(\",\\n\\t\")} \\nFROM #{entity.entity_type}\"\n end\n\n return query_arr.join(\"\\nUNION ALL\\n\")\n end",
"def group(name, args)\n raise \":data is needed as an argument to group metrics\" unless args[:data]\n raise \":subgroup is needed as an argument to group metrics\" unless args.include?(:subgroup)\n\n args[:aggregator] = \"sumSeries\" unless args[:aggregator]\n\n group_args = args.clone\n group_args[:data] = \"groupByNode(#{group_args[:data]},#{group_args[:subgroup]},\\\"#{group_args[:aggregator]}\\\")\"\n field \"#{name}_group\", group_args\n\n end",
"def create_aggregates(db: EventSourcery::Postgres.config.event_store_database,\n table_name: EventSourcery::Postgres.config.aggregates_table_name)\n db.create_table(table_name) do\n uuid :aggregate_id, primary_key: true\n column :version, :bigint, default: 1\n end\n end",
"def group_by_sort_order\n if grouped? && (column = group_by_column)\n column.sortable.is_a?(Array) ?\n column.sortable.collect {|s| \"#{s} #{column.default_order}\"}.join(',') :\n \"#{column.sortable} #{column.default_order}\"\n end\n end",
"def selects_the_titles_of_all_projects_and_their_pledge_amounts_alphabetized_by_name\n #SELECT title, SUM(quantity) FROM groceries GROUP BY aisle ORDER BY SUM(quantity);\n\n \"SELECT projects.title, sum( pledges.amount)\nFROM projects\nJOIN pledges\nwhere projects.id=pledges.project_id\ngroup by projects.title order by projects.title\n;\"\n\nend",
"def selects_the_titles_of_all_projects_and_their_pledge_amounts_alphabetized_by_name\n sql = <<-SQL\n SELECT projects.title, SUM(pledges.amount) FROM projects JOIN pledges ON pledges.project_id = projects.id GROUP BY projects.title;\n SQL\n\nend",
"def group_by_exp(input, variable, expression); end",
"def selects_the_titles_of_all_projects_and_their_pledge_amounts\n\"SELECT title, SUM(amount) FROM projects\nLEFT OUTER JOIN pledges ON projects.id = pledges.project_id\nGROUP BY title\"\nend",
"def grouping\n remove_empty_statement\n new_sl = []\n sl = []\n g = []\n @statement_list.each do |st|\n if st.to_exp?\n g.push(st)\n else\n if g.length > 0\n sl.push(g)\n end\n sl.push([st])\n g = []\n end\n end\n if g.length > 0\n sl.push(g)\n end\n\n sl.each do |g|\n if g.length == 1\n new_sl.push(g[0])\n else\n i = 1\n t = ExpParen.new(g[0].to_exp)\n while i < g.length\n t = ExpComma.new(t, ExpParen.new(g[i].to_exp))\n i += 1\n end\n new_sl.push(StExp.new(t))\n end\n end\n\n if idx = new_sl.index{|x| x.class == StReturn}\n idx += 1\n while idx < new_sl.length\n if new_sl[idx].kind_of? StVar\n ;\n elsif new_sl[idx].kind_of? StFunc\n ;\n else\n new_sl[idx] = StEmpty.new\n end\n idx += 1\n end\n end\n\n if self.kind_of? SourceElements\n if new_sl[-1].kind_of? StReturn and new_sl[-1].exp.nil?\n new_sl.pop\n end\n end\n\n if new_sl[-1].kind_of? StReturn and new_sl[-2].kind_of? StExp\n if new_sl[-1].exp\n new_sl[-2] = StReturn.new(ExpComma.new(new_sl[-2].exp, new_sl[-1].exp))\n new_sl.pop\n end\n end\n @statement_list = new_sl\n end",
"def aggregate_values(rows)\n # Convert rows into hash where each key is a column name and the each\n # value is an array of values for that column\n cols = OrderedHash.new\n rows.each do |row|\n row.each do |k,v|\n cols[k] ||= []\n cols[k] << v\n end\n end\n\n # Loop through each column, applying an aggregate proc if one exists\n # to the array of column values. If a proc does not exist we take the\n # last value from the array.\n result = cols.inject(OrderedHash.new) do |hsh, (col, vals)|\n hsh[col] = if @aggregators[col]\n @aggregators[col].call(vals)\n else\n vals.last\n end\n hsh\n end\n\n Row[result]\n end",
"def gfooter(label, *sum_cols, **agg_cols)\n label = label.to_s\n foot = {}\n sum_cols.each do |h|\n unless table.headers.include?(h)\n raise UserError, \"No '#{h}' column in table for group sum footer\"\n end\n\n foot[h] = :sum\n end\n agg_cols.each do |h, agg|\n unless table.headers.include?(h)\n raise UserError, \"No '#{h}' column in table for #{agg} group footer\"\n end\n\n foot[h] = agg\n end\n @gfooters[label] = foot\n self\n end",
"def embiggen_grouped_results(values)\n embiggened_results = []\n values.each do |resultset|\n container = {} \n resultset.each do |set|\n data = {}\n set.data.each do |key, value|\n if value.kind_of? Hash\n if value.empty?\n value = []\n else \n value = [value] if value.kind_of? Hash\n end\n end\n data[key] = value\n end\n container.merge!(data) do |key, old, nue|\n if old.kind_of? Array\n old.push nue.first\n else\n nue\n end \n end\n end\n embiggened_results.push container\n end\n\n embiggened_results\n end",
"def selects_the_titles_of_all_projects_and_their_pledge_amounts\n\"\nSELECT projects.title, SUM(pledges.amount)\nFROM projects LEFT OUTER JOIN pledges\nON projects.id = pledges.project_id\nGROUP BY projects.title\nORDER BY projects.title\n\"\nend",
"def group_append(*columns, &block)\n columns = @opts[:group] + columns if @opts[:group]\n group(*columns, &block)\n end",
"def selects_the_titles_and_amount_over_goal_of_all_projects_that_have_met_their_funding_goal\n\"SELECT projects.title, (SUM(pledges.amount)-projects.funding_goal) FROM projects INNER JOIN pledges ON projects.id = pledges.project_id GROUP BY projects.title HAVING (SUM(pledges.amount)) >= projects.funding_goal\";\n# INSERT INTO projects (id, title, category, funding_goal, start_date, end_date)\n# INSERT INTO pledges (id, amount, user_id, project_id\nend",
"def selects_the_titles_of_all_projects_and_their_pledge_amounts_alphabetized_by_title\n %q(\n SELECT pr.title, \n SUM(pl.amount) AS pledge_amount\n FROM projects pr\n JOIN pledges pl ON pr.id = pl.project_id\n GROUP BY 1\n ORDER BY 1;\n )\nend",
"def execute_union_with_order(options = {})\n execute_sql(union_with_order(options))\n end",
"def group(group: T.unsafe(nil)); end",
"def rollup(plan, src_plan_key, super_id, obj_plan_key)\n id_to_obj = Hash.new()\n plan[src_plan_key].each do |o|\n id = o[super_id]\n if !id_to_obj.has_key?(id)\n id_to_obj[id] = Array.new\n end\n id_to_obj[id] << o\n end\n\n plan[obj_plan_key].each do |o|\n id = o[\"id\"]\n if id_to_obj.has_key?(id)\n o[src_plan_key] = id_to_obj[ id ]\n end\n end\n plan.delete(src_plan_key)\n end",
"def groups_total_hours(records, from, to, rows)\n groups_total_hours = []\n # days difference between rows\n days = ((to - from) / rows).to_i + 1\n (0..rows-1).each do |i|\n i_from = to - (i+1) * days\n i_to = to - i * days\n # break if time period of current row is out of time area delimited by from and to \n break if from > i_to\n date = i_to - days / 2\n total_hours = records.where(\"date(records.created_at) > ? and date(records.created_at) <= ?\", i_from, i_to).sum(\"hours\")\n groups_total_hours << {date: date, total_hours: total_hours}\n end\n groups_total_hours \n end",
"def rl_end_undo_group()\r\n rl_add_undo(UNDO_END, 0, 0, nil)\r\n @_rl_undo_group_level-=1\r\n 0\r\n end",
"def selects_the_titles_of_all_projects_and_their_pledge_amounts\n\"SELECT title, SUM (pledges.amount) FROM projects JOIN pledges ON pledges.project_id = projects.id GROUP BY projects.title\"\nend",
"def aggregation(operation)\n return self unless operation\n clone.tap do |query|\n unless aggregating?\n query.pipeline.concat(query.selector.to_pipeline)\n query.pipeline.concat(query.options.to_pipeline)\n query.aggregating = true\n end\n yield(query.pipeline)\n end\n end",
"def selects_the_titles_of_all_projects_and_their_pledge_amounts\n\"SELECT projects.title, SUM(pledges.amount)\n FROM projects\n INNER JOIN pledges\n ON pledges.project_id = projects.id\n GROUP BY(projects.title);\"\nend",
"def selects_the_titles_of_all_projects_and_their_pledge_amounts_alphabetized_by_name\n<<-SQL\n SELECT projects.title, SUM(pledges.amount)\n FROM projects\n JOIN pledges\n ON projects.id = pledges.project_id\n GROUP BY projects.title;\nSQL\nend",
"def group(entry)\n push(\"$group\" => evolve(entry.__expand_complex__))\n end",
"def grouping_parentheses(o, collector)\n if o.expr.is_a? Nodes::SelectStatement\n collector << \"(\"\n visit o.expr, collector\n collector << \")\"\n else\n visit o.expr, collector\n end\n end",
"def result\n query_group.with_context do\n if primary.summarise?\n summary_result\n else\n simple_result\n end\n end\n end",
"def selects_the_titles_of_all_projects_and_their_pledge_amounts\n <<-SQL\n SELECT projects.title , sum(pledges.amount)\n FROM projects\n INNER JOIN pledges\n ON projects.id = pledges.project_id\n GROUP BY pledges.project_id\n ORDER BY projects.title\n SQL\nend",
"def db_query_transform__count query\n tmp_table = \"resultset_table\"\n make_tmp_table = db_query_transform__subquery query, tmp_table\n \"SELECT COUNT(*) FROM #{make_tmp_table}\"\n end",
"def selects_the_titles_of_all_projects_and_their_pledge_amounts\n\"SELECT projects.title, SUM(pledges.amount) FROM projects JOIN pledges on projects.id = pledges.project_id group by projects.title\"\nend",
"def selects_the_titles_of_all_projects_and_their_pledge_amounts\n\"SELECT title, SUM(amount)\nFROM projects\nLEFT JOIN pledges ON projects.id = pledges.project_id GROUP BY projects.title; \"\nend",
"def aggregate\n # oookay I took some shortcuts with this one.\n # first, sort messages by file and line\n @messages.sort! { |a, b| a.compare_by_file_and_line(b) }\n\n # now create an initial empty message group\n first_group = MessageGroup.new(file: nil,\n line: nil)\n @message_groups = @messages.reduce([first_group]) do |groups, msg|\n # We get to take a shortcut because we sorted the messages earlier - only\n # have to see if we can append msg to the last group in the list\n if groups.last << msg\n # we appended it, so return groups unchanged\n groups\n else\n # have to create a new group since msg wasn't appended to the other\n # group\n new_group = MessageGroup.new(file: msg.file,\n line: msg.line)\n new_group << msg\n groups << new_group\n end\n end\n\n @message_groups.extend(Helpers::MessageGroupsArrayHelper)\n end",
"def popular_group_by_year\n sql = <<-SQL\n SELECT year, guest_group, MAX(num)\n FROM (\n SELECT year, guest_group, COUNT(*) AS num\n FROM guests\n GROUP BY year, guest_group\n )\n GROUP BY year;\n SQL\n DB[:conn].execute(sql)\nend",
"def preprocess\n if group_by?\n build_headers\n group_rows\n else\n enumerator.next\n end\n end",
"def group\n return if record.respond_to?(:where)\n record.group\n end",
"def selects_the_titles_of_all_projects_and_their_pledge_amounts\n\"SELECT title, SUM(pledges.amount) FROM projects\n JOIN pledges ON projects.id = pledges.project_id\n GROUP BY projects.title\"\nend",
"def group_by(hash)\n hash.inject({}) do |ret_hash,(key,el)|\n group_key = yield el\n prev_for_group = ret_hash[group_key] || {}\n group = {key => el}.merge(prev_for_group)\n ret_hash.merge(group_key => group)\n end\n end",
"def add_group_by(field)\n @group_by.push(field)\n end",
"def aggregate(source, options={})\n return send_message(SkyDB::Message::Lua::Aggregate.new(source, options))\n end",
"def create_aggregate( name, arity, step, finalize, type=nil )\n case type\n when :numeric\n type = SQLite::API::NUMERIC\n when :text\n type = SQLite::API::TEXT\n when :args\n type = SQLite::API::ARGS\n end\n\n step_callback = proc do |func,*args|\n ctx = SQLite::API.aggregate_context( func )\n step.call( FunctionProxy.new( func, ctx ), *args )\n end\n\n finalize_callback = proc do |func|\n ctx = SQLite::API.aggregate_context( func )\n finalize.call( FunctionProxy.new( func, ctx ) )\n end\n\n SQLite::API.create_aggregate( @handle, name, arity,\n step_callback, finalize_callback )\n\n SQLite::API.function_type( @handle, name, type ) if type\n\n self\n end",
"def grouped_duplicates(collection); end",
"def add_group_result(result, group: nil)\n data = {result: result}\n if group\n data[:group] = group\n end\n if result.present?\n @group_results << data\n end\n end",
"def selects_the_titles_of_all_projects_and_their_pledge_amounts_alphabetized_by_title\n # group pledge sum by title\n # return titles\n # order by\n \"SELECT projects.title, \n SUM(pledges.amount) \n FROM projects \n JOIN pledges ON projects.id=pledges.project_id \n GROUP BY projects.title \n ORDER BY projects.title;\"\nend"
] |
[
"0.7843713",
"0.5746715",
"0.55350065",
"0.5282332",
"0.51886714",
"0.51368845",
"0.5116343",
"0.5096416",
"0.50795084",
"0.5046495",
"0.5004107",
"0.4995787",
"0.49910265",
"0.4987092",
"0.4955337",
"0.49265525",
"0.4913858",
"0.48521537",
"0.4838019",
"0.4783108",
"0.4770773",
"0.47651398",
"0.47633743",
"0.47616455",
"0.47513467",
"0.47386274",
"0.47239763",
"0.46800232",
"0.46590376",
"0.46437484",
"0.46287444",
"0.46058574",
"0.46044505",
"0.46000928",
"0.45913422",
"0.45907822",
"0.4587362",
"0.4576649",
"0.45619684",
"0.45606738",
"0.45506272",
"0.45436904",
"0.4542718",
"0.45406175",
"0.45391834",
"0.4537375",
"0.45235953",
"0.45165548",
"0.4507442",
"0.45050856",
"0.4502814",
"0.44911262",
"0.44907272",
"0.44905338",
"0.4484286",
"0.44830567",
"0.448121",
"0.44590896",
"0.4446671",
"0.44431105",
"0.44161314",
"0.4411166",
"0.4403734",
"0.44018927",
"0.43899673",
"0.43888608",
"0.4385294",
"0.4376601",
"0.43727016",
"0.4362174",
"0.43564466",
"0.43529812",
"0.43506694",
"0.43503734",
"0.4335968",
"0.4335565",
"0.43274832",
"0.43269762",
"0.4326333",
"0.4326159",
"0.4320294",
"0.43193412",
"0.4311981",
"0.43099624",
"0.43068612",
"0.43018964",
"0.42993626",
"0.4299205",
"0.4298946",
"0.4293456",
"0.42933393",
"0.42919943",
"0.42900798",
"0.42898044",
"0.42846096",
"0.4270582",
"0.4269729",
"0.42684582",
"0.42680234",
"0.4254752",
"0.4252811"
] |
0.0
|
-1
|
PostgreSQL 9.5+ supports GROUPING SETS
|
def supports_grouping_sets?
server_version >= 90500
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def grouping_sets\n raise Error, \"GROUP BY GROUPING SETS not supported on #{db.database_type}\" unless supports_grouping_sets?\n clone(:group_options=>:\"grouping sets\")\n end",
"def group_by\n end",
"def group_by\n\n end",
"def grouped_duplicates(collection); end",
"def group_by?; @group_by; end",
"def to_hash_groups(key_column, value_column=nil, opts=OPTS)\n if (@opts[:eager_graph] || @opts[:eager]) && !opts.has_key?(:all)\n opts = Hash[opts]\n opts[:all] = true\n end\n super\n end",
"def group_all\n grp = {}\n grp.merge!(groupby_fields)\n grp.merge!(groupby_values)\n { '$group' => grp }\n end",
"def to_group_sql\n case\n when is_many?, is_string?, ThinkingSphinx.use_group_by_shortcut?\n nil\n else\n @columns.collect { |column|\n column_with_prefix(column)\n }\n end\n end",
"def group_by(collection, grouping_value)\n collection.reduce({}) do |acc, item| #we build a dictionary (or hashmap)\n acc.tap do |acc| #because I love old fashioned functional style\n acc[item[grouping_value]] ||= [] #because a new value\n acc[item[grouping_value]] << item\n end\n end.map do |key, value| #transform the hash to an array\n value\n end\nend",
"def group_by(column)\n @conjunction.add_group_by(column)\n nil\n end",
"def groups(recursive = false)\n Set.new() << self\n end",
"def group\n return if record.respond_to?(:where)\n record.group\n end",
"def group_by(*args)\n args.each do |arg|\n if arg.is_a?(String)\n self.groups = self.groups.concat(SkyDB::Query::Selection.parse_groups(arg))\n elsif arg.is_a?(Symbol)\n self.groups << SelectionGroup.new(:expression => arg.to_s)\n else\n raise \"Invalid group by argument: #{arg} (#{arg.class})\"\n end\n end\n \n return self\n end",
"def to_hash_groups(key_column, value_column = nil, opts = Sequel::OPTS)\n if value_column && !opts[:hash]\n clone(:_sequel_pg_type=>:hash_groups, :_sequel_pg_value=>[key_column, value_column]).fetch_rows(sql){|s| return s}\n {}\n elsif opts.empty?\n super(key_column, value_column)\n else\n super\n end\n end",
"def embiggen_grouped_results(values)\n embiggened_results = []\n values.each do |resultset|\n container = {} \n resultset.each do |set|\n data = {}\n set.data.each do |key, value|\n if value.kind_of? Hash\n if value.empty?\n value = []\n else \n value = [value] if value.kind_of? Hash\n end\n end\n data[key] = value\n end\n container.merge!(data) do |key, old, nue|\n if old.kind_of? Array\n old.push nue.first\n else\n nue\n end \n end\n end\n embiggened_results.push container\n end\n\n embiggened_results\n end",
"def group_with_builtin(coll)\n puts \"Grouping by 'Make' (built-in)\"\n coll.group({\n :key => [:Make],\n :initial => {:crashes => 0},\n :reduce => 'function(doc, prev) {prev.crashes += 1;}'\n })\nend",
"def group(*) end",
"def group_by(collection, grouping_key)\n [].tap do |providers| #building the returned Array\n extract_uniq_values_for_key(collection, grouping_key).each do |provider_name|\n current_provider_array, collection = collection.partition{|h| h[grouping_key] == provider_name} #reducing the collection\n providers << current_provider_array\n end\n end\nend",
"def group\n klass.collection.group(\n :key => field_list,\n :cond => selector,\n :initial => { :group => [] },\n :reduce => Javascript.group\n ).collect do |docs|\n docs[\"group\"] = docs[\"group\"].collect do |attrs|\n Mongoid::Factory.from_db(klass, attrs)\n end\n docs\n end\n end",
"def test_grouping_partial_set\n\n i = CartItem.for_product(Product.find(5))\n i.save\n\n groups = CartGroup.groups_for_items([i])\n\n assert_equal(1, groups.size)\n\n assert_equal(1, groups[0].size)\n assert_equal(5, groups[0][0].product_id)\n assert(!groups[0].set_discount?)\n assert_equal(BigDecimal('14.99'), groups[0].total)\n assert_equal(BigDecimal('0.00'), groups[0].savings)\n groups[0].items_with_prices { |item, price| assert_equal(BigDecimal('14.99'), price) }\n\n end",
"def group_by\n @group_by ||= (defaults[:group_by] || [])\n end",
"def test_grouping\n make_dummy_source(\"http://groupme/source1\", N::FOAFX.Goat, N::FOAFX.Bee)\n make_dummy_source(\"http://groupme/source2\", N::FOAFX.Goat)\n make_dummy_source(\"http://groupme/source3\", N::FOAFX.Bee)\n results = Source.groups_by_property(:type, [ N::FOAFX.Goat, N::FOAFX.Bee ])\n assert_equal(2, results.size)\n assert_equal(2, results[N::FOAFX.Goat].size)\n assert_equal(2, results[N::FOAFX.Bee].size)\n end",
"def groups(*groups); end",
"def groups(*groups); end",
"def ungrouped\n cached_dataset(:_ungrouped_ds){clone(:group => nil, :having => nil)}\n end",
"def group(*columns)\n clone(:group => columns)\n end",
"def to_expanded_set\r\n (target.map { |x| x.to_i } << to_i).to_set # include the group value as well\r\n end",
"def groups\n quads\n end",
"def _reduce_7(val, _values)\n Group.new(val[1])\nend",
"def group_query_builder group\n query_arr = []\n g_question_names = questions_for_group(group).map{|question|sanitize(question.attribute_name)}.uniq\n\n entities_for_group(group).each do |entity|\n cols = []\n e_question_names = questions_for_entity(entity).map{|question|sanitize(question.attribute_name)}\n\n #\n g_question_names.each do |q_name|\n if e_question_names.include? q_name\n cols.push \"CAST(\\\"#{q_name}\\\" AS FLOAT) AS \\\"#{q_name}\\\"\"\n else\n cols.push \"CAST(888 AS FLOAT) AS #{q_name}\"\n end\n end\n\n #\n cols.push \"#{entity.reference_year} AS year\" unless entity.reference_year.nil?\n cols.push sanitize(entity.entity_type_fk)\n\n # convert to sql\n query_arr.push \"SELECT \\n\\t#{cols.join(\",\\n\\t\")} \\nFROM #{entity.entity_type}\"\n end\n\n return query_arr.join(\"\\nUNION ALL\\n\")\n end",
"def group_by(&block)\n result = {}\n each {|value|\n new_key = block.call(value)\n (result[new_key] ||= []) << value\n }\n result\n end",
"def group_by(&block)\n result = {}\n each {|value|\n new_key = block.call(value)\n (result[new_key] ||= []) << value\n }\n result\n end",
"def visit_Arel_Nodes_Grouping o, collector\n visit(o.expr, collector)\n end",
"def yale_row_as_set i\n require 'set'\n yale_row_as_array(i).to_set\n end",
"def group(group: T.unsafe(nil)); end",
"def grouped?\n !group_by_column.nil?\n end",
"def grouped?\n !group_by_column.nil?\n end",
"def groupings(column_ids)\n update if running?\n if succeeded?\n doc = post(link('group'), {:columns => column_ids}.update(@opts))\n return doc['groupings'].to_a.map {|g| Grouping.new(@opts, g)}\n elsif running?\n raise VeritableError.new(\"Grouping -- Analysis with id #{_id} is still running and not yet ready to calculate groupings.\")\n elsif failed?\n raise VeritableError.new(\"Grouping -- Analysis with id #{_id} has failed and cannot calculate groupings.\")\n else\n raise VeritableError.new(\"Grouping -- Shouldn't be here -- please let us know at support@priorknowledge.com.\")\n end\n end",
"def group_by(*args)\n @group_keys = args\n\n @group_by = Proc.new do |allocations|\n getters = attribute_getters(@group_keys)\n\n allocations.group_by do |allocation|\n getters.map { |getter| getter.call(allocation) }\n end\n end\n\n self\n end",
"def summarize_per_subset\n @having = ANY_ROWS\n end",
"def sql_inventory_groups\n \"WITH ooc_groups AS\n (\n SELECT assg.asset_id, grp.ooc_group_id as group_id,grp.ooc_group_name as group_name,\n grp.ooc_group_type as group_type, grp.ooc_group_status as group_status\n FROM hip_ooc_asset_group_v AS assg\n JOIN hip_ooc_group_v AS grp ON grp.ooc_group_id = assg.ooc_group_id\n WHERE grp.ooc_group_status != 'deleted'\n AND grp.org_l1_id = #{org_l1_id}\n AND grp.org_id = #{org_id}\n ),\n hc_groups as(\n SELECT assg.asset_id, grp.hc_group_id as group_id,grp.group_name,'hc cycle'as group_type ,\n grp.is_current\n FROM hip_asset_group_v AS assg\n JOIN hip_hc_group_v AS grp ON grp.hc_group_id = assg.hc_group_id\n WHERE grp.org_l1_id = #{org_l1_id}\n AND grp.org_id = #{org_id}\n ),\n all_groups as (\n select * from ooc_groups\n union\n select * from hc_groups\n )\n SELECT assh.host_name,assh.ip_string_list, assh.hc_start_date,assh.security_policy_name,\n assh.tool_asset_id,os.os_product,assh.system_status,assh.hc_auto_flag,assh.hc_auto_interval_weeks,\n assh.hc_manual_interval_weeks,assh.hc_manual_flag,\n #{group_type_columns_frag}\n CASE\n WHEN assh.hc_auto_flag='y' and assh.hc_manual_flag='y' then 'Yes'\n WHEN assh.hc_auto_flag='n' and assh.hc_manual_flag='n' then 'No'\n ELSE NULL\n END AS hc_required \n FROM dim_comm_tool_asset_hist_v AS assh\n LEFT join all_groups AS g ON g.asset_id = assh.tool_asset_id\n JOIN dim_comm_os_v AS os ON os.os_id=assh.os_id\n WHERE\n assh.org_l1_id=#{org_l1_id} AND assh.org_id=#{org_id}\n AND CURRENT_TIMESTAMP BETWEEN assh.row_from_timestamp AND COALESCE(assh.row_to_timestamp, CURRENT_TIMESTAMP)\n group by assh.host_name,assh.ip_string_list,assh.hc_start_date,assh.security_policy_name,\n assh.tool_asset_id,os.os_product,assh.system_status,assh.hc_auto_flag, assh.hc_auto_interval_weeks, assh.hc_manual_interval_weeks,\n assh.hc_manual_flag\n ORDER BY assh.host_name\"\n end",
"def group_by_columns\n @group = \" GROUP BY #{column_list_for(@columns)}\" if @columns.any?\n end",
"def aggregate_after_grouping?; @aggregate_after_grouping; end",
"def add_group_by(field)\n @group_by.push(field)\n end",
"def test_0690_group_by\n @@log.debug \"test_0690_group_by starts\" if @@log.debug?\n assert_respond_to(@list, :group_by, \"test_0690_group_by_respond\")\n # Type check\n enum = @list.group_by\n result = enum.is_a? Enumerator\n assert(result,\"test_0690_group_by_class\") \n # Build basic Hash groups\n hash = @list.group_by {|item| item.ndata <= 2 ? \"le2\" : \"gt2\"}\n assert_equal(hash,\n {\"gt2\" => [@aen, @bsb], \"le2\" => [@cab, @dad]},\n \"test_0690_group_by_hash\")\n @@log.debug \"test_0690_group_by ends\" if @@log.debug?\n end",
"def scrooge_select_sql( set )\n set.map{|a| attribute_with_table( a ) }.join( ScroogeComma )\n end",
"def aggregate\n klass.collection.group(\n :key => field_list,\n :cond => selector,\n :initial => { :count => 0 },\n :reduce => Javascript.aggregate\n )\n end",
"def get_groups\n sparql = SPARQL.parse(\"SELECT DISTINCT ?uri ?label\n WHERE {\n ?uri <http://www.bbc.co.uk/ontologies/sport/hasMatch> ?match_uri .\n ?uri <#{RDF::RDFS.label}> ?label .\n }\n ORDER BY ASC(?label)\n \")\n results = QUERYABLE.query(sparql)\n end",
"def group_by_brand(products)\n products.group_by do |product|\n product[:brand]\n end\nend",
"def split_multiple_result_sets\n raise(Error, \"Can't split multiple statements on a graphed dataset\") if opts[:graph]\n ds = clone(:split_multiple_result_sets=>true)\n ds = ds.with_row_proc(proc{|x| x.map{|h| row_proc.call(h)}}) if row_proc\n ds\n end",
"def group_by(key, records)\n records.inject({}) do |table, record|\n table[record.send(key)] ||= []\n table.tap {|x| x[record.send(key)] << record}\n end\n end",
"def cogroup(*others)\n unioned = self\n others.each do |other|\n unioned = unioned.union(other)\n end\n\n unioned.group_by_key\n end",
"def group_with(other, num_partitions=nil)\n self.union(other).group_by_key(num_partitions)\n end",
"def set_GroupBy(value)\n set_input(\"GroupBy\", value)\n end",
"def find_groups\n initialize_groups\n @edges.each do |e|\n v1 = e[0]\n v2 = e[1]\n g1 = group_for_id(v1)\n g2 = group_for_id(v2)\n merge_groups(g1, g2)\n end\n end",
"def group(*values)\n values.inject(self) { |res, val| res._group(val) or fail ArgumentError, \"Unknown value for group: #{val}\" }\n end",
"def group_by_unique(proc = nil, &block)\n result = {}\n\n return result if proc.nil? and !block_given?\n\n proc = block if block_given?\n\n @target.each_entry do |item|\n result[proc.call(item)] = item\n end\n\n result\n end",
"def group_by_statement(with_identifier: true)\n return [degenerate_fragment] if type == Dimension::TYPES[:degenerate]\n\n group = [label_fragment]\n group << identifier_fragment if with_identifier\n group\n end",
"def as_set\n content = as_content_array\n set = Set.new\n content.each {|row| row.each {|column_name, column_data| set << column_data}}\n set\n end",
"def each_set\n \n end",
"def groups; end",
"def groups; end",
"def groups; end",
"def group_by_exp(input, variable, expression); end",
"def option_groups_from_collection_for_select(collection, group_method, group_label_method, option_key_method, option_value_method, selected_key = T.unsafe(nil)); end",
"def groups(opts={'start' => nil, 'limit' => nil})\n update if running?\n if succeeded?\n return Cursor.new({'collection' => link('groups'),\n 'start' => opts['start'],\n 'limit' => opts['limit']}.update(@opts)) { |g| g['group_id'] }\n elsif running?\n raise VeritableError.new(\"Grouping on column #{column_id} is still running and not yet ready to return groups.\")\n elsif failed?\n raise VeritableError.new(\"Grouping on column #{column_id} has failed and cannot return groups.\")\n else\n raise VeritableError.new(\"Grouping -- Shouldn't be here -- please let us know at support@priorknowledge.com.\")\n end\n end",
"def grouped_duplicates(collection)\n collection.group_by { |item| item }.values.reject(&:one?)\n end",
"def groups\n @groups ||= @csv.group_by { |row| row[:id] }\n end",
"def i_groups; end",
"def get_group_by\n @group_by\n end",
"def groups\n c = Hash.new\n @up.keys.each{|key|\n c.append(self.find(key),(key))\n }\n return c.values\n end",
"def grouping\n @grouping ||= :clustered\n end",
"def grouping\n remove_empty_statement\n new_sl = []\n sl = []\n g = []\n @statement_list.each do |st|\n if st.to_exp?\n g.push(st)\n else\n if g.length > 0\n sl.push(g)\n end\n sl.push([st])\n g = []\n end\n end\n if g.length > 0\n sl.push(g)\n end\n\n sl.each do |g|\n if g.length == 1\n new_sl.push(g[0])\n else\n i = 1\n t = ExpParen.new(g[0].to_exp)\n while i < g.length\n t = ExpComma.new(t, ExpParen.new(g[i].to_exp))\n i += 1\n end\n new_sl.push(StExp.new(t))\n end\n end\n\n if idx = new_sl.index{|x| x.class == StReturn}\n idx += 1\n while idx < new_sl.length\n if new_sl[idx].kind_of? StVar\n ;\n elsif new_sl[idx].kind_of? StFunc\n ;\n else\n new_sl[idx] = StEmpty.new\n end\n idx += 1\n end\n end\n\n if self.kind_of? SourceElements\n if new_sl[-1].kind_of? StReturn and new_sl[-1].exp.nil?\n new_sl.pop\n end\n end\n\n if new_sl[-1].kind_of? StReturn and new_sl[-2].kind_of? StExp\n if new_sl[-1].exp\n new_sl[-2] = StReturn.new(ExpComma.new(new_sl[-2].exp, new_sl[-1].exp))\n new_sl.pop\n end\n end\n @statement_list = new_sl\n end",
"def group_by *groupings\n @dimensions.merge! paramerize(groupings, VALID_DIMENSIONS, 'Invalid dimension group')\n self\n end",
"def groups\n groups = []\n relations = self.group_relations\n relations.each do |r|\n groups.push r.group\n end\n groups\n end",
"def execute(input_set = nil)\n resp = super(input_set)\n results = HaplogroupsResultSet.new(resp)\n return results\n end",
"def each\n @mset.each_pair do |key, group| \n group.to_a.each do |value|\n yield [key, value]\n end\n end \n end",
"def group_key\n columns = []\n\n @columns.each do |column|\n columns.push(column) if column.group\n end\n\n columns\n end",
"def set_group_by(attributes)\n @group_by = GroupByClause.new(attributes)\n end",
"def test_grouping\n\n i1 = CartItem.create(:product_id => 1)\n i2 = CartItem.create(:product_id => 4)\n i3 = CartItem.create(:product_id => 5)\n\n groups = CartGroup.groups_for_items([i1, i2, i3])\n\n assert_equal(2, groups.size)\n\n assert_equal(1, groups[0].size)\n assert_equal(1, groups[0][0].product_id)\n assert(!groups[0].set_discount?)\n assert_equal(BigDecimal('9.99'), groups[0].total)\n assert_equal(BigDecimal('0.00'), groups[0].savings)\n groups[0].items_with_prices { |item, price| assert_equal(BigDecimal('9.99'), price) }\n\n assert_equal(2, groups[1].size)\n assert_equal(4, groups[1][0].product_id)\n assert_equal(5, groups[1][1].product_id)\n assert_equal(5, groups[1].sorted_items[0].product_id)\n assert_equal(4, groups[1].sorted_items[1].product_id)\n assert(groups[1].set_discount?)\n assert_equal(BigDecimal('23.98'), groups[1].total)\n assert_equal(ApplicationHelper.round_currency(BigDecimal('6.00')), ApplicationHelper.round_currency(groups[1].savings))\n groups[1].items_with_prices do |item, price|\n assert_equal(BigDecimal('0.00'), price) if item.product.id == 5\n assert_equal(BigDecimal('23.98'), price) if item.product.id == 4\n end\n\n end",
"def group_for_key(series_key)\n raise NotImplementedError, \"Implement group_for_key\"\n end",
"def generate_groups(groups, type)\r\n [].tap do |results|\r\n groups.each do |n, obj|\r\n group = {}\r\n group[\"name\"] = obj['name']\r\n group[\"code\"] = n\r\n group[\"risk_category\"] = \"AUTO\"\r\n group[type] = obj[type]\r\n results << group\r\n end\r\n end\r\nend",
"def date_groups(table, column) # rubocop:disable Metrics/MethodLength\n query = <<~SQL.squish\n SELECT date_trunc('year', #{column}) AS year, count(*) as events\n FROM #{table}\n WHERE #{column} IS NOT NULL\n GROUP BY year\n ORDER BY year ASC\n SQL\n\n ApplicationRecord.connection.execute(query)\n .map { |row| [row[\"year\"].year, row[\"events\"]] }\n .select { |row| (2004..Time.current.year).cover? row.first }\n .to_h\n end",
"def grouped_by_kind\n grouped = Hash.new {|h,k| h[k] = [] }\n all.each { |r| grouped[r.kind] << r }\n grouped\n end",
"def fill_aggregator\n @aggregator = {}\n Setting.where(often: true).each do |sset|\n @aggregator[sset.ident] = sset.value\n end\n end",
"def grouping_by_parameter(argument_ids, grouped_by_param_x, param_x, param_y, simulation_runs)\n simulation_runs = simulation_runs.map do |obj|\n ## search for parameter value value in result or arguments\n param_x_val = argument_ids.index(param_x) ? obj[:arguments][param_x] : obj[:result][param_x]\n param_y_val = argument_ids.index(param_y) ? obj[:arguments][param_y] : obj[:result][param_y]\n\n if grouped_by_param_x.include? param_x_val\n grouped_by_param_x[param_x_val].push(param_y_val)\n else\n grouped_by_param_x[param_x_val] = [param_y_val]\n end\n obj\n end\n grouped_by_param_x\n end",
"def grouping_by_parameter(argument_ids, grouped_by_param_x, param_x, param_y, simulation_runs)\n simulation_runs = simulation_runs.map do |obj|\n ## search for parameter value value in result or arguments\n param_x_val = argument_ids.index(param_x) ? obj[:arguments][param_x] : obj[:result][param_x]\n param_y_val = argument_ids.index(param_y) ? obj[:arguments][param_y] : obj[:result][param_y]\n\n if grouped_by_param_x.include? param_x_val\n grouped_by_param_x[param_x_val].push(param_y_val)\n else\n grouped_by_param_x[param_x_val] = [param_y_val]\n end\n obj\n end\n grouped_by_param_x\n end",
"def groupby_fields\n ids = {}\n ids.merge!(make_grp_prj_periods[0])\n ids.merge!(make_grp_prj_nodes[0])\n { '_id' => ids }\n end",
"def groups=(_arg0); end",
"def group_ids(*ids, **)\n ids.sort!.uniq!\n ids\n end",
"def group_by(grouping_field)\n @grouping_field = grouping_field\n end",
"def ii_groups; end",
"def group\n raise \"View#reduce must have been set before grouping is permitted\" unless query[:reduce]\n update_query(:group => true)\n end",
"def subgroups\n @subgroups ||= [].tap do |subgroups|\n row_data = rows.map(&:elements).map(&:dup)\n 3.times do\n row_set = row_data.pop(3)\n 3.times do\n subgroups << row_set.map{ |row| row.pop(3) }.flatten\n end\n end\n end.map { |elements| Subgroup.new(elements) }\n end",
"def group_by_environment(node_configs)\n node_configs.group_by do |config|\n node_environment(config)\n end\nend",
"def group_by(hash)\n hash.inject({}) do |ret_hash,(key,el)|\n group_key = yield el\n prev_for_group = ret_hash[group_key] || {}\n group = {key => el}.merge(prev_for_group)\n ret_hash.merge(group_key => group)\n end\n end",
"def group_for_grouped_select(items)\n items.group_by { |entry| entry.first.split(\": \").first }\n end",
"def group_rollup\n raise Error, \"GROUP BY ROLLUP not supported on #{db.database_type}\" unless supports_group_rollup?\n clone(:group_options=>:rollup)\n end",
"def each(&block)\n to_set.each(&block)\n end",
"def rset; end"
] |
[
"0.72578466",
"0.64574665",
"0.6363012",
"0.6204091",
"0.60069335",
"0.59798086",
"0.59468204",
"0.59299254",
"0.5926807",
"0.5895744",
"0.58356977",
"0.5835613",
"0.58214027",
"0.5820797",
"0.5771506",
"0.57504207",
"0.57495415",
"0.57292455",
"0.5693723",
"0.5669777",
"0.5630873",
"0.5607406",
"0.5567527",
"0.5567527",
"0.5497081",
"0.5496305",
"0.5486578",
"0.54739153",
"0.5472895",
"0.54555404",
"0.54426455",
"0.54426455",
"0.5425488",
"0.54190964",
"0.54127926",
"0.54021454",
"0.54021454",
"0.5384314",
"0.53620595",
"0.5335348",
"0.53318894",
"0.5314217",
"0.53098935",
"0.5295519",
"0.5276337",
"0.5268612",
"0.52684766",
"0.52552694",
"0.5251422",
"0.5228094",
"0.52259195",
"0.52203184",
"0.5211955",
"0.52074456",
"0.52066976",
"0.52002776",
"0.5187471",
"0.51656234",
"0.5162762",
"0.51593965",
"0.5146778",
"0.5146778",
"0.5146778",
"0.5146048",
"0.5144185",
"0.51412344",
"0.514082",
"0.5139597",
"0.5136045",
"0.513364",
"0.51279926",
"0.5115513",
"0.5108393",
"0.50985444",
"0.5095966",
"0.50919545",
"0.5087347",
"0.5085452",
"0.50853026",
"0.50810087",
"0.507617",
"0.5071293",
"0.50695944",
"0.5066109",
"0.50420386",
"0.5039738",
"0.5039738",
"0.50382304",
"0.5037402",
"0.50328743",
"0.50186",
"0.5013169",
"0.501227",
"0.50018585",
"0.5001036",
"0.4992924",
"0.49834338",
"0.49801898",
"0.4970489",
"0.49700996"
] |
0.53189254
|
41
|
True unless insert returning has been disabled for this dataset.
|
def supports_insert_select?
!@opts[:disable_insert_returning]
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def use_insert?\n !use_copy?\n end",
"def insertable?\n persistable? && !_assigning? && inserts_valid\n end",
"def _use_insert_select?(ds)\n (!ds.opts[:select] || ds.opts[:returning]) && ds.supports_insert_select? \n end",
"def _use_insert_select?(ds)\n (!ds.opts[:select] || ds.opts[:returning]) && ds.supports_insert_select? \n end",
"def disable_insert_returning\n clone(:disable_insert_returning=>true)\n end",
"def inserts_valid\n @inserts_valid\n end",
"def save_on_insert?\n false\n end",
"def dataset_need_primary_key?\n false\n end",
"def dataset_need_primary_key?\n false\n end",
"def new_record?\n !primary_key_value\n end",
"def delete_statement?\n false\n end",
"def generate_anyinsert_adapter?\n params[:gen_anyinsert_adapter_policy] || false\n end",
"def prepared_statements\n false\n end",
"def omittable?\n false\n end",
"def non_sql_option?(key)\n super || key == :cursor || key == :insert_conflict\n end",
"def dataset_need_primary_key?\n true\n end",
"def dataset_need_primary_key?\n true\n end",
"def recording?\n @decision != Decision::DROP\n end",
"def error_on_disabled?\n false\n end",
"def disabled_tx?; @disabled_tx > 0 end",
"def limit_to_single_row?\n !returns_array?\n end",
"def supports_multi_insert?\n true\n end",
"def distracted?\n false\n end",
"def use_insert!\n @use_copy = false\n end",
"def enabled?()\n #This is a stub, used for indexing\n end",
"def ignored?()\n #This is a stub, used for indexing\n end",
"def disabled?; end",
"def disabled?; end",
"def create_preliminary_result?\n record.unlocked? && view_access?\n end",
"def enabled?\n false\n end",
"def record_transaction\n if self.save\n return true\n else\n return false\n end \n end",
"def prepared?\n !!@prepared\n end",
"def prepared?\n !!@prepared\n end",
"def enabled?; end",
"def enabled?; end",
"def include_error_data?\n self.return_error_data\n end",
"def autocommit?()\n #This is a stub, used for indexing\n end",
"def done_preprocessing?(save = false)\n !each_dataset.any? do |d|\n d.ref? && d.active? && !d.done_preprocessing?(save)\n end\n end",
"def enabled?\n false\n end",
"def ddl_batch?\n return true if @ddl_batch\n false\n end",
"def allowed?\n true\n end",
"def auto_exclude?\n @auto_exclude\n end",
"def autocommit?\n raise NoMethodError, \"Not implemented for this raw driver\"\n end",
"def write_data_permitted?\n return write_permitted?\n end",
"def supports_prepared_transactions?\n false\n end",
"def new_record?\n !_persisted_obj\n end",
"def any?\n inserted > 0\n end",
"def deleted?\n return (withdrawn || record.nil?)\n end",
"def modified_existing?\n false\n end",
"def query_yields_statements?\n false\n end",
"def suppressed?\n @suppressed\n end",
"def insert_tab?\n @insert_tab\n end",
"def primary_key?\n false\n end",
"def supports_returning?\n false\n end",
"def persists_state_via_data_import?\n false\n end",
"def updatable?\n true\n end",
"def can_save?\n @key != '1'\n end",
"def disable?\n false\n end",
"def eliminated?\n\t\t@eliminated\n\tend",
"def persisted?\n return false\n end",
"def prepared?\n @prepared\n end",
"def new_record?\n !persisted?\n end",
"def enabled?\n true\n end",
"def dataset?\n true\n end",
"def disabled?\n\n return ! @enabled\n \n end",
"def skip?\n !value_of(entity.only_if)\n end",
"def supports_ddl_transactions?\n false\n end",
"def supports_ddl_transactions?\n false\n end",
"def supports_ddl_transactions?\n false\n end",
"def new_record?\n !persisted?\n end",
"def new_record?\n !persisted?\n end",
"def needs_cursor?\n return true\n end",
"def semact?; false; end",
"def atomic?\n false\n end",
"def skip?\n false \n end",
"def prepared?\n @prepared\n end",
"def deleted?\n return true if !@data and !@id\n return false\n end",
"def deletable?\n persisted? && reserved?\n end",
"def allowed?\n true\n end",
"def allowed?\n true\n end",
"def unspendable?\n (size > 0 && op_return?) || size > Tapyrus::MAX_SCRIPT_SIZE\n end",
"def enabled?(*)\n true\n end",
"def preventing_writes?\n return true if replica?\n return false if connection_class.nil?\n\n connection_class.current_preventing_writes\n end",
"def in_use?\n !batch.nil?\n end",
"def virgin?\n self.transactions.empty?\n end",
"def restrict_primary_key?\n @restrict_primary_key\n end",
"def restrict_primary_key?\n @restrict_primary_key\n end",
"def handle_failed_rows?\n false\n end",
"def expects_statements?\n false\n end",
"def expects_statements?\n false\n end",
"def query_yields_statements?\n false\n end",
"def new_record?\n !self.persisted?\n end",
"def needs_cursor?\n return true\n end",
"def ignore?\n @should_ignore\n end",
"def soy_edificable\n return false\n end",
"def soy_edificable\n return false\n end",
"def transactional?\n true\n end",
"def persisted?\n paranoid? ? !new_record? : super\n end",
"def persisted?\n paranoid? ? !new_record? : super\n end",
"def disabled?\n \n return ! @enabled\n \n end"
] |
[
"0.68219227",
"0.6537745",
"0.6484788",
"0.6484788",
"0.64069223",
"0.6354706",
"0.6287502",
"0.62571484",
"0.6256194",
"0.62128913",
"0.61805964",
"0.61404204",
"0.61000156",
"0.608905",
"0.60511166",
"0.6009329",
"0.5975124",
"0.5947837",
"0.5921701",
"0.5902638",
"0.5899718",
"0.58996165",
"0.58727497",
"0.58273935",
"0.5821703",
"0.5796663",
"0.5792996",
"0.5792996",
"0.5787782",
"0.57673085",
"0.576176",
"0.57602245",
"0.57602245",
"0.57122785",
"0.57122785",
"0.570687",
"0.5701243",
"0.5689131",
"0.56865275",
"0.56786746",
"0.5676301",
"0.56542766",
"0.5650854",
"0.56508315",
"0.56415504",
"0.56391823",
"0.5636108",
"0.56312495",
"0.56162095",
"0.56160223",
"0.56157136",
"0.56132615",
"0.5610396",
"0.56097627",
"0.56076473",
"0.56073403",
"0.5601292",
"0.56007075",
"0.5600366",
"0.559866",
"0.5595788",
"0.55945003",
"0.55911803",
"0.5589122",
"0.5583359",
"0.5576735",
"0.5575545",
"0.5575545",
"0.5575545",
"0.55736226",
"0.55736226",
"0.5572665",
"0.5568479",
"0.556319",
"0.5562108",
"0.5561647",
"0.5555403",
"0.5553347",
"0.55516034",
"0.55516034",
"0.554969",
"0.5547208",
"0.55458033",
"0.5535901",
"0.55346024",
"0.5529725",
"0.5529725",
"0.5524267",
"0.55148023",
"0.55148023",
"0.5513774",
"0.5512187",
"0.551199",
"0.5510922",
"0.5510663",
"0.5510663",
"0.5506092",
"0.5501544",
"0.5501544",
"0.55013824"
] |
0.6667171
|
1
|
PostgreSQL 9.5+ supports the ON CONFLICT clause to INSERT.
|
def supports_insert_conflict?
server_version >= 90500
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def insert_conflict_sql(sql)\n if opts = @opts[:insert_conflict]\n sql << \" ON CONFLICT\"\n\n if target = opts[:constraint] \n sql << \" ON CONSTRAINT \"\n identifier_append(sql, target)\n elsif target = opts[:target]\n sql << ' '\n identifier_append(sql, Array(target))\n if conflict_where = opts[:conflict_where]\n sql << \" WHERE \"\n literal_append(sql, conflict_where)\n end\n end\n\n if values = opts[:update]\n sql << \" DO UPDATE SET \"\n update_sql_values_hash(sql, values)\n if update_where = opts[:update_where]\n sql << \" WHERE \"\n literal_append(sql, update_where)\n end\n else\n sql << \" DO NOTHING\"\n end\n end\n end",
"def sql_for_on_duplicate_key_ignore( *args ) # :nodoc:\n arg = args.first\n conflict_target = sql_for_conflict_target( arg ) if arg.is_a?( Hash )\n \" ON CONFLICT #{conflict_target}DO NOTHING\"\n end",
"def sql_for_on_duplicate_key_ignore( table_name, *args ) # :nodoc:\n arg = args.first\n conflict_target = sql_for_conflict_target( arg ) if arg.is_a?( Hash )\n \" ON CONFLICT #{conflict_target}DO NOTHING\"\n end",
"def _insert_dataset\n if upsert_plugin_upserting\n if postgres?\n super.insert_conflict(update: values_to_update, target: self.class.upsert_plugin_identifying_columns)\n elsif mysql?\n columns_to_update = values_to_update.keys - self.class.upsert_plugin_identifying_columns\n super.on_duplicate_key_update(*columns_to_update)\n else\n super\n end\n else\n super\n end\n end",
"def sql_for_on_duplicate_key_update( table_name, *args ) # :nodoc:\n arg, model, primary_key, locking_column = args\n arg = { columns: arg } if arg.is_a?( Array ) || arg.is_a?( String )\n return unless arg.is_a?( Hash )\n\n sql = ' ON CONFLICT '.dup\n conflict_target = sql_for_conflict_target( arg )\n\n columns = arg.fetch( :columns, [] )\n condition = arg[:condition]\n if columns.respond_to?( :empty? ) && columns.empty?\n return sql << \"#{conflict_target}DO NOTHING\"\n end\n\n conflict_target ||= sql_for_default_conflict_target( table_name, primary_key )\n unless conflict_target\n raise ArgumentError, 'Expected :conflict_target or :constraint_name to be specified'\n end\n\n sql << \"#{conflict_target}DO UPDATE SET \"\n case columns\n when Array\n sql << sql_for_on_duplicate_key_update_as_array( table_name, model, locking_column, columns )\n when Hash\n sql << sql_for_on_duplicate_key_update_as_hash( table_name, model, locking_column, columns )\n when String\n sql << columns\n else\n raise ArgumentError, 'Expected :columns to be an Array or Hash'\n end\n\n sql << \" WHERE #{condition}\" if condition.present?\n\n sql\n end",
"def sql_for_on_duplicate_key_update( table_name, *args ) # :nodoc:\n arg, model, primary_key, locking_column = args\n arg = { columns: arg } if arg.is_a?( Array ) || arg.is_a?( String )\n return unless arg.is_a?( Hash )\n\n sql = ' ON CONFLICT '.dup\n conflict_target = sql_for_conflict_target( arg )\n\n columns = arg.fetch( :columns, [] )\n condition = arg[:condition]\n if columns.respond_to?( :empty? ) && columns.empty?\n return sql << \"#{conflict_target}DO NOTHING\"\n end\n\n conflict_target ||= sql_for_default_conflict_target( primary_key )\n unless conflict_target\n raise ArgumentError, 'Expected :conflict_target to be specified'\n end\n\n sql << \"#{conflict_target}DO UPDATE SET \"\n case columns\n when Array\n sql << sql_for_on_duplicate_key_update_as_array( table_name, model, locking_column, columns )\n when Hash\n sql << sql_for_on_duplicate_key_update_as_hash( table_name, model, locking_column, columns )\n when String\n sql << columns\n else\n raise ArgumentError, 'Expected :columns to be an Array or Hash'\n end\n\n sql << \" WHERE #{condition}\" if condition.present?\n\n sql\n end",
"def set_timestamp_to_now\n puts 'set_timestamp_to_now'\n db_conn.prepare 'set_timestamp_to_now', \"INSERT INTO #{TABLE} (id, updated_at) VALUES (#{ROW_KEY}, $1)\n ON CONFLICT(id) DO UPDATE SET updated_at = excluded.updated_at\"\n db_conn.exec_prepared 'set_timestamp_to_now', [Time.now]\nend",
"def merge_insert(*values, &block)\n h = {:type=>:insert, :values=>values}\n if override = @opts[:override]\n h[:override] = insert_override_sql(String.new)\n end\n _merge_when(h, &block)\n end",
"def on_conflict(column = nil)\n ::MultiInsert::Query::OnConflict.new(self, column)\n end",
"def sneaky_save(avoid_insert_conflict: nil)\n begin\n sneaky_create_or_update(avoid_insert_conflict: avoid_insert_conflict)\n rescue ActiveRecord::StatementInvalid\n false\n end\n end",
"def on_upsert\n #\n end",
"def _merge_insert_sql(sql, data)\n sql << \" THEN INSERT \"\n columns, values = _parse_insert_sql_args(data[:values])\n _insert_columns_sql(sql, columns)\n if override = data[:override]\n sql << override\n end\n _insert_values_sql(sql, values)\n end",
"def insert_or_update(uniq_keys, values_hash, tbl_name='main_table', opts={})\n all_field_names = values_hash.keys\n field_names_as_symbol_string = all_field_names.map{ |k| \":#{k}\" }.join(',') # need to appear as symbols\n sql_statement = \"INSERT INTO #{tbl_name} (#{format_field_names_as_string(all_field_names)}) VALUES (#{field_names_as_symbol_string})\"\n database.execute(sql_statement, values_hash)\n rescue SQLite3::ConstraintException => e\n unique_key_constraint = uniq_keys.map { |k| \"'#{k}'=:#{k}\" }.join(' AND ')\n update_keys = values_hash.keys\n update_keys -= uniq_keys if !opts[:update_unique_keys]\n update_sql = update_keys.map { |k| \"'#{k}'=:#{k}\" }.join(', ')\n sql_statement = \"UPDATE #{tbl_name} SET #{update_sql} WHERE #{unique_key_constraint}\"\n database.execute sql_statement, values_hash\n rescue SQLite3::SQLException => e\n puts \"Exception (#{e.inspect}) raised\" if verbose?\n case e.message\n when /no such table/\n create_table(tbl_name, all_field_names, uniq_keys)\n retry\n when /has no column/\n add_columns(tbl_name, all_field_names)\n retry\n else\n raise e\n end\n end",
"def result(id, key, value)\n @pgsql.exec(\n 'INSERT INTO result (job, key, value) VALUES ($1, $2, $3) ON CONFLICT (job, key) DO UPDATE SET value = $3',\n [id, key, value]\n )\n end",
"def insert_ignore\n insert_conflict\n end",
"def multi_insert_sql_strategy\n :union\n end",
"def _insert(*)\n fail NotImplementedError\n end",
"def conflicting_or_created_record\n conflict || create\n end",
"def create!\n Upsert.logger.info \"[upsert] Creating or replacing database function #{name.inspect} on table #{buffer.parent.table_name.inspect} for selector #{selector.map(&:inspect).join(', ')} and columns #{columns.map(&:inspect).join(', ')}\"\n column_definitions = get_column_definitions\n connection.execute <<-EOS\nCREATE OR REPLACE FUNCTION #{name}(#{column_definitions.map { |c| \"#{c.quoted_input_name} #{c.sql_type} DEFAULT #{c.default || 'NULL'}\" }.join(',') }) RETURNS VOID AS\n$$\nBEGIN\n LOOP\n -- first try to update the key\n UPDATE #{quoted_table_name} SET #{column_definitions.map { |c| \"#{c.quoted_name} = #{c.quoted_input_name}\" }.join(',')}\n WHERE #{selector.map { |k| \"#{connection.quote_ident(k)} = #{connection.quote_ident([k,'input'].join('_'))}\" }.join(' AND ') };\n IF found THEN\n RETURN;\n END IF;\n -- not there, so try to insert the key\n -- if someone else inserts the same key concurrently,\n -- we could get a unique-key failure\n BEGIN\n INSERT INTO #{quoted_table_name}(#{column_definitions.map { |c| c.quoted_name }.join(',')}) VALUES (#{column_definitions.map { |c| c.quoted_input_name }.join(',')});\n RETURN;\n EXCEPTION WHEN unique_violation THEN\n -- Do nothing, and loop to try the UPDATE again.\n END;\n END LOOP;\nEND;\n$$\nLANGUAGE plpgsql;\nEOS\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n return super if id_value\n pk = pk_from_insert_sql(sql) unless pk\n select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n return super if id_value\n pk = pk_from_insert_sql(sql) unless pk\n select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n end",
"def sql_for_insert(sql, pk, id_value, sequence_name, binds)\n unless pk\n table_ref = extract_table_ref_from_insert_sql(sql)\n pk = primary_key(table_ref) if table_ref\n end\n\n sql = \"#{sql} RETURNING #{quote_column_name(pk)}\" if pk\n\n [sql, binds]\n end",
"def supports_multi_insert?\n true\n end",
"def multi_insert_sql_strategy\n :values\n end",
"def create!\n Upsert.logger.info \"[upsert] Creating or replacing database function #{name.inspect} on table #{table_name.inspect} for selector #{selector_keys.map(&:inspect).join(', ')} and setter #{setter_keys.map(&:inspect).join(', ')}\"\n\n selector_column_definitions = column_definitions.select { |cd| selector_keys.include?(cd.name) }\n setter_column_definitions = column_definitions.select { |cd| setter_keys.include?(cd.name) }\n update_column_definitions = setter_column_definitions.select { |cd| cd.name !~ CREATED_COL_REGEX && !options[\"ignore_on_update\"].include?(cd.name) }\n\n first_try = true\n connection.execute(%{\n CREATE OR REPLACE FUNCTION #{name}(#{(selector_column_definitions.map(&:to_selector_arg) + setter_column_definitions.map(&:to_setter_arg)).join(', ')}) RETURNS VOID AS\n $$\n DECLARE\n first_try INTEGER := 1;\n BEGIN\n LOOP\n -- first try to update the key\n UPDATE #{quoted_table_name} SET #{update_column_definitions.map(&:to_setter).join(', ')}\n WHERE #{selector_column_definitions.map(&:to_selector).join(' AND ') };\n IF found THEN\n RETURN;\n END IF;\n -- not there, so try to insert the key\n -- if someone else inserts the same key concurrently,\n -- we could get a unique-key failure\n BEGIN\n INSERT INTO #{quoted_table_name}(#{setter_column_definitions.map(&:quoted_name).join(', ')}) VALUES (#{setter_column_definitions.map(&:to_setter_value).join(', ')});\n RETURN;\n EXCEPTION WHEN unique_violation THEN\n -- seamusabshere 9/20/12 only retry once\n IF (first_try = 1) THEN\n first_try := 0;\n ELSE\n RETURN;\n END IF;\n -- Do nothing, and loop to try the UPDATE again.\n END;\n END LOOP;\n END;\n $$\n LANGUAGE plpgsql;\n })\n rescue\n if first_try and $!.message =~ /tuple concurrently updated/\n first_try = false\n retry\n else\n raise $!\n end\n end",
"def save_detecting_duplicate_entry_constraint_violation\n begin\n save\n rescue ActiveRecord::StatementInvalid => e\n # Would that rails gave us the nested exception to check...\n if e.message =~ /.*[Dd]uplicate/\n errors.add_to_base(translate_with_theme('duplicate_entry_please_try_again'))\n false\n else\n raise e\n end\n end\n end",
"def exec_insert(sql, name = nil, binds = [], pk = nil, sequence_name = nil)\n exec_query(sql, name, binds)\n end",
"def create!\n Upsert.logger.info \"[upsert] Creating or replacing database function #{name.inspect} on table #{table_name.inspect} for selector #{selector_keys.map(&:inspect).join(', ')} and setter #{setter_keys.map(&:inspect).join(', ')}\"\n first_try = true\n connection.execute(%{\n CREATE OR REPLACE FUNCTION #{name}(#{(selector_column_definitions.map(&:to_selector_arg) + setter_column_definitions.map(&:to_setter_arg) + hstore_delete_handlers.map(&:to_arg)).join(', ')}) RETURNS VOID AS\n $$\n DECLARE\n first_try INTEGER := 1;\n BEGIN\n LOOP\n -- first try to update the key\n UPDATE #{quoted_table_name} SET #{update_column_definitions.map(&:to_setter).join(', ')}\n WHERE #{selector_column_definitions.map(&:to_selector).join(' AND ') };\n IF found THEN\n #{hstore_delete_handlers.map(&:to_pgsql).join(' ')}\n RETURN;\n END IF;\n -- not there, so try to insert the key\n -- if someone else inserts the same key concurrently,\n -- we could get a unique-key failure\n BEGIN\n INSERT INTO #{quoted_table_name}(#{setter_column_definitions.map(&:quoted_name).join(', ')}) VALUES (#{setter_column_definitions.map(&:to_setter_value).join(', ')});\n #{hstore_delete_handlers.map(&:to_pgsql).join(' ')}\n RETURN;\n EXCEPTION WHEN unique_violation THEN\n -- seamusabshere 9/20/12 only retry once\n IF (first_try = 1) THEN\n first_try := 0;\n ELSE\n RETURN;\n END IF;\n -- Do nothing, and loop to try the UPDATE again.\n END;\n END LOOP;\n END;\n $$\n LANGUAGE plpgsql;\n })\n rescue\n if first_try and $!.message =~ /tuple concurrently updated/\n first_try = false\n retry\n else\n raise $!\n end\n end",
"def insert(*values)\n if @opts[:sql] || @opts[:returning]\n super\n else\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def insert_or_merge_entity(table_name, entity_values, options = {})\n options[:create_if_not_exists] = true\n merge_entity(table_name, entity_values, options)\n end",
"def non_sql_option?(key)\n super || key == :cursor || key == :insert_conflict\n end",
"def insert_or_merge_entity(table_name, entity_values, options={})\n options[:create_if_not_exists] = true\n merge_entity(table_name, entity_values, options)\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil) end",
"def insert_nonexist(table, unique, input)\n id = get_id_existing(table, unique)\n\n if id == nil\n # Insert the information\n id = insert(table, input)\n end\n return id\n end",
"def insert_sql(sql, name = nil, pri_key = nil, id_value = nil, sequence_name = nil)\n unless pri_key\n table_ref = extract_table_ref_from_insert_sql(sql)\n pri_key = primary_key(table_ref) if table_ref\n end\n\n if pri_key\n select_value(\"#{sql} RETURNING #{quote_column_name(pri_key)}\")\n else\n super\n end\n end",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(*insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def on_conflict_sql(sql)\n @sql_on_conflict = sql\n self\n end",
"def _import(columns, values, opts=OPTS)\n if @opts[:returning]\n # no transaction: our multi_insert_sql_strategy should guarantee\n # that there's only ever a single statement.\n sql = multi_insert_sql(columns, values)[0]\n returning_fetch_rows(sql).map{|v| v.length == 1 ? v.values.first : v}\n elsif opts[:return] == :primary_key\n returning(insert_pk)._import(columns, values, opts)\n else\n super\n end\n end",
"def insert_select(*values)\n return unless supports_insert_select?\n # Handle case where query does not return a row\n server?(:default).with_sql_first(insert_select_sql(*values)) || false\n end",
"def _insert\n return super if model.cti_tables.length == 1\n model.cti_models.each do |m|\n v = {}\n m.cti_table_columns.each{|c| v[c] = @values[c] if @values.include?(c)}\n ds = use_server(m.cti_instance_dataset)\n if ds.supports_insert_select? && (h = ds.insert_select(v))\n @values.merge!(h)\n else\n nid = ds.insert(v)\n @values[primary_key] ||= nid\n end\n end\n db.dataset.supports_insert_select? ? nil : @values[primary_key]\n end",
"def flush\n conn.transaction do\n buffer.flatten.each do |row|\n # check to see if this row's compound key constraint already exists\n # note that the compound key constraint may not utilize virtual fields\n next unless row_allowed?(row)\n\n # add any virtual fields\n add_virtuals!(row)\n \n key_names = []\n key_values = []\n @key_columns.each do |name|\n key_names << \"#{name}\"\n key_values << conn.quote(row[name]) # TODO: this is probably not database agnostic\n end\n\n names = []\n values = []\n (order - @key_columns).each do |name|\n names << \"#{name}\"\n values << conn.quote(row[name]) # TODO: this is probably not database agnostic\n end\n\n all_name_values = (key_names+names).zip(key_values+values)\n\n q = <<EOF\nMERGE INTO #{table_name} d \nUSING (SELECT #{all_name_values.collect {|c,v| \"#{v} #{c}\"}.join(',')} FROM DUAL) s\nON (#{map_src_to_dest(key_names,'s','d').join(' AND ')})\nWHEN MATCHED THEN \nUPDATE SET #{[map_src_to_dest(names,'s','d'), \"d.#{@update_ts_column}=CURRENT_TIMESTAMP\"].flatten.join(',')}\nWHEN NOT MATCHED THEN\nINSERT (#{all_name_values.collect {|c,v| 'd.'+c}.join(',')},d.#{@insert_ts_column})\nVALUES (#{all_name_values.collect {|c,v| 's.'+c}.join(',')},CURRENT_TIMESTAMP)\nEOF\n #q = \"INSERT INTO `#{table_name}` (#{names.join(',')}) VALUES (#{values.join(',')})\"\n ETL::Engine.logger.debug(\"Executing upsert: #{q}\")\n conn.insert(q, \"Upsert row #{current_row}\")\n @current_row += 1\n end\n buffer.clear\n end\n end",
"def _insert\n ds = _insert_dataset\n if _use_insert_select?(ds) && (h = _insert_select_raw(ds))\n _save_set_values(h)\n nil\n else\n iid = _insert_raw(ds)\n # if we have a regular primary key and it's not set in @values,\n # we assume it's the last inserted id\n if (pk = autoincrementing_primary_key) && pk.is_a?(Symbol) && !(vals = @values)[pk]\n vals[pk] = iid\n end\n pk\n end\n end",
"def _insert\n ds = _insert_dataset\n if _use_insert_select?(ds) && (h = _insert_select_raw(ds))\n _save_set_values(h)\n nil\n else\n iid = _insert_raw(ds)\n # if we have a regular primary key and it's not set in @values,\n # we assume it's the last inserted id\n if (pk = autoincrementing_primary_key) && pk.is_a?(Symbol) && !(vals = @values)[pk]\n vals[pk] = iid\n end\n pk\n end\n end",
"def supports_insert_select?\n !@opts[:disable_insert_returning]\n end",
"def insert_sql(*values)\n if values.size == 1 && (v = values[0]).is_a?(Sequel::Model) && !v.respond_to?(:sql_literal_append)\n Sequel::Deprecation.deprecate(\"Passing Sequel::Model instance argument to Sequel::Dataset#insert\", \"Pass model_instance.values or model_instance.to_hash as the argument instead\")\n super(v.to_hash)\n else\n super\n end\n end",
"def upsert(sobject, field, attrs)\n upsert!(sobject, field, attrs)\n rescue *exceptions\n false\n end",
"def insert_into(table, data)\n\t\tkeys = \"(#{data.keys.join(', ')})\"\n\t\tvalues = \"(#{data.values.map{ |value| \"'#{value}'\" }.join(', ')})\"\n\t\texecute_with_retry \"INSERT INTO #{table} #{keys} VALUES #{values}; \"\n\tend",
"def create_insert(headers, values, model, upsert_fields, ret_vals, ig_cols)\r\n\t\t\tputs \"Creating insert query:\"\r\n\t\t\tputs \"There are #{values.length} rows to insert.\"\r\n\r\n\t\t\tp \"HEADERS:\"\r\n\t\t\tp headers\r\n\r\n\t\t\tp \"IGNORED COLUMNS BEFORE HACK:\"\r\n\t\t\tp ig_cols\r\n\r\n\t\t\tig_cols = [] if ig_cols == nil\t# Weird hack because of an error ruby was throwing\r\n\t\t\treturn_results = []\r\n\r\n\t\t\tp \"IGNORED COLUMNS:\"\r\n\t\t\tp ig_cols\r\n\r\n\t\t\t# Loop through the array of arrays of values to insert\r\n\t\t\tvalues.each do |values_array|\r\n\t\t\t\tupsert_attributes = {}\r\n\t\t\t\tinner_array = []\r\n\t\t\t\t# Now loop through the single array of values\r\n\t\t\t\tp \"VALUES ARRAY:\"\r\n\t\t\t\tp values_array\r\n\r\n\t\t\t\tvalues_array.each_with_index do |val, index|\r\n\t\t\t\t\t# puts \"INDEX: #{index}\"\r\n\r\n\t\t\t\t\tnext if ig_cols.include?(index) # IMPORTANT: Need to ignore the indices of the columns in the CSV that the user specifies\r\n\t\t\t\t\tassociated_column_name = headers.at(index).to_sym\t# Get the header name for the row - need it to match in return values\r\n\r\n\t\t\t\t\t# Store the attributes we want to do the upsert on to pass into find_or_create_by method\r\n\t\t\t\t\tupsert_attributes[associated_column_name] = val if upsert_fields.include?(associated_column_name)\r\n\r\n\t\t\t\t\t# puts \"Line 282: #{upsert_attributes}\"\r\n\t\t\t\tend\r\n\r\n\t\t\t\t# Use ActiveRecord's method to return the updated or inserted row\r\n\t\t\t\t# Workaround - do a select and then insert since I can't figure out how to dynamically add the values to the class\r\n\t\t\t\t# select_result = model.find_by(upsert_attributes)\r\n\t\t\t\tinsert_attributes = {}\r\n\t\t\t\tvalues_array.each_with_index do |val, i|\r\n\t\t\t\t\tif !(upsert_attributes.has_key?(headers[i]))\r\n\t\t\t\t\t\t# puts \"VALUE: #{val}\"\r\n\t\t\t\t\t\tinsert_attributes[headers[i].to_sym] = val\r\n\t\t\t\t\tend\r\n\t\t\t\tend\r\n\t\t\t\tinsert_attributes = insert_attributes.merge upsert_attributes\r\n\r\n\t\t\t\t# if select_result == nil\r\n\t\t\t\t# \tinsert_result = model.create(insert_attributes)\r\n\t\t\t\t# else\r\n\t\t\t\t# \tinsert_result = model.update(insert_attributes)\r\n\t\t\t\t# end\r\n\r\n\t\t\t\t# upsert_result = model.find_or_create_by(upsert_attributes) do |klass|\r\n\t\t\t\t# \t# Check to see that we haven't already included the column and value in the upsert_attributes\r\n\t\t\t\t# \t# and if we haven't, include it as a field we need to add to the database along with the value\r\n\t\t\t\t# \t# puts \"#{klass.instance_variables}\"\r\n\t\t\t\t# \tvalues_array.each_with_index do |val, i|\r\n\t\t\t\t# \t\tif !(upsert_attributes.has_key?(headers[i]))\r\n\t\t\t\t# \t\t\t# puts \"VALUE: #{val}\"\r\n\t\t\t\t# \t\t\tklass.send :write_attribute, headers[i].to_sym, val\r\n\t\t\t\t# \t\tend\r\n\t\t\t\t# \tend\r\n\t\t\t\t# end\r\n\r\n\t\t\t\tp upsert_attributes\r\n\r\n\t\t\t\tupsert_result = model.find_or_initialize_by(upsert_attributes)\r\n\t\t\t\tupsert_result.update_attributes(insert_attributes)\r\n\r\n\t\t\t\t# Return what the user asked for\r\n\t\t\t\t#ret_vals.each { |val| inner_array.push(insert_result[val]) }\r\n\t\t\t\tret_vals.each { |val| inner_array.push(upsert_result[val]) }\r\n\r\n\t\t\t\t# Concatenate the arrays of information the user wants back\r\n\t\t\t\treturn_results.push(inner_array)\r\n\r\n\t\t\tend\r\n\t\t\t# p return_results\r\n\t\t\treturn return_results\r\n\t\tend",
"def exec_insert(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def insert_or_replace_entity(table_name, entity_values, options = {})\n options[:create_if_not_exists] = true\n update_entity(table_name, entity_values, options)\n end",
"def db_insert a, b, c\n if a == \"\" or b == \"\" \n return 'blank'\n end\n $db.execute \"INSERT INTO produtos (cod, prod, prec) VALUES(?, ?, ?)\", a, b, c\nend",
"def sneaky_save!(avoid_insert_conflict: nil)\n sneaky_create_or_update(avoid_insert_conflict: avoid_insert_conflict)\n end",
"def exec_insert(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def InsertOrUpdateItemInInventory(item_id, price, quantity, extraNotes) # either inserts new item in inventory, or updates the quantity if already exists\n @@db.results_as_hash = true\n rowid = @@db.execute \"INSERT OR IGNORE INTO Inventory(item_id, price, quantity, extraNotes) VALUES(#{item_id},#{price},#{quantity},'#{extraNotes}')\"\n # if row id is null or empty, that means the item already exists in the inventory, hence it we can just update the quantity..\n id = @@db.execute \"select last_insert_rowid() as ITEMID\" #Check the last ID of the row that was inserted.\n if id[0][\"ITEMID\"] == 0 #incase the row was not inserted (happens if it exists already, then id will be 0)\n @@db.execute \"UPDATE Inventory set quantity = quantity + #{quantity} where item_id = #{item_id}\"\n end\n end",
"def _use_insert_select?(ds)\n (!ds.opts[:select] || ds.opts[:returning]) && ds.supports_insert_select? \n end",
"def _use_insert_select?(ds)\n (!ds.opts[:select] || ds.opts[:returning]) && ds.supports_insert_select? \n end",
"def execute_insert(sql, opts=OPTS)\n execute(sql, opts.merge(:meth=>:autoid))\n end",
"def insert_or_replace_entity(table_name, entity_values, options={})\n options[:create_if_not_exists] = true\n update_entity(table_name, entity_values, options)\n end",
"def execute_insert(sql, opts={})\n synchronize(opts[:server]) do |conn|\n conn.execute(sql)\n insert_result(conn, opts[:table], opts[:values])\n end\n end",
"def insert_select(*values)\n with_sql_first(insert_select_sql(*values))\n end",
"def duplicate_primary_key(duplicate_row:, key:, node_id:)\n # nothing\n end",
"def test_upsert_date\n create_users\n User.rollup(\"Test\")\n assert_match \"'#{now.to_date}'\", $sql.find { |s| s =~ /ON (CONFLICT|DUPLICATE KEY)/i }\n end",
"def _insert_raw(ds)\n if use_prepared_statements_for?(:insert)\n _set_prepared_statement_server(model.send(:prepared_insert, @values.keys)).call(@values)\n else\n super\n end\n end",
"def add_exercise(name)\n #want to check for duplicates, will attempt later\n #exercises = db.execute(\"SELECT name FROM Exercise\")\n #exercises.each do |ex|\n #if name != ex[0]\n $db.execute(\"INSERT OR IGNORE INTO Exercise (name) VALUES (?)\", [name])\nend",
"def insert!(*rows)\n Mao.sql(with_options(:insert => rows.flatten).sql) do |pg_result|\n if @options[:returning]\n pg_result.map {|result| Mao.normalize_result(result, @col_types)}\n else\n pg_result.cmd_tuples\n end\n end\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n insert_sql(sql, name, pk, id_value, sequence_name)\n end",
"def execute_insert(sql, opts=OPTS)\n _execute(sql, opts){|conn| log_connection_yield(sql, conn){conn.execute_batch(sql)}; conn.last_insert_rowid}\n end",
"def post_insert(sql, name, pk, id_value, sequence_name)\n if @iiEnabled\n begin\n @connection.do(enable_identity_insert(@iiTable, false))\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned off\"\n end\n end\n end",
"def post_insert(sql, name, pk, id_value, sequence_name)\n if @iiEnabled\n begin\n @connection.do(enable_identity_insert(@iiTable, false))\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned off\"\n end\n end\n end",
"def insert_sequenced(row)\n sql = row.type.insert_sql_minus_key\n vals = row.field_values_minus_key\n#$stderr.puts sql\n#$stderr.puts vals.inspect\n\n db.do(sql, *vals)\n insert_id = db.select_one(row.type.get_insert_id_sql)[0]\n row.send(row.type.primary_key.setter_name, insert_id)\n row.reset_changed\n end",
"def upsert_model(model)\n model_hash = model.to_hash\n columns_to_update = model_hash.keys.reject do |k|\n matching_attributes.include?(k) || skip_updating.include?(k)\n end\n upsert_options = { target: matching_attributes }\n unless columns_to_update.empty?\n update_clause = columns_to_update.map { |key| [ key.to_sym, \"excluded__#{key}\".to_sym ] }.to_h\n timestamps = update_timestamps(columns_to_update)\n upsert_options[:update] = update_clause.merge(timestamps) { |key, oldval, newval| oldval }\n end\n model_insert_clause = model_hash.merge(insert_timestamps) { |key, oldval, newval| oldval }\n\n inserted_id = model_class.dataset.insert_conflict(upsert_options).insert(model_insert_clause)\n # If model was not inserted, the above returns nil\n if inserted_id\n model.id = inserted_id\n end\n model.instance_variable_set(:@new, false)\n end",
"def raises_uniqueness_violation?(&block)\n transaction(:savepoint=>:only, &block)\n false\n rescue unique_constraint_violation_class => e\n e\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n execute(sql, name)\n id_value\n end",
"def multiple_value_sets_insert_sql(table_name, column_names, options) # :nodoc:\n \"INSERT #{options[:ignore] ? 'IGNORE ':''}INTO #{table_name} (#{column_names.join(',')}) VALUES \"\n end",
"def exec_insert(sql, name = nil, binds = []) #:nodoc:\r\n log(sql, \"insert\", binds) { query(sql, binds) }\r\n end",
"def insert(*values)\n raise NotImplementedError, NOTIMPL_MSG\n end",
"def merge_if_exists!\n t = merge_if_exists || self\n t.save!\n end",
"def insert_into_sql(sql)\n sql << \" INTO \"\n if (f = @opts[:from]) && f.length == 1\n identifier_append(sql, server_version >= 90500 ? f.first : unaliased_identifier(f.first))\n else\n source_list_append(sql, f)\n end\n end",
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @logger.unknown(\"ODBCAdapter#pre_insert>\") if @trace\n @logger.unknown(\"args=[#{sql}|#{name}|#{pk}|#{id_value}|#{sequence_name}]\") if @trace\n @iiTable = get_table_name(sql)\n @logger.unknown(\"@iiTable=#{@iiTable}\") if @trace\n @iiCol = get_autounique_column(@iiTable)\n @logger.unknown(\"@iiCol=#{@iiCol}\") if @trace\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n remove_null_sequence_value_from_sql(sql, @iiCol)\n# rescue Exception => e\n# raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def insert(key, values, opt={})\n do_op(:insert, column_family, key, values, opt)\n end",
"def insert(key, values, opt={})\n do_op(:insert, column_family, key, values, opt)\n end",
"def postgresql_not_unique_error_class\n /(PG::UniqueViolation)|(ActiveRecord::RecordNotUnique)|(ActiveRecord::JDBCError)/\n end",
"def duplicate_key_update_error?(exception) # :nodoc:\n exception.is_a?(ActiveRecord::StatementInvalid) && exception.to_s.include?('duplicate key')\n end",
"def duplicate_key_update_error?(exception) # :nodoc:\n exception.is_a?(ActiveRecord::StatementInvalid) && exception.to_s.include?('duplicate key')\n end",
"def single_insert(table_name, hash)\n status = true\n begin\n columns = []\n values = []\n hash.keys.each do |item|\n columns.push(item)\n values.push(\"'#{hash[item]}'\")\n end\n columns = columns.join(\",\")\n values = values.join(\",\")\n @mysql_client.query(\"INSERT INTO #{table_name} (#{columns}) VALUES (#{values})\")\n rescue\n status = false\n end\n return status\n end",
"def sql_for_on_duplicate_key_update( table_name, *args ) # :nodoc:\n sql = ' ON DUPLICATE KEY UPDATE '\n arg = args.first\n if arg.is_a?( Array )\n sql << sql_for_on_duplicate_key_update_as_array( table_name, arg )\n elsif arg.is_a?( Hash )\n sql << sql_for_on_duplicate_key_update_as_hash( table_name, arg )\n elsif arg.is_a?( String )\n sql << arg\n else\n raise ArgumentError.new( \"Expected Array or Hash\" )\n end\n sql\n end",
"def prepared_insert(cols)\n cached_prepared_statement(:insert, prepared_columns(cols)){prepare_statement(dataset, :insert, prepared_statement_key_hash(cols))}\n end",
"def upsert(kind, item)\n end",
"def generate_pg_insert_query(table_name, keys, rows)\n \"INSERT INTO #{table_name}(#{keys.map { |i| \"\\\"#{i}\\\"\" }.join(',')}) VALUES(#{keys.map { |i| rows[i] == nil ? 'NULL' : \"'\" + pg_conn.escape_string(rows[i]) + \"'\" }.join(',')});\\n\"\n end",
"def insert_select(*values)\n unless @opts[:disable_insert_returning]\n ds = opts[:returning] ? self : returning\n ds.insert(*values){|r| return r}\n end\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def build_insert_sql(insert) # :nodoc:\n if insert.skip_duplicates? || insert.update_duplicates?\n raise NotImplementedError, \"#{self.class} should define `build_insert_sql` to implement adapter-specific logic for handling duplicates during INSERT\"\n end\n\n \"INSERT #{insert.into} #{insert.values_list}\"\n end",
"def run_insert\n raise DataFactory::NoInsertStatement unless @insert_statement\n\n stmt = db_interface.execute_sql(@insert_statement, *@binds)\n stmt.close\n end",
"def do_update(values)\n @query.on_conflict_sql(::MultiInsert::QueryBuilder.on_conflict_do_update(@column, values, @query.opts))\n end",
"def db_insert!( db_file )\n ####db = SQLite3::Database.new( db_file )\n ####db.transaction do |database|\n #### if not database.execute( db_fetch ).empty?\n #### raise(IndexError, \"Entry exists for #{filename}, #{@rmr_number}, #{@timestamp.to_s}... Skipping.\")\n #### end\n #### database.execute( db_insert )\n ####end\n ####db.close\n puts \"SQLite3\"\n end",
"def insert\n DATABASE.execute(\"INSERT INTO locations (city) VALUES ('#{@city}')\")\n @id = DATABASE.last_insert_row_id # will return the value of the row id\n end",
"def save()\n if(@slo_id) #if the row already exists\n update()\n else\n insert()\n end\n end"
] |
[
"0.70739776",
"0.6682397",
"0.66430146",
"0.6549153",
"0.61710525",
"0.61554676",
"0.60309786",
"0.5983611",
"0.59169257",
"0.58177334",
"0.5790062",
"0.5763213",
"0.5675745",
"0.5648549",
"0.56024617",
"0.55638194",
"0.5546072",
"0.5493775",
"0.54135245",
"0.53846943",
"0.53846943",
"0.5377916",
"0.5372538",
"0.5371362",
"0.53563106",
"0.53530985",
"0.5349955",
"0.5341008",
"0.5332472",
"0.53224",
"0.5311038",
"0.5303167",
"0.5302433",
"0.5297142",
"0.52923185",
"0.5275789",
"0.52669454",
"0.52331823",
"0.5192116",
"0.51906896",
"0.518779",
"0.5186771",
"0.5167183",
"0.5167183",
"0.5159521",
"0.5152691",
"0.5124914",
"0.5116549",
"0.5105324",
"0.5089455",
"0.5085651",
"0.50847954",
"0.5080969",
"0.50750494",
"0.5074355",
"0.50670797",
"0.50670797",
"0.5059572",
"0.50563955",
"0.50468886",
"0.5029599",
"0.50002587",
"0.49861917",
"0.49844638",
"0.4971919",
"0.49675828",
"0.49558294",
"0.49407852",
"0.4935795",
"0.4935795",
"0.4931107",
"0.49209994",
"0.49202344",
"0.4911946",
"0.4905681",
"0.49020597",
"0.48989713",
"0.48970777",
"0.4878288",
"0.48696056",
"0.48560265",
"0.48560265",
"0.4851576",
"0.48503754",
"0.48503754",
"0.48500475",
"0.48415563",
"0.4835108",
"0.48231316",
"0.48156732",
"0.48123947",
"0.48046067",
"0.48046067",
"0.48046067",
"0.48041287",
"0.48023292",
"0.48022008",
"0.47971237",
"0.47926125",
"0.4789057"
] |
0.58374584
|
9
|
PostgreSQL 9.3+ supports lateral subqueries
|
def supports_lateral_subqueries?
server_version >= 90300
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def immediate_subqueries\n my_nodes_tagged(:subquery)\n end",
"def immediate_subqueries\n my_nodes_tagged(:subquery)\n end",
"def subquery\n subqueries.\n compact.\n inject(&:merge)\n end",
"def db_query_transform__subquery query, tmp_table=\"resultset_table\"\n \"(#{query}) as #{tmp_table}\"\n end",
"def pipe_cte_with!(subquery)\n return self unless subquery.try(:with_values?)\n\n # Add subquery CTE's to the parents query stack. (READ THE SPECIAL NOTE ABOVE!)\n if @scope.with_values?\n @scope.cte.pipe_cte_with!(subquery.cte)\n else\n # Top level has no with values\n @scope.with!(subquery.cte)\n end\n\n self\n end",
"def subquery_columns\n explicit_columns_in_subquery? ? explicit_columns : super\n end",
"def subquery_for(relation)\n operand = relation.operand\n subquery = dispatch(operand)\n if collapse_subquery_for?(relation)\n @from\n else\n aliased_subquery(subquery)\n end\n end",
"def inner_query\n self.class.\n select(\"#{SUBQUERY_TABLE_ALIAS}.*\").\n from(\"#{table_name} AS #{SUBQUERY_TABLE_ALIAS}\")\n end",
"def lazy_select\n lazify.call(S.select)\n end",
"def currval(seq)\n $new.select_one(\"SELECT CASE WHEN is_called THEN last_value ELSE last_value-increment_by END from #{seq}\")[0]\nend",
"def build_subselect(key, o)\n subselect = super\n\n # Materialize subquery by adding distinct\n # to work with MySQL 5.7.6 which sets optimizer_switch='derived_merge=on'\n unless has_limit_or_offset_or_orders?(subselect)\n core = subselect.cores.last\n core.set_quantifier = Arel::Nodes::Distinct.new\n end\n\n Nodes::SelectStatement.new.tap do |stmt|\n core = stmt.cores.last\n core.froms = Nodes::Grouping.new(subselect).as(\"__active_record_temp\")\n core.projections = [Arel.sql(quote_column_name(key.name))]\n end\n end",
"def best_rank_subquery(group_by)\n @source.respond_to?(:project) or raise ThroughHierarchySourceError, \"#{@source} cannot be converted into a subquery\"\n subq = source.\n project(foreign_type_column, foreign_key_column, group_by, best_rank).\n where(filters).\n group(source[group_by]).\n as(best_rank_table_name)\n\n spawn(subq)\n end",
"def ct_subquery_sql(options)\n # the source query contains a variable number of \"extra\" columns\n # ones needed in the output but not involved in pivoting\n source_sql = \"SELECT ARRAY[#{sql_row_name_columns.join(', ')}],\n #{sql_crosstab_columns.join(', ')}, year, gross_quantity\n FROM (#{subquery_sql(options)}) subquery\n ORDER BY 1, #{sql_crosstab_columns.length + 2}\" # order by row_name and year\n source_sql = ActiveRecord::Base.send(:sanitize_sql_array, [source_sql, years])\n source_sql = ActiveRecord::Base.connection.quote_string(source_sql)\n # the categories query returns values by which to pivot (years)\n categories_sql = 'SELECT * FROM UNNEST(ARRAY[?])'\n categories_sql = ActiveRecord::Base.send(:sanitize_sql_array, [categories_sql, years.map(&:to_i)])\n ct_columns = [\n 'row_name TEXT[]',\n report_crosstab_columns.map.each_with_index { |c, i| \"#{sql_crosstab_columns[i]} #{crosstab_columns[c][:pg_type]}\" },\n years_columns.map { |y| \"#{y} numeric\" }\n ].flatten.join(', ')\n # a set returning query requires that output columns are specified\n <<-SQL\n SELECT * FROM CROSSTAB('#{source_sql}', '#{categories_sql}')\n AS ct(#{ct_columns})\n SQL\n end",
"def build_subselect(key, o)\n stmt = Nodes::SelectStatement.new\n core = stmt.cores.first\n core.froms = o.relation\n core.wheres = o.wheres\n core.projections = [key]\n stmt.limit = o.limit\n stmt.offset = o.offset\n stmt.orders = []\n stmt\n end",
"def supports_cte_in_subqueries?\n supports_cte?\n end",
"def merge_by_values(relation, other)\n other.cte.with_values.each do |name, expression|\n relation = if other.cte.materialized_key?(name)\n relation.with!.materialized(name => expression)\n elsif other.cte.not_materialized_key?(name)\n relation.with!.not_materialized(name => expression)\n else\n relation.with!(name => expression)\n end\n end\n\n relation\n end",
"def with_query\n Arel::Nodes::As.new(recursive_table, union_term.arel)\n end",
"def build_subselect key, o\n stmt = Nodes::SelectStatement.new\n core = stmt.cores.first\n core.froms = o.relation\n core.wheres = o.wheres\n core.projections = [key]\n stmt.limit = o.limit\n stmt.orders = o.orders\n stmt\n end",
"def aggregated_over_time_query\n quantity_field = @country_ids.present? ? \"#{entity_quantity}_reported_quantity\" : \"#{@reported_by}_reported_quantity\"\n\n <<-SQL\n SELECT ROW_TO_JSON(row)\n FROM (\n SELECT JSON_AGG(JSON_BUILD_OBJECT('x', year, 'y', value) ORDER BY year) AS datapoints\n FROM (\n SELECT year, ROUND(SUM(#{quantity_field}::FLOAT)) AS value\n FROM #{shipments_table}\n #{child_taxa_join}\n WHERE #{@condition} AND #{quantity_field} IS NOT NULL AND #{country_condition}\n AND #{child_taxa_condition}\n GROUP BY year\n #{quantity_condition(quantity_field)}\n ORDER BY value DESC\n #{limit}\n ) t\n ) row\n SQL\n end",
"def execute_sql(my_sql)\n pg_result = ActiveRecord::Base.connection.execute(my_sql)\n\n # In this example we are just calling #to_a to convert the PG::Result to an\n # Array. PG::Result has a nice API for slicing and dicing itself so you may\n # want to to something clever instead. See\n # https://www.rubydoc.info/gems/pg/PG/Result for details.\n #\n # The important bit here is that we are copying all the data we care about\n # out of the PG::Result in preparation for later clearing the PG::Result\n results = pg_result.to_a\n\n # Calling #clear on the PG::Result is the important bit of cleanup and the\n # whole reason this method exists. See\n # https://www.rubydoc.info/gems/pg/PG/Result#clear-instance_method\n pg_result.clear\n\n yield results if block_given?\n\n results\nend",
"def relation_with_join_lateral(parent_column, parents)\n parents_for_lateral = parents.select(:id).to_sql\n\n lateral = filtered_events\n .limit(limit_for_join_lateral)\n .where(\"events.#{parent_column} = parents_for_lateral.id\") # rubocop:disable GitlabSecurity/SqlInjection\n .to_sql\n\n # The outer query does not need to re-apply the filters since the JOIN\n # LATERAL body already takes care of this.\n base_relation\n .from(\"(#{parents_for_lateral}) parents_for_lateral\")\n .joins(\"JOIN LATERAL (#{lateral}) AS #{Event.table_name} ON true\")\n end",
"def subquery_sql(options)\n gross_exports_query(options)\n end",
"def apply_correlated_subquery_limit_strategy(ds)\n table = ds.first_source_table\n table_alias = ds.first_source_alias\n primary_key = associated_class.primary_key\n key = self[:key]\n cs_alias = :t1\n cs = associated_dataset.\n from(Sequel.as(table, :t1)).\n select(*qualify(cs_alias, primary_key)).\n where(Array(qualify(cs_alias, key)).zip(Array(qualify(table_alias, key)))).\n limit(*limit_and_offset)\n ds.where(qualify(table_alias, primary_key)=>cs)\n end",
"def aliased_subquery(subquery)\n \"#{subquery.to_subquery} AS #{visit_identifier(subquery.name)}\"\n ensure\n reset_query_state\n end",
"def select(*) end",
"def subqueries\n [\n select_distinct_on,\n # default filters -- all scopes have them\n filter_by_subscription_or_topics,\n filter_by_start_date,\n filter_by_end_date,\n # grouping\n group_distinct_on,\n # ordering for GROUP BY\n order_distinct_on,\n ]\n end",
"def late_materialization(db)\n part_result_1 = []\n part_result_2 = []\n db[\"country\"][\"dic\"].each_with_index do |v, i|\n if v == \"GER\" \n db[\"country\"][\"av\"].each.with_index do |val, ind|\n if val == i then part_result_1.push(ind) end\n end\n break\n end\n end\n db[\"gender\"][\"dic\"].each_with_index do |v, i|\n if v == \"M\" \n db[\"gender\"][\"av\"].each.with_index do |val, ind|\n if val == i then part_result_2.push(ind) end\n end\n break\n end\n end \n # with this returned array we can materialize the\n # row the length of the array is the aggregation\n # in this case the materialiation is not neccessarry\n (part_result_1 & part_result_2)\nend",
"def test_002\n target_sql = \"select shipments.number as shipment_number,\nvariants.sku as sku,\nvariants.price as price,\nvariants.weight as weight,\nvariants.height as height,\nvariants.width as width,\nvariants.depth as length,\nproducts.description as product_description,\norders.number as order_number\nfrom orders\njoin shipments on (shipments.order_id = orders.id)\njoin line_items on (line_items.order_id = orders.id)\njoin variants on (line_items.variant_id = variants.id)\njoin products on (variants.product_id = products.id)\njoin state_events on (state_events.stateful_id = orders.id and state_events.name = 'payment' and state_events.stateful_type = 'Order' and state_events.next_state in ('paid','credit_owed'))\nwhere orders.state = 'complete' and orders.shipment_state = 'ready' and state_events.created_at >= '2012-10-01' and state_events.created_at <= '2015-03-08' and shipments.warehouse_id = 28\"\n \n @sql.select do\n shipments number: 'shipment_number'\n variants :sku, :price, :weight, :height, :width, depth: 'length'\n products description: 'product_description'\n orders number: 'order_number'\n end\n \n @sql.from :orders do\n join :shipments, on: 'shipments.order_id = orders.id'\n join :line_items, on: 'line_items.order_id = orders.id'\n join :variants, on: 'line_items.variant_id = variants.id'\n join :products, on: 'variants.product_id = products.id'\n join :state_events do\n state_events stateful_id: :'orders.id',\n name: 'payment',\n stateful_type: 'Order',\n next_state: %w(paid credit_owed)\n end\n end\n\n begin_date = '2012-10-01'\n end_date = '2015-03-08'\n warehouse_id = 28\n \n @sql.where do\n orders state: 'complete', shipment_state: 'ready'\n \n con '>=' do\n state_events created_at: begin_date\n end\n con '<=' do\n state_events created_at: end_date\n end\n \n shipments warehouse_id: warehouse_id\n end\n\n assert_equal @sql.to_s, target_sql\n end",
"def select_statement(query)\n model = query.model\n fields = query.fields\n conditions = query.conditions\n limit = query.limit\n offset = query.offset\n order = query.order\n group_by = nil\n\n # FIXME: using a boolean for qualify does not work in some cases,\n # such as when you have a self-referrential many to many association.\n # if you don't qualfiy the columns with a unique alias, then the\n # SQL query will fail. This may mean though, that it might not\n # be enough to pass in a Property, but we may need to know the\n # table and the alias we should use for the column.\n\n qualify = query.links.any?\n\n if query.unique?\n group_by = fields.select { |p| p.kind_of?(Property) }\n end\n\n # create subquery to find all valid keys and then use these keys to retrive all other columns\n use_subquery = qualify\n\n # when we can include ROWNUM condition in main WHERE clause\n use_simple_rownum_limit = limit && (offset||0 == 0) && group_by.blank? && order.blank?\n\n unless (limit && limit > 1) || offset > 0 || qualify\n # TODO: move this method to Query, so that it walks the conditions\n # and finds an OR operator\n\n # TODO: handle cases where two or more properties need to be\n # used together to be unique\n\n # if a unique property is used, and there is no OR operator, then an ORDER\n # and LIMIT are unecessary because it should only return a single row\n if conditions.kind_of?(Query::Conditions::AndOperation) &&\n conditions.any? { |operand| operand.kind_of?(Query::Conditions::EqualToComparison) && operand.subject.respond_to?(:unique?) && operand.subject.unique? } &&\n !conditions.any? { |operand| operand.kind_of?(Query::Conditions::OrOperation) }\n order = nil\n limit = nil\n end\n end\n\n conditions_statement, bind_values = conditions_statement(conditions, qualify)\n\n statement = \"SELECT #{columns_statement(fields, qualify)}\"\n if use_subquery\n statement << \" FROM #{quote_name(model.storage_name(name))}\"\n statement << \" WHERE (#{columns_statement(model.key, qualify)}) IN\"\n statement << \" (SELECT DISTINCT #{columns_statement(model.key, qualify)}\"\n end\n statement << \" FROM #{quote_name(model.storage_name(name))}\"\n statement << join_statement(query, qualify) if qualify\n statement << \" WHERE (#{conditions_statement})\" unless conditions_statement.blank?\n if use_subquery\n statement << \")\"\n end\n if use_simple_rownum_limit\n statement << \" AND rownum <= ?\"\n bind_values << limit\n end\n statement << \" GROUP BY #{columns_statement(group_by, qualify)}\" unless group_by.blank?\n statement << \" ORDER BY #{order_statement(order, qualify)}\" unless order.blank?\n\n add_limit_offset!(statement, limit, offset, bind_values) unless use_simple_rownum_limit\n\n return statement, bind_values\n end",
"def query(rows)\n join_rows = fetch_join_rows(rows)\n assoc_ids = join_rows.map { |row| row[1] }.compact.uniq\n yield assoc_ids.any? ? base_scope.where(@ref.association_primary_key => assoc_ids) : nil, join_rows\n end",
"def all(sql, *args, into: nil, &block)\n raise ArgumentError, \"all no longer support blocks, use each instead.\" if block\n\n rows, pg_source_oid, column_info = each_without_conversion(sql, *args, into: into)\n\n result = convert_rows_to_result rows, into: into, pg_source_oid: pg_source_oid\n\n # [TODO] - resolve associations. Note that this is only possible if the type\n # is not an Array (i.e. into is nil)\n\n result.pagination_scope = sql if sql.is_a?(::Simple::SQL::Connection::Scope) && sql.paginated?\n result.column_info = column_info\n result\n end",
"def true_eager_graph_limit_strategy\n r = super\n ds = associated_dataset\n if r == :ruby && ds.supports_limits_in_correlated_subqueries? && (Array(associated_class.primary_key).length == 1 || ds.supports_multiple_column_in?) && (!offset || ds.supports_offsets_in_correlated_subqueries?)\n :correlated_subquery\n else\n r\n end\n end",
"def select!\n # Ensure we can never be destructive by nilifying :update.\n Mao.sql(with_options(:update => nil).sql) do |pg_result|\n if @options[:join]\n other = Mao.query(@options[:join][0])\n pg_result.map {|result|\n Mao.normalize_join_result(result, self, other)\n }\n else\n pg_result.map {|result| Mao.normalize_result(result, @col_types)}\n end\n end\n end",
"def select_books_titles_and_years_in_first_series_order_by_year\n \"SELECT title, year FROM books WHERE series_id = 1;\"\nend",
"def lateral\n with_opts(:lateral=>true)\n end",
"def select(&block); end",
"def select_with_sql_cte(sql, cte)\n super\n select_with_sql_cte_search_cycle(sql, cte)\n end",
"def call(*args, &block)\n tuples =\n if hooks?\n prepared =\n if curried?\n apply_hooks(before_hooks, *(curry_args + args))\n else\n apply_hooks(before_hooks, *args)\n end\n\n result = prepared ? execute(prepared, &block) : execute(&block)\n\n if curried?\n if !args.empty?\n apply_hooks(after_hooks, result, *args)\n elsif curry_args.size > 1\n apply_hooks(after_hooks, result, curry_args[1])\n else\n apply_hooks(after_hooks, result)\n end\n else\n apply_hooks(after_hooks, result, *args[1..args.size - 1])\n end\n else\n execute(*(curry_args + args), &block)\n end\n\n if one?\n tuples.first\n else\n tuples\n end\n end",
"def run_tuples(query, options = {})\n GRel::Debugger.debug \"QUERYING SELECT...\"\n GRel::Debugger.debug query\n GRel::Debugger.debug \"** LIMIT #{@last_query_context.limit}\" if @last_query_context.limit\n GRel::Debugger.debug \"** OFFSET #{@last_query_context.offset}\" if @last_query_context.offset\n GRel::Debugger.debug \"----------------------\"\n args = {}\n args[:accept] = options[:accept] if options[:accept]\n args[:offset] = @last_query_context.offset if @last_query_context.offset\n args[:limit] = @last_query_context.limit if @last_query_context.limit\n @connection.query(@db_name,query, args).body\n end",
"def select_with_sql_base\n opts[:with].any?{|w| w[:recursive]} ? \"WITH RECURSIVE \" : super\n end",
"def selects_all_bears_names_and_ages_that_are_alive_and_order_youngest_to_oldest\n 'SELECT bears.name, bears.age\n FROM bears WHERE alive = 1\n ORDER BY age'\nend",
"def tuples\n results = run_tuples(@last_query_context.to_sparql_select)\n results[\"results\"][\"bindings\"].map do |h|\n h.keys.each do |k|\n h[k.to_sym] = QL.from_tuple_binding(h[k])\n h.delete(k)\n end\n h\n end\n end",
"def select_name_and_series_subgenres_of_authors\n \"SELECT authors.name, subgenres.name\n FROM series\n INNER JOIN authors\n ON series.author_id = authors.id\n INNER JOIN subgenres\n ON series.subgenre_id = subgenres.id\"\nend",
"def collect_surrogates(items, last_id, surr)\n next_id= last_id + 1\n surr.surrogates.map do |attr,qin|\n plan = qin.plan\n cols = qin.column_structure\n side = qin.side_effects.plan\n surr = qin.surrogates\n\n colref = items.index(attr) + 1\n\n ser = SerializeRelation.new(\n side, plan,\n Iter.new(1), Pos.new(1), cols.items)\n qp = QueryPlan.new(\n ser, cols, next_id, nil, last_id, colref)\n qps = collect_surrogates(cols.items, next_id, surr)\n next_id += qps.flatten.size + 1\n [qp] + qps\n end\n end",
"def select!(&block); end",
"def selects_oldest_bear_and_returns_name_and_age\n 'SELECT bears.name, bears.age FROM bears ORDER BY age DESC LIMIT 1'\nend",
"def select_sql\n return super unless o = @opts[:offset]\n l = @opts[:limit]\n order = @opts[:order]\n dsa1 = dataset_alias(1)\n dsa2 = dataset_alias(2)\n rn = row_number_column\n irn = Sequel::SQL::Identifier.new(rn).qualify(dsa2)\n subselect_sql(unlimited.\n from_self(:alias=>dsa1).\n select_more(Sequel::SQL::QualifiedIdentifier.new(dsa1, WILDCARD),\n Sequel::SQL::WindowFunction.new(SQL::Function.new(:ROW_NUMBER), Sequel::SQL::Window.new(:order=>order)).as(rn)).\n from_self(:alias=>dsa2).\n select(Sequel::SQL::QualifiedIdentifier.new(dsa2, WILDCARD)).\n where(l ? ((irn > o) & (irn <= l + o)) : (irn > o))) # Leave off limit in case of limit(nil, offset)\n end",
"def build_count_subquery(relation, column_name, distinct)\n if column_name == :all\n column_alias = Arel.sql('Id')\n else\n column_alias = Arel.sql(column_name.to_s)\n end\n\n select_value = operation_over_aggregate_column(column_alias || Arel.sql('Id'), \"count\", false)\n\n relation.arel.projections.clear()\n relation.arel.project(select_value)\n relation.arel\n end",
"def summarize_per_relation(generator)\n @from = \"#{generator.to_subquery} AS #{visit_identifier(generator.name)} NATURAL LEFT JOIN #{@from}\"\n end",
"def select_sql\n return super unless l = @opts[:limit]\n o = @opts[:offset] || 0\n order = @opts[:order]\n dsa1 = dataset_alias(1)\n dsa2 = dataset_alias(2)\n rn = row_number_column\n irn = Sequel::SQL::Identifier.new(rn).qualify(dsa2)\n subselect_sql(unlimited.\n from_self(:alias=>dsa1).\n select_more(Sequel::SQL::QualifiedIdentifier.new(dsa1, WILDCARD),\n Sequel::SQL::WindowFunction.new(SQL::Function.new(:ROW_NUMBER), Sequel::SQL::Window.new(:order=>order)).as(rn)).\n from_self(:alias=>dsa2).\n select(Sequel::SQL::QualifiedIdentifier.new(dsa2, WILDCARD)).\n where((irn > o) & (irn <= l + o)))\n end",
"def m_ar_and_chain_1\n Rows.where(:x && :y)\n end",
"def unionise *sub_queries\n sub_queries_with_parens = sub_queries.map do |i| \n \"{ #{i} }\" \n end\n\n sub_queries_with_parens.join(' UNION ')\n end",
"def query_return_first_value(sql, *binds)\n mysql.fetch(sql, *binds).single_value\n end",
"def stars_working_with_ben_affleck\n MovieDatabase.execute(<<-SQL)\n SELECT\n movies.title, actors.name\n FROM\n movies\n JOIN actors ON castings.actor_id = actors.id\n JOIN castings ON castings.movie_id = movies.id\n WHERE\n castings.ord = 1\n AND \n movies.title IN\n (SELECT\n movies.title\n FROM\n movies\n JOIN actors ON castings.actor_id = actors.id\n JOIN castings ON castings.movie_id = movies.id\n WHERE\n actors.name = 'Ben Affleck'\n AND\n castings.ord != 1) \nSQL\nend",
"def monadic_expressions_over(operand)\n [operand] + \n MONADIC_OPERATORS\n .select { |op| op.applies_to?(operand)}\n .map { |op| begin\n MonadicExpression.new(op, operand) \n rescue Noop\n nil\n rescue RangeError\n nil\n end \n }\n .reject(&:nil?)\nend",
"def run_eager\n root_operation = query.selected_operation\n root_op_type = root_operation.operation_type || \"query\"\n root_type = schema.root_type_for_operation(root_op_type)\n st = get_current_runtime_state\n st.current_object = query.root_value\n st.current_result = @response\n runtime_object = root_type.wrap(query.root_value, context)\n runtime_object = schema.sync_lazy(runtime_object)\n\n if runtime_object.nil?\n # Root .authorized? returned false.\n @response = nil\n else\n call_method_on_directives(:resolve, runtime_object, root_operation.directives) do # execute query level directives\n gathered_selections = gather_selections(runtime_object, root_type, root_operation.selections)\n # This is kind of a hack -- `gathered_selections` is an Array if any of the selections\n # require isolation during execution (because of runtime directives). In that case,\n # make a new, isolated result hash for writing the result into. (That isolated response\n # is eventually merged back into the main response)\n #\n # Otherwise, `gathered_selections` is a hash of selections which can be\n # directly evaluated and the results can be written right into the main response hash.\n tap_or_each(gathered_selections) do |selections, is_selection_array|\n if is_selection_array\n selection_response = GraphQLResultHash.new(nil, nil, false)\n final_response = @response\n else\n selection_response = @response\n final_response = nil\n end\n\n @dataloader.append_job {\n st = get_current_runtime_state\n st.current_object = query.root_value\n st.current_result = selection_response\n # This is a less-frequent case; use a fast check since it's often not there.\n if (directives = selections[:graphql_directives])\n selections.delete(:graphql_directives)\n end\n call_method_on_directives(:resolve, runtime_object, directives) do\n evaluate_selections(\n runtime_object,\n root_type,\n root_op_type == \"mutation\",\n selections,\n selection_response,\n final_response,\n nil,\n )\n end\n }\n end\n end\n end\n delete_all_interpreter_context\n nil\n end",
"def select_statement(query)\n model = query.model\n fields = query.fields\n conditions = query.conditions\n limit = query.limit\n offset = query.offset\n order = query.order\n group_by = nil\n\n # FIXME: using a boolean for qualify does not work in some cases,\n # such as when you have a self-referrential many to many association.\n # if you don't qualfiy the columns with a unique alias, then the\n # SQL query will fail. This may mean though, that it might not\n # be enough to pass in a Property, but we may need to know the\n # table and the alias we should use for the column.\n\n qualify = query.links.any?\n\n if qualify || query.unique?\n group_by = fields.select { |property| property.kind_of?(Property) }\n end\n\n unless (limit && limit > 1) || offset > 0 || qualify\n # TODO: move this method to Query, so that it walks the conditions\n # and finds an OR operator\n\n # TODO: handle cases where two or more properties need to be\n # used together to be unique\n\n # if a unique property is used, and there is no OR operator, then an ORDER\n # and LIMIT are unecessary because it should only return a single row\n if conditions.kind_of?(Query::Conditions::AndOperation) &&\n conditions.any? { |operand| operand.kind_of?(Query::Conditions::EqualToComparison) && operand.subject.respond_to?(:unique?) && operand.subject.unique? } &&\n !conditions.any? { |operand| operand.kind_of?(Query::Conditions::OrOperation) }\n order = nil\n limit = nil\n end\n end\n\n conditions_statement, bind_values = conditions_statement(conditions, qualify)\n\n statement = \"SELECT #{columns_statement(fields, qualify)}\"\n statement << \" FROM #{quote_name(model.storage_name(name))}\"\n statement << join_statement(query, qualify) if qualify\n statement << \" WHERE #{conditions_statement}\" unless conditions_statement.blank?\n statement << \" GROUP BY #{columns_statement(group_by, qualify)}\" unless group_by.blank?\n statement << \" ORDER BY #{order_statement(order, qualify)}\" unless order.blank?\n\n if limit\n statement << ' LIMIT ?'\n bind_values << limit\n end\n\n if limit && offset > 0\n statement << ' OFFSET ?'\n bind_values << offset\n end\n\n return statement, bind_values\n end",
"def test_multi_results\n rows = @connection.select_rows(\"CALL ten();\")\n assert_equal 10, rows[0][0].to_i, \"ten() did not return 10 as expected: #{rows.inspect}\"\n\n assert @connection.active?, \"Bad connection use by '#{@connection.class}.select_rows'\"\n end",
"def sub_in(result)\n expression = result\n end",
"def query\n resource_id = @params.fetch(:resourceId)\n all_children_ids = db[:archival_object]\n .filter(:root_record_id => resource_id)\n .select(:id)\n db[:instance]\n .inner_join(:sub_container, :instance_id => :instance__id)\n .inner_join(:top_container_link_rlshp, :sub_container_id => :sub_container__id)\n .inner_join(:top_container, :id => :top_container_link_rlshp__top_container_id)\n .left_outer_join(:top_container_profile_rlshp, :top_container_id => :top_container__id)\n .left_outer_join(:container_profile, :id => :top_container_profile_rlshp__container_profile_id)\n .filter {\n Sequel.|({:instance__resource_id => resource_id},\n :instance__archival_object_id => all_children_ids)\n }\n .select(Sequel.as(Sequel.lit(\"CONCAT(COALESCE(container_profile.name, ''), ' ', top_container.indicator)\"), :container),\n Sequel.as(Sequel.lit(\"GetEnumValueUF(sub_container.type_2_id)\"), :container2Type),\n Sequel.as(:sub_container__indicator_2, :container2Indicator),\n Sequel.as(Sequel.lit(\"GetEnumValueUF(sub_container.type_3_id)\"), :container3Type),\n Sequel.as(:sub_container__indicator_3, :container3Indicator))\n .distinct\n end",
"def _reduce_2(val, _values, result)\n result = new_compstmt val[0]\n \n result\nend",
"def _reduce_2(val, _values, result)\n result = new_compstmt val[0]\n \n result\nend",
"def _reduce_2(val, _values, result)\n result = new_compstmt val[0]\n \n result\nend",
"def _reduce_2(val, _values, result)\n result = new_compstmt val[0]\n \n result\nend",
"def query_return_first(sql, *binds)\n mysql.fetch(sql, *binds).first\n end",
"def select_prepared(plan_name, *args)\n # {{{\n args_string = ''\n if args.to_s != '' && args.length > 0 then \n args_string = \"(#{args.flatten.map { |a| \"'#{a}'\" }.join(',')})\" \n end\n query_string = \"EXECUTE #{plan_name.to_s} #{args_string}; \"\n return select_cached(query_string)\n end",
"def conjunct_select(arr, *procs)\n arr.select do |ele|\n procs.all?{ |proc| proc.call(ele)} \n end\nend",
"def select_books_titles_and_years_in_first_series_order_by_year\n \"select books.title, books.year from books join series on books.series_id = series.id where series_id = 1 order by books.year\"\nend",
"def m_ar_or_chain_1\n Rows.where(:x || :y)\n end",
"def fetch\n plans = @pgsql.exec(\n [\n 'SELECT plan.id, plan.completed, plan.schedule,',\n 'SUM(risk.probability * effect.impact) / COUNT(triple.id) AS rank',\n 'FROM plan',\n 'JOIN part ON part.id = plan.part',\n 'JOIN project ON part.project = project.id',\n 'JOIN triple ON cause = plan.part OR risk = plan.part OR effect = plan.part',\n 'JOIN risk ON triple.risk = risk.id',\n 'JOIN effect ON triple.effect = effect.id',\n 'LEFT JOIN task ON task.plan = plan.id',\n 'WHERE project.login = $1 AND task.id IS NULL',\n 'GROUP BY plan.id, plan.completed, plan.schedule'\n ],\n [@login]\n )\n plans.select { |p| deadline(Time.parse(p['completed']), p['schedule'].strip.downcase) < Time.now }\n .map { |p| p['id'].to_i }\n end",
"def to_sql\n \"\n SELECT row_to_json(fc)\n FROM ( SELECT 'FeatureCollection' AS type, array_to_json(array_agg(f)) AS features\n FROM ( SELECT 'Feature' AS type\n , ST_AsGeoJSON(subquery.geom)::json AS geometry\n , row_to_json(\n (SELECT l FROM (SELECT id, geoid) AS l)\n ) AS properties\n\n FROM (\n SELECT\n ct.id,\n ct.geom,\n ct.geoid,\n ST_Area(ST_SetSRID(geom,4326)) as d,\n ST_Area(\n ST_Intersection(\n ST_SetSRID( ST_GeomFromGeoJSON('#{ @geojson }'), 4326),\n ST_SetSRID(geom,4326)\n )\n ) as n\n FROM census_tracts_2010 AS ct\n WHERE\n ST_Intersects(\n ST_SetSRID(geom,4326),\n ST_SetSRID( ST_GeomFromGeoJSON('#{ @geojson }'), 4326)\n )\n ) subquery\n WHERE (n/d*100) >= 15\n\n\n ) AS f\n ) AS fc;\n \"\n end",
"def select_youngest_bear_and_returns_name_and_age\n 'SELECT min(bears.name),(bears.age) FROM bears ORDER BY age ASC LIMIT 1'\nend",
"def _reduce_1(val, _values, result)\n result = Expression.new(val[0]) \n result\nend",
"def explicit_columns_in_subquery?\n @scope.include?(Algebra::Projection) ||\n @scope.include?(Algebra::Rename) ||\n @scope.include?(Algebra::Summarization)\n end",
"def filter_by_associations_conditions_subquery_conditions(obj)\n key = qualify(associated_class.table_name, associated_class.primary_key)\n case obj\n when Array\n {key=>obj.map(&:pk)}\n when Sequel::Dataset\n {key=>obj.select(*Array(qualify(associated_class.table_name, associated_class.primary_key)))}\n else\n Array(key).zip(Array(obj.pk))\n end\n end",
"def _reduce_2(val, _values, result)\n result = @builder.compstmt(val[0])\n \n result\nend",
"def _reduce_2(val, _values, result)\n result = @builder.compstmt(val[0])\n \n result\nend",
"def _reduce_2(val, _values, result)\n result = @builder.compstmt(val[0])\n \n result\nend",
"def _reduce_2(val, _values, result)\n result = @builder.compstmt(val[0])\n \n result\nend",
"def _reduce_2(val, _values, result)\n result = @builder.compstmt(val[0])\n \n result\nend",
"def _reduce_2(val, _values, result)\n result = @builder.compstmt(val[0])\n \n result\nend",
"def _reduce_2(val, _values, result)\n result = @builder.compstmt(val[0])\n \n result\nend",
"def _reduce_2(val, _values, result)\n result = @builder.compstmt(val[0])\n \n result\nend",
"def _reduce_2(val, _values, result)\n result = @builder.compstmt(val[0])\n \n result\nend",
"def _reduce_2(val, _values, result)\n result = @builder.compstmt(val[0])\n \n result\nend",
"def _reduce_2(val, _values, result)\n result = @builder.compstmt(val[0])\n \n result\nend",
"def _reduce_2(val, _values, result)\n result = @builder.compstmt(val[0])\n \n result\nend",
"def lateral\n cached_dataset(:_lateral_ds){clone(:lateral=>true)}\n end",
"def pg_gem_batch__from psql_db, db_queries\n psql_db = array__from psql_db\n db_queries = array__from db_queries\n pg_gem_conn = pg_gem_conn__from psql_db\n pg_connection = pg_gem_conn[5]\n batch = [pg_connection].product db_queries\n end",
"def select_name_and_series_subgenres_of_authors\n \"SELECT authors.name, subgenres.name FROM authors INNER JOIN series ON authors.id=series.author_id INNER JOIN subgenres ON series.subgenre_id = subgenres.id; \"\nend",
"def selects_all_female_bears_return_name_and_age\n <<-SQL \n SELECT \n bears.name, \n bears.age \n FROM bears \n WHERE sex='F';\n SQL\nend",
"def _reduce_2(val, _values, result)\n result = @builder.compstmt(val[0])\n\n result\nend",
"def _reduce_2(val, _values, result)\n result = @builder.compstmt(val[0])\n\n result\nend",
"def _reduce_2(val, _values, result)\n result = @builder.compstmt(val[0])\n\n result\nend",
"def select(sql)\n raise(ArgumentError, \"Bad sql parameter\") unless sql.kind_of?(String)\n\n client = ensure_connected\n\n Pod4.logger.debug(__FILE__){ \"select: #{sql}\" }\n query = client.execute(sql)\n\n rows = []\n query.each do |r| \n\n if block_given? \n rows << yield(r)\n else\n rows << r\n end\n\n end\n\n query.cancel \n rows\n\n rescue => e\n handle_error(e)\n end",
"def subquery_sql(options)\n net_imports_query(options)\n end",
"def _reduce_332(val, _values, result)\n result = new_call nil, [:lambda, []], []\n result << new_iter(val[0], val[1])\n \n result\nend",
"def pluck_one(sql, *bindvars)\n result = []\n exec(sql, *bindvars) { |row| result << row.first }\n result\n end",
"def _reduce_573(val, _values, result)\n # TODO? if (!($$ = gettable(p, $1, &@$))) $$ = NEW_BEGIN(0, &@$);\n var, = val\n\n result = var\n\n result\nend",
"def chain(next_link)\n if next_link.is_a? Maybe\n flatten_result(apply(next_link))\n else\n flatten_result(map(next_link))\n end\n end"
] |
[
"0.61836445",
"0.61836445",
"0.58440346",
"0.57117987",
"0.52055484",
"0.5106489",
"0.5051809",
"0.50212246",
"0.4995844",
"0.48434427",
"0.47571778",
"0.47279462",
"0.46696457",
"0.4661552",
"0.4650162",
"0.46348396",
"0.45884544",
"0.45826247",
"0.45632428",
"0.4534584",
"0.45032236",
"0.44775015",
"0.4463196",
"0.44253635",
"0.44126764",
"0.43732888",
"0.43726593",
"0.43674448",
"0.43476373",
"0.43165466",
"0.43161312",
"0.43041787",
"0.42886487",
"0.42831507",
"0.42781115",
"0.4273515",
"0.42724606",
"0.4263476",
"0.42608467",
"0.4254155",
"0.4245283",
"0.42365763",
"0.42317766",
"0.4229671",
"0.42182428",
"0.42107028",
"0.42102966",
"0.42080024",
"0.4198174",
"0.4196313",
"0.4195062",
"0.41946483",
"0.4194056",
"0.41819587",
"0.41552752",
"0.41468233",
"0.41423482",
"0.4139285",
"0.41329592",
"0.41300088",
"0.41178223",
"0.41178223",
"0.41178223",
"0.41178223",
"0.41140383",
"0.41114268",
"0.41114205",
"0.41076413",
"0.4106579",
"0.41060898",
"0.41050094",
"0.41018543",
"0.40934607",
"0.40866515",
"0.4085012",
"0.40801316",
"0.4079767",
"0.4079767",
"0.4079767",
"0.4079767",
"0.4079767",
"0.4079767",
"0.4079767",
"0.4079767",
"0.4079767",
"0.4079767",
"0.4079767",
"0.4071204",
"0.40704957",
"0.40694138",
"0.40637395",
"0.40631774",
"0.40631774",
"0.40631774",
"0.40551206",
"0.40535036",
"0.40505576",
"0.40440506",
"0.40436375",
"0.4037494"
] |
0.5263417
|
4
|
PostgreSQL supports modifying joined datasets
|
def supports_modifying_joins?
true
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def def_many_to_many(opts)\n super\n def_association_pks_getter(opts) do\n _join_table_dataset(opts).filter(opts[:left_key]=>send(opts[:left_primary_key])).select_map(opts[:right_key])\n end\n def_association_pks_setter(opts) do |pks|\n checked_transaction do\n ds = _join_table_dataset(opts).filter(opts[:left_key]=>send(opts[:left_primary_key]))\n ds.exclude(opts[:right_key]=>pks).delete\n pks -= ds.select_map(opts[:right_key])\n pks.each{|pk| ds.insert(opts[:left_key]=>send(opts[:left_primary_key]), opts[:right_key]=>pk)}\n end\n end\n end",
"def _join_table_dataset(opts)\n ds = model.db.from(opts.join_table_source)\n opts[:join_table_block] ? opts[:join_table_block].call(ds) : ds\n end",
"def update_from_sql(sql)\n join_from_sql(:FROM, sql)\n end",
"def update_from_sql(sql)\n join_from_sql(:FROM, sql)\n end",
"def join_from_sql(type, sql)\n if(from = @opts[:from][1..-1]).empty?\n raise(Error, 'Need multiple FROM tables if updating/deleting a dataset with JOINs') if @opts[:join]\n else\n sql << SPACE << type.to_s << SPACE\n source_list_append(sql, from)\n select_join_sql(sql)\n end\n end",
"def join_from_sql(type, sql)\n if(from = @opts[:from][1..-1]).empty?\n raise(Error, 'Need multiple FROM tables if updating/deleting a dataset with JOINs') if @opts[:join]\n else\n sql << ' ' << type.to_s << ' '\n source_list_append(sql, from)\n select_join_sql(sql)\n end\n end",
"def apply_dataset_changes(ds)\n ds = ds.with_extend(AssociationDatasetMethods).\n clone(:association_reflection => self)\n self[:extend].each{|m| ds = ds.with_extend(m)}\n ds = ds.select(*select) if select\n if c = self[:conditions]\n ds = (c.is_a?(Array) && !Sequel.condition_specifier?(c)) ? ds.where(*c) : ds.where(c)\n end\n ds = ds.order(*self[:order]) if self[:order]\n ds = ds.limit(*self[:limit]) if self[:limit]\n ds = ds.limit(1).skip_limit_check if limit_to_single_row?\n ds = ds.eager(self[:eager]) if self[:eager]\n ds = ds.distinct if self[:distinct]\n ds\n end",
"def add_piggy_back_sql_data!(reflection_name, prefix, table_alias, attributes, select, joins, conditions, join_type)\n ktn = table_name\n kpkey = primary_key\n reflection = reflections[reflection_name]\n atn = reflection.table_name\n attributes.each do |attr|\n if table_alias\n select << \", #{table_alias}.#{attr} AS #{prefix}_#{attr}\"\n else\n select << \", #{atn}.#{attr} AS #{prefix}_#{attr}\"\n end\n end\n fkey = reflection.primary_key_name\n fpkey = reflection.klass.primary_key\n\n case reflection.macro\n when :belongs_to\n if table_alias\n joins << \" #{join_type} JOIN #{atn} #{table_alias} ON #{table_alias}.#{fpkey}=#{ktn}.#{fkey} \"\n else\n joins << \" #{join_type} JOIN #{atn} ON #{atn}.#{fpkey}=#{ktn}.#{fkey} \"\n end\n when :has_one\n if table_alias\n joins << \" #{join_type} JOIN #{atn} #{table_alias} ON #{table_alias}.#{fkey}=#{ktn}.#{kpkey} \"\n else\n joins << \" #{join_type} JOIN #{atn} ON #{atn}.#{fkey}=#{ktn}.#{kpkey} \"\n end\n when :has_many\n raise \"piggy_back: aliasing not implemented for has_many\" if table_alias\n if reflection.options[:through]\n ttn = reflection.through_reflection.klass.table_name\n tkfkey = reflection.through_reflection.primary_key_name\n tafkey = reflection.source_reflection.primary_key_name\n\n through_conditions = reflection.options[:conditions] ?\n \" AND \" + reflection.options[:conditions] : \"\"\n source_conditions = reflection.through_reflection.options[:conditions] ?\n \" AND \" + reflection.through_reflection.options[:conditions] : \"\"\n\n joins << \" LEFT JOIN #{ttn} ON (#{ttn}.#{tkfkey}=#{ktn}.#{kpkey}#{through_conditions})\"\n joins << \" LEFT JOIN #{atn} ON (#{ttn}.#{tafkey}=#{atn}.#{fpkey}#{source_conditions}) \"\n else\n reflection_conditions = reflection.options[:conditions] ?\n \" AND \" + reflection.options[:conditions] : \"\"\n\n joins << \" LEFT JOIN #{atn} ON (#{atn}.#{fkey}=#{ktn}.#{kpkey}#{reflection_conditions}) \"\n end\n else\n raise \"can't piggy back #{reflection.macro} on class #{klass}\"\n end\n end",
"def select!\n # Ensure we can never be destructive by nilifying :update.\n Mao.sql(with_options(:update => nil).sql) do |pg_result|\n if @options[:join]\n other = Mao.query(@options[:join][0])\n pg_result.map {|result|\n Mao.normalize_join_result(result, self, other)\n }\n else\n pg_result.map {|result| Mao.normalize_result(result, @col_types)}\n end\n end\n end",
"def join_data_from_source(join_source, category, key_field)\n unless @data.member? category\n @data[category] = @data[join_source][category]\n\n else\n joined_data = {}\n @data[category].each {|i| joined_data[i[key_field]] = i}\n\n @data[join_source][category].each do |v|\n k = v[key_field]\n if joined_data.member?(k)\n joined_data[k].merge!(v)\n else\n joined_data[k] = v\n end\n end\n @data[category] = joined_data.values\n\n end\n @data[join_source].delete category\n end",
"def set_dataset_rel_and_attr\n @dataset.database = @database\n @dataset_id = @dataset.id\n organization = Organization.find_by_id(@organization_id)\n organization.datasets << @dataset\n @dataset.organization = organization\n end",
"def merge_by_values(relation, other)\n other.cte.with_values.each do |name, expression|\n relation = if other.cte.materialized_key?(name)\n relation.with!.materialized(name => expression)\n elsif other.cte.not_materialized_key?(name)\n relation.with!.not_materialized(name => expression)\n else\n relation.with!(name => expression)\n end\n end\n\n relation\n end",
"def replace_related_type(old, new)\n $db.execute \"UPDATE destination.series_relationships SET type = ? WHERE type = ?\", new, old;\nend",
"def join(secDataset,pair)\n #print \"\\n#{pair}\\n\"\n pair[0].downcase! unless @columns.include?(pair[0])\n pair[1].downcase! unless secDataset.columns.include?(pair[1])\n @data.each do |row|\n secDataset.each do |row2|\n #print \"\\nPorovnavam #{row[pair[0]]} s #{row2[pair[1]]}\\n\"\n if (row[pair[0]] == row2[pair[1]])\n row.merge!(row2)\n break\n end\n end\n #secDataset.find(pair[0])\n #TODO zoptimalizovat -> zlepsit slozitost z m*n!\n end\n sloupce = secDataset.columns\n sloupce.each do |sl|\n set_column(sl,nil)\n end\n\n end",
"def _associated_dataset\n associated_class.dataset.clone\n end",
"def populate_relation(args)\n\targs[:keys_B].each do |key_B|\n\t\tapi_url = \"#{@base_url}/#{args[:coll_A]}/#{args[:key_A]}/relation/\" +\n\t\t \"#{args[:relation]}/#{args[:coll_B]}/#{key_B}\"\n\t\tputs do_the_put_call({ url: api_url, user: @user, json: '{}' })\n\tend\nend",
"def join_rows(rows)\n return @join_rows if defined? @join_rows\n\n conn = @model.connection\n join_table = conn.quote_table_name @ref.join_table\n assoc_fkey = conn.quote_column_name @ref.association_foreign_key\n fkey = conn.quote_column_name @ref.foreign_key\n quoted_ids = rows.map { |r| conn.quote r.send @ref.active_record_primary_key }\n\n @join_rows = conn.\n exec_query(\"SELECT #{fkey}, #{assoc_fkey} FROM #{join_table} WHERE #{fkey} IN (#{quoted_ids.join ','})\").\n rows\n end",
"def read_join_relations(obj, res_row, row, join_relations)\n offset = obj.class.serializable_attributes.size\n\n for rel in join_relations\n rel_obj = rel[:target_class].og_allocate(res_row, row)\n rel_obj.og_read(res_row, row, offset)\n offset += rel_obj.class.serializable_attributes.size\n obj.instance_variable_set(\"@#{rel[:name]}\", rel_obj)\n end\n end",
"def reload_nested_set\n reload(\n :select => \"#{quoted_left_column_full_name}, #{quoted_right_column_full_name}, #{quoted_parent_column_full_name}\",\n :lock => true\n )\n end",
"def join_context(context_data, options = {})\n join_column = options[:join_column] || :branch_id\n\n versioned_table = opts[:last_joined_table] || opts[:from].first\n\n ds = join(context_data, { :branch_id => join_column }, options) do |j, lj|\n Sequel.expr(Sequel.qualify(j, :version) => nil) |\n (Sequel.qualify(lj, :version) <= Sequel.qualify(j, :version))\n end\n ds.opts[:versioned_table] = versioned_table\n #ds.opts[:last_record_id] = Sequel.qualify(versioned_table, :record_id)\n ds.opts[:order_columns] = (ds.opts[:order_columns] || []) +\n [Sequel.qualify(ds.opts[:last_joined_table], :depth),\n Sequel.qualify(versioned_table, :version).desc]\n ds\n end",
"def relation_method\n :join\n end",
"def update_sql(values = {}, opts = nil, &block)\n opts = opts ? @opts.merge(opts) : @opts\n\n if opts[:group]\n raise Error::InvalidOperation, \"A grouped dataset cannot be updated\"\n elsif (opts[:from].size > 1) or opts[:join]\n raise Error::InvalidOperation, \"A joined dataset cannot be updated\"\n end\n \n sql = \"UPDATE #{source_list(@opts[:from])} SET \"\n if block\n sql << block.to_sql(self, :comma_separated => true)\n else\n set = if values.is_a?(Hash)\n # get values from hash\n values = transform_save(values) if @transform\n values.map do |k, v|\n # convert string key into symbol\n k = k.to_sym if String === k\n \"#{literal(k)} = #{literal(v)}\"\n end.join(COMMA_SEPARATOR)\n else\n # copy values verbatim\n values\n end\n sql << set\n end\n if where = opts[:where]\n sql << \" WHERE #{literal(where)}\"\n end\n\n sql\n end",
"def update_graph\n connection.execute <<-EOS\n UPDATE #{oqgraph_table_name} \n SET origid = #{self.send(self.class.from_key)}, \n destid = #{self.send(self.class.to_key)}, \n weight = #{self.send(self.class.weight_column)} \n WHERE origid = #{self.send(self.class.from_key + '_was')} AND destid = #{self.send(self.class.to_key + '_was')};\n EOS\n end",
"def def_many_to_many(opts)\n one_through_one = opts[:type] == :one_through_one\n left = (opts[:left_key] ||= opts.default_left_key)\n lcks = opts[:left_keys] = Array(left)\n right = (opts[:right_key] ||= opts.default_right_key)\n rcks = opts[:right_keys] = Array(right)\n left_pk = (opts[:left_primary_key] ||= self.primary_key)\n opts[:eager_loader_key] = left_pk unless opts.has_key?(:eager_loader_key)\n lcpks = opts[:left_primary_keys] = Array(left_pk)\n lpkc = opts[:left_primary_key_column] ||= left_pk\n lpkcs = opts[:left_primary_key_columns] ||= Array(lpkc)\n raise(Error, \"mismatched number of left keys: #{lcks.inspect} vs #{lcpks.inspect}\") unless lcks.length == lcpks.length\n if opts[:right_primary_key]\n rcpks = Array(opts[:right_primary_key])\n raise(Error, \"mismatched number of right keys: #{rcks.inspect} vs #{rcpks.inspect}\") unless rcks.length == rcpks.length\n end\n opts[:uses_left_composite_keys] = lcks.length > 1\n opts[:uses_right_composite_keys] = rcks.length > 1\n opts[:cartesian_product_number] ||= one_through_one ? 0 : 1\n join_table = (opts[:join_table] ||= opts.default_join_table)\n opts[:left_key_alias] ||= opts.default_associated_key_alias\n opts[:graph_join_table_join_type] ||= opts[:graph_join_type]\n opts[:after_load].unshift(:array_uniq!) if opts[:uniq]\n opts[:dataset] ||= opts.association_dataset_proc\n opts[:eager_loader] ||= opts.method(:default_eager_loader)\n \n join_type = opts[:graph_join_type]\n select = opts[:graph_select]\n use_only_conditions = opts.include?(:graph_only_conditions)\n only_conditions = opts[:graph_only_conditions]\n conditions = opts[:graph_conditions]\n graph_block = opts[:graph_block]\n graph_jt_conds = opts[:graph_join_table_conditions] = opts.fetch(:graph_join_table_conditions, []).to_a\n use_jt_only_conditions = opts.include?(:graph_join_table_only_conditions)\n jt_only_conditions = opts[:graph_join_table_only_conditions]\n jt_join_type = opts[:graph_join_table_join_type]\n jt_graph_block = opts[:graph_join_table_block]\n opts[:eager_grapher] ||= proc do |eo|\n ds = eo[:self]\n egls = eo[:limit_strategy]\n if egls && egls != :ruby\n associated_key_array = opts.associated_key_array\n orig_egds = egds = eager_graph_dataset(opts, eo)\n egds = egds.\n inner_join(join_table, rcks.zip(opts.right_primary_keys) + graph_jt_conds, :qualify=>:deep).\n select_all(egds.first_source).\n select_append(*associated_key_array)\n egds = opts.apply_eager_graph_limit_strategy(egls, egds)\n ds.graph(egds, associated_key_array.map(&:alias).zip(lpkcs) + conditions, :qualify=>:deep, :table_alias=>eo[:table_alias], :implicit_qualifier=>eo[:implicit_qualifier], :join_type=>eo[:join_type]||join_type, :from_self_alias=>eo[:from_self_alias], :join_only=>eo[:join_only], :select=>select||orig_egds.columns, &graph_block)\n else\n ds = ds.graph(join_table, use_jt_only_conditions ? jt_only_conditions : lcks.zip(lpkcs) + graph_jt_conds, :select=>false, :table_alias=>ds.unused_table_alias(join_table, [eo[:table_alias]]), :join_type=>eo[:join_type]||jt_join_type, :join_only=>eo[:join_only], :implicit_qualifier=>eo[:implicit_qualifier], :qualify=>:deep, :from_self_alias=>eo[:from_self_alias], &jt_graph_block)\n ds.graph(eager_graph_dataset(opts, eo), use_only_conditions ? only_conditions : opts.right_primary_keys.zip(rcks) + conditions, :select=>select, :table_alias=>eo[:table_alias], :qualify=>:deep, :join_type=>eo[:join_type]||join_type, :join_only=>eo[:join_only], &graph_block)\n end\n end\n \n return if opts[:read_only]\n \n if one_through_one\n opts[:setter] ||= proc do |o|\n h = {}\n lh = lcks.zip(lcpks.map{|k| get_column_value(k)})\n jtds = _join_table_dataset(opts).where(lh)\n\n checked_transaction do\n current = jtds.first\n\n if o\n new_values = []\n rcks.zip(opts.right_primary_key_methods).each{|k, pk| new_values << (h[k] = o.get_column_value(pk))}\n end\n\n if current\n current_values = rcks.map{|k| current[k]}\n jtds = jtds.where(rcks.zip(current_values))\n if o\n if current_values != new_values\n jtds.update(h)\n end\n else\n jtds.delete\n end\n elsif o\n lh.each{|k,v| h[k] = v}\n jtds.insert(h)\n end\n end\n end\n opts[:_setter] = proc{|o| set_one_through_one_associated_object(opts, o)}\n else \n opts[:adder] ||= proc do |o|\n h = {}\n lcks.zip(lcpks).each{|k, pk| h[k] = get_column_value(pk)}\n rcks.zip(opts.right_primary_key_methods).each{|k, pk| h[k] = o.get_column_value(pk)}\n _join_table_dataset(opts).insert(h)\n end\n\n opts[:remover] ||= proc do |o|\n _join_table_dataset(opts).where(lcks.zip(lcpks.map{|k| get_column_value(k)}) + rcks.zip(opts.right_primary_key_methods.map{|k| o.get_column_value(k)})).delete\n end\n\n opts[:clearer] ||= proc do\n _join_table_dataset(opts).where(lcks.zip(lcpks.map{|k| get_column_value(k)})).delete\n end\n end\n end",
"def join_table(type, table, *args, &block)\n if table.is_a?(Class) && table < Sequel::Model\n Sequel::Deprecation.deprecate(\"Passing Sequel::Model class to a dataset join method\", \"Pass the model's table name or dataset as the first argument instead\")\n if table.dataset.simple_select_all?\n super(type, table.table_name, *args, &block)\n else\n super(type, table.dataset, *args, &block)\n end\n else\n super\n end\n end",
"def reload_nested_set\n reload(:select => \"#{quoted_left_column_name}, \" +\n \"#{quoted_right_column_name}, #{quoted_parent_column_name}\")\n end",
"def join_table_source\n cached_fetch(:join_table_source){split_join_table_alias[0]}\n end",
"def merge!(assoc_rows, rows)\n joins_by_id = join_rows(rows).reduce({}) { |a, join|\n id = join[0]\n a[id] ||= []\n a[id] << join[1]\n a\n }\n\n assoc_rows_by_id = assoc_rows.reduce({}) { |a, row|\n id = row.send @ref.association_primary_key\n a[id] = row\n a\n }\n\n rows.each do |row|\n id = row.send @ref.active_record_primary_key\n assoc_fkeys = (joins_by_id[id] || []).uniq\n associations = assoc_rows_by_id.values_at(*assoc_fkeys).compact.uniq\n row.send @assign, associations\n end\n end",
"def clean_join_table\n # these 2 lines run pretty much the same sql, self.parses adds a where in clause\n self.parsers = []\n # OR\n # ActiveRecord::Base.connection.execute(\"DELETE FROM parsers_transformations WHERE transformation_id = #{id}\")\n end",
"def wrap_dataset(dataset)\n if relation.is_a?(Relation::Composite)\n relation.new(dataset).to_a\n else\n dataset\n end\n end",
"def merge!(assoc_rows, rows, join_rows)\n joins_by_id = join_rows.reduce({}) { |a, join|\n id = join[0].to_s\n a[id] ||= []\n a[id] << join[1].to_s\n a\n }\n\n assoc_order_cache = {} # maintains the original order of assoc_rows\n assoc_rows_by_id = assoc_rows.each_with_index.reduce({}) { |a, (row, idx)|\n begin\n id = row.send(@ref.association_primary_key).to_s\n rescue NoMethodError => e\n raise MissingColumnError.new(row, e.name)\n end\n assoc_order_cache[id] = idx\n a[id] = row\n a\n }\n\n assign = \"#{name}=\"\n rows.each do |row|\n begin\n id = row.send(@ref.active_record_primary_key).to_s\n rescue NoMethodError => e\n raise MissingColumnError.new(row, e.name)\n end\n assoc_fkeys = (joins_by_id[id] || []).uniq.\n sort_by { |fkey| assoc_order_cache[fkey] || 0 }\n\n associations = assoc_rows_by_id.values_at(*assoc_fkeys).compact.uniq\n row.send assign, associations\n end\n end",
"def join(other, *exps, join_type: :inner)\n unless other.is_a?(Table)\n raise UserError, 'need other table as first argument to join'\n end\n unless JOIN_TYPES.include?(join_type)\n raise UserError, \"join_type may only be: #{JOIN_TYPES.join(', ')}\"\n end\n\n # These may be needed for outer joins.\n self_row_nils = headers.map { |h| [h, nil] }.to_h\n other_row_nils = other.headers.map { |h| [h, nil] }.to_h\n join_exp, other_common_heads =\n build_join_expression(exps, other, join_type)\n ev = Evaluator.new\n result = empty_dup\n other_rows = other.rows\n other_row_matches = Array.new(other_rows.size, false)\n rows.each do |self_row|\n self_row_matched = false\n other_rows.each_with_index do |other_row, k|\n # Same as other_row, but with keys that are common with self and equal\n # in value, removed, so the output table need not repeat them.\n locals = build_locals_hash(row_a: self_row, row_b: other_row)\n matches = ev.evaluate(join_exp, locals: locals)\n next unless matches\n\n self_row_matched = other_row_matches[k] = true\n out_row = build_out_row(row_a: self_row, row_b: other_row,\n common_heads: other_common_heads,\n type: join_type)\n result << out_row\n end\n next unless [:left, :full].include?(join_type)\n next if self_row_matched\n\n result << build_out_row(row_a: self_row,\n row_b: other_row_nils,\n type: join_type)\n end\n if [:right, :full].include?(join_type)\n other_rows.each_with_index do |other_row, k|\n next if other_row_matches[k]\n\n result << build_out_row(row_a: self_row_nils,\n row_b: other_row,\n type: join_type)\n end\n end\n result.normalize_boundaries\n result\n end",
"def apply_single\n validate_schema\n\n # Prepare some lists of columns.\n key_cols = @db1.primary_key(@table1)\n data_cols = @db1.except_primary_key(@table1)\n all_cols = @db1.column_names(@table1)\n\n # Let our public know we are beginning.\n @patch.begin_diff\n\n # Advertise column names.\n @rc_columns = DiffColumns.new\n @rc_columns.title_row = all_cols\n @rc_columns.update(0)\n cells = all_cols.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"@@\",cells)\n @patch.apply_row(rc)\n\n # If requested, we will be providing context rows around changed rows.\n # This is not a natural thing to do with SQL, so we do it only on request.\n # When requested, we need to buffer row changes.\n @pending_rcs = []\n\n # Prepare some useful SQL fragments to assemble later.\n sql_table1 = @db1.quote_table(@table1)\n sql_table2 = @db1.quote_table(@table2)\n sql_key_cols = key_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_all_cols = all_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_key_match = key_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS #{sql_table2}.#{c}\"}.join(\" AND \")\n sql_data_mismatch = data_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS NOT #{sql_table2}.#{c}\"}.join(\" OR \")\n\n # For one query we will need to interleave columns from two tables. For\n # portability we need to give these columns distinct names.\n weave = all_cols.map{|c| [[sql_table1,@db1.quote_column(c)],\n [sql_table2,@db2.quote_column(c)]]}.flatten(1)\n dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]}\"}\n sql_dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]} AS #{c[0].gsub(/[^a-zA-Z0-9]/,'_')}_#{c[1].gsub(/[^a-zA-Z0-9]/,'_')}\"}.join(\",\")\n\n # Prepare a map of primary key offsets.\n keys_in_all_cols = key_cols.each.map{|c| all_cols.index(c)}\n keys_in_dbl_cols = keys_in_all_cols.map{|x| 2*x}\n\n # Find rows in table2 that are not in table1.\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table2} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table1} WHERE #{sql_key_match})\"\n apply_inserts(sql,all_cols,keys_in_all_cols)\n\n # Find rows in table1 and table2 that differ while having the same primary\n # key.\n sql = \"SELECT #{sql_dbl_cols} FROM #{sql_table1} INNER JOIN #{sql_table2} ON #{sql_key_match} WHERE #{sql_data_mismatch}\"\n apply_updates(sql,dbl_cols,keys_in_dbl_cols)\n\n # Find rows that are in table1 but not table2\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table1} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table2} WHERE #{sql_key_match})\"\n apply_deletes(sql,all_cols,keys_in_all_cols)\n\n # If we are supposed to provide context, we need to deal with row order.\n if @patch.want_context\n sql = \"SELECT #{sql_all_cols}, 0 AS __coopy_tag__ FROM #{sql_table1} UNION SELECT #{sql_all_cols}, 1 AS __coopy_tag__ FROM #{sql_table2} ORDER BY #{sql_key_cols}, __coopy_tag__\"\n apply_with_context(sql,all_cols,keys_in_all_cols)\n end\n\n # Done!\n @patch.end_diff\n end",
"def apply_single\n validate_schema\n\n # Prepare some lists of columns.\n key_cols = @db1.primary_key(@table1)\n data_cols = @db1.except_primary_key(@table1)\n all_cols = @db1.column_names(@table1)\n\n # Let our public know we are beginning.\n @patch.begin_diff\n\n # Advertise column names.\n @rc_columns = DiffColumns.new\n @rc_columns.title_row = all_cols\n @rc_columns.update(0)\n cells = all_cols.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"@@\",cells)\n @patch.apply_row(rc)\n\n # If requested, we will be providing context rows around changed rows.\n # This is not a natural thing to do with SQL, so we do it only on request.\n # When requested, we need to buffer row changes.\n @pending_rcs = []\n\n # Prepare some useful SQL fragments to assemble later.\n sql_table1 = @db1.quote_table(@table1)\n sql_table2 = @db1.quote_table(@table2)\n sql_key_cols = key_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_all_cols = all_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_key_match = key_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS #{sql_table2}.#{c}\"}.join(\" AND \")\n sql_data_mismatch = data_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS NOT #{sql_table2}.#{c}\"}.join(\" OR \")\n\n # For one query we will need to interleave columns from two tables. For\n # portability we need to give these columns distinct names.\n weave = all_cols.map{|c| [[sql_table1,@db1.quote_column(c)],\n [sql_table2,@db2.quote_column(c)]]}.flatten(1)\n dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]}\"}\n sql_dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]} AS #{c[0].gsub(/[^a-zA-Z0-9]/,'_')}_#{c[1].gsub(/[^a-zA-Z0-9]/,'_')}\"}.join(\",\")\n\n # Prepare a map of primary key offsets.\n keys_in_all_cols = key_cols.each.map{|c| all_cols.index(c)}\n keys_in_dbl_cols = keys_in_all_cols.map{|x| 2*x}\n\n # Find rows in table2 that are not in table1.\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table2} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table1} WHERE #{sql_key_match})\"\n apply_inserts(sql,all_cols,keys_in_all_cols)\n\n # Find rows in table1 and table2 that differ while having the same primary\n # key.\n sql = \"SELECT #{sql_dbl_cols} FROM #{sql_table1} INNER JOIN #{sql_table2} ON #{sql_key_match} WHERE #{sql_data_mismatch}\"\n apply_updates(sql,dbl_cols,keys_in_dbl_cols)\n\n # Find rows that are in table1 but not table2\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table1} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table2} WHERE #{sql_key_match})\"\n apply_deletes(sql,all_cols,keys_in_all_cols)\n\n # If we are supposed to provide context, we need to deal with row order.\n if @patch.want_context\n sql = \"SELECT #{sql_all_cols}, 0 AS __coopy_tag__ FROM #{sql_table1} UNION SELECT #{sql_all_cols}, 1 AS __coopy_tag__ FROM #{sql_table2} ORDER BY #{sql_key_cols}, __coopy_tag__\"\n apply_with_context(sql,all_cols,keys_in_all_cols)\n end\n\n # Done!\n @patch.end_diff\n end",
"def apply_single\n validate_schema\n\n # Prepare some lists of columns.\n key_cols = @db1.primary_key(@table1)\n data_cols = @db1.except_primary_key(@table1)\n all_cols = @db1.column_names(@table1)\n\n # Let our public know we are beginning.\n @patch.begin_diff\n\n # Advertise column names.\n @rc_columns = DiffColumns.new\n @rc_columns.title_row = all_cols\n @rc_columns.update(0)\n cells = all_cols.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"@@\",cells)\n @patch.apply_row(rc)\n\n # If requested, we will be providing context rows around changed rows.\n # This is not a natural thing to do with SQL, so we do it only on request.\n # When requested, we need to buffer row changes.\n @pending_rcs = []\n\n # Prepare some useful SQL fragments to assemble later.\n sql_table1 = @db1.quote_table(@table1)\n sql_table2 = @db1.quote_table(@table2)\n sql_key_cols = key_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_all_cols = all_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_key_match = key_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS #{sql_table2}.#{c}\"}.join(\" AND \")\n sql_data_mismatch = data_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS NOT #{sql_table2}.#{c}\"}.join(\" OR \")\n\n # For one query we will need to interleave columns from two tables. For\n # portability we need to give these columns distinct names.\n weave = all_cols.map{|c| [[sql_table1,@db1.quote_column(c)],\n [sql_table2,@db2.quote_column(c)]]}.flatten(1)\n dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]}\"}\n sql_dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]} AS #{c[0].gsub(/[^a-zA-Z0-9]/,'_')}_#{c[1].gsub(/[^a-zA-Z0-9]/,'_')}\"}.join(\",\")\n\n # Prepare a map of primary key offsets.\n keys_in_all_cols = key_cols.each.map{|c| all_cols.index(c)}\n keys_in_dbl_cols = keys_in_all_cols.map{|x| 2*x}\n\n # Find rows in table2 that are not in table1.\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table2} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table1} WHERE #{sql_key_match})\"\n apply_inserts(sql,all_cols,keys_in_all_cols)\n\n # Find rows in table1 and table2 that differ while having the same primary\n # key.\n sql = \"SELECT #{sql_dbl_cols} FROM #{sql_table1} INNER JOIN #{sql_table2} ON #{sql_key_match} WHERE #{sql_data_mismatch}\"\n apply_updates(sql,dbl_cols,keys_in_dbl_cols)\n\n # Find rows that are in table1 but not table2\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table1} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table2} WHERE #{sql_key_match})\"\n apply_deletes(sql,all_cols,keys_in_all_cols)\n\n # If we are supposed to provide context, we need to deal with row order.\n if @patch.want_context\n sql = \"SELECT #{sql_all_cols}, 0 AS __coopy_tag__ FROM #{sql_table1} UNION SELECT #{sql_all_cols}, 1 AS __coopy_tag__ FROM #{sql_table2} ORDER BY #{sql_key_cols}, __coopy_tag__\"\n apply_with_context(sql,all_cols,keys_in_all_cols)\n end\n\n # Done!\n @patch.end_diff\n end",
"def join_people_by_relationship_type\n\n sql = <<EOF\nselect supers.id, supers.name, subs.id, subs.name\nfrom person_associations pa\ninner join people as supers\n on supers.id = pa.source_id\ninner join people as subs\n on subs.id = pa.sink_id\nand association_type = 'direct_reporting'\norder by supers.id\nEOF\n\n r = ActiveRecord::Base.connection.execute(sql)\n end",
"def test_join1\n\t\tr = prep(\"program foo;\\nfoo(A,B) :- bar(A,B);\\n\")\n\t\t\n\t\t# set up schema table's predicate\n\t\t## require 'ruby-debug'; debugger\n\t\tterm_schema = @terms.schema_of\n\t\tterm_pred = Predicate.new(false,@terms.name,@terms,term_schema.variables)\n\t\tterm_pred.set(r, \"global\", \"r3\", 1)\n\t\t\n\t\tsj = ScanJoin.new(r, term_pred, @preds.schema_of)\t\n\t\tts = TupleSet.new(\"pred\", *@preds.tuples)\n\t\tres = sj.evaluate(ts)\n\n\t\tassert_equal(2, res.tups.length)\n\tend",
"def convert_input_dataset(ds)\n case ds\n when Symbol, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, LiteralString\n self.simple_table = db.literal(ds).freeze\n ds = db.from(ds)\n when Dataset\n if ds.joined_dataset?\n # raise Error, \"Using a joined dataset as a model dataset is not support, use from_self on the dataset to wrap it in a subquery\" # SEQUEL5\n Sequel::Deprecation.deprecate(\"Using a joined dataset as a Sequel::Model dataset\", respond_to?(:cti_base_model) ? \"Use the class_table_inheritance plugin :alias option in #{cti_base_model.inspect}\" : \"Call from_self on the dataset to wrap it in a subquery\")\n end\n\n self.simple_table = if ds.send(:simple_select_all?)\n ds.literal(ds.first_source_table).freeze\n end\n @db = ds.db\n else\n raise(Error, \"Model.set_dataset takes one of the following classes as an argument: Symbol, LiteralString, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, Dataset\")\n end\n\n set_dataset_row_proc(ds.clone(:model=>self))\n end",
"def join_writer\n \"@join_table.create(@source.id,@destination.id)\"\n end",
"def test_003\n\n target_sql = \"select d.id as id,\nd.taxnumber as taxnumber,\nd.social_security_type as social_security_type,\nd.taxnumber_exemption as taxnumber_exemption\nfrom distributors d\nleft join distributor_addons da on (d.id = da.distributor_id)\nwhere d.id = (11,12,13,14,15,16)\norder by d.id\"\n \n @sql.select do\n d :id, :taxnumber, :social_security_type, :taxnumber_exemption\n end\n\n @sql.from(distributors: 'd') do\n left_join distributor_addons: 'da', on: 'd.id = da.distributor_id'\n end\n\n dist_ids = [11,12,13,14,15,16]\n \n @sql.where do\n d id: dist_ids\n end\n\n @sql.order('d.id')\n\n assert_equal @sql.to_s, target_sql\n end",
"def merge_into(target)\n target_id = target.id\n # Find all the Entries attached to this name, that will need to be\n # reindexed after the merge\n entry_ids = entry_ids_to_index_on_update\n\n ids = EntryArtist.where(artist_id: self.id).pluck(:id)\n EntryArtist.where(artist_id: self.id).update_all({ artist_id: target_id })\n EntryArtist.where( id: ids ).each(&:update_bunny)\n\n ids = EntryAuthor.where(author_id: self.id).pluck(:id)\n EntryAuthor.where(author_id: self.id).update_all({ author_id: target_id })\n EntryAuthor.where( id: ids ).each(&:update_bunny)\n\n ids = EntryScribe.where(scribe_id: self.id).pluck(:id)\n EntryScribe.where(scribe_id: self.id).update_all({ scribe_id: target_id })\n EntryScribe.where( id: ids ).each(&:update_bunny)\n\n ids = SaleAgent.where(agent_id: self.id).pluck(:id)\n SaleAgent.where(agent_id: self.id).update_all({ agent_id: target_id })\n SaleAgent.where( id: ids ).each(&:update_bunny)\n\n ids = SourceAgent.where(agent_id: self.id).pluck(:id)\n SourceAgent.where(agent_id: self.id).update_all({ agent_id: target_id })\n SourceAgent.where( id: ids ).each(&:update_bunny)\n\n ids = Provenance.where(provenance_agent_id: self.id).pluck(:id)\n Provenance.where(provenance_agent_id: self.id).update_all({ provenance_agent_id: target_id })\n Provenance.where( id: ids ).each(&:update_bunny)\n\n ids = DericciLink.where(name_id: self.id).pluck(:id)\n DericciLink.where(name_id: self.id).update_all({ name_id: target_id })\n DericciLink.where( id: ids ).each(&:update_bunny)\n\n ids = DericciRecord.where(verified_id: self.id).pluck(:id)\n DericciRecord.where(verified_id: self.id).update_all({verified_id: target_id})\n DericciRecord.where( id: ids ).each(&:update_bunny)\n\n # update flags on the target\n target.is_artist ||= self.is_artist\n target.is_author ||= self.is_author\n target.is_scribe ||= self.is_scribe\n target.is_provenance_agent ||= self.is_provenance_agent\n\n target.save\n\n # but ... CAN't SAVE when name is BLANK (nil)\n # self.name = nil\n self.viaf_id = nil\n self.deleted = true\n self.save!\n\n # slice into managable chunks to avoid running out of space in mysql\n entry_ids.each_slice(200) do |slice|\n SDBMSS::IndexJob.perform_later(Entry.to_s, slice)\n end\n\n Name.update_counters(target.id,\n :authors_count => target.author_entries.where(deprecated: false, draft: false).count - target.authors_count,\n :artists_count => target.artist_entries.where(deprecated: false, draft: false).count - target.artists_count,\n :scribes_count => target.scribe_entries.where(deprecated: false, draft: false).count - target.scribes_count,\n :sale_agents_count => target.sale_entries.where(deprecated: false, draft: false).count - target.sale_agents_count,\n :provenance_count => target.provenance_entries.where(deprecated: false, draft: false).count - target.provenance_count,\n :source_agents_count => target.agent_sources.count - target.source_agents_count\n )\n end",
"def join_tables(db)\n db.execute(\"SELECT users.user_name, platform.platform_name, music.artist, music.song, music.explicit_lyrics FROM music JOIN users ON music.user_id = users.id JOIN platform ON music.platform_id = platform.id\")\nend",
"def enhance_batch!(conn, record_batch)\n return record_batch if record_batch.nil? || record_batch.empty?\n\n copy_info = get_joined_table(\n conn, record_batch,\n :table_name => \"copy\",\n :column_map => settings['horizon.copy_map'],\n :join_clause => settings['horizon.copy_join_clause'],\n :public_only => (settings['horizon.public_only'].to_s == \"true\")\n ) if %w{all copies direct}.include? settings['horizon.include_holdings'].to_s\n\n\n\n item_info = get_joined_table(\n conn, record_batch,\n :table_name => \"item\",\n :column_map => settings['horizon.item_map'],\n :join_clause => settings['horizon.item_join_clause'],\n :public_only => (settings['horizon.public_only'].to_s == \"true\")\n ) if %w{all items direct}.include? settings['horizon.include_holdings'].to_s\n\n\n\n if item_info || copy_info\n record_batch.each do |record|\n id = record['001'].value.to_s\n record_copy_info = copy_info && copy_info[id]\n record_item_info = item_info && item_info[id]\n\n record_copy_info.each do |copy_row|\n field = MARC::DataField.new( settings[\"horizon.copy_tag\"] )\n copy_row.each_pair do |subfield, value|\n field.append MARC::Subfield.new(subfield, value)\n end\n record.append field\n end if record_copy_info\n\n record_item_info.each do |item_row|\n field = MARC::DataField.new( settings[\"horizon.item_tag\"] )\n item_row.each_pair do |subfield, value|\n field.append MARC::Subfield.new(subfield, value)\n end\n record.append field\n end if record_item_info && ((settings['horizon.include_holdings'].to_s != \"direct\") || record_copy_info.empty?)\n end\n end\n\n return record_batch\n end",
"def auto_assoc(opts = {})\n except = opts[:except] || []\n\n assocs = db.schema_parse_associations(table_name)\n relations = process_join_tables(assocs)\n\n relations.each do |row|\n src_tbl = row[:src_tbl]\n src_col = row[:src_col]\n if src_tbl == table_name && ! (src_col & except).empty?\n # TODO enable except for *_to_many\n next\n end\n src_uniq = row[:src_uniq]\n src_cardinality = cardinality(src_uniq)\n\n join_tbl = row[:join_tbl]\n\n dst_tbl = row[:dst_tbl]\n dst_col = row[:dst_col]\n dst_uniq = row[:dst_uniq]\n dst_cardinality = cardinality(dst_uniq)\n\n TABLE_MODELS.wait_all(src_tbl, dst_tbl) do |src_cls, dst_cls|\n self_ref = src_cls == dst_cls\n\n src = self_ref ? :child : underscore(src_cls.name).to_sym\n src = src_uniq ? singularize(src).to_sym : pluralize(src).to_sym\n\n dst = self_ref ? :parent : underscore(dst_cls.name).to_sym\n dst = dst_uniq ? singularize(dst).to_sym : pluralize(dst).to_sym\n\n if join_tbl\n left_col = row[:left_col]\n right_col = row[:right_col]\n send :many_to_many, src, :class => src_cls, :join_table => join_tbl,\n :left_key => left_col, :left_primary_key => dst_col,\n :right_key => right_col, :right_primary_key => src_col\n else\n # TODO name overrides\n\n if self == dst_cls\n # dst holds the foreign key -> one_to_*\n meth = dst_cardinality + '_to_' + src_cardinality\n send meth, src, :class => src_cls, :key => src_col, :primary_key => dst_col\n end\n\n if self == src_cls\n # src holds the foreign key -> *_to_one\n meth = src_cardinality + '_to_' + dst_cardinality\n\n # one_to_one requires to swap pk and fk\n src_col, dst_col = dst_col, src_col if src_uniq\n send meth, dst, :class => dst_cls, :key => src_col, :primary_key => dst_col\n end\n end\n\n end\n end\n end",
"def associated_dataset\n cached_fetch(:_dataset){apply_dataset_changes(_associated_dataset)}\n end",
"def many!(assoc_rows, target_attr, assoc_attr)\n begin\n assoc_rows_by_attr = assoc_rows.group_by(&assoc_attr.to_sym)\n rescue NoMethodError => e\n raise MissingColumnError.new(assoc_rows[0], e.name)\n end\n\n target_rows.each do |row|\n begin\n pkey = row.send target_attr\n rescue NoMethodError => e\n raise MissingColumnError.new(row, e.name)\n end\n row.send @assign, assoc_rows_by_attr[pkey] || []\n end\n end",
"def cross_join(other)\n join(other, join_type: :cross)\n end",
"def relation_by_sql_form\n # Nothing to do here\n end",
"def merge!(assoc_rows, rows)\n Merge.new(rows, name).\n many!(assoc_rows, @ref.active_record_primary_key, @ref.foreign_key)\n end",
"def def_one_to_many(opts)\n super\n return if opts[:type] == :one_to_one\n def_association_pks_getter(opts) do\n send(opts.dataset_method).select_map(opts[:primary_key])\n end\n def_association_pks_setter(opts) do |pks|\n checked_transaction do\n ds = send(opts.dataset_method)\n ds.unfiltered.filter(opts[:primary_key]=>pks).update(opts[:key]=>pk)\n ds.exclude(opts[:primary_key]=>pks).update(opts[:key]=>nil)\n end\n end\n end",
"def join(p1, p2, set_first)\n p1.add(p2, set_first)\n p2.add(p1)\n end",
"def modify_datasource(datasources)\n end",
"def map_association(target, name, rows, session)\n singular_name = name.to_s.singularize.to_sym\n entity_mapping = @mapping.get(singular_name)\n\n if target.is_a? Array\n relations = {}\n target_name = target[0].class.name.underscore\n back_relations = {}\n\n if rows.length > 0\n target_id_name = \"#{target_name.underscore}_id\"\n target_reference = entity_mapping.references[target_name.to_sym]\n\n rows.each do |row|\n relation = map(entity_mapping.type, row, session, singular_name.to_s)\n target_id = row[target_id_name.to_sym]\n\n if target_id.nil?\n raise MapperError, \"Field \\\"#{target_id_name}\\\" is not defined in the query but it's required to construct \\\"#{name} to #{target_name}\\\" association. Just add it to SELECT clause.\"\n end\n\n relations[target_id] ||= []\n relations[target_id] << relation\n back_relations[relation.id] = target.select{|t| t.id == target_id}\n end\n\n target.each do |entry|\n target_id = entry.id\n relation_objects = relations[target_id]\n\n if relation_objects\n if target_reference\n relation_objects.each {|obj| obj.set_reference!(target_reference.name, entry)}\n end\n\n # set forward collection relation\n entry.set_collection!(name, relations[target_id])\n\n # set reverse collection relation if it's present\n if entity_mapping.collections[target_name.pluralize.to_sym]\n relation_objects.each{|obj| obj.set_collection!(target_name.pluralize.to_sym, back_relations[obj.id])}\n end\n end\n end\n end\n\n ids = []\n relations.values.each do |associations|\n ids << associations.map{|a| a.id}\n end\n\n {result: relations, ids: ids.flatten.uniq}\n else\n result = rows.map do |row|\n map(entity_mapping.type, row, session, singular_name.to_s)\n end\n\n target.set_collection!(name, result)\n\n {result: result, ids: result.map {|i| i.id}}\n end\n\n end",
"def add_join(join)\n @clause[:final_join] = join.implicit_joins\n end",
"def link!\n base = ::ActiveRecord::Associations::ClassMethods::JoinDependency.new(\n @model, [], nil\n )\n \n @fields.each { |field|\n field.model ||= @model\n field.columns.each { |col|\n field.associations[col] = associations(col.__stack.clone)\n field.associations[col].each { |assoc| assoc.join_to(base) }\n }\n }\n \n @attributes.each { |attribute|\n attribute.model ||= @model\n attribute.columns.each { |col|\n attribute.associations[col] = associations(col.__stack.clone)\n attribute.associations[col].each { |assoc| assoc.join_to(base) }\n }\n }\n end",
"def convert_input_dataset(ds)\n case ds\n when Symbol, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, LiteralString\n self.simple_table = db.literal(ds).freeze\n ds = db.from(ds)\n when Dataset\n ds = ds.from_self(:alias=>ds.first_source) if ds.joined_dataset?\n\n self.simple_table = if ds.send(:simple_select_all?)\n ds.literal(ds.first_source_table).freeze\n end\n @db = ds.db\n else\n raise(Error, \"Model.set_dataset takes one of the following classes as an argument: Symbol, LiteralString, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, Dataset\")\n end\n\n set_dataset_row_proc(ds.clone(:model=>self))\n end",
"def save\n unless @added.empty? && @deleted.empty?\n # We cannot reuse the allocated space, since the data\n # that is copied would be destroyed.\n if polymorphic?\n offset = @database.allocate_polymorphic_join_elements(@size)\n else\n offset = @database.allocate_join_elements(@size)\n end\n pairs =\n @size.times.map do |index|\n rod_id = id_for(index)\n if rod_id.is_a?(Model)\n object = rod_id\n if object.new?\n if polymorphic?\n object.reference_updaters <<\n ReferenceUpdater.for_plural(self,index,@database)\n else\n object.reference_updaters <<\n ReferenceUpdater.for_plural(self,index,@database)\n end\n next\n else\n rod_id = object.rod_id\n end\n end\n [rod_id,index]\n end.compact\n if polymorphic?\n pairs.each do |rod_id,index|\n class_id = (rod_id == 0 ? 0 : class_for(index).name_hash)\n @database.set_polymorphic_join_element_id(offset,index,rod_id,class_id)\n end\n else\n pairs.each do |rod_id,index|\n @database.set_join_element_id(offset,index,rod_id)\n end\n end\n @offset = offset\n @added.clear\n @deleted.clear\n @map.clear\n @original_size = @size\n end\n @offset\n end",
"def split_join_table_alias\n associated_class.dataset.split_alias(self[:join_table])\n end",
"def prepare_hgt_com_trsf_prkgrs_old()\n \n @conn.execute \\\n \"truncate table hgt_com_trsf_prkgrs\"\n \n puts \"hgt_com_trsf_prkgrs table truncated...\"\n \n #\n sql = \"select id,\n gene_id,\n TXSRC_ID,\n TXDST_ID,\n WEIGHT_TR_TX\n from HGT_COM_TRSF_TAXONS\"\n \n #puts \"sql: #{sql}\"\n \n \n tr_taxons = HgtComTrsfTaxon.find_by_sql(sql)\n \n tr_taxons.each {|tr|\n \n\n #debugging\n #next unless tr.gene_id == 111 and tr.txsrc_id == 768679 and tr.txdst_id == 374847\n \n #puts \"tr: #{tr.inspect}\"\n #puts \"tr.id: #{tr.id}, #{tr.gene_id}\"\n \n #for each chiteria\n (0..1).each {|crit|\n \n #for each criteria and\n #for each source and destination prok groups\n sql = \"select tg.PROK_GROUP_ID,\n tg.WEIGHT_PG\n from TAXON_GROUPS tg \n join PROK_GROUPS pg on pg.id = tg.PROK_GROUP_ID\n where tg.TAXON_ID = #{tr.txsrc_id} and\n pg.GROUP_CRITER_ID = #{crit}\"\n #puts \"sql: \\n #{sql}\"\n \n pg_src = TaxonGroup.find_by_sql(sql)\n \n \n sql = \"select tg.PROK_GROUP_ID,\n tg.WEIGHT_PG\n from TAXON_GROUPS tg \n join PROK_GROUPS pg on pg.id = tg.PROK_GROUP_ID\n where tg.TAXON_ID = #{tr.txdst_id} and\n pg.GROUP_CRITER_ID = #{crit}\"\n #puts \"sql: \\n #{sql}\"\n \n pg_dst = TaxonGroup.find_by_sql(sql)\n \n pg_src.each {|src|\n pg_dst.each {|dst|\n \n #puts \"src: #{src.inspect}\"\n #puts \"dst: #{dst.inspect}\"\n \n #insert alternative\n prkg = HgtComTrsfPrkgr.new \n prkg.gene_id = tr.gene_id\n prkg.hgt_com_trsf_taxon_id = tr.id\n prkg.pgsrc_id = src.prok_group_id\n prkg.pgdst_id = dst.prok_group_id\n prkg.weight_tr_pg = tr.weight_tr_tx * src.weight_pg * dst.weight_pg\n prkg.save\n \n #prkg.gene_id = tr.gene_id \n #prkg.save\n \n \n }\n }\n \n \n \n \n \n } \n }\n \n \n end",
"def compound_dataset_sql_append(sql, ds)\n sql << '('\n super\n sql << ')'\n end",
"def reflection_merge(reflection, from, to)\n foreign_key = reflection.options[:foreign_key] || table_name.classify.foreign_key\n sql = case reflection.macro\n when :has_one, :has_many\n \"UPDATE #{reflection.klass.table_name} SET #{foreign_key} = #{to} WHERE #{foreign_key} = #{from}\\n\" \n when :has_and_belongs_to_many\n join_table = reflection.options[:join_table] || ( table_name < reflection.klass.table_name ? '#{table_name}_#{reflection.klass.table_name}' : '#{reflection.klass.table_name}_#{table_name}')\n \"UPDATE #{join_table} SET #{foreign_key} = #{to} WHERE #{foreign_key} = #{from}\\n\" \n else return\n end\n connection.update(sql)\n end",
"def sample\r\n\t#add 2 test authors\r\n\tlib.execute(\"INSERT INTO authors (l_name, f_name) VALUES ('Robinson', 'Kit')\")\r\n\tlib.execute(\"INSERT INTO authors (l_name, f_name) VALUES ('Lancaster', 'Blythe')\")\r\n\t#add 3 test books\r\n\tlib.execute(\"INSERT INTO books (title, author_id, section, on_shelf) VALUES ('Goodmight Noon', 1, 'Childrens', 'true')\")\r\n\tlib.execute(\"INSERT INTO books (title, author_id, section, on_shelf) VALUES ('Leo the Lapp', 1, 'Childrens', 'true')\")\r\n\tlib.execute(\"INSERT INTO books (title, author_id, section, on_shelf) VALUES ('The Hermaneutical Bealgeshichte', 2, 'Philosophy', 'true')\")\r\n\tputs \"added the minimal sample:\"\r\n\t#output the joined list\r\n\tjoinlist = lib.execute(\"SELECT * FROM books JOIN authors ON books.author_id = authors.id\")\r\n\tjoinlist.each do |book|\r\n\t\tputs \"#{book['title']} by #{book['l_name']} is a #{book['section']} book.\"\r\n\tend\r\nend",
"def _association_join(type, associations)\n clone(:join=>clone(:graph_from_self=>false).eager_graph_with_options(associations, :join_type=>type, :join_only=>true).opts[:join])\n end",
"def ungraphed\n ds = super.clone(:eager_graph=>nil)\n if (eg = @opts[:eager_graph]) && (rp = eg[:row_proc])\n ds = ds.with_row_proc(rp)\n end\n ds\n end",
"def subselect_sql_append(sql, ds)\n ds.clone(:append_sql=>sql, :prepared_args=>prepared_args, :bind_vars=>@opts[:bind_vars]).\n send(:to_prepared_statement, :select, nil, :extend=>prepared_statement_modules).\n prepared_sql\n end",
"def def_one_to_many(opts)\n one_to_one = opts[:type] == :one_to_one\n name = opts[:name]\n key = (opts[:key] ||= opts.default_key)\n km = opts[:key_method] ||= opts[:key]\n cks = opts[:keys] = Array(key)\n opts[:key_methods] = Array(opts[:key_method])\n primary_key = (opts[:primary_key] ||= self.primary_key)\n opts[:eager_loader_key] = primary_key unless opts.has_key?(:eager_loader_key)\n cpks = opts[:primary_keys] = Array(primary_key)\n pkc = opts[:primary_key_column] ||= primary_key\n pkcs = opts[:primary_key_columns] ||= Array(pkc)\n raise(Error, \"mismatched number of keys: #{cks.inspect} vs #{cpks.inspect}\") unless cks.length == cpks.length\n uses_cks = opts[:uses_composite_keys] = cks.length > 1\n opts[:dataset] ||= opts.association_dataset_proc\n opts[:eager_loader] ||= proc do |eo|\n h = eo[:id_map]\n reciprocal = opts.reciprocal\n assign_singular = opts.assign_singular?\n delete_rn = opts.delete_row_number_column\n\n eager_load_results(opts, eo) do |assoc_record|\n assoc_record.values.delete(delete_rn) if delete_rn\n hash_key = uses_cks ? km.map{|k| assoc_record.get_column_value(k)} : assoc_record.get_column_value(km)\n next unless objects = h[hash_key]\n if assign_singular\n objects.each do |object| \n unless object.associations[name]\n object.associations[name] = assoc_record\n assoc_record.associations[reciprocal] = object if reciprocal\n end\n end\n else\n objects.each do |object| \n object.associations[name].push(assoc_record)\n assoc_record.associations[reciprocal] = object if reciprocal\n end\n end\n end\n end\n \n join_type = opts[:graph_join_type]\n select = opts[:graph_select]\n use_only_conditions = opts.include?(:graph_only_conditions)\n only_conditions = opts[:graph_only_conditions]\n conditions = opts[:graph_conditions]\n opts[:cartesian_product_number] ||= one_to_one ? 0 : 1\n graph_block = opts[:graph_block]\n opts[:eager_grapher] ||= proc do |eo|\n ds = eo[:self]\n ds = ds.graph(opts.apply_eager_graph_limit_strategy(eo[:limit_strategy], eager_graph_dataset(opts, eo)), use_only_conditions ? only_conditions : cks.zip(pkcs) + conditions, Hash[eo].merge!(:select=>select, :join_type=>eo[:join_type]||join_type, :qualify=>:deep, :from_self_alias=>eo[:from_self_alias]), &graph_block)\n # We only load reciprocals for one_to_many associations, as other reciprocals don't make sense\n ds.opts[:eager_graph][:reciprocals][eo[:table_alias]] = opts.reciprocal\n ds\n end\n \n return if opts[:read_only]\n\n save_opts = {:validate=>opts[:validate]}\n ck_nil_hash ={}\n cks.each{|k| ck_nil_hash[k] = nil}\n\n if one_to_one\n opts[:setter] ||= proc do |o|\n up_ds = _apply_association_options(opts, opts.associated_dataset.where(cks.zip(cpks.map{|k| get_column_value(k)})))\n if o\n up_ds = up_ds.exclude(o.pk_hash) unless o.new?\n cks.zip(cpks).each{|k, pk| o.set_column_value(:\"#{k}=\", get_column_value(pk))}\n end\n checked_transaction do\n up_ds.skip_limit_check.update(ck_nil_hash)\n o.save(save_opts) || raise(Sequel::Error, \"invalid associated object, cannot save\") if o\n end\n end\n opts[:_setter] = proc{|o| set_one_to_one_associated_object(opts, o)}\n else \n save_opts[:raise_on_failure] = opts[:raise_on_save_failure] != false\n\n opts[:adder] ||= proc do |o|\n cks.zip(cpks).each{|k, pk| o.set_column_value(:\"#{k}=\", get_column_value(pk))}\n o.save(save_opts)\n end\n \n opts[:remover] ||= proc do |o|\n cks.each{|k| o.set_column_value(:\"#{k}=\", nil)}\n o.save(save_opts)\n end\n\n opts[:clearer] ||= proc do\n _apply_association_options(opts, opts.associated_dataset.where(cks.zip(cpks.map{|k| get_column_value(k)}))).update(ck_nil_hash)\n end\n end\n end",
"def select_name_and_series_subgenres_of_authors\n \"SELECT authors.name, subgenres.name\n FROM series\n INNER JOIN authors\n ON series.author_id = authors.id\n INNER JOIN subgenres\n ON series.subgenre_id = subgenres.id\"\nend",
"def __foreign_key_list_ds(reverse)\n if reverse\n ctable = Sequel[:att2]\n cclass = Sequel[:cl2]\n rtable = Sequel[:att]\n rclass = Sequel[:cl]\n else\n ctable = Sequel[:att]\n cclass = Sequel[:cl]\n rtable = Sequel[:att2]\n rclass = Sequel[:cl2]\n end\n\n if server_version >= 90500\n cpos = Sequel.expr{array_position(co[:conkey], ctable[:attnum])}\n rpos = Sequel.expr{array_position(co[:confkey], rtable[:attnum])}\n # :nocov:\n else\n range = 0...32\n cpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:conkey], [x]), x]}, 32, ctable[:attnum])}\n rpos = Sequel.expr{SQL::CaseExpression.new(range.map{|x| [SQL::Subscript.new(co[:confkey], [x]), x]}, 32, rtable[:attnum])}\n # :nocov:\n end\n\n ds = metadata_dataset.\n from{pg_constraint.as(:co)}.\n join(Sequel[:pg_class].as(cclass), :oid=>:conrelid).\n join(Sequel[:pg_attribute].as(ctable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:conkey])).\n join(Sequel[:pg_class].as(rclass), :oid=>Sequel[:co][:confrelid]).\n join(Sequel[:pg_attribute].as(rtable), :attrelid=>:oid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:confkey])).\n join(Sequel[:pg_namespace].as(:nsp), :oid=>Sequel[:cl2][:relnamespace]).\n order{[co[:conname], cpos]}.\n where{{\n cl[:relkind]=>%w'r p',\n co[:contype]=>'f',\n cpos=>rpos\n }}.\n select{[\n co[:conname].as(:name),\n ctable[:attname].as(:column),\n co[:confupdtype].as(:on_update),\n co[:confdeltype].as(:on_delete),\n cl2[:relname].as(:table),\n rtable[:attname].as(:refcolumn),\n SQL::BooleanExpression.new(:AND, co[:condeferrable], co[:condeferred]).as(:deferrable),\n nsp[:nspname].as(:schema)\n ]}\n\n if reverse\n ds = ds.order_append(Sequel[:nsp][:nspname], Sequel[:cl2][:relname])\n end\n\n ds\n end",
"def apply_with_context(sql,all_cols,keys_in_all_cols)\n hits = {}\n @pending_rcs.each do |rc|\n hits[rc.key] = rc\n end \n hist = []\n n = 2\n pending = 0\n skipped = false\n noted = false\n last_row = nil\n @db1.fetch(sql,all_cols + [\"__coopy_tag__\"]) do |row|\n tag = row.pop.to_i\n k = keyify(row.values_at(*keys_in_all_cols))\n if hits[k]\n emit_skip(row) if skipped\n hist.each do |row0|\n cells = row0.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n end\n hist.clear\n pending = n\n @patch.apply_row(hits[k])\n hits.delete(k)\n skipped = false\n noted = true\n elsif tag == 1\n # ignore redundant row\n elsif pending>0\n emit_skip(row) if skipped\n cells = row.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n pending = pending-1\n skipped = false\n else\n hist << row\n if hist.length>n\n skipped = true\n last_row = row\n hist.shift\n end\n end\n end\n emit_skip(last_row) if skipped and noted\n end",
"def apply_with_context(sql,all_cols,keys_in_all_cols)\n hits = {}\n @pending_rcs.each do |rc|\n hits[rc.key] = rc\n end \n hist = []\n n = 2\n pending = 0\n skipped = false\n noted = false\n last_row = nil\n @db1.fetch(sql,all_cols + [\"__coopy_tag__\"]) do |row|\n tag = row.pop.to_i\n k = keyify(row.values_at(*keys_in_all_cols))\n if hits[k]\n emit_skip(row) if skipped\n hist.each do |row0|\n cells = row0.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n end\n hist.clear\n pending = n\n @patch.apply_row(hits[k])\n hits.delete(k)\n skipped = false\n noted = true\n elsif tag == 1\n # ignore redundant row\n elsif pending>0\n emit_skip(row) if skipped\n cells = row.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n pending = pending-1\n skipped = false\n else\n hist << row\n if hist.length>n\n skipped = true\n last_row = row\n hist.shift\n end\n end\n end\n emit_skip(last_row) if skipped and noted\n end",
"def apply_with_context(sql,all_cols,keys_in_all_cols)\n hits = {}\n @pending_rcs.each do |rc|\n hits[rc.key] = rc\n end \n hist = []\n n = 2\n pending = 0\n skipped = false\n noted = false\n last_row = nil\n @db1.fetch(sql,all_cols + [\"__coopy_tag__\"]) do |row|\n tag = row.pop.to_i\n k = keyify(row.values_at(*keys_in_all_cols))\n if hits[k]\n emit_skip(row) if skipped\n hist.each do |row0|\n cells = row0.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n end\n hist.clear\n pending = n\n @patch.apply_row(hits[k])\n hits.delete(k)\n skipped = false\n noted = true\n elsif tag == 1\n # ignore redundant row\n elsif pending>0\n emit_skip(row) if skipped\n cells = row.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n pending = pending-1\n skipped = false\n else\n hist << row\n if hist.length>n\n skipped = true\n last_row = row\n hist.shift\n end\n end\n end\n emit_skip(last_row) if skipped and noted\n end",
"def schema_ds_join(table_name, opts)\n [:information_schema__columns, {:table_catalog=>:table_catalog,\n :table_schema => :table_schema, :table_name => :table_name} , :c]\n end",
"def merge_or_replace(mode:)\n target = replication.target_table\n stage = temporary_table_name\n sql = send(\"#{mode}_statement\", target, stage)\n ::RailsRedshiftReplicator.connection.exec sql\n end",
"def copy_table_data(from, to, remaps = [])\n old = columns(from).collect(&:name)\n current = columns(to).collect(&:name)\n remapped_columns = remaps.collect {|c| c.first.to_s}.compact\n common = (current & old).sort - remapped_columns\n from_columns = common.collect {|c| \"`#{c}`\"}\n to_columns = common.collect {|c| \"`#{c}`\"}\n remaps.each do |remap|\n remap = [remap].flatten\n next if remap.length != 2\n from_columns << remap.first\n to_columns << remap.last\n end\n from_columns_to_s = from_columns.join(', ')\n to_columns_to_s = to_columns.join(', ')\n execute \"INSERT INTO #{to}(#{to_columns_to_s}) SELECT #{from_columns_to_s} FROM #{from}\"\n end",
"def touch_associations\n model.touched_associations.each do |assoc, column|\n r = model.association_reflection(assoc)\n next unless r.can_have_associated_objects?(self)\n ds = send(r.dataset_method)\n\n if ds.send(:joined_dataset?)\n # Can't update all values at once, so update each instance individually.\n # Instead if doing a simple save, update via the instance's dataset,\n # to avoid going into an infinite loop in some cases.\n send(assoc).each{|x| x.this.update(column=>touch_association_value)}\n else\n # Update all values at once for performance reasons.\n ds.update(column=>touch_association_value)\n associations.delete(assoc)\n end\n end\n end",
"def unblocked_subsets\n availabilities = Availability.arel_table\n subsets = Subset.arel_table\n account = Membership.arel_table.alias(:account)\n friends = Connection.arel_table.alias(:friends)\n user = Membership.arel_table.alias(:user)\n devices = Device.arel_table\n mutes = Mute.arel_table.alias(:mutes)\n blocked = Mute.arel_table.alias(:blocked)\n sqlblock = \n availabilities. \n join(subsets).on(availabilities[:available_object_id].eq(subsets[:id])).\n join(account).on(subsets[:id].eq(account[:member_id])).\n join(friends).on(account[:account_id].eq(friends[:account_id])).\n join(user).on(user[:account_id].eq(friends[:connectable_id])).\n join(mutes, Arel::Nodes::OuterJoin).on(mutes[:account_id].eq(user[:account_id]).and(mutes[:muted_object_id].eq(subsets[:id]))).\n join(blocked, Arel::Nodes::OuterJoin).on(blocked[:account_id].eq(account[:account_id]).and(blocked[:muted_object_id].eq(user[:account_id]))).\n where(user[:member_type].eq('user').\n and(account[:member_type].eq('subset')).\n and(mutes[:id].eq(nil).or(mutes[:muted_object_type].not_eq('subset'))).\n and(blocked[:id].eq(nil).or(blocked[:muted_object_type].not_eq('account'))).\n and(user[:account_id].eq(self.account.id))\n ).\n project(subsets[:id]).to_sql\n self.connected_subsets.where( id: sqlblock )\nend",
"def join(*args)\n\t\tif args.count > 1\n\t\t\tjoins = args.map { |arg| \"INNER JOIN #{arg} ON #{arg}.#{table}_id = #{table}.id\"}.join(\" \")\n\t\t\trows = connection.execute <<-SQL \n\t\t\t\tSELECT * FROM #{table} #{joins};\n\t\t\tSQL\n\t\telse\n\t\t\tcase args.first\n\t\t\twhen String\n\t\t\t\trows = connection.execute <<-SQL\n\t\t\t\t\tSELECT * FROM #{table} #{BlocRecord::Utility.sql_strings(args.first)};\n\t\t\t\tSQL\n\t\t\twhen Symbol\n\t\t\t\trows = connection.execute <<-SQL \n\t\t\t\t\tSELECT * FROM #{table}\n\t\t\t\t\tINNER JOIN #{args.first} ON #{arg.first}.#{table}_id = #{table}.id;\n\t\t\t\tSQL\n\t\t\twhen Hash \n\t\t\t\t#extract the options from the hash\n\t\t\t\tsecond_table = args[0].keys.first \n\t\t\t\tthird_table = args[0].keys.first\n\t\t\t\trows = connection.execute <<-SQL \n\t\t\t\t\tSELECT * FROM #{table}\n\t\t\t\t\tINNER JOIN #{second_table} ON #{second_table}.#{table}_id = #{table}.id\n\t\t\t\t\tINNER JOIN #{third_table} ON #{third_table}.#{second_table}_id = #{second_table}.id;\n\t\t\t\tSQL\n\n\t\t\tend \n\t\tend\n\t\trows_to_array(rows)\n\tend",
"def graph(dataset, join_conditions = nil, options = OPTS, &block)\n # Allow the use of a dataset or symbol as the first argument\n # Find the table name/dataset based on the argument\n table_alias = options[:table_alias]\n table = dataset\n create_dataset = true\n\n case dataset\n when Symbol\n # let alias be the same as the table name (sans any optional schema)\n # unless alias explicitly given in the symbol using ___ notation\n table_alias ||= split_symbol(table).compact.last\n when Dataset\n if dataset.simple_select_all?\n table = dataset.opts[:from].first\n table_alias ||= table\n else\n table_alias ||= dataset_alias((@opts[:num_dataset_sources] || 0)+1)\n end\n create_dataset = false\n when SQL::Identifier\n table_alias ||= table.value\n when SQL::QualifiedIdentifier\n table_alias ||= split_qualifiers(table).last\n when SQL::AliasedExpression\n return graph(table.expression, join_conditions, {:table_alias=>table.alias}.merge!(options), &block)\n else\n raise Error, \"The dataset argument should be a symbol or dataset\"\n end\n table_alias = table_alias.to_sym\n\n if create_dataset\n dataset = db.from(table)\n end\n\n # Raise Sequel::Error with explanation that the table alias has been used\n raise_alias_error = lambda do\n raise(Error, \"this #{options[:table_alias] ? 'alias' : 'table'} has already been been used, please specify \" \\\n \"#{options[:table_alias] ? 'a different alias' : 'an alias via the :table_alias option'}\") \n end\n\n # Only allow table aliases that haven't been used\n raise_alias_error.call if @opts[:graph] && @opts[:graph][:table_aliases] && @opts[:graph][:table_aliases].include?(table_alias)\n \n table_alias_qualifier = qualifier_from_alias_symbol(table_alias, table)\n implicit_qualifier = options[:implicit_qualifier]\n ds = self\n\n # Use a from_self if this is already a joined table (or from_self specifically disabled for graphs)\n if (@opts[:graph_from_self] != false && !@opts[:graph] && joined_dataset?)\n from_selfed = true\n implicit_qualifier = options[:from_self_alias] || first_source\n ds = ds.from_self(:alias=>implicit_qualifier)\n end\n \n # Join the table early in order to avoid cloning the dataset twice\n ds = ds.join_table(options[:join_type] || :left_outer, table, join_conditions, :table_alias=>table_alias_qualifier, :implicit_qualifier=>implicit_qualifier, :qualify=>options[:qualify], &block)\n\n return ds if options[:join_only]\n\n opts = ds.opts\n\n # Whether to include the table in the result set\n add_table = options[:select] == false ? false : true\n # Whether to add the columns to the list of column aliases\n add_columns = !ds.opts.include?(:graph_aliases) # SEQUEL5: Remove graph_aliases support\n\n if graph = opts[:graph]\n graph = graph.dup\n select = opts[:select].dup\n [:column_aliases, :table_aliases, :column_alias_num].each{|k| graph[k] = graph[k].dup}\n else\n # Setup the initial graph data structure if it doesn't exist\n qualifier = ds.first_source_alias\n master = alias_symbol(qualifier)\n raise_alias_error.call if master == table_alias\n\n # Master hash storing all .graph related information\n graph = {}\n\n # Associates column aliases back to tables and columns\n column_aliases = graph[:column_aliases] = {}\n\n # Associates table alias (the master is never aliased)\n table_aliases = graph[:table_aliases] = {master=>self}\n\n # Keep track of the alias numbers used\n ca_num = graph[:column_alias_num] = Hash.new(0)\n\n # All columns in the master table are never\n # aliased, but are not included if set_graph_aliases\n # has been used.\n if add_columns\n if (select = @opts[:select]) && !select.empty? && !(select.length == 1 && (select.first.is_a?(SQL::ColumnAll)))\n select = select.map do |sel|\n raise Error, \"can't figure out alias to use for graphing for #{sel.inspect}\" unless column = _hash_key_symbol(sel)\n column_aliases[column] = [master, column]\n if from_selfed\n # Initial dataset was wrapped in subselect, selected all\n # columns in the subselect, qualified by the subselect alias.\n Sequel.qualify(qualifier, Sequel.identifier(column))\n else\n # Initial dataset not wrapped in subslect, just make\n # sure columns are qualified in some way.\n qualified_expression(sel, qualifier)\n end\n end\n else\n select = columns.map do |column|\n column_aliases[column] = [master, column]\n SQL::QualifiedIdentifier.new(qualifier, column)\n end\n end\n end\n end\n\n # Add the table alias to the list of aliases\n # Even if it isn't been used in the result set,\n # we add a key for it with a nil value so we can check if it\n # is used more than once\n table_aliases = graph[:table_aliases]\n table_aliases[table_alias] = add_table ? dataset : nil\n\n # Add the columns to the selection unless we are ignoring them\n if add_table && add_columns\n column_aliases = graph[:column_aliases]\n ca_num = graph[:column_alias_num]\n # Which columns to add to the result set\n cols = options[:select] || dataset.columns\n # If the column hasn't been used yet, don't alias it.\n # If it has been used, try table_column.\n # If that has been used, try table_column_N \n # using the next value of N that we know hasn't been\n # used\n cols.each do |column|\n col_alias, identifier = if column_aliases[column]\n column_alias = :\"#{table_alias}_#{column}\"\n if column_aliases[column_alias]\n column_alias_num = ca_num[column_alias]\n column_alias = :\"#{column_alias}_#{column_alias_num}\" \n ca_num[column_alias] += 1\n end\n [column_alias, SQL::AliasedExpression.new(SQL::QualifiedIdentifier.new(table_alias_qualifier, column), column_alias)]\n else\n ident = SQL::QualifiedIdentifier.new(table_alias_qualifier, column)\n [column, ident]\n end\n column_aliases[col_alias] = [table_alias, column].freeze\n select.push(identifier)\n end\n end\n [:column_aliases, :table_aliases, :column_alias_num].each{|k| graph[k].freeze}\n ds = ds.clone(:graph=>graph.freeze)\n add_columns ? ds.select(*select) : ds\n end",
"def reverse_merge_equally_named_columns(dest, source)\n dest.each{ |dc| dc.reverse_merge!(source.detect{ |sc| sc[:name] == dc[:name] } || {}) }\n end",
"def join_for_works_from_files\n \"{!join from=#{Hyrax.config.id_field} to=file_set_ids_ssim}#{dismax_query}\"\n end",
"def rebuild(table); end",
"def fetch_join_rows(rows)\n conn = @model.connection\n join_table = conn.quote_table_name @ref.join_table\n assoc_fkey = conn.quote_column_name @ref.association_foreign_key\n fkey = conn.quote_column_name @ref.foreign_key\n quoted_ids = rows.map { |row|\n begin\n id = row.send @ref.active_record_primary_key\n rescue NoMethodError => e\n raise MissingColumnError.new(row, e.name)\n end\n conn.quote id\n }\n\n quoted_ids.any? ? conn.\n exec_query(\"SELECT #{fkey}, #{assoc_fkey} FROM #{join_table} WHERE #{fkey} IN (#{quoted_ids.join ','})\").\n rows : []\n end",
"def group_join!(group)\n affiliations.create!( :group_id => group.id )\n end",
"def change_corresponding_herbarium_records(old_format_name)\n HerbariumRecord.joins(observations: :collection_numbers).where(\n accession_number: old_format_name, collection_numbers: { id: id }\n ).update_all(accession_number: format_name)\n end",
"def append_join(join)\n @clause[:join] << join.string \n @clause[:join] << join.implicit_joins\n end",
"def join(other)\n\t\t\n\t\t# Ensure the other CSV has the keys present\n\t\tif not has_keys?(other)\n\t\t\traise \"ERROR: Could not find all key columns #{@keys.to_s} in other CSV\"\n\t\tend\n\t\t\n\t\t# Because .length will change as we delete, we must save ahead of time\n\t\t# and also track the number of rows we've removed\n\t\tlength = @matrix.length\n\t\tremoved_count = 0\n\t\t\n\t\t# Iterate our matrix removing rows not present in the other CSV\n\t\t0.upto length do |index|\n\t\t\t# nil row check\n\t\t\tif not @matrix[index-removed_count]\n\t\t\t\tnext\n\t\t\tend\n\t\t\t\n\t\t\t# If this row is NOT also present in other, delete it here\n\t\t\tif not also_present?(@matrix[index-removed_count], other)\n\t\t\t\t@matrix.delete(index-removed_count)\n\t\t\t\tremoved_count += 1\n\t\t\tend\n\t\tend\n\tend",
"def set_operation(other, oper = :+, distinct: true, add_boundaries: true, inherit_boundaries: false)\n unless columns.size == other.columns.size\n msg = \"can't apply set ops to tables with a different number of columns\"\n raise UserError, msg\n end\n unless columns.map(&:type) == other.columns.map(&:type)\n msg = \"can't apply a set ops to tables with different column types.\"\n raise UserError, msg\n end\n other_rows = other.rows.map { |r| r.replace_keys(headers) }\n result = empty_dup\n new_rows = rows.send(oper, other_rows)\n new_rows.each_with_index do |row, k|\n result << row\n result.mark_boundary if k == size - 1 && add_boundaries\n end\n if inherit_boundaries\n result.explicit_boundaries = boundaries\n result.append_boundaries(other.boundaries, shift: size)\n end\n result.normalize_boundaries\n distinct ? result.distinct : result\n end",
"def join_to_campaigns\n # Inverse of #join_to_photos.\n # Source: http://stackoverflow.com/questions/8425232/sql-select-all-rows-where-subset-exists\n return true if self.photo_tags.count == 0\n in_campaigns = Campaign.joins(:campaign_tags)\n .select(\"campaigns.*, COUNT(campaigns.id)\")\n .where(\n \"campaign_tags.tag_id IN (:tags) and campaigns.end_date > :created_at\",\n {tags: self.tags(true),\n created_at: self.created_at})\n .group(\"campaigns.id\")\n .having(\"COUNT(campaigns.id) = (SELECT count(*) from campaign_tags WHERE campaign_tags.campaign_id = campaigns.id)\")\n\n # Force a reload (hmm)\n self.campaigns(true)\n self.campaigns = in_campaigns\n end",
"def unjoin(other)\t\t\n\t\t# Ensure the other CSV has the keys present\n\t\tif not has_keys?(other)\n\t\t\traise \"ERROR: Could not find all key columns #{@keys.to_s} in other CSV\"\n\t\tend\n\t\t\n\t\t# Because .length will change as we delete, we must save ahead of time\n\t\t# and also track the number of rows we've removed\n\t\tlength = @matrix.length\n\t\tremoved_count = 0\n\t\t\n\t\t# Iterate our matrix removing rows present in the other CSV\n\t\t0.upto length do |index|\n\t\t\t# nil row check\n\t\t\tif not @matrix[index-removed_count]\n\t\t\t\tnext\n\t\t\tend\n\t\t\t\n\t\t\t# If this row IS also present in other, delete it here\n\t\t\tif also_present?(@matrix[index-removed_count], other)\n\t\t\t\t@matrix.delete(index-removed_count)\n\t\t\t\tremoved_count += 1\n\t\t\tend\n\t\tend\t\t\n\tend",
"def with_workers_works_a_bit\n association_join(:workers, select: [:id, :title])\n end",
"def substitute(replacement)\n if replacement\n new_docs, docs = replacement.compact, []\n new_ids = new_docs.map(&:_id)\n remove_not_in(new_ids)\n new_docs.each do |doc|\n docs.push(doc) if doc.send(foreign_key) != _base.send(_association.primary_key)\n end\n concat(docs)\n else\n purge\n end\n self\n end",
"def joined\n meta(joined: true)\n end",
"def update_by_sql(target, set, options = nil)\n set = set.gsub(/@/, '')\n\n if target.is_a? Class\n sql = \"UPDATE #{target.table} SET #{set} \"\n sql << \" WHERE #{options[:condition]}\" if options and options[:condition]\n sql_update(sql)\n else\n sql = \"UPDATE #{target.class.table} SET #{set} WHERE #{pk_field target.class} = #{quote(target.pk)}\"\n sql << \" AND #{options[:condition]}\" if options and options[:condition]\n sql_update(sql)\n end\n end",
"def join_table(type, table, expr=nil, options=OPTS, &block)\n if expr.is_a?(SQL::AliasedExpression) && expr.expression.is_a?(Array) && !expr.expression.empty? && expr.expression.all?\n options = options.merge(:join_using=>true)\n end\n super\n end",
"def prepend_join(join)\n @clause[:join] = join.string << @clause[:join]\n @clause[:join] << join.implicit_joins\n end",
"def update_many_relation(rel, identifier, candidates)\n existing_relations = eval(\"self.#{rel}\")\n existing_ids = existing_relations.map {| trel | eval(\"trel.#{identifier}\")}.compact\n new_ids = candidates-existing_ids\n deleted_ids = existing_ids - candidates\n new_ids.each do | val |\n eval(\"#{rel}.create(#{identifier}: '#{val}')\")\n end\n deleted_ids.each do | val |\n eval( \"#{rel}.where(#{identifier}: '#{val}')[0].destroy\" )\n end\n end",
"def map_in_included_relations!(result, query)\n dat = get_relation_data_as_hash(query)\n result.each do |res|\n dat.each do |name, lookup|\n res[name] = lookup[res[@relations[name][:this_key]]]\n if @relations[name][:type] == :has_one\n res[name] = res[name][0] unless res[name].nil?\n else \n res[name] = [] if res[name].nil?\n end\n end\n end\n end",
"def def_many_to_one(opts)\n name = opts[:name]\n opts[:key] = opts.default_key unless opts.has_key?(:key)\n key = opts[:key]\n opts[:eager_loader_key] = key unless opts.has_key?(:eager_loader_key)\n cks = opts[:graph_keys] = opts[:keys] = Array(key)\n opts[:key_column] ||= key\n opts[:graph_keys] = opts[:key_columns] = Array(opts[:key_column])\n opts[:qualified_key] = opts.qualify_cur(key)\n if opts[:primary_key]\n cpks = Array(opts[:primary_key])\n raise(Error, \"mismatched number of keys: #{cks.inspect} vs #{cpks.inspect}\") unless cks.length == cpks.length\n end\n uses_cks = opts[:uses_composite_keys] = cks.length > 1\n opts[:cartesian_product_number] ||= 0\n\n if !opts.has_key?(:many_to_one_pk_lookup) &&\n (opts[:dataset] || opts[:conditions] || opts[:block] || opts[:select] ||\n (opts.has_key?(:key) && opts[:key] == nil))\n opts[:many_to_one_pk_lookup] = false\n end\n auto_assocs = @autoreloading_associations\n cks.each do |k|\n (auto_assocs[k] ||= []) << name\n end\n\n opts[:dataset] ||= opts.association_dataset_proc\n opts[:eager_loader] ||= proc do |eo|\n h = eo[:id_map]\n pk_meths = opts.primary_key_methods\n\n eager_load_results(opts, eo) do |assoc_record|\n hash_key = uses_cks ? pk_meths.map{|k| assoc_record.get_column_value(k)} : assoc_record.get_column_value(opts.primary_key_method)\n if objects = h[hash_key]\n objects.each{|object| object.associations[name] = assoc_record}\n end\n end\n end\n \n join_type = opts[:graph_join_type]\n select = opts[:graph_select]\n use_only_conditions = opts.include?(:graph_only_conditions)\n only_conditions = opts[:graph_only_conditions]\n conditions = opts[:graph_conditions]\n graph_block = opts[:graph_block]\n graph_cks = opts[:graph_keys]\n opts[:eager_grapher] ||= proc do |eo|\n ds = eo[:self]\n ds.graph(eager_graph_dataset(opts, eo), use_only_conditions ? only_conditions : opts.primary_keys.zip(graph_cks) + conditions, Hash[eo].merge!(:select=>select, :join_type=>eo[:join_type]||join_type, :qualify=>:deep, :from_self_alias=>eo[:from_self_alias]), &graph_block)\n end\n \n return if opts[:read_only]\n \n opts[:setter] ||= proc{|o| cks.zip(opts.primary_key_methods).each{|k, pk| set_column_value(:\"#{k}=\", (o.get_column_value(pk) if o))}}\n opts[:_setter] = proc{|o| set_associated_object(opts, o)}\n end",
"def to_sql\n @join.association_join.gsub(/::ts_join_alias::/,\n \"#{@reflection.klass.connection.quote_table_name(@join.parent.aliased_table_name)}\"\n )\n end",
"def relate base, name, metadata\n base.relations_sleeping_king_studios.update metadata.relation_key => metadata\n end"
] |
[
"0.59809345",
"0.59600765",
"0.58247083",
"0.58247083",
"0.5726891",
"0.5717141",
"0.568549",
"0.5677487",
"0.5632732",
"0.5618993",
"0.5600694",
"0.559718",
"0.55618924",
"0.5530125",
"0.5509987",
"0.54918057",
"0.54159164",
"0.54155934",
"0.5400182",
"0.53923464",
"0.53874177",
"0.5372625",
"0.535616",
"0.53556126",
"0.5314273",
"0.5312108",
"0.5302187",
"0.52588713",
"0.5255211",
"0.52405316",
"0.52387255",
"0.5228078",
"0.52179223",
"0.52179223",
"0.52179223",
"0.52038276",
"0.519633",
"0.5172158",
"0.51699597",
"0.515534",
"0.51539904",
"0.51463014",
"0.51421535",
"0.5096075",
"0.5083826",
"0.50750244",
"0.5048132",
"0.5042142",
"0.5041731",
"0.50403666",
"0.5039421",
"0.50328714",
"0.5032211",
"0.50237733",
"0.50227875",
"0.5015418",
"0.5001226",
"0.500078",
"0.49960965",
"0.49949846",
"0.49923468",
"0.4986276",
"0.49772674",
"0.4975219",
"0.49708426",
"0.49682",
"0.4957818",
"0.49503335",
"0.49462044",
"0.49462044",
"0.49462044",
"0.49412665",
"0.49331903",
"0.49317595",
"0.4929103",
"0.49283865",
"0.49277034",
"0.49214557",
"0.4920942",
"0.49130726",
"0.48995656",
"0.48959342",
"0.48954934",
"0.48852828",
"0.48847994",
"0.48833612",
"0.48808092",
"0.48787543",
"0.48755658",
"0.48752764",
"0.48750174",
"0.48741144",
"0.48719046",
"0.48675215",
"0.48669472",
"0.48615503",
"0.48583695",
"0.48491704",
"0.48457074",
"0.48440933"
] |
0.59849083
|
0
|
PostgreSQL 15+ supports MERGE.
|
def supports_merge?
server_version >= 150000
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def merge_statement(target, stage)\n <<-SQLMERGE\n begin transaction;\n\n delete from #{target}\n using #{stage}\n where #{target}.id = #{stage}.id;\n insert into #{target}\n select * from #{stage};\n\n end transaction;\n SQLMERGE\n end",
"def merge; end",
"def merge!; end",
"def merge_conflict?; end",
"def merge_or_replace(mode:)\n target = replication.target_table\n stage = temporary_table_name\n sql = send(\"#{mode}_statement\", target, stage)\n ::RailsRedshiftReplicator.connection.exec sql\n end",
"def multi_insert_sql_strategy\n :union\n end",
"def merge(with); end",
"def merge_if_exists!\n t = merge_if_exists || self\n t.save!\n end",
"def merge(source); end",
"def merge(other); end",
"def merge!(with); end",
"def pg_hash_only_merge(merge_onto, merge_with)\n # If there are two Hashes, recursively merge.\n if merge_onto.kind_of?(Hash) && merge_with.kind_of?(Hash)\n merge_with.each do |key, merge_with_value|\n merge_onto[key] = pg_hash_only_merge(merge_onto[key], merge_with_value)\n end\n merge_onto\n\n # If merge_with is nil, don't replace merge_onto\n elsif merge_with.nil?\n merge_onto\n\n # In all other cases, replace merge_onto with merge_with\n else\n merge_with\n end\nend",
"def merge_insert(*values, &block)\n h = {:type=>:insert, :values=>values}\n if override = @opts[:override]\n h[:override] = insert_override_sql(String.new)\n end\n _merge_when(h, &block)\n end",
"def try_merge!(pin); end",
"def merge!(other); end",
"def insert_conflict_sql(sql)\n if opts = @opts[:insert_conflict]\n sql << \" ON CONFLICT\"\n\n if target = opts[:constraint] \n sql << \" ON CONSTRAINT \"\n identifier_append(sql, target)\n elsif target = opts[:target]\n sql << ' '\n identifier_append(sql, Array(target))\n if conflict_where = opts[:conflict_where]\n sql << \" WHERE \"\n literal_append(sql, conflict_where)\n end\n end\n\n if values = opts[:update]\n sql << \" DO UPDATE SET \"\n update_sql_values_hash(sql, values)\n if update_where = opts[:update_where]\n sql << \" WHERE \"\n literal_append(sql, update_where)\n end\n else\n sql << \" DO NOTHING\"\n end\n end\n end",
"def merge_to_project(options = {})\n $merge = true \n @opt = {\n :proj_id => nil, \n :person_id => nil,\n :postfix_otu_names => false,\n :postfix_chr_names => false\n }.merge!(options.symbolize_keys)\n\n return false if !@opt[:proj_id] || !@opt[:person_id]\n return false if !@proj = Proj.find(@opt[:proj_id])\n return false if !@person = Person.find(@opt[:person_id])\n\n $proj_id = @proj.id \n $person_id = @person.id\n\n begin\n Proj.transaction do \n # try and do a little matching/transfer in a few cases (could also extend to Keywords)\n self.content_types.each do |o|\n if ct = ContentType.find(:first, :conditions => {:proj_id => @proj.id, :name => o.name})\n # update the templates \n self.content_templates_content_types.each do |ctct|\n if ctct.content_type.name == ct.name\n ctct.content_type = ct\n ctct.save\n end\n end\n \n # update the content\n self.contents.each do |c|\n if c.content_type.name == ct.name\n c.content_type = ct\n c.save\n end\n end \n\n # update the mapped_chr_groups\n self.chr_groups.each do |cg|\n if !cg.content_type.blank? && cg.content_type.name == ct.name\n cg.content_type = ct\n cg.save\n end\n end \n # we have to only delete the merged object below\n end\n end\n\n self.genes.each do |g|\n if g = Gene.find(:first, :conditions => {:proj_id => @proj.id, :name => g.name})\n self.primers.each do |p|\n p.gene = g\n p.save\n end\n self.seqs.each do |s|\n s.gene = g\n s.save\n end\n end\n end\n\n # loop the remaining types\n [:has_many, :has_one, :has_and_belongs_to_many].each do |rel|\n Proj.reflect_on_all_associations(rel).collect{|o| o.name}.each do |r| # r is the class name \n next if r == :text_content_types\n case r\n when :content_types \n self.send(r).each do |o|\n if @ct = ContentType.find(:first, :conditions => {:proj_id => @proj.id, :name => o.name})\n o.destroy \n else\n o.proj_id = @opt[:proj_id]\n o.save\n end \n end \n \n when :genes\n self.send(r).each do |o|\n if @g = Gene.find(:first, :conditions => {:proj_id => @proj.id, :name => o.name})\n o.destroy \n else\n o.proj_id = @opt[:proj_id]\n o.save\n end \n end \n \n when :people\n # do nothing, these remain in the project to be deleted later, otherwise they get touched an pwds get borked\n\n else \n t = self.send(r).each do |o|\n @o = o\n @r = r\n o.name = \"#{o.name} [from: #{o.proj_id}]\" if (@opt[:postfix_otu_names] && o.class == Otu) || (@opt[:postfix_chr_names] && o.class == Chr)\n o.proj_id = @opt[:proj_id]\n o.save\n end\n end\n end\n end # end rel types\n end # end transaction\n\n rescue Exception => e\n $merge = false\n raise \"#{e} o:(#{@o.to_yaml}) o_class: #{@o.class.to_s} r:(#{@r}) p:(#{@proj.id})\"\n end\n $merge = false\n true\n end",
"def _insert_dataset\n if upsert_plugin_upserting\n if postgres?\n super.insert_conflict(update: values_to_update, target: self.class.upsert_plugin_identifying_columns)\n elsif mysql?\n columns_to_update = values_to_update.keys - self.class.upsert_plugin_identifying_columns\n super.on_duplicate_key_update(*columns_to_update)\n else\n super\n end\n else\n super\n end\n end",
"def flush\n conn.transaction do\n buffer.flatten.each do |row|\n # check to see if this row's compound key constraint already exists\n # note that the compound key constraint may not utilize virtual fields\n next unless row_allowed?(row)\n\n # add any virtual fields\n add_virtuals!(row)\n \n key_names = []\n key_values = []\n @key_columns.each do |name|\n key_names << \"#{name}\"\n key_values << conn.quote(row[name]) # TODO: this is probably not database agnostic\n end\n\n names = []\n values = []\n (order - @key_columns).each do |name|\n names << \"#{name}\"\n values << conn.quote(row[name]) # TODO: this is probably not database agnostic\n end\n\n all_name_values = (key_names+names).zip(key_values+values)\n\n q = <<EOF\nMERGE INTO #{table_name} d \nUSING (SELECT #{all_name_values.collect {|c,v| \"#{v} #{c}\"}.join(',')} FROM DUAL) s\nON (#{map_src_to_dest(key_names,'s','d').join(' AND ')})\nWHEN MATCHED THEN \nUPDATE SET #{[map_src_to_dest(names,'s','d'), \"d.#{@update_ts_column}=CURRENT_TIMESTAMP\"].flatten.join(',')}\nWHEN NOT MATCHED THEN\nINSERT (#{all_name_values.collect {|c,v| 'd.'+c}.join(',')},d.#{@insert_ts_column})\nVALUES (#{all_name_values.collect {|c,v| 's.'+c}.join(',')},CURRENT_TIMESTAMP)\nEOF\n #q = \"INSERT INTO `#{table_name}` (#{names.join(',')}) VALUES (#{values.join(',')})\"\n ETL::Engine.logger.debug(\"Executing upsert: #{q}\")\n conn.insert(q, \"Upsert row #{current_row}\")\n @current_row += 1\n end\n buffer.clear\n end\n end",
"def find_merge\n \nend",
"def merge_with(other, unique_id_col = 'id')\n raise \"unmergable objects\" if other.class.column_names != self.class.column_names || self.send(unique_id_col.to_sym) != other.send(unique_id_col.to_sym)\n\n column_names = self.class.column_names\n\n self.trackzored_columns.each do |tc|\n has_updated_by_col = column_names.include?(\"#{tc}_updated_by\")\n has_updated_at_col = column_names.include?(\"#{tc}_updated_at\")\n \n if has_updated_at_col\n self_time = self.send(\"#{tc}_updated_at\".to_sym)\n other_time = other.send(\"#{tc}_updated_at\".to_sym)\n else\n self_time = self.updated_at\n other_time = other.updated_at\n end\n\n if self_time.nil? || (!other_time.nil? && other_time > self_time)\n self.send(\"#{tc}_updated_at=\".to_sym, other_time) if has_updated_at_col\n self.send(\"#{tc}_updated_by=\".to_sym, other.send(\"#{tc}_updated_by\".to_sym)) if has_updated_by_col\n self.send(\"#{tc}=\".to_sym, other.send(tc.to_sym))\n end\n end\n\n if other.updated_at > self.updated_at\n (column_names - self.trackzored_columns - self.trackzor_maintained_columns).each do |c|\n self.send(\"#{c}=\".to_sym, other.send(c.to_sym))\n end\n end\n\n puts \"Merged #{self.send(unique_id_col.to_sym)}: #{self.changes.inspect}\" unless self.changes.empty?\n self.send(:update_without_callbacks)\n end",
"def on_upsert\n #\n end",
"def use_merging(use = T.unsafe(nil)); end",
"def _merge_insert_sql(sql, data)\n sql << \" THEN INSERT \"\n columns, values = _parse_insert_sql_args(data[:values])\n _insert_columns_sql(sql, columns)\n if override = data[:override]\n sql << override\n end\n _insert_values_sql(sql, values)\n end",
"def merge(other_hash); end",
"def merge(...)\n self.clone.merge!(...)\n end",
"def merge(stmt2)\n # print each statement to a file\n [ [ @rows, \"/tmp/merge.1\" ],\n [ stmt2.rows, \"/tmp/merge.2\" ] ].each do | cur_rows, cur_file |\n f = File.open(cur_file, \"w\")\n cur_rows.each do |row| \n if !row[0].nil?\n f.puts(row[0].text) \n end\n end\n f.close\n end\n \n # run an sdiff on it\n @diffs = []\n IO.popen(\"sdiff -w1 /tmp/merge.1 /tmp/merge.2\") do |f|\n f.each { |line| @diffs.push(line.chomp) }\n end\n system(\"rm /tmp/merge.1 /tmp/merge.2\")\n \n # paralellize the arrays, by inserting blank rows\n @diffs.each_with_index do |cur_diff,idx|\n if cur_diff == \"<\"\n new_row = [@rows[idx][0]]\n while new_row.length < stmt2.rows[idx].length\n new_row.push(Cell.new)\n end\n stmt2.rows.insert(idx,new_row)\n elsif cur_diff == \">\"\n new_row = [stmt2.rows[idx][0]]\n while new_row.length < @rows[idx].length\n new_row.push(Cell.new)\n end\n @rows.insert(idx,new_row)\n else\n end\n end\n \n # merge them together\n @rows.size.times do |i|\n @rows[i].concat(stmt2.rows[i])\n end\n end",
"def merge_entity(table_name, entity_values, options = {})\n if_match = \"*\"\n if_match = options[:if_match] if options[:if_match]\n\n uri = entities_uri(table_name,\n entity_values[:PartitionKey] || entity_values[\"PartitionKey\"],\n entity_values[:RowKey] || entity_values[\"RowKey\"], new_query(options))\n\n headers = { \"X-HTTP-Method\" => \"MERGE\" }\n headers[\"If-Match\"] = if_match || \"*\" unless options[:create_if_not_exists]\n\n body = Serialization.hash_to_json(entity_values)\n\n response = call(:post, uri, body, headers, options)\n response.headers[\"etag\"]\n rescue => e\n raise_with_response(e, response)\n end",
"def merge_entity(table_name, entity_values, options={})\n if_match = \"*\"\n if_match = options[:if_match] if options[:if_match]\n\n query = { }\n query[\"timeout\"] = options[:timeout].to_s if options[:timeout]\n\n uri = entities_uri(table_name, \n entity_values[:PartitionKey] || entity_values['PartitionKey'],\n entity_values[:RowKey] || entity_values['RowKey'], query)\n\n headers = { \"X-HTTP-Method\"=> \"MERGE\" }\n headers[\"If-Match\"] = if_match || \"*\" unless options[:create_if_not_exists]\n\n body = Table::Serialization.hash_to_entry_xml(entity_values).to_xml\n\n response = call(:post, uri, body, headers, options)\n response.headers[\"etag\"]\n rescue => e\n raise_with_response(e, response)\n end",
"def successfull_merge_or_nothing(i,j)\n # try a merge and determinize inside a transaction on the ufds\n @ufds.transactional do\n merge_and_determinize(i, j)\n end\n end",
"def merge(hash_1, hash_2)\n\nend",
"def process!()\n # Regarding updates or deletions of :badge_id\n # --------------------------------------------------------------------\n # This has to change only if it refers to a duplicate row;\n # that is, a row that will become a duplicate (and it will have to\n # be deleted) once that the update (and translation) process has been\n # completed.\n #\n # Thus, each linked entity that has a reference to one of these\n # possibly duplicate IDs has to be updated with the new destination value\n # (of which they are the duplicate); else, the value for :badge_id can stay untouched.\n\n merge_done = false\n if check\n process_text_log << \"\\r\\nMerging '#{ @slave_swimmer.complete_name }' (id: #{ @slave_swimmer.id}) => '#{@master_swimmer.complete_name}' (id: #{@master_swimmer.id })...\\r\\n\\r\\n\"\n sql_diff_text_log << \"-- Merging '#{ @slave_swimmer.complete_name }' (id: #{ @slave_swimmer.id}) => '#{@master_swimmer.complete_name}' (id: #{@master_swimmer.id })...\\r\\n\\r\\n\"\n\n # Define merge schema (involved entities)\n @involved_entities = SwimmerMerger.get_involved_entities\n\n # Collect entity row to merge\n @involved_entities.each do |involved_entity|\n collect_swimmer_inolved_entity( involved_entity )\n end\n\n # Collect badges to merge (both swimmers have badges for the same season)\n collect_badges_to_merge\n\n # N.B. Checks for some recalculation needed should be performed before update and delete phases\n # Check if Goggle Cup standards needs recalculation\n needs_goggle_cup_recalculation = needs_goggle_cup_recalculation?\n\n # Check if seasonal personal standards needs recalculation\n needs_season_personal_standard_recalculation = needs_season_personal_standard_recalculation?\n\n # Perform the merge: update and delete phases\n # Bail out from any of these, if something goes wrong:\n merge_done = update_phase! && create_alias_phase! && delete_phase! && execute_delete!( Swimmer )\n\n if merge_done\n if @row_collectors[ MeetingIndividualResult.table_name ].non_duplicates_rows.size > 0\n # Recalculate personal bests\n process_text_log << \"\\r\\n\\r\\nPersonal-bests recalculation needed...\\r\\n\"\n swimmer_best_updater = SwimmerPersonalBestUpdater.new( @master_swimmer )\n swimmer_best_updater.scan_for_personal_best!\n sql_diff_text_log << swimmer_best_updater.sql_diff_text_log\n process_text_log << \"Personal-bests recalculation done!\\r\\n\"\n end\n\n # TODO if needed, also recalculate goggle_cup_standards\n if needs_goggle_cup_recalculation\n process_text_log << \"\\r\\n\\r\\n*** I M P O R T A N T ***\\r\\n\"\n process_text_log << \"Goggle-cup standards recalculation needed...\\r\\n\"\n end\n\n # TODO if needed, also recalculate season_personal_standards\n if needs_season_personal_standard_recalculation?\n process_text_log << \"\\r\\n\\r\\n*** I M P O R T A N T ***\\r\\n\"\n process_text_log << \"Season personal standards recalculation needed...\\r\\n\"\n end\n end\n end\n merge_done\n end",
"def merge!(assoc_rows, rows)\n Merge.new(rows, name).\n many!(assoc_rows, @ref.active_record_primary_key, @ref.foreign_key)\n end",
"def conflicting_or_created_record\n conflict || create\n end",
"def create!\n Upsert.logger.info \"[upsert] Creating or replacing database function #{name.inspect} on table #{buffer.parent.table_name.inspect} for selector #{selector.map(&:inspect).join(', ')} and columns #{columns.map(&:inspect).join(', ')}\"\n column_definitions = get_column_definitions\n connection.execute <<-EOS\nCREATE OR REPLACE FUNCTION #{name}(#{column_definitions.map { |c| \"#{c.quoted_input_name} #{c.sql_type} DEFAULT #{c.default || 'NULL'}\" }.join(',') }) RETURNS VOID AS\n$$\nBEGIN\n LOOP\n -- first try to update the key\n UPDATE #{quoted_table_name} SET #{column_definitions.map { |c| \"#{c.quoted_name} = #{c.quoted_input_name}\" }.join(',')}\n WHERE #{selector.map { |k| \"#{connection.quote_ident(k)} = #{connection.quote_ident([k,'input'].join('_'))}\" }.join(' AND ') };\n IF found THEN\n RETURN;\n END IF;\n -- not there, so try to insert the key\n -- if someone else inserts the same key concurrently,\n -- we could get a unique-key failure\n BEGIN\n INSERT INTO #{quoted_table_name}(#{column_definitions.map { |c| c.quoted_name }.join(',')}) VALUES (#{column_definitions.map { |c| c.quoted_input_name }.join(',')});\n RETURN;\n EXCEPTION WHEN unique_violation THEN\n -- Do nothing, and loop to try the UPDATE again.\n END;\n END LOOP;\nEND;\n$$\nLANGUAGE plpgsql;\nEOS\n end",
"def supports_multi_insert?\n true\n end",
"def apply_single\n validate_schema\n\n # Prepare some lists of columns.\n key_cols = @db1.primary_key(@table1)\n data_cols = @db1.except_primary_key(@table1)\n all_cols = @db1.column_names(@table1)\n\n # Let our public know we are beginning.\n @patch.begin_diff\n\n # Advertise column names.\n @rc_columns = DiffColumns.new\n @rc_columns.title_row = all_cols\n @rc_columns.update(0)\n cells = all_cols.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"@@\",cells)\n @patch.apply_row(rc)\n\n # If requested, we will be providing context rows around changed rows.\n # This is not a natural thing to do with SQL, so we do it only on request.\n # When requested, we need to buffer row changes.\n @pending_rcs = []\n\n # Prepare some useful SQL fragments to assemble later.\n sql_table1 = @db1.quote_table(@table1)\n sql_table2 = @db1.quote_table(@table2)\n sql_key_cols = key_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_all_cols = all_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_key_match = key_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS #{sql_table2}.#{c}\"}.join(\" AND \")\n sql_data_mismatch = data_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS NOT #{sql_table2}.#{c}\"}.join(\" OR \")\n\n # For one query we will need to interleave columns from two tables. For\n # portability we need to give these columns distinct names.\n weave = all_cols.map{|c| [[sql_table1,@db1.quote_column(c)],\n [sql_table2,@db2.quote_column(c)]]}.flatten(1)\n dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]}\"}\n sql_dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]} AS #{c[0].gsub(/[^a-zA-Z0-9]/,'_')}_#{c[1].gsub(/[^a-zA-Z0-9]/,'_')}\"}.join(\",\")\n\n # Prepare a map of primary key offsets.\n keys_in_all_cols = key_cols.each.map{|c| all_cols.index(c)}\n keys_in_dbl_cols = keys_in_all_cols.map{|x| 2*x}\n\n # Find rows in table2 that are not in table1.\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table2} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table1} WHERE #{sql_key_match})\"\n apply_inserts(sql,all_cols,keys_in_all_cols)\n\n # Find rows in table1 and table2 that differ while having the same primary\n # key.\n sql = \"SELECT #{sql_dbl_cols} FROM #{sql_table1} INNER JOIN #{sql_table2} ON #{sql_key_match} WHERE #{sql_data_mismatch}\"\n apply_updates(sql,dbl_cols,keys_in_dbl_cols)\n\n # Find rows that are in table1 but not table2\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table1} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table2} WHERE #{sql_key_match})\"\n apply_deletes(sql,all_cols,keys_in_all_cols)\n\n # If we are supposed to provide context, we need to deal with row order.\n if @patch.want_context\n sql = \"SELECT #{sql_all_cols}, 0 AS __coopy_tag__ FROM #{sql_table1} UNION SELECT #{sql_all_cols}, 1 AS __coopy_tag__ FROM #{sql_table2} ORDER BY #{sql_key_cols}, __coopy_tag__\"\n apply_with_context(sql,all_cols,keys_in_all_cols)\n end\n\n # Done!\n @patch.end_diff\n end",
"def apply_single\n validate_schema\n\n # Prepare some lists of columns.\n key_cols = @db1.primary_key(@table1)\n data_cols = @db1.except_primary_key(@table1)\n all_cols = @db1.column_names(@table1)\n\n # Let our public know we are beginning.\n @patch.begin_diff\n\n # Advertise column names.\n @rc_columns = DiffColumns.new\n @rc_columns.title_row = all_cols\n @rc_columns.update(0)\n cells = all_cols.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"@@\",cells)\n @patch.apply_row(rc)\n\n # If requested, we will be providing context rows around changed rows.\n # This is not a natural thing to do with SQL, so we do it only on request.\n # When requested, we need to buffer row changes.\n @pending_rcs = []\n\n # Prepare some useful SQL fragments to assemble later.\n sql_table1 = @db1.quote_table(@table1)\n sql_table2 = @db1.quote_table(@table2)\n sql_key_cols = key_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_all_cols = all_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_key_match = key_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS #{sql_table2}.#{c}\"}.join(\" AND \")\n sql_data_mismatch = data_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS NOT #{sql_table2}.#{c}\"}.join(\" OR \")\n\n # For one query we will need to interleave columns from two tables. For\n # portability we need to give these columns distinct names.\n weave = all_cols.map{|c| [[sql_table1,@db1.quote_column(c)],\n [sql_table2,@db2.quote_column(c)]]}.flatten(1)\n dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]}\"}\n sql_dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]} AS #{c[0].gsub(/[^a-zA-Z0-9]/,'_')}_#{c[1].gsub(/[^a-zA-Z0-9]/,'_')}\"}.join(\",\")\n\n # Prepare a map of primary key offsets.\n keys_in_all_cols = key_cols.each.map{|c| all_cols.index(c)}\n keys_in_dbl_cols = keys_in_all_cols.map{|x| 2*x}\n\n # Find rows in table2 that are not in table1.\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table2} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table1} WHERE #{sql_key_match})\"\n apply_inserts(sql,all_cols,keys_in_all_cols)\n\n # Find rows in table1 and table2 that differ while having the same primary\n # key.\n sql = \"SELECT #{sql_dbl_cols} FROM #{sql_table1} INNER JOIN #{sql_table2} ON #{sql_key_match} WHERE #{sql_data_mismatch}\"\n apply_updates(sql,dbl_cols,keys_in_dbl_cols)\n\n # Find rows that are in table1 but not table2\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table1} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table2} WHERE #{sql_key_match})\"\n apply_deletes(sql,all_cols,keys_in_all_cols)\n\n # If we are supposed to provide context, we need to deal with row order.\n if @patch.want_context\n sql = \"SELECT #{sql_all_cols}, 0 AS __coopy_tag__ FROM #{sql_table1} UNION SELECT #{sql_all_cols}, 1 AS __coopy_tag__ FROM #{sql_table2} ORDER BY #{sql_key_cols}, __coopy_tag__\"\n apply_with_context(sql,all_cols,keys_in_all_cols)\n end\n\n # Done!\n @patch.end_diff\n end",
"def apply_single\n validate_schema\n\n # Prepare some lists of columns.\n key_cols = @db1.primary_key(@table1)\n data_cols = @db1.except_primary_key(@table1)\n all_cols = @db1.column_names(@table1)\n\n # Let our public know we are beginning.\n @patch.begin_diff\n\n # Advertise column names.\n @rc_columns = DiffColumns.new\n @rc_columns.title_row = all_cols\n @rc_columns.update(0)\n cells = all_cols.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"@@\",cells)\n @patch.apply_row(rc)\n\n # If requested, we will be providing context rows around changed rows.\n # This is not a natural thing to do with SQL, so we do it only on request.\n # When requested, we need to buffer row changes.\n @pending_rcs = []\n\n # Prepare some useful SQL fragments to assemble later.\n sql_table1 = @db1.quote_table(@table1)\n sql_table2 = @db1.quote_table(@table2)\n sql_key_cols = key_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_all_cols = all_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_key_match = key_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS #{sql_table2}.#{c}\"}.join(\" AND \")\n sql_data_mismatch = data_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS NOT #{sql_table2}.#{c}\"}.join(\" OR \")\n\n # For one query we will need to interleave columns from two tables. For\n # portability we need to give these columns distinct names.\n weave = all_cols.map{|c| [[sql_table1,@db1.quote_column(c)],\n [sql_table2,@db2.quote_column(c)]]}.flatten(1)\n dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]}\"}\n sql_dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]} AS #{c[0].gsub(/[^a-zA-Z0-9]/,'_')}_#{c[1].gsub(/[^a-zA-Z0-9]/,'_')}\"}.join(\",\")\n\n # Prepare a map of primary key offsets.\n keys_in_all_cols = key_cols.each.map{|c| all_cols.index(c)}\n keys_in_dbl_cols = keys_in_all_cols.map{|x| 2*x}\n\n # Find rows in table2 that are not in table1.\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table2} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table1} WHERE #{sql_key_match})\"\n apply_inserts(sql,all_cols,keys_in_all_cols)\n\n # Find rows in table1 and table2 that differ while having the same primary\n # key.\n sql = \"SELECT #{sql_dbl_cols} FROM #{sql_table1} INNER JOIN #{sql_table2} ON #{sql_key_match} WHERE #{sql_data_mismatch}\"\n apply_updates(sql,dbl_cols,keys_in_dbl_cols)\n\n # Find rows that are in table1 but not table2\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table1} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table2} WHERE #{sql_key_match})\"\n apply_deletes(sql,all_cols,keys_in_all_cols)\n\n # If we are supposed to provide context, we need to deal with row order.\n if @patch.want_context\n sql = \"SELECT #{sql_all_cols}, 0 AS __coopy_tag__ FROM #{sql_table1} UNION SELECT #{sql_all_cols}, 1 AS __coopy_tag__ FROM #{sql_table2} ORDER BY #{sql_key_cols}, __coopy_tag__\"\n apply_with_context(sql,all_cols,keys_in_all_cols)\n end\n\n # Done!\n @patch.end_diff\n end",
"def supports_insert_conflict?\n server_version >= 90500\n end",
"def merge(opts); end",
"def upsert_rows(reader, table_schemas_lookup, row_transformer, validator = nil, copy_options = [])\n add_rows(reader, table_schemas_lookup, row_transformer, validator, copy_options, AddNewData.new('upsert'))\n end",
"def upsert_rows(reader, table_schemas_lookup, row_transformer, validator = nil, copy_options = [])\n add_rows(reader, table_schemas_lookup, row_transformer, validator, copy_options, AddNewData.new('upsert'))\n end",
"def pfmerge(dest_key, *source_key); end",
"def pfmerge(dest_key, *source_key); end",
"def persist_merge\n updater.update\n end",
"def merge(source,target)\n source.stringify_keys!\n target.stringify_keys!\n source.each do |key,value|\n if not value.is_a? Hash and not target[key] then\n puts \" #{key} not present in target. Copying\"\n target[key] = source[key]\n elsif value.is_a? Hash and not target[key] then\n target[key] = value\n elsif value.is_a? Hash and target[key] then\n target[key] = merge(value,target[key])\n end\n end\n return target\nend",
"def merge(to_be_merged)\n scheme_ids = identifiers.pluck(:identifier_scheme_id)\n # merge logic\n # => answers -> map id\n to_be_merged.answers.update_all(user_id: id)\n # => notes -> map id\n to_be_merged.notes.update_all(user_id: id)\n # => plans -> map on id roles\n to_be_merged.roles.update_all(user_id: id)\n # => prefs -> Keep's from self\n # => auths -> map onto keep id only if keep does not have the identifier\n to_be_merged.identifiers\n .where.not(identifier_scheme_id: scheme_ids)\n .update_all(identifiable_id: id)\n # => ignore any perms the deleted user has\n to_be_merged.destroy\n end",
"def merge_into(target)\n target_id = target.id\n # Find all the Entries attached to this name, that will need to be\n # reindexed after the merge\n entry_ids = entry_ids_to_index_on_update\n\n ids = EntryArtist.where(artist_id: self.id).pluck(:id)\n EntryArtist.where(artist_id: self.id).update_all({ artist_id: target_id })\n EntryArtist.where( id: ids ).each(&:update_bunny)\n\n ids = EntryAuthor.where(author_id: self.id).pluck(:id)\n EntryAuthor.where(author_id: self.id).update_all({ author_id: target_id })\n EntryAuthor.where( id: ids ).each(&:update_bunny)\n\n ids = EntryScribe.where(scribe_id: self.id).pluck(:id)\n EntryScribe.where(scribe_id: self.id).update_all({ scribe_id: target_id })\n EntryScribe.where( id: ids ).each(&:update_bunny)\n\n ids = SaleAgent.where(agent_id: self.id).pluck(:id)\n SaleAgent.where(agent_id: self.id).update_all({ agent_id: target_id })\n SaleAgent.where( id: ids ).each(&:update_bunny)\n\n ids = SourceAgent.where(agent_id: self.id).pluck(:id)\n SourceAgent.where(agent_id: self.id).update_all({ agent_id: target_id })\n SourceAgent.where( id: ids ).each(&:update_bunny)\n\n ids = Provenance.where(provenance_agent_id: self.id).pluck(:id)\n Provenance.where(provenance_agent_id: self.id).update_all({ provenance_agent_id: target_id })\n Provenance.where( id: ids ).each(&:update_bunny)\n\n ids = DericciLink.where(name_id: self.id).pluck(:id)\n DericciLink.where(name_id: self.id).update_all({ name_id: target_id })\n DericciLink.where( id: ids ).each(&:update_bunny)\n\n ids = DericciRecord.where(verified_id: self.id).pluck(:id)\n DericciRecord.where(verified_id: self.id).update_all({verified_id: target_id})\n DericciRecord.where( id: ids ).each(&:update_bunny)\n\n # update flags on the target\n target.is_artist ||= self.is_artist\n target.is_author ||= self.is_author\n target.is_scribe ||= self.is_scribe\n target.is_provenance_agent ||= self.is_provenance_agent\n\n target.save\n\n # but ... CAN't SAVE when name is BLANK (nil)\n # self.name = nil\n self.viaf_id = nil\n self.deleted = true\n self.save!\n\n # slice into managable chunks to avoid running out of space in mysql\n entry_ids.each_slice(200) do |slice|\n SDBMSS::IndexJob.perform_later(Entry.to_s, slice)\n end\n\n Name.update_counters(target.id,\n :authors_count => target.author_entries.where(deprecated: false, draft: false).count - target.authors_count,\n :artists_count => target.artist_entries.where(deprecated: false, draft: false).count - target.artists_count,\n :scribes_count => target.scribe_entries.where(deprecated: false, draft: false).count - target.scribes_count,\n :sale_agents_count => target.sale_entries.where(deprecated: false, draft: false).count - target.sale_agents_count,\n :provenance_count => target.provenance_entries.where(deprecated: false, draft: false).count - target.provenance_count,\n :source_agents_count => target.agent_sources.count - target.source_agents_count\n )\n end",
"def merge(hash); end",
"def merge(hash); end",
"def merge(hash); end",
"def merge!(other)\n raise NotImplementedError.new(\"Method 'merge!' not implemented by '#{self.class.name}'\")\n end",
"def rh_merge!(data)\n _rh_merge(self, data)\n end",
"def merge_branch\n git.merge branch\n rescue Git::MergeFailed\n cli.say \"Merge failed. Please resolve these conflicts.\"\n end",
"def sql_for_on_duplicate_key_ignore( *args ) # :nodoc:\n arg = args.first\n conflict_target = sql_for_conflict_target( arg ) if arg.is_a?( Hash )\n \" ON CONFLICT #{conflict_target}DO NOTHING\"\n end",
"def scaffold_merge_records(from, to, options)\n from, to = from.to_i, to.to_i\n return false if from == to\n from_object = scaffold_get_object(from)\n return false unless scaffold_session_value_matches?(from_object, options[:session])\n to_object = scaffold_get_object(to)\n return false unless scaffold_session_value_matches?(to_object, options[:session])\n scaffold_transaction do\n scaffold_all_associations.each{|reflection| scaffold_reflection_merge(reflection, from, to)}\n scaffold_destroy(from_object)\n end\n true\n end",
"def mergeGbkSeq\n \nend",
"def execute_update!( activerecord_class )\n collector = @row_collectors[ activerecord_class.table_name ]\n process_text_log << \"Updating #{ activerecord_class.name }...\\r\\n\"\n sql_diff_text_log << \"\\r\\n-- Updates for #{ activerecord_class.name }:\\r\\n\"\n is_ok = true\n begin\n collector.non_duplicates_rows.each do |row|\n sql_attributes = {}\n if row.respond_to?(:swimmer_id) # Correct the swimmer_id link:\n row.swimmer_id = @master_swimmer.id\n sql_attributes['swimmer_id'] = @master_swimmer.id\n end\n # Correct the badge link, when included in the matrix:\n if row.respond_to?(:badge_id) && @dup_badge_matrix_ids.has_key?( row.badge_id )\n row.badge_id = @dup_badge_matrix_ids[ row.badge_id ]\n sql_attributes['badge_id'] = row.badge_id\n end\n\n if row.invalid? # Check validation:\n msg = \"\\r\\n\\r\\n-- *** Swimmer Merge: validation ERROR during #{ row.class.name } update!\\r\\n\" <<\n \"- row..............: #{ row.inspect }\\r\\n\" <<\n \"- sql_attributes...: #{ sql_attributes.inspect }\\r\\n\" <<\n \"\\r\\n- ERROR............: #{ ValidationErrorTools.recursive_error_for( row ) }\\r\\n\"\n# DEBUG\n puts msg\n process_text_log << msg\n end\n row.save!\n # Build-up SQL-diff:\n if sql_attributes.size > 0 # (false = no additional comment)\n sql_diff_text_log << to_sql_update( row, false, sql_attributes, \"\\r\\n\" )\n end\n end\n rescue\n process_text_log << \"\\r\\n\\r\\n*** Swimmer Merge: exception caught!\\r\\n\"\n process_text_log << \"*** Phase '#{ activerecord_class.name } UPDATE': #{ $!.to_s }\\r\\n\" if $!\n process_text_log << sql_diff_text_log\n is_ok = false\n end\n is_ok\n end",
"def perform_additional_merge_operations!(other)\n end",
"def multi_insert_sql_strategy\n :values\n end",
"def merge(merge_branch, options = {})\n merge_analysis = rugged.merge_analysis(merge_branch.name)\n if merge_analysis.include?(:fastforward)\n rugged.references.update(helper.head_ref, merge_branch.target_id)\n rugged.checkout_head(strategy: :force)\n elsif merge_analysis.include?(:normal)\n ours = helper.head_target\n theirs = merge_branch.target\n merge_base = rugged.merge_base(ours, theirs)\n raise(NoCommonCommit) unless merge_base\n\n base = rugged.rev_parse(merge_base)\n index = ours.tree.merge(theirs.tree, base.tree)\n\n commit_message =\n if index.conflicts?\n raise(MergeConflict) unless block_given?\n\n message = yield(index, rugged, helper.working_directory)\n raise(MergeConflict) unless message\n\n index.conflict_cleanup\n message\n else\n \"Merge branch '#{helper.head_branch.name}' of #{helper.head_remote.url}\"\n end\n\n helper.commit_create(\n commit_message,\n index.write_tree(rugged),\n [ours, theirs],\n options\n )\n rugged.checkout_head(strategy: :force)\n end\n\n self\n end",
"def merge_trees(base_treeish, local_treeish, remote_treeish)\n invoke(:merge_recursive, base_treeish, \"-- #{local_treeish} #{remote_treeish}\")\n true\n rescue ShellExecutionError => error\n # 'CONFLICT' messages go to stdout.\n raise MergeError, error.out\n end",
"def merge_by_values(relation, other)\n other.cte.with_values.each do |name, expression|\n relation = if other.cte.materialized_key?(name)\n relation.with!.materialized(name => expression)\n elsif other.cte.not_materialized_key?(name)\n relation.with!.not_materialized(name => expression)\n else\n relation.with!(name => expression)\n end\n end\n\n relation\n end",
"def sql_for_on_duplicate_key_ignore( table_name, *args ) # :nodoc:\n arg = args.first\n conflict_target = sql_for_conflict_target( arg ) if arg.is_a?( Hash )\n \" ON CONFLICT #{conflict_target}DO NOTHING\"\n end",
"def sql_for_on_duplicate_key_update( table_name, *args ) # :nodoc:\n arg, model, primary_key, locking_column = args\n arg = { columns: arg } if arg.is_a?( Array ) || arg.is_a?( String )\n return unless arg.is_a?( Hash )\n\n sql = ' ON CONFLICT '.dup\n conflict_target = sql_for_conflict_target( arg )\n\n columns = arg.fetch( :columns, [] )\n condition = arg[:condition]\n if columns.respond_to?( :empty? ) && columns.empty?\n return sql << \"#{conflict_target}DO NOTHING\"\n end\n\n conflict_target ||= sql_for_default_conflict_target( primary_key )\n unless conflict_target\n raise ArgumentError, 'Expected :conflict_target to be specified'\n end\n\n sql << \"#{conflict_target}DO UPDATE SET \"\n case columns\n when Array\n sql << sql_for_on_duplicate_key_update_as_array( table_name, model, locking_column, columns )\n when Hash\n sql << sql_for_on_duplicate_key_update_as_hash( table_name, model, locking_column, columns )\n when String\n sql << columns\n else\n raise ArgumentError, 'Expected :columns to be an Array or Hash'\n end\n\n sql << \" WHERE #{condition}\" if condition.present?\n\n sql\n end",
"def merge!(other_hash); end",
"def sql_for_on_duplicate_key_update( table_name, *args ) # :nodoc:\n arg, model, primary_key, locking_column = args\n arg = { columns: arg } if arg.is_a?( Array ) || arg.is_a?( String )\n return unless arg.is_a?( Hash )\n\n sql = ' ON CONFLICT '.dup\n conflict_target = sql_for_conflict_target( arg )\n\n columns = arg.fetch( :columns, [] )\n condition = arg[:condition]\n if columns.respond_to?( :empty? ) && columns.empty?\n return sql << \"#{conflict_target}DO NOTHING\"\n end\n\n conflict_target ||= sql_for_default_conflict_target( table_name, primary_key )\n unless conflict_target\n raise ArgumentError, 'Expected :conflict_target or :constraint_name to be specified'\n end\n\n sql << \"#{conflict_target}DO UPDATE SET \"\n case columns\n when Array\n sql << sql_for_on_duplicate_key_update_as_array( table_name, model, locking_column, columns )\n when Hash\n sql << sql_for_on_duplicate_key_update_as_hash( table_name, model, locking_column, columns )\n when String\n sql << columns\n else\n raise ArgumentError, 'Expected :columns to be an Array or Hash'\n end\n\n sql << \" WHERE #{condition}\" if condition.present?\n\n sql\n end",
"def merge(other_person, force: false)\n # Consider just using straight SQL for this --\n # it's not complicated, and the current process generates an\n # enormous amount of SQL\n\n ActiveSupport::Notifications.instrument(\n \"merge.people.admin.racing_on_rails\",\n person_id: id,\n person_name: name,\n other_id: other_person.try(:id),\n other_name: other_person.try(:name)\n ) do\n unless merge?(other_person, force: force)\n ActiveSupport::Notifications.instrument(\n \"failure.merge.people.admin.racing_on_rails\",\n person_id: id,\n person_name: name,\n other_id: other_person.try(:id),\n other_name: other_person.try(:name)\n )\n return false\n end\n\n Person.transaction do\n before_merge other_person\n\n if login.blank? && other_person.login.present?\n self.login = other_person.login\n self.crypted_password = other_person.crypted_password\n PaperTrail.request(enabled: false) do\n other_person.update login: nil\n end\n end\n self.member_from = other_person.member_from if member_from.nil? || (other_person.member_from && other_person.member_from < member_from)\n self.member_to = other_person.member_to if member_to.nil? || (other_person.member_to && other_person.member_to > member_to)\n\n other_person_is_newer = other_person.created_at > created_at\n MERGE_ATTRIBUTES.each do |attribute|\n send(\"#{attribute}=\", other_person.send(attribute)) if other_person.send(attribute).present? && (send(attribute).blank? || other_person_is_newer)\n end\n\n if date_of_birth && other_person.date_of_birth && date_of_birth.day == 1\n self.date_of_birth = Time.zone.local(date_of_birth.year, date_of_birth.month, other_person.date_of_birth.day)\n end\n\n # Prevent unique index collision\n other_person.update_column :license, nil\n\n save!\n\n # save! can trigger automatic deletion for people created for old orders\n # if that happens, don't try and merge associations\n return true unless Person.exists?(id)\n\n aliases << other_person.aliases\n editor_requests << other_person.editor_requests\n editors << (other_person.editors - editors).uniq.reject { |e| e == self }\n events << other_person.events\n event_teams = event_team_memberships.map(&:event_team_id)\n event_team_memberships << other_person.event_team_memberships.reject { |e| event_teams.include?(e.event_team_id) }\n names << other_person.names\n race_numbers << other_person.race_numbers\n results << other_person.results\n versions << other_person.versions\n\n other_person.event_team_memberships.reload.clear\n Person.delete other_person.id\n existing_alias = aliases.detect { |a| a.name.casecmp(other_person.name) == 0 }\n aliases.create(name: other_person.name) if existing_alias.nil? && Person.find_all_by_name(other_person.name).empty?\n end\n end\n\n ActiveSupport::Notifications.instrument(\n \"success.merge.people.admin.racing_on_rails\",\n person_id: id,\n person_name: name,\n other_id: other_person.try(:id),\n other_name: other_person.try(:name)\n )\n\n true\n end",
"def _do_rh_merge(result, key, data, refuse_discordance = false)\n value = data[key]\n\n return if _rh_merge_do_add_key(result, key, value)\n\n return if _rh_merge_recursive(result, key, data)\n\n return if refuse_discordance\n\n return unless _rh_struct_changing_ok?(result, key, data)\n\n return unless _rh_merge_ok?(result, key)\n\n _rh_merge_do_upd_key(result, key, value)\n end",
"def merge(other, new_name = nil)\n schema_1 = self.ruby_type\n db_type_1 = self.database_type\n schema_2 = other.ruby_type\n db_type_2 = other.database_type\n if schema_1 == schema_2 && db_type_1 == db_type_2\n result = schema_1\n else\n type_1 = schema_1[:type]\n opts_1 = schema_1[:opts] || {}\n type_2 = schema_2[:type]\n opts_2 = schema_2[:opts] || {}\n result_type = type_1\n result_opts = schema_1[:opts] ? schema_1[:opts].dup : {}\n\n # type\n if type_1 != type_2\n result_type = first_common_type(type_1, type_2)\n if result_type.nil?\n raise \"Can't merge #{type_1} (#{name}) with #{type_2} (#{other.name})\"\n end\n end\n\n # text\n if opts_1[:text] != opts_2[:text]\n # This can only be of type String.\n result_opts[:text] = true\n result_opts.delete(:size)\n end\n\n # size\n if !result_opts[:text] && opts_1[:size] != opts_2[:size]\n types = [type_1, type_2].uniq\n if types.length == 1 && types[0] == BigDecimal\n # Two decimals\n if opts_1.has_key?(:size) && opts_2.has_key?(:size)\n s_1 = opts_1[:size]\n s_2 = opts_2[:size]\n result_opts[:size] = [ s_1[0] > s_2[0] ? s_1[0] : s_2[0] ]\n\n if s_1[1] && s_2[1]\n result_opts[:size][1] = s_1[1] > s_2[1] ? s_1[1] : s_2[1]\n else\n result_opts[:size][1] = s_1[1] ? s_1[1] : s_2[1]\n end\n else\n result_opts[:size] = opts_1.has_key?(:size) ? opts_1[:size] : opts_2[:size]\n end\n elsif types.include?(String) && types.include?(BigDecimal)\n # Add one to the precision of the BigDecimal (for the dot)\n if opts_1.has_key?(:size) && opts_2.has_key?(:size)\n s_1 = opts_1[:size].is_a?(Array) ? opts_1[:size][0] + 1 : opts_1[:size]\n s_2 = opts_2[:size].is_a?(Array) ? opts_2[:size][0] + 1 : opts_2[:size]\n result_opts[:size] = s_1 > s_2 ? s_1 : s_2\n elsif opts_1.has_key?(:size)\n result_opts[:size] = opts_1[:size].is_a?(Array) ? opts_1[:size][0] + 1 : opts_1[:size]\n elsif opts_2.has_key?(:size)\n result_opts[:size] = opts_2[:size].is_a?(Array) ? opts_2[:size][0] + 1 : opts_2[:size]\n end\n else\n # Treat as two strings\n if opts_1.has_key?(:size) && opts_2.has_key?(:size)\n result_opts[:size] = opts_1[:size] > opts_2[:size] ? opts_1[:size] : opts_2[:size]\n elsif opts_1.has_key?(:size)\n result_opts[:size] = opts_1[:size]\n else\n result_opts[:size] = opts_2[:size]\n end\n end\n end\n\n # fixed\n if opts_1[:fixed] != opts_2[:fixed]\n # This can only be of type String.\n result_opts[:fixed] = true\n end\n\n # collation\n if opts_1[:collate] != opts_2[:collate] || db_type_1 != db_type_2\n result_opts.delete(:collate)\n end\n\n result = {:type => result_type}\n result[:opts] = result_opts unless result_opts.empty?\n end\n\n if new_name\n name = new_name.to_sym\n else\n name = self.name == other.name ? self.name : :\"#{self.name}_#{other.name}\"\n end\n MergeField.new(name, result, db_type_1 == db_type_2 ? db_type_1 : nil)\n end",
"def merge(other)\n schema(other.keys)\n end",
"def merge_to(data)\n\n # prevent cross merging tickets\n target_ticket = Ticket.find_by(id: data[:ticket_id])\n raise 'no target ticket given' if !target_ticket\n raise Exceptions::UnprocessableEntity, 'ticket already merged, no merge into merged ticket possible' if target_ticket.state.state_type.name == 'merged'\n\n # check different ticket ids\n raise Exceptions::UnprocessableEntity, 'Can\\'t merge ticket with it self!' if id == target_ticket.id\n\n # update articles\n Transaction.execute do\n\n Ticket::Article.where(ticket_id: id).each(&:touch)\n\n # quiet update of reassign of articles\n Ticket::Article.where(ticket_id: id).update_all(['ticket_id = ?', data[:ticket_id]]) # rubocop:disable Rails/SkipsModelValidations\n\n # mark target ticket as updated\n # otherwise the \"received_merge\" history entry\n # will be the same as the last updated_at\n # which might be a long time ago\n target_ticket.updated_at = Time.zone.now\n\n # add merge event to both ticket's history (Issue #2469 - Add information \"Ticket merged\" to History)\n target_ticket.history_log(\n 'received_merge',\n data[:user_id],\n id_to: target_ticket.id,\n id_from: id,\n )\n history_log(\n 'merged_into',\n data[:user_id],\n id_to: target_ticket.id,\n id_from: id,\n )\n\n # create new merge article\n Ticket::Article.create(\n ticket_id: id,\n type_id: Ticket::Article::Type.find_by(name: 'note').id,\n sender_id: Ticket::Article::Sender.find_by(name: 'Agent').id,\n body: 'merged',\n internal: false,\n )\n\n # reassign links to the new ticket\n # rubocop:disable Rails/SkipsModelValidations\n Link.where(\n link_object_source_id: Link::Object.find_by(name: 'Ticket').id,\n link_object_source_value: id,\n ).update_all(link_object_source_value: data[:ticket_id])\n Link.where(\n link_object_target_id: Link::Object.find_by(name: 'Ticket').id,\n link_object_target_value: id,\n ).update_all(link_object_target_value: data[:ticket_id])\n # rubocop:enable Rails/SkipsModelValidations\n\n # link tickets\n Link.add(\n link_type: 'parent',\n link_object_source: 'Ticket',\n link_object_source_value: data[:ticket_id],\n link_object_target: 'Ticket',\n link_object_target_value: id\n )\n\n # set state to 'merged'\n self.state_id = Ticket::State.find_by(name: 'merged').id\n\n # rest owner\n self.owner_id = 1\n\n # save ticket\n save!\n\n # touch new ticket (to broadcast change)\n target_ticket.touch # rubocop:disable Rails/SkipsModelValidations\n end\n true\n end",
"def insert_or_merge_entity(table_name, entity_values, options = {})\n options[:create_if_not_exists] = true\n merge_entity(table_name, entity_values, options)\n end",
"def rh_merge(data)\n _rh_merge(clone, data)\n end",
"def merge(b)\n return false unless self.same_book?(b)\n self.merge!(b)\n end",
"def merge(dupe)\n return false if self == dupe\n dupe.traits.update_all(master_trait_id: self.id)\n dupe.destroy\n end",
"def merge *others\n others.inject self.clone do |a,b|\n a.insert b\n end\n end",
"def deep_merge(other, &bloc)\n other.keys.inject(dup) do |result, key|\n begin\n case result[key]\n when Hash\n if other[key].is_a?(Hash)\n result[key] = result[key].deep_merge(other[key], &bloc)\n result\n else\n raise MergeConflict\n end\n when nil then result.merge key => other[key]\n else\n raise MergeConflict\n end\n rescue MergeConflict\n if bloc.nil?\n result[key] = other[key]\n else\n result[key] = bloc.call(result, other, key) \n end\n result\n end\n end\n \n end",
"def insert_or_merge_entity(table_name, entity_values, options={})\n options[:create_if_not_exists] = true\n merge_entity(table_name, entity_values, options)\n end",
"def merge!(hash); end",
"def wonkofile_merge(old, new)\n res = old.merge new\n # changing versions is not allowed by adding another version index (we might end up with a version in the index that\n # we don't have an actual version file for)\n res['versions'] = old['versions']\n res\nend",
"def merge!(other)\n # @todo: use specific exception\n raise Exception, \"Cannot merge\" unless self.class == other.class &&\n coll_name == other.coll_name &&\n db_name == other.db_name\n @spec[batch_key] += other.spec[batch_key]\n @spec[:indexes] = indexes + other.indexes\n self\n end",
"def neo_self_merge_clause\n \"MERGE (self:#{self.class.base_class} {#{neo_self_condition}})\"\n end",
"def on_conflict_sql(sql)\n @sql_on_conflict = sql\n self\n end",
"def merge(other)\n dup.merge!(other)\n end",
"def merge(base_hash, derived_hash); end",
"def merge( other )\n self.dup.merge!(other)\n end",
"def merge(*docs)\n json_op(:merge, self, *docs.map { |d| Sequel.object_to_json(d) })\n end",
"def merge!\n other_tag = Tag.find_by_name(self.name)\n if other_tag.nil? or other_tag.id == self.id\n return self\n else\n taggings.map(&:clone).each { |t| (t.tag = other_tag) && t.save! }\n self.destroy\n return other_tag\n end\n end",
"def perform_merge(src, dest, delete_after)\n src_notes = src.all_notes\n dest_notes = dest.all_notes\n result = false\n\n # Mergeable if there are no fields which are non-blank in both descriptions.\n if src.class.all_note_fields.none? \\\n {|f| !src_notes[f].blank? and !dest_notes[f].blank?}\n result = true\n\n # Copy over all non-blank descriptive fields.\n xargs = {}\n for f, val in src_notes\n if !val.blank?\n dest.send(\"#{f}=\", val)\n xargs[:\"set_#{f}\"] = val\n end\n end\n\n # Store where merge came from in new version of destination.\n dest.merge_source_id = src.versions.latest.id rescue nil\n xargs[:set_merge_source] = src\n\n # Save changes to destination.\n dest.save\n Transaction.send(\"put_#{dest.type_tag}\", xargs)\n\n # Copy over authors and editors.\n src.authors.each {|user| dest.add_author(user)}\n src.editors.each {|user| dest.add_editor(user)}\n\n # Delete old description if requested.\n if delete_after\n if !src.is_admin?(@user)\n flash_warning(:runtime_description_merge_delete_denied.t)\n else\n src_was_default = (src.parent.description_id == src.id)\n Transaction.send(\"delete_#{src.type_tag}\", :id => src)\n flash_notice(:runtime_description_merge_deleted.\n t(:old => src.unique_partial_format_name))\n src.destroy\n\n # Make destination the default if source used to be the default.\n if src_was_default && dest.public\n dest.parent.description = dest\n dest.parent.save\n end\n end\n end\n\n end\n return result\n end",
"def _import(columns, values, opts=OPTS)\n if @opts[:returning]\n # no transaction: our multi_insert_sql_strategy should guarantee\n # that there's only ever a single statement.\n sql = multi_insert_sql(columns, values)[0]\n returning_fetch_rows(sql).map{|v| v.length == 1 ? v.values.first : v}\n elsif opts[:return] == :primary_key\n returning(insert_pk)._import(columns, values, opts)\n else\n super\n end\n end",
"def store_merge_state; end",
"def store_merge_state; end",
"def merge(other)\n dup.update(other)\n end",
"def merge(other_person)\n # Consider just using straight SQL for this --\n # it's not complicated, and the current process generates an\n # enormous amount of SQL\n\n ActiveSupport::Notifications.instrument(\n \"merge.people.admin.racing_on_rails\",\n person_id: id,\n person_name: name,\n other_id: other_person.try(:id),\n other_name: other_person.try(:name)\n ) do\n\n if other_person.nil? || other_person == self\n ActiveSupport::Notifications.instrument(\n \"failure.merge.people.admin.racing_on_rails\",\n person_id: id,\n person_name: name,\n other_id: other_person.try(:id),\n other_name: other_person.try(:name),\n )\n return false\n end\n\n Person.transaction do\n self.merge_version do\n if login.blank? && other_person.login.present?\n self.login = other_person.login\n self.crypted_password = other_person.crypted_password\n other_person.skip_version do\n other_person.update login: nil\n end\n end\n if member_from.nil? || (other_person.member_from && other_person.member_from < member_from)\n self.member_from = other_person.member_from\n end\n if member_to.nil? || (other_person.member_to && other_person.member_to > member_to)\n self.member_to = other_person.member_to\n end\n\n if license.blank?\n self.license = other_person.license\n end\n\n save!\n aliases << other_person.aliases\n events << other_person.events\n names << other_person.names\n results << other_person.results\n race_numbers << other_person.race_numbers\n\n begin\n versions << other_person.versions\n rescue ActiveRecord::SerializationTypeMismatch => e\n logger.error e\n end\n\n versions.sort_by(&:created_at).each_with_index do |version, index|\n version.number = index + 2\n version.save!\n end\n\n Person.delete other_person.id\n existing_alias = aliases.detect{ |a| a.name.casecmp(other_person.name) == 0 }\n if existing_alias.nil? and Person.find_all_by_name(other_person.name).empty?\n aliases.create(name: other_person.name)\n end\n end\n end\n\n ActiveSupport::Notifications.instrument(\n \"success.merge.people.admin.racing_on_rails\",\n person_id: id,\n person_name: name,\n other_id: other_person.try(:id),\n other_name: other_person.try(:name),\n )\n\n true\n end\n end",
"def updatable_merge_requests_union_sql\n metrics_not_exists_clause =\n 'NOT EXISTS (SELECT 1 FROM merge_request_metrics WHERE merge_request_metrics.merge_request_id = merge_requests.id)'\n\n without_metrics_data = <<-SQL.strip_heredoc\n merge_request_metrics.merged_by_id IS NULL OR\n merge_request_metrics.latest_closed_by_id IS NULL OR\n merge_request_metrics.latest_closed_at IS NULL\n SQL\n\n mrs_without_metrics_record = MergeRequest\n .where(metrics_not_exists_clause)\n .select(:id)\n\n mrs_without_events_data = MergeRequest\n .joins('INNER JOIN merge_request_metrics ON merge_requests.id = merge_request_metrics.merge_request_id')\n .where(without_metrics_data)\n .select(:id)\n\n Gitlab::SQL::Union.new([mrs_without_metrics_record, mrs_without_events_data]).to_sql\n end",
"def merge!(new_args); end",
"def merge(place)\n fail 'Cannot merge a venue into a merged venued' unless merged_with_place_id.blank?\n fail 'Cannot merge place with itself' if id == place.id\n self.class.connection.transaction do\n Event.where(place_id: place.id).each do |event|\n event.update_attribute(:place_id, id) || fail('cannot update event')\n end\n\n Venue.where(place_id: place.id).each do |venue|\n real_venue = Venue.find_or_create_by(place_id: id, company_id: venue.company_id)\n # Update them one by one so the versions are generated\n venue.activities.each { |a| a.update_attribute(:activitable_id, real_venue.id) }\n venue.invites.update_all(venue_id: real_venue.id)\n venue.destroy\n end\n\n Placeable.where(place_id: place.id).update_all(place_id: id)\n\n place.td_linx_code ||= place.td_linx_code\n place.update_attribute(:merged_with_place_id, id)\n end\n end",
"def merge_base(target, source)\n invoke(:merge_base, target, source)\n rescue ShellExecutionError\n nil\n end",
"def merge(base_hash, derived_hash, **opts); end"
] |
[
"0.6710938",
"0.6681831",
"0.6634573",
"0.64860135",
"0.64847136",
"0.64394313",
"0.6314614",
"0.62681806",
"0.61912584",
"0.6133674",
"0.60186386",
"0.59482306",
"0.58233184",
"0.5804145",
"0.5768289",
"0.57538915",
"0.5709405",
"0.5695733",
"0.5656634",
"0.5585567",
"0.5577488",
"0.5531713",
"0.5527096",
"0.5507786",
"0.5495764",
"0.54893184",
"0.54772604",
"0.5443693",
"0.54418653",
"0.5428375",
"0.53797734",
"0.5376607",
"0.5355894",
"0.53476155",
"0.5332091",
"0.53142905",
"0.53075486",
"0.53075486",
"0.53075486",
"0.5305047",
"0.5299691",
"0.529788",
"0.529788",
"0.52803904",
"0.52803904",
"0.52700096",
"0.5267843",
"0.5266987",
"0.5265068",
"0.52481747",
"0.52481747",
"0.52481747",
"0.5247746",
"0.52349395",
"0.5230775",
"0.5220869",
"0.5202584",
"0.5193764",
"0.5192757",
"0.519264",
"0.5187066",
"0.5183742",
"0.5180088",
"0.5179874",
"0.5172688",
"0.51712334",
"0.5167317",
"0.51587355",
"0.515182",
"0.51501876",
"0.5140019",
"0.513331",
"0.5129086",
"0.5117587",
"0.5112617",
"0.510537",
"0.51025456",
"0.5098441",
"0.50877374",
"0.50840116",
"0.5076583",
"0.50631654",
"0.50583786",
"0.5055216",
"0.5050118",
"0.5048323",
"0.50480443",
"0.504326",
"0.5037745",
"0.50334615",
"0.50195026",
"0.50194484",
"0.5006737",
"0.5006737",
"0.50059265",
"0.49928045",
"0.49909937",
"0.49901664",
"0.49851048",
"0.49831924",
"0.49814683"
] |
0.0
|
-1
|
Returning is always supported.
|
def supports_returning?(type)
true
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def supported?\n false\n end",
"def supports?\n fail \"Method 'supports?' must be defined\"\n end",
"def allowed?() raise NotImplementedError end",
"def supports?\n raise \"Method 'supports?' must be defined\"\n end",
"def supported\n @supported ||= []\n end",
"def supported?(name); end",
"def supported?\n !!@supported_proc.call\n end",
"def supports?\n !@version.nil?\n end",
"def allowed?; true end",
"def allowed?\n true\n end",
"def allowed?\n true\n end",
"def supports?(type)\n supported.include? type.to_sym\n end",
"def allowed?\n true\n end",
"def applicable?\n true\n end",
"def available?\n raise ::NotImplementedError\n end",
"def support\n raise \"Not implemented\"\n end",
"def suitable?\n false\n end",
"def suitable?\n false\n end",
"def available?\n return false\n end",
"def valid_for_platform?; true; end",
"def allowed?\n allowed\n end",
"def applicable?\n true\n end",
"def supported?\n supports_platform? && supports_runtime?\n end",
"def available; end",
"def available; end",
"def available?\n true\n end",
"def support\n end",
"def is_allowed?\n self.allowed.include?(@architecture)\n end",
"def supports_options?\n return true\n end",
"def supports_is_true?\n false\n end",
"def available?()\n #This is a stub, used for indexing\n end",
"def isSatisfiable?()\n\tend",
"def version_supported?\n\tKitchenplan::Log.debug \"#{self.class} : Is platform version lower than #{@lowest_version_supported}?\"\n\treturn false if self.version.to_s < @lowest_version_supported\n\ttrue\n end",
"def version_supported?\n\tKitchenplan::Log.debug \"#{self.class} : Is platform version lower than #{@lowest_version_supported}?\"\n\treturn false if self.version.to_s < @lowest_version_supported\n\ttrue\n end",
"def version_supported?\n\tKitchenplan::Log.debug \"#{self.class} : Is platform version lower than #{@lowest_version_supported}?\"\n\treturn false if self.version.to_s < @lowest_version_supported\n\ttrue\n end",
"def controlable?\n respond_to?(:call)\n end",
"def acceptable?(value)\n true\n end",
"def accepted?\n false\n end",
"def test_isSupported07\n doc = nil\n rootNode = nil\n state = nil\n doc = load_document(\"staff\", false)\n rootNode = doc.documentElement()\n state = rootNode.isSupported(\"XML\", \"\")\n assertTrue(\"throw_True\", state)\n \n end",
"def activated?\n raise NotImplementedError\n end",
"def activated?\n raise NotImplementedError\n end",
"def recommendable?() false end",
"def available?\n false\n end",
"def physical?\n fail NotImplementedError\n end",
"def ordinary?\n raise \"Not implemented yet\"\n end",
"def is_supported_by_termination?\r\n self.find_termination_supporter.nil? ? false : true\r\n end",
"def available?\n true\n end",
"def supported?\n CurrencyExchangeRate.supported?(self)\n end",
"def enabled?; end",
"def enabled?; end",
"def supported? docs_source\n fail NotImplementedError\n end",
"def compilable?\n false\n end",
"def ok_enabled?\r\n return true\r\n end",
"def usable?; end",
"def legacy?\n false\n end",
"def match?\n raise NotImplementedError, 'please implement #match? '\\\n \"for #{self.class.name} which should return true or false, \"\\\n 'depending on if it can decide #allowed?'\n end",
"def allowed?(*_)\n true\n end",
"def magical?\n fail NotImplementedError\n end",
"def certain?\n fail NotImplementedError\n end",
"def supported?(method)\n !@operations.include?(MAP.fetch(method))\n end",
"def supported?\n each_accepted_cipher_suite.any?\n end",
"def compilable?\n false\n end",
"def acceptable? *args\n true\n end",
"def is_compliant()\n\t\tend",
"def valid?\n not_implemented\n end",
"def editing_supported?\n !(is_multivalued? || is_resource?)\n end",
"def supports?(feature)\n features.include?(feature)\n end",
"def respond_to?(m)\n m || super\n end",
"def __accepted__\n defined?(accepted) ? accepted : false\n end",
"def enabled_all?; end",
"def requestable?\n (ill? || available? || recallable? ||\n processing? || on_order? || offsite? || ezborrow?)\n end",
"def support?\n Support.new(ENV, verbose: verbose).support?\n end",
"def supported_format?\n !(@file_set.mime_type & self.class.supported_formats).empty?\n end",
"def capable?(key); end",
"def property_supported?(property)\n self.class.property_registered?(property, :supported)\n end",
"def standard?\n STANDARD_CAPABILITIES.include?(@name)\n end",
"def versionable?\n !respond_to?(:is_standard?) || !is_standard?\n end",
"def need_selection?\n fail NotImplementedError\n end",
"def respond_to_missing?(*)\n true\n end",
"def valid?\n raise 'Not Implemented'\n end",
"def valid?\n raise NotImplementedError\n end",
"def supported_versions\n raise NotImplementedError\n end",
"def valid?\n raise Errors::NotImplementedError\n end",
"def check?\n raise NotImplementedError\n end",
"def respond_to?(*)\n true\n end",
"def ok_enabled?\n true\n end",
"def allowed?\n raise \"Unimplemented 'allowed?' for intention: #{self.inspect}!\"\n end",
"def can_have_value?()\n return true\n end",
"def can_have_value?\n return true\n end",
"def recognized?\n return !self.etypes.empty?\n end",
"def support?(mode)\n @modes.keys.include?(mode)\n end",
"def mixed?\n return false\n end",
"def can_emulate\n {\n method: \"Emulation.canEmulate\"\n }\n end",
"def present?\n\t\t\traise 'not implemented'\n\t\tend",
"def valid?\n raise NotImplementedError\n end",
"def supports_deferrable_constraints?\n server_version >= 90000\n end",
"def strict? default = false\n raise NotImplementedError.new(\"Classes including the Api module must provide a strict? method\")\n end",
"def disabled?\n deprecated? || deleted?\n end",
"def is_per_device_acceptance_required\n return @is_per_device_acceptance_required\n end",
"def can_use_api?\n api_enabled\n end",
"def respond_to_missing?(*several_variants)\n super\n end"
] |
[
"0.843344",
"0.7700669",
"0.7631781",
"0.7629838",
"0.76231104",
"0.7519657",
"0.73208463",
"0.7241568",
"0.7153662",
"0.7140632",
"0.7140632",
"0.71038365",
"0.7073607",
"0.6998771",
"0.6959675",
"0.68993336",
"0.68871474",
"0.68871474",
"0.67963094",
"0.678926",
"0.6754085",
"0.67360824",
"0.66793126",
"0.66762227",
"0.66762227",
"0.66729814",
"0.6616343",
"0.6605055",
"0.6596759",
"0.6577984",
"0.6544926",
"0.6544446",
"0.65158963",
"0.65158963",
"0.65158963",
"0.6514209",
"0.6509086",
"0.641482",
"0.64144975",
"0.63879603",
"0.63879603",
"0.63855207",
"0.6374666",
"0.63599014",
"0.6348747",
"0.63349116",
"0.6323082",
"0.6313027",
"0.6307359",
"0.6307359",
"0.6299256",
"0.6290418",
"0.62898016",
"0.62869525",
"0.6258572",
"0.62529576",
"0.6249392",
"0.6242818",
"0.6241471",
"0.62256545",
"0.62235075",
"0.62135595",
"0.6210535",
"0.6194427",
"0.6184473",
"0.6183717",
"0.6181662",
"0.6165529",
"0.6152564",
"0.6135007",
"0.6124178",
"0.6114715",
"0.61077976",
"0.60988283",
"0.609823",
"0.6093431",
"0.60914737",
"0.6088289",
"0.6073752",
"0.6072193",
"0.6066783",
"0.6064549",
"0.6062034",
"0.6061061",
"0.6052451",
"0.60490984",
"0.6047816",
"0.604639",
"0.6042661",
"0.60329026",
"0.60283005",
"0.60265404",
"0.6026098",
"0.6021368",
"0.60177374",
"0.6005852",
"0.6003093",
"0.5998464",
"0.5993521",
"0.59918493",
"0.59869814"
] |
0.0
|
-1
|
PostgreSQL supports pattern matching via regular expressions
|
def supports_regexp?
true
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def match(pattern); end",
"def match(regexp); end",
"def sql_match_pattern(column, value, **opt)\n '(%s)' % sql_test(column, value, **opt)\n end",
"def regexp_for_sqlite\n if ActiveRecord::Base.connection.adapter_name.eql?(\"SQLite\")\n db = ActiveRecord::Base.connection.instance_variable_get(:@connection)\n db.create_function(\"regexp\", 2) do |func, expr, value|\n begin\n if value.to_s && value.to_s.match(Regexp.new(expr.to_s))\n func.set_result 1\n else\n func.set_result 0\n end\n rescue => e\n puts \"error: #{e}\"\n end\n end\n end\n end",
"def pattern2regex(pattern); end",
"def regexp; end",
"def regexp; end",
"def select_regexp(table_name)\n %r{SELECT (`?(?:#{table_name})?`?.?\\\\*) FROM}\n end",
"def matches?(pattern); end",
"def matching_lines(regex); end",
"def on_match_pattern(node); end",
"def on_match_pattern_p(node); end",
"def match(input); end",
"def match(regexp)\n return regexp.match(pickle_format)\n end",
"def match(input)\n regexp.match(input)\n end",
"def match(p0) end",
"def match(p0) end",
"def pre_match() end",
"def regexp\n @regexp ||= Regexp.compile(source.to_s, Regexp::IGNORECASE)\n end",
"def extract(pattern); end",
"def where_regex(attr, value, flags: \"e\")\n where_operator(attr, :matches_regexp, \"(?#{flags})\" + value)\n end",
"def where_regex(attr, value, flags: \"e\")\n where_operator(attr, :matches_regexp, \"(?#{flags})\" + value)\n end",
"def regex_col(col, thing)\n all.find_all {|row| Regexp.new(row[col], Regexp::IGNORECASE) =~ thing}\n end",
"def test_extended_patterns_no_flags\n [\n [ \".*\", \"abcd\\nefg\", \"abcd\" ],\n [ \"^a.\", \"abcd\\naefg\", \"ab\" ],\n [ \"^a.\", \"bacd\\naefg\", \"ae\" ],\n [ \".$\", \"bacd\\naefg\", \"d\" ]\n ].each do |reg, str, result|\n m = RustRegexp.new(reg).match(str)\n puts m.inspect\n unless m.nil?\n assert_equal result, m[0]\n end\n end\n end",
"def fnmatch(matcher); end",
"def string_matches_pattern(pattern, string)\n case pattern\n when Regexp\n pattern === node['string']\n when String\n string.include?(pattern)\n end\nend",
"def regexify(label)\n if label.is_a?(Regexp)\n Regexp.new(label.source, Regexp::IGNORECASE)\n elsif label.is_a?(Symbol)\n if ROW_TYPES.keys.include?(label)\n ROW_TYPES[label]\n else\n Regexp.new(\"^#{label.to_s}$\", Regexp::IGNORECASE)\n end\n else\n Regexp.new(Regexp.escape(label.to_s), Regexp::IGNORECASE)\n end\n end",
"def regexp=(_arg0); end",
"def check_scan(s, pattern); end",
"def check_scan(s, pattern); end",
"def test_match_regexp\n pattern = /Hello,*/\n value = \"Hello, World\"\n check = Maze::Checks::AssertCheck.new\n check.match pattern, value\n end",
"def scrooge_select_regex\n @@scrooge_select_regexes[self.table_name] ||= Regexp.compile( \"SELECT (`?(?:#{table_name})?`?.?\\\\*) FROM\" )\n end",
"def show_regexp(string, pattern)\n match = pattern.match(string)\n if match\n \"#{match.pre_match}->#{match[0]}<-#{match.post_match}\"\n else\n \"no match\"\n end\nend",
"def regex(pattern)\n Regexp.new pattern.regex\n end",
"def regexp_with_working_captures?(node); end",
"def match_to_regex(match, type=:fuzzy)\n match.downcase!\n if type == :exact\n /^#{match}$/\n else\n /#{match}/\n end\n end",
"def validateVertexPattern(to_match)\n\t\t# Pattern to check for special charectors\n\t\tpattern = /[A-Za-z]/ \n\t\treturn to_match =~ pattern\n\tend",
"def matches(_ext); end",
"def matches(_ext); end",
"def regexps; end",
"def matches_wildcard_pattern(str, pattern)\n # First we need to build a regex out of the pattern\n regex = build_regex_from_pattern pattern\n # Then we check if the regex matches the string\n match_data = regex.match str\n # and if the matched data is actually the whole string (avoid errors with channels with the same prefix)\n !match_data.nil? && match_data[0] == str\n end",
"def match; end",
"def match; end",
"def word_pattern(pattern, input)\n \nend",
"def scan(pattern); end",
"def pattern\n segs = @tags.map { |tagged_segment| build_segment(tagged_segment) }\n segs.last.gsub!(/\\.$/, '')\n segs.unshift \"^\"\n segs.push \"\\\\.?$\"\n Regexp.new(segs.join)\n end",
"def all_regex(col, regex)\n regex = Regexp.new(regex, Regexp::IGNORECASE) if String === regex\n self.all.find_all {|row| row[col] =~ regex}\n end",
"def methods_matching(re); end",
"def db_string_regex(type)\n \"--#{type}='?([a-zA-Z0-9!@\\#$%^&*-=+]+)'?\\s\"\nend",
"def fnmatch?(matcher); end",
"def submatcher; end",
"def submatcher; end",
"def matches(ext); end",
"def case_insensitive_match; end",
"def match(keyword); end",
"def split_sql(rest_of_query, *regexs)\n results = Array.new\n\n regexs.each do |regex|\n if position = (regex =~ rest_of_query)\n # Extract the matched string and chop the rest_of_query\n matched = rest_of_query[position..-1]\n rest_of_query = rest_of_query[0...position]\n else\n matched = nil\n end\n\n results << matched\n end\n results << rest_of_query\n\n results\n end",
"def regex_search\n if use_regex?\n ::Arel::Nodes::Regexp.new((custom_field? ? field : table[field]), ::Arel::Nodes.build_quoted(formated_value))\n else\n non_regex_search\n end\n end",
"def matches_pattern?(text)\n text.split('|').count == 3\n end",
"def create_match(nominee)\n names = []\n pname = nominee[:name]\n names << pname\n names << pname.sub(%r{ [A-Z]\\. }, ' ') # drop initial\n personname = ASF::Person.find(nominee[:id]).public_name\n names << personname if personname\n list = names.uniq.map{|name| Regexp.escape(name)}.join('|')\n # N.B. \\b does not match if it follows ')', so won't match John (Fred)\n # TODO: Work-round is to also look for EOS, but this needs to be improved\n %r{\\b(#{list})(\\b|$)}i\nend",
"def match_string_to_regexp(str)\n #str = str.split(/(\\(\\(.*?\\)\\))(?!\\))/).map{ |x|\n # x =~ /\\A\\(\\((.*)\\)\\)\\Z/ ? $1 : Regexp.escape(x)\n #}.join\n #str = str.gsub(/\\\\\\s+/, '\\s+')\n #Regexp.new(str, Regexp::IGNORECASE)\n\n #str = str.split(/([#$]\\(.*?\\))/).map{ |x|\n # x =~ /\\A[#$]\\((.*)\\)\\Z/ ? ($1.start_with?('#') ? \"(#{$1})\" : $1 ) : Regexp.escape(x)\n #}.join\n #str = str.gsub(/\\\\\\s+/, '\\s+')\n #Regexp.new(str, Regexp::IGNORECASE)\n\n$stderr.puts \"HERE!!!!!!\"\n\n str = str.split(PATTERN).map{ |x|\n case x\n when /\\A\\(\\((.*)\\)\\)\\Z/\n $1\n when /\\A[#$]\\((.*)\\)\\Z/\n $1.start_with?('#') ? \"(#{$1})\" : $1\n else\n Regexp.escape(x)\n end\n }.join\n\n str = str.gsub(/\\\\\\s+/, '\\s+')\n\n Regexp.new(str, Regexp::IGNORECASE)\n\n #rexps = []\n #str = str.gsub(/\\(\\((.*?)\\)\\)/) do |m|\n # rexps << '(' + $1 + ')'\n # \"\\0\"\n #end\n #str = Regexp.escape(str)\n #rexps.each do |r|\n # str = str.sub(\"\\0\", r)\n #end\n #str = str.gsub(/(\\\\\\ )+/, '\\s+')\n #Regexp.new(str, Regexp::IGNORECASE)\n end",
"def pattern2regex(pattern)\n tail = pattern\n prefix = String.new\n while !tail.empty? do\n head, sep, tail = tail.partition(/[\\*\\?]/)\n prefix = prefix + Regexp.quote(head)\n case sep\n when '*'\n prefix += '.*'\n when '?'\n prefix += '.'\n when ''\n else\n fail \"Unpexpcted sep:#{sep}\"\n end\n end\n Regexp.new(\"^\" + prefix + \"$\", true)\n end",
"def datetime_pattern(field)\n pattern1 = field.scan(/[0-9]\\//)\n pattern2 = field.scan(/[0-9]\\-/)\n pattern3 = field.scan(/[0-9] /)\n pattern4 = field.scan(/[0-9] [A-Z][a-z][a-z] [0-9]|[0-9]-[A-Z][a-z][a-z]-[0-9]|[0-9] [a-z][a-z][a-z] [0-9]|[0-9]-[a-z][a-z][a-z]-[0-9]/)\n if(pattern1.size == 2||pattern2.size == 2||pattern3.size == 2||pattern4.size != 0)\n return true\n else\n return false\n end\nend",
"def regex_for(pattern)\n return pattern if Regexp === pattern\n pattern = pattern.split('.') if String === pattern\n\n source = ''\n pattern.each_with_index do |part, index|\n if part == '*'\n source << '\\\\.' unless index == 0\n source << '[^\\.]+'\n elsif part == '#'\n source << '.*?' # .*? ?\n else\n source << '\\\\.' unless index == 0\n source << part\n end\n end\n\n Regexp.new(\"\\\\A#{source}\\\\Z\")\n end",
"def match(input)\n input \n end",
"def _match column, field, value, field_is_int = false, opts = { }\n b = lambda { | fmt, v | \"(#{field}#{fmt % Content.connection.quote(v)})\" }\n _match_and(column, b, value, field_is_int, opts)\n end",
"def regex(_obj)\n raise NotImplementedError\n end",
"def test_a_regexp_can_search_a_string_for_matching_content\n assert_equal 'match', \"some matching content\"[/match/]\n end",
"def match_query(query); end",
"def to_like(pattern)\n <<~SQL\n REPLACE(REPLACE(REPLACE(#{pattern},\n #{q('%')}, #{q('\\\\%')}),\n #{q('_')}, #{q('\\\\_')}),\n #{q('*')}, #{q('%')})\n SQL\n end",
"def create_filter(regexs, field = nil)\n return nil if regexs.nil? || regexs.empty?\n\n # /<regex>/i match pattern ignore case\n proc { |e| regexs.index do |regex|\n return /#{regex}/i =~ e if field\n return /#{regex}/i =~ e[field] if field\n end }\n end",
"def compile_pattern(pattern)\n Regexp.compile(\"\\\\.(#{pattern})$\")\n end",
"def query_regexp( query )\n read_db do |dbm|\n dbm.each_key do |key|\n puts RDictCcEntry.format_str(dbm[key]) if key =~ /#{query}/\n end\n end\n end",
"def each_match_range(range, regex); end",
"def tag_regexp(prefix = \"\")\n Regexp.new(\"^:#{prefix}(\" + @filters.keys.join('|') + \")(.*)\")\n end",
"def match(regexp)\n self.decoded.match(regexp)\n end",
"def validation_regex\n {\n name: {\n regex: ConvergDB::ValidationRegex::SQL_IDENTIFIER,\n mandatory: true\n }\n }\n end",
"def validation_regex\n {\n name: {\n regex: ConvergDB::ValidationRegex::SQL_IDENTIFIER,\n mandatory: true\n }\n }\n end",
"def regex_arrow_with_function\n Regexp.new(\"-> a#{VM_PREFIX}_line\\\\(\\\\d+\\\\),\")\nend",
"def match?(pattern)\n do_scan pattern, false, false, true\n end",
"def re; end",
"def test_string_match\n s = \"a\"\n assert_equal(0, string_match(\"a\", s))\n assert_equal(0, string_match(/a/, s))\n assert_equal(0, string_match('\\(a\\|\\b\\)', s))\n assert_equal(0, string_match(/a|b/, s))\n assert_equal(0, string_match(/^a/, s))\n assert_equal(0, string_match(/a$/, s))\n assert_equal(0, string_match(/.*/, s))\n assert_equal(nil, string_match(/not-match/, s))\n \n end",
"def regexp\n pattern = '(?:' + Regexp.union([@name] + @aliases).source + ')'\n\n @arguments.each_value do |format|\n arg_regexp = case format\n when Array then Regexp.union(format)\n when Regexp then format\n when Symbol then ARG_FORMATS.fetch(format)\n else Regexp.escape(format.to_s)\n end\n\n pattern << ' (' << arg_regexp.source << ')'\n end\n\n # match the full message\n pattern << '$'\n\n return Regexp.new(pattern)\n end",
"def matchanywhere(rgx, text)\n if rgx[0] == '^'\n return matchhere(rgx[1..-1], text)\n elsif matchhere(rgx, text)\n return true\n elsif text.nil? && !rgx.nil?\n return false\n else\n return matchanywhere(rgx, text[1..-1])\n end\nend",
"def match?(name, literal) true end",
"def regex_match st\n \n x = (/^\\d{5}$/)\n st.match?(x)\nend",
"def match(line)\n regex = self.to_regexp\n regex.match(line.value)\n end",
"def path_regex\n Regexp.new path_pattern, 'i'\n end",
"def match_against filename\n @regexp.match(filename)\n end",
"def regexp(exp)\n begin\n match = exp.match(@data)\n yield match if match\n rescue ArgumentError # velmi zriedkava UTF8 chyba\n end\n end",
"def translate_key_into_regexp(k)\n # replace :id with regular expression\n %r{^#{k.gsub /\\:\\w+/, '[a-z0-9_.-]+'}$}i\n end",
"def regex_for_slug\n if embedded? || Mongoid::Compatibility::Version.mongoid3? || Mongoid::Compatibility::Version.mongoid4?\n Regexp.new(escaped_pattern)\n else\n BSON::Regexp::Raw.new(escaped_pattern)\n end\n end",
"def parse_pattern(pattern)\n return pattern if pattern.is_a?(Regexp)\n\n pattern = pattern.split(/\\//).map do |part|\n if /^:/ =~ part\n \"(?<#{part.sub(\":\", \"\")}>\\\\d+)\"\n else\n part\n end\n end.join(\"/\")\n\n Regexp.new(\"^#{pattern}$\")\n end",
"def quote_regexp(value)\n quote_string(value.source)\n end",
"def match_captures; end",
"def allow_matcher; end",
"def key_matcher(pattern, options); end",
"def short_pattern\n /\n (?:^|\\W) # beginning of string or non-word char\n (?:(#{qualifier_regex})(?:\\s))? # qualifier (optional)\n (?:(#{REPOSITORY_NAME})? # repository name (optional)\n \\#|(?:GH\\-))(\\d+) # issue number\n (?=\n \\.+[ \\t]| # dots followed by space or non-word character\n \\.+$| # dots at end of line\n [^0-9a-zA-Z_.]| # non-word character except dot\n $ # end of line\n )\n /ix\n end",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend",
"def match_in_string?(string, regex)\n string.match(regex).class == MatchData\nend"
] |
[
"0.7205053",
"0.7067555",
"0.7002273",
"0.6855836",
"0.6748494",
"0.6593988",
"0.6593988",
"0.6397198",
"0.6342313",
"0.63078797",
"0.6296916",
"0.6256902",
"0.6157808",
"0.609136",
"0.6068666",
"0.6062908",
"0.6062908",
"0.60525745",
"0.6016549",
"0.59992856",
"0.5995987",
"0.5995987",
"0.59671",
"0.5959094",
"0.59568167",
"0.59148383",
"0.5880098",
"0.58720946",
"0.5862454",
"0.5862454",
"0.58585596",
"0.5842509",
"0.5822905",
"0.5804605",
"0.5737548",
"0.57238173",
"0.5713207",
"0.57054615",
"0.57054615",
"0.5704736",
"0.56853217",
"0.5684757",
"0.5684757",
"0.5678547",
"0.5673388",
"0.5671749",
"0.5671364",
"0.56676364",
"0.5666634",
"0.56498456",
"0.5642395",
"0.5642395",
"0.56345",
"0.56227434",
"0.56157243",
"0.56109697",
"0.5575031",
"0.55701995",
"0.556346",
"0.55564326",
"0.5552713",
"0.552988",
"0.55273193",
"0.55272067",
"0.5512714",
"0.5509139",
"0.5486723",
"0.5484405",
"0.5481279",
"0.5454462",
"0.54502416",
"0.54394263",
"0.54358494",
"0.54203904",
"0.54074556",
"0.54037875",
"0.54037875",
"0.54036945",
"0.5394995",
"0.53787816",
"0.53768736",
"0.5374568",
"0.5368343",
"0.53534263",
"0.5347523",
"0.53455776",
"0.534479",
"0.5341263",
"0.53337157",
"0.5331618",
"0.5327814",
"0.532513",
"0.53216183",
"0.53164274",
"0.531518",
"0.5307416",
"0.5299758",
"0.52976686",
"0.52976686"
] |
0.56653345
|
49
|
PostgreSQL 9.5+ supports SKIP LOCKED.
|
def supports_skip_locked?
server_version >= 90500
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def skip_locked\n cached_dataset(:_skip_locked_ds) do\n raise(Error, 'This dataset does not support skipping locked rows') unless supports_skip_locked?\n clone(:skip_locked=>true)\n end\n end",
"def visit_Arel_Nodes_Lock(o, a = nil)\n # SQL Layer does not support row locks\n end",
"def without_locking\n old_value = Parts::WithLocking.locking?\n begin\n Parts::WithLocking.locking = false\n ret_value = yield\n ensure\n Parts::WithLocking.locking = old_value\n end\n ret_value\nend",
"def nolock\n clone(:table_options => \"(NOLOCK)\")\n end",
"def try_await_lock(table, i); end",
"def without_locking(&block)\n current = ActiveRecord::Base.lock_optimistically\n ActiveRecord::Base.lock_optimistically = false if current\n begin\n block.call\n ensure\n ActiveRecord::Base.lock_optimistically = true if current\n end\n end",
"def locked; end",
"def locks\n sql = %q(\n select\n pg_stat_activity.procpid,\n pg_class.relname,\n pg_locks.transactionid,\n pg_locks.granted,\n substr(pg_stat_activity.current_query,1,30) as query_snippet,\n age(now(),pg_stat_activity.query_start) as \"age\"\n from pg_stat_activity,pg_locks left\n outer join pg_class on (pg_locks.relation = pg_class.oid)\n where pg_stat_activity.current_query <> '<insufficient privilege>' and\n pg_locks.pid=pg_stat_activity.procpid and pg_locks.mode = 'ExclusiveLock' order by query_start)\n\n exec_sql(sql, find_uri)\n end",
"def select_lock_sql(sql)\n lock = @opts[:lock]\n if lock == :share\n sql << ' FOR SHARE'\n else\n super\n end\n\n if lock\n if @opts[:skip_locked]\n sql << \" SKIP LOCKED\"\n elsif @opts[:nowait]\n sql << \" NOWAIT\"\n end\n end\n end",
"def blocking\n sql = %q(\n select bl.pid as blocked_pid,\n ka.current_query as blocking_statement,\n now() - ka.query_start as blocking_duration,\n kl.pid as blocking_pid,\n a.current_query as blocked_statement,\n now() - a.query_start as blocked_duration\n from pg_catalog.pg_locks bl\n join pg_catalog.pg_stat_activity a\n on bl.pid = a.procpid\n join pg_catalog.pg_locks kl\n join pg_catalog.pg_stat_activity ka\n on kl.pid = ka.procpid\n on bl.transactionid = kl.transactionid and bl.pid != kl.pid\n where not bl.granted)\n\n exec_sql(sql, find_uri)\n end",
"def lock\n shaz_nolock_lock if !@nolock\n end",
"def locked\n end",
"def try_lock\n end",
"def skip_or_pending_inside_block?(param0 = T.unsafe(nil)); end",
"def select_lock_sql(sql)\n @opts[:lock] == :update ? sql : super\n end",
"def locked_transaction(&block)\n unless connection.open_transactions.zero?\n raise \"locked_transaction cannot be called from within another transaction!\"\n end\n unsafe_locked_transaction do\n yield if block_given?\n end\n end",
"def lock!; end",
"def locked?\n end",
"def non_blocking_lock!( *args, &block )\n unless non_blocking_lock( *args, &block )\n raise ::LockFailed.new( \"Failed to obtain a lock.\" )\n end\n end",
"def supports_locking?\n false #true\n end",
"def exclusive_schema_lock()\n check_return_code(PureHailDB.ib_schema_lock_exclusive(@trx_ptr))\n end",
"def nolock\n clone(:with => \"(NOLOCK)\")\n end",
"def with_database_exclusive_table_lock_postgresql(&block)\n # If we just use the regular :sanitize_sql support, we get:\n # LOCK TABLE 'foo'\n # ...which, for whatever reason, PostgreSQL doesn't like. Escaping it this way works fine.\n escaped = @low_card_model.connection.quote_table_name(@low_card_model.table_name)\n run_sql(\"LOCK TABLE #{escaped}\", { })\n block.call\n end",
"def without_locking(&block)\n self.class.without_locking(&block)\n end",
"def without_transaction\n n = Thread.current['open_transactions']\n Thread.current['open_transactions'] = 1\n yield\n Thread.current['open_transactions'] = n\n end",
"def skip_session_commit\n request.session_options[:skip] = true\n end",
"def transaction_disabled=(_arg0); end",
"def lock; end",
"def lock; end",
"def lock; end",
"def if_access_locked; end",
"def lock\n end",
"def process_only(session, wait = T.unsafe(nil)); end",
"def do_LOCK(req, res)\n end",
"def add_lock!( sql, options )\n sql\n end",
"def enable_lock\n add option: \"-lock=true\"\n end",
"def add_lock!(sql, options)\n sql\n end",
"def acquire_lock\n\t\t@@logger.info { \"Acquiring a lock in the database.\" } if have_logger?\n\t\tTournament.dataset.filter(:id => self.id, :locked => false).update(:locked => true) != 0\n\tend",
"def with_database_exclusive_table_lock_sqlite(&block)\n block.call\n end",
"def lock(mode, opts=OPTS)\n if defined?(yield) # perform locking inside a transaction and yield to block\n @db.transaction(opts){lock(mode, opts); yield}\n else\n sql = 'LOCK TABLE '.dup\n source_list_append(sql, @opts[:from])\n mode = mode.to_s.upcase.strip\n unless LOCK_MODES.include?(mode)\n raise Error, \"Unsupported lock mode: #{mode}\"\n end\n sql << \" IN #{mode} MODE\"\n @db.execute(sql, opts)\n end\n nil\n end",
"def lock_expired?; end",
"def access_locked?; end",
"def msg_MLOCK(source, args)\n return nil\n end",
"def lock_next(worker)\n NotImplementedError\n end",
"def ensure_exclusive\n acquire_locks\n write_pid\n end",
"def return_lock\n\t\t@@logger.info { \"Returning the lock to the database.\" } if have_logger?\n\t\tTournament.dataset.filter(:id => self.id, :locked => true).update(:locked => false) != 0\n\tend",
"def lock_timeout; end",
"def select_and_lock(relation, limit:)\n relation = upcoming(relation)\n\n # FOR UPDATE SKIP LOCKED selects and locks entries, but skips those that\n # are already locked - preventing this transaction from being locked.\n sql = relation.to_sql + \" FOR UPDATE SKIP LOCKED\"\n sql += \" LIMIT #{limit}\" if limit\n\n item_class.find_by_sql(sql)\n end",
"def lock(&block)\n # TODO: only use replace strategy when server is executing the lock\n return call_strategy unless (locked_token = locksmith.lock(&block))\n\n locked_token\n end",
"def skip_wait\n @skip_wait = true\n end",
"def with_lock(key)\n raise NotImplementedError\n end",
"def is_locked?\n locked\n end",
"def supports_advisory_locks?\n false\n end",
"def supports_advisory_locks?\n false\n end",
"def transaction_disabled; end",
"def lock(opts={})\n super\n end",
"def lock(opts={})\n super\n end",
"def lock(opts={})\n super\n end",
"def lock_list\n super\n end",
"def try_lock\n puts \"I couldn't get a lock.\" unless \n open_lock('contested', 'w', File::LOCK_EX | File::LOCK_NB) do\n puts \"I've got a lock!\" \n true\n end\nend",
"def with_lock_retries(*args, **kwargs, &block)\n if transaction_open?\n if enable_lock_retries?\n Gitlab::AppLogger.warn 'Lock retries already enabled, executing the block directly'\n yield\n else\n raise <<~EOF\n #{__callee__} can not be run inside an already open transaction\n\n Use migration-level lock retries instead, see https://docs.gitlab.com/ee/development/migration_style_guide.html#retry-mechanism-when-acquiring-database-locks\n EOF\n end\n else\n super(*args, **kwargs.merge(allow_savepoints: false), &block)\n end\n end",
"def lock_is_exclusive?\n lockscope == 'exclusive'\n end",
"def unlocked?\n not locked?\n end",
"def disable_paranoid\n if block_given?\n @paranoid = false\n yield\n else\n raise 'Only block form is supported'\n end\n ensure\n @paranoid = true\n end",
"def lock(mode, &block)\n sql = LOCK % [source_list(@opts[:from]), mode]\n @db.synchronize do\n if block # perform locking inside a transaction and yield to block\n @db.transaction {@db.execute(sql); yield}\n else\n @db.execute(sql) # lock without a transaction\n self\n end\n end\n end",
"def locked?\n locked = false\n status_constant == OldOrder::UNPROCESSED ? locked = false : locked = true\n locked = true unless invoice.blank?\n return locked\n end",
"def lock(mode, &block)\n sql = LOCK % [@opts[:from], mode]\n @db.synchronize do\n if block # perform locking inside a transaction and yield to block\n @db.transaction {@db.execute(sql); yield}\n else\n @db.execute(sql) # lock without a transaction\n self\n end\n end\n end",
"def add_lock!(sql, options)\n case lock = options[:lock]\n when true; sql << ' FOR UPDATE'\n when String; sql << \" #{lock}\"\n end\n end",
"def lock(name, mode)\n yield\n end",
"def recover_from_timeout(pid, name)\n with_dedicated_connection do |con|\n lock = select_one(<<~SQL, pid, name, connection: con)\n SELECT locktype, objid, pid, granted FROM pg_locks \\\n WHERE pid = ? AND locktype = 'advisory' AND objid = hashtext(?)\n SQL\n return false unless lock\n\n if lock['granted']\n logger&.info 'DBLock: Lock was acquired after all'\n true\n else\n res = select_value 'SELECT pg_cancel_backend(?)', pid, connection: con\n logger&.warn 'DBLock: Failed to cancel ungranted lock query' unless res == true\n false\n end\n end\n end",
"def lock_timeout=(_arg0); end",
"def locked?\n raise NotImplementedError\n end",
"def lock_changes\n begin\n @lock_count += 1\n yield\n ensure\n @lock_count -= 1\n end\n end",
"def lock\n post :lock\n end",
"def with_instance_locked_for(operation, &block)\n return yield unless operation.version\n\n key = \"#{app}:#{operation.key}\"\n lock = Redis::Lock.new(key, LOCK_OPTIONS.merge(:redis => Promiscuous::Redis.connection))\n\n unless lock.lock\n raise Promiscuous::Error::LockUnavailable.new(lock.key)\n end\n\n begin\n yield\n ensure\n unless lock.unlock\n # TODO Be safe in case we have a duplicate message and lost the lock on it\n raise \"The subscriber lost the lock during its operation. It means that someone else\\n\"+\n \"received a duplicate message, and we got screwed.\\n\"\n end\n end\n end",
"def dont_process_while &blk\n self.update_attributes(:persistence_checksum => \"skip\") \n yield\n self.update_persistence_checksum\n end",
"def lock_table(db, table)\n begin\n db.query(\"LOCK #{table}\")\n rescue Mysql::Error => e\n $stderr.puts \"Error code: #{e.errno}\"\n $stderr.puts \"Error message: #{e.error}\"\n $stderr.puts \"Error SQLSTATE: #{e.sqlstate}\" if e.respond_to?(\"sqlstate\")\n exit\n end\n end",
"def lock_keys\n @lock_keys ||= begin\n [stable_hashcode(lock_name), ENV['WITH_ADVISORY_LOCK_PREFIX']].map do |ea|\n # pg advisory args must be 31 bit ints\n ea.to_i & 0x7fffffff\n end\n end\n end",
"def lock_keys\n @lock_keys ||= begin\n [stable_hashcode(lock_name), ENV['WITH_ADVISORY_LOCK_PREFIX']].map do |ea|\n # pg advisory args must be 31 bit ints\n ea.to_i & 0x7fffffff\n end\n end\n end",
"def work\n stat :attempting_lock_on, item_id: object_id\n if @mutex.try_lock\n stat :has_lock_on, item_id: object_id\n chore\n stat :releasing_lock_on, item_id: object_id\n @mutex.unlock\n else\n stat :bailed_on, item_id: object_id\n end\n end",
"def lock_for_update(name=nil)\n if locked_for_update?(name)\n logger.debug { \"we are locked for update, yield to the block\" }\n yield\n else\n zk_with_lock(:mode => :exclusive, :name => name) { yield }\n end\n end",
"def stuck_merge_requests\n MergeRequest.select('id, merge_jid').with_state(:locked).where.not(merge_jid: nil).reorder(nil)\n end",
"def lock\n self.is_locked = true\n self\n end",
"def exclusive(timeout: 10.minutes)\n counter = 0\n log_wait = proc do |owner|\n if (counter += 1) % 10 == 1\n executor.output.write(\"Waiting for repository lock for #{owner}\\n\")\n end\n end\n MultiLock.lock(repo_cache_dir, outside_caller, timeout: timeout, failed_to_lock: log_wait) { return yield }\n end",
"def create_or_update_with_paranoid\n self.class.disable_paranoid { create_or_update_without_paranoid }\n end",
"def skip\n param 'state' => Patriot::JobStore::JobState::SUCCEEDED\n end",
"def allow_concurrency; end",
"def allow_concurrency; end",
"def lock_type\n @lock_type ||= :mysql\n end",
"def lock_key(*arguments)\n nil\n end",
"def locked _args\n \"locked _args;\" \n end",
"def unlocked?\n unlocked, _, @active_worker_timestamp = Sidekiq.redis do |redis| \n redis.multi do\n redis.setnx(@locking_key, @current_worker_timestamp)\n redis.expire(@locking_key, 600)\n redis.get(@locking_key)\n end\n end\n unlocked\n end",
"def select_statement_lock\n Thread.current[:'Arel::Visitors::SQLServerNG.select_statement_lock']\n end",
"def with_serial_lock\n timeout_cap(15) do\n CLIENT_MUTEX.synchronize { yield }\n end\nend",
"def acquire_one_time_pessimistic_lock(reason)\n acquire_pessimistic_lock(ONE_TIME_LOCKING_LOCK_HOLDER,reason)\n end",
"def do_not_thread; true; end",
"def do_not_thread; true; end",
"def do_not_thread; true; end",
"def acquire_lock(dbconn_reports_transaction,get_or_release, reports_or_recovery_string)\n begin\n if get_or_release == ACQUIRE_LOCK\n @lock_obj = dbconn_reports_transaction.select_one(\"SELECT IS_FREE_LOCK('#{reports_or_recovery_string}')\" )\n if @lock_obj[0] == 1\n @get_lock_stat = dbconn_reports_transaction.select_one(\"SELECT GET_LOCK('#{reports_or_recovery_string}',10)\")\n else\n executed = false\n begin # begin while loop\n #puts \"ROTATE LOOP\"\n @lock_obj = dbconn_reports_transaction.select_one(\"SELECT IS_FREE_LOCK('#{reports_or_recovery_string}')\" )\n if @lock_obj[0] == 1\n executed = true \n @get_lock_stat = dbconn_reports_transaction.select_one(\"SELECT GET_LOCK('#{reports_or_recovery_string}',10)\")\n else\n executed = false\n end # end of @lock_obj[0] == 1 inside begin loop\n sleep(0.1)\n end while (executed == false) \n end # end of @lock_obj[0] == 1\n return @get_lock_stat[0]\n elsif get_or_release == RELEASE_LOCK\n #puts \"RELEASE lock method\"\n dbconn_reports_transaction.select_one(\"SELECT RELEASE_LOCK('#{reports_or_recovery_string}')\")\n end \n rescue Exception =>e\n puts \"Error in ClassName: AnalyticsDataReducer MethodName: acquire_lock ErrInfo:#{e.to_s}\" \n end\n end",
"def lock(key)\n raise NotImplementedError\n end"
] |
[
"0.6517862",
"0.6342078",
"0.62830657",
"0.6273183",
"0.62377363",
"0.62230605",
"0.61991656",
"0.6127176",
"0.61240304",
"0.60525775",
"0.60241866",
"0.59789443",
"0.59540343",
"0.594054",
"0.5928728",
"0.58429885",
"0.578368",
"0.57701516",
"0.57493556",
"0.5722337",
"0.57190907",
"0.5684072",
"0.56644523",
"0.566191",
"0.56502765",
"0.5632477",
"0.56317896",
"0.56088763",
"0.56088763",
"0.56088763",
"0.56027114",
"0.55732775",
"0.5569073",
"0.55574715",
"0.5554091",
"0.5532628",
"0.5526525",
"0.5524372",
"0.55208707",
"0.5520219",
"0.551584",
"0.55039364",
"0.5490394",
"0.546414",
"0.54441506",
"0.542106",
"0.5419448",
"0.5416926",
"0.53999454",
"0.53980595",
"0.5382804",
"0.53815776",
"0.5375047",
"0.5375047",
"0.53707093",
"0.5368494",
"0.5368494",
"0.5368494",
"0.5364537",
"0.53633547",
"0.535854",
"0.53570527",
"0.53568804",
"0.53409564",
"0.5322005",
"0.5320051",
"0.53035706",
"0.5298703",
"0.5296703",
"0.5289916",
"0.5286068",
"0.52840847",
"0.5255989",
"0.52463233",
"0.52446043",
"0.52348596",
"0.5234859",
"0.5233368",
"0.5233368",
"0.52309954",
"0.52194726",
"0.5210437",
"0.5204098",
"0.52032727",
"0.51938784",
"0.5183762",
"0.51834995",
"0.51834995",
"0.51803553",
"0.51795805",
"0.51712245",
"0.515964",
"0.515782",
"0.51527905",
"0.51415765",
"0.5136857",
"0.5136857",
"0.5136857",
"0.5130145",
"0.51279414"
] |
0.54521567
|
44
|
PostgreSQL supports timezones in literal timestamps
|
def supports_timestamp_timezones?
true
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def data_timezone(identifier); end",
"def data_timezone(identifier); end",
"def time_zone=(_arg0); end",
"def time_zone=(_arg0); end",
"def timezone(identifier); end",
"def timezone(identifier); end",
"def timezone(identifier); end",
"def data_timezone_identifiers; end",
"def data_timezone_identifiers; end",
"def data_timezone_identifiers; end",
"def data_timezone_identifiers; end",
"def data_timezone_identifiers; end",
"def data_timezone_identifiers; end",
"def linked_timezone(identifier); end",
"def linked_timezone(identifier); end",
"def linked_timezone(identifier); end",
"def linked_timezone(identifier); end",
"def test_quoted_datetime_local\n with_timezone_config default: :local do\n t = Time.now.change(usec: 0).to_datetime\n assert_equal t.to_fs(:db), @quoter.quoted_date(t)\n end\n end",
"def time_zone; end",
"def time_zone; end",
"def time_zone; end",
"def time_zone; end",
"def time_zone; end",
"def set_timezone(timezone); end",
"def set_timezone(timezone); end",
"def set_timezone(timezone); end",
"def set_timezone(timezone); end",
"def set_timezone(timezone); end",
"def test_quoted_datetime_local\n with_active_record_default_timezone :local do\n t = DateTime.now\n assert_equal t.to_s(:db), @quoter.quoted_date(t)\n end\n end",
"def linked_timezone_identifiers; end",
"def linked_timezone_identifiers; end",
"def linked_timezone_identifiers; end",
"def linked_timezone_identifiers; end",
"def tz\n zone = has_timezone? ? object.zone : \"\"\n zone = \"Z\" if zone == \"+00:00\"\n RDF::Literal(zone)\n end",
"def tz\n zone = has_timezone? ? object.zone : \"\"\n zone = \"Z\" if zone == \"+00:00\"\n RDF::Literal(zone)\n end",
"def get_timestamp\n \"[#{(Time.now.in_time_zone(TIME_ZONE_PACIFIC)).strftime('%m/%d/%y %l:%M%p')}]\"\nend",
"def to_application_timestamp(v)\n Sequel.convert_timestamp(v, timezone)\n end",
"def created_at\n super.in_time_zone if super\n end",
"def utc?() end",
"def default_timestamp_format\n \"TIMESTAMP '%Y-%m-%d %H:%M:%S%N %z'\".freeze\n end",
"def tz\n ActiveSupport::TimeZone.new timezone\n end",
"def type_literal_generic_datetime(column)\n :timestamp\n end",
"def timezone(reference, identifier, latitude_numerator, latitude_denominator, longitude_numerator, longitude_denominator, description = T.unsafe(nil)); end",
"def timezone(reference, identifier, latitude_numerator, latitude_denominator, longitude_numerator, longitude_denominator, description = T.unsafe(nil)); end",
"def time_zone\n super\n end",
"def skip_time_zone_conversion_for_attributes\n []\n end",
"def time_in_zone\n # Time.zone should be UTC\n Time.zone.at((time+utc_offset)/1000)\n end",
"def literal_datetime(v)\n v.strftime(\"TIMESTAMP '%Y-%m-%d %H:%M:%S'\")\n end",
"def convert_for_timestamp_shape(arg)\n return nil if arg.nil?\n\n time_value = case arg\n when Time\n arg\n when Date, DateTime\n arg.to_time\n when Integer, Float\n Time.at(arg)\n else\n Time.parse(arg.to_s)\n end\n time_value.utc.iso8601\n end",
"def tz\n md = self.to_s.match(GRAMMAR)\n zone = md[2].to_s\n zone = \"Z\" if zone == \"+00:00\"\n RDF::Literal(zone)\n end",
"def literal_time(v)\n v.strftime(\"TIMESTAMP '%Y-%m-%d %H:%M:%S'\")\n end",
"def start_at\n super.in_time_zone(time_zone) if super && time_zone\n end",
"def utc() end",
"def from_application_timestamp(v)\n Sequel.convert_output_timestamp(v, timezone)\n end",
"def requires_sql_standard_datetimes?\n true\n end",
"def set_time_zone\n Time.use_zone('Europe/Moscow') { yield }\n end",
"def sql_datetime_minute_mask\n case session[:database][:locale]\n when \"de\" then \"DD.MM.YYYY HH24:MI\"\n when \"en\" then \"YYYY-MM-DD HH24:MI\"\n else \"DD.MM.YYYY HH24:MI\" # Deutsche Variante als default\n end\n end",
"def with_timezone\n Time.use_zone(current_user_timezone) { yield }\n end",
"def utc_offset() end",
"def at_utc(*args)\n extract_and_create_local_time(args, false)\n end",
"def sql_datetime_second_mask\n case session[:database][:locale]\n when \"de\" then \"DD.MM.YYYY HH24:MI:SS\"\n when \"en\" then \"YYYY-MM-DD HH24:MI:SS\"\n else \"DD.MM.YYYY HH24:MI:SS\" # Deutsche Variante als default\n end\n end",
"def in_time_zone(time)\n time.in_time_zone(time_zone)\n end",
"def to_time\n preserve_timezone ? getlocal(utc_offset) : getlocal\n end",
"def at(*args)\n Time.at(*args).utc.in_time_zone(self)\n end",
"def parse_datetime(dt, tz)\n offset = \"+00:00\"\n case tz\n when \"EST\"\n offset = \"−05:00\"\n when \"CST\"\n offset = \"−06:00\"\n when \"MST\"\n offset = \"−07:00\"\n when \"PST\"\n offset = \"−08:00\"\n else\n offset = \"+00:00\"\n end\n\n parsedDate = DateTime.parse(\"#{dt}#{tz}\")\n return parsedDate\nend",
"def parse_timestamp(time_string, time_zone)\n time_string.slice!(0)\n time_zone.chomp(\"]\")\n Time.strptime(time_string.concat(time_zone), \"%d/%b/%Y:%T%z\")\n end",
"def parse(tz_string); end",
"def at\n Timestamp.utc(@timestamp_value)\n end",
"def tz\n time_zone_adjustment.to_r / (24 * 60)\n end",
"def tz\n time_zone_adjustment.to_r / (24 * 60)\n end",
"def parsed_time(timezone = patient.timezone)\n self.datetime.in_time_zone(timezone)\n end",
"def to_sparql(**options)\n \"TZ(\" + operands.to_sparql(**options) + \")\"\n end",
"def create_custom_function\n # puts \"Creating a custom function to make queries with timestamp as a string...\".cyan\n @session.execute('CREATE OR REPLACE FUNCTION timefstring(somearg text)\n RETURNS NULL ON NULL INPUT\n RETURNS timestamp\n LANGUAGE java\n AS $$\n java.text.SimpleDateFormat formatter = new java.text.SimpleDateFormat(\"yyyy-MM-dd HH:mm:ss.SSS\");\n try {\n Date date = formatter.parse(somearg);\n return date;\n } catch(java.text.ParseException e) {\n return new Date();\n }\n $$')\n end",
"def data_timezone_identifiers\n @timezone_index\n end",
"def timezones\n @timezones.freeze\n end",
"def in_zone(zone_name, &block)\n\n EtOrbi.class_eval do\n @local_tzone = nil\n @local_tzone_tz = nil\n @local_tzone_loaded_at = nil\n end\n\n prev_tz = ENV['TZ']\n\n if zone_name == :no_env_tz\n ENV.delete('TZ')\n elsif zone_name == nil\n ENV['TZ'] = EtOrbi.os_tz\n else\n zone_name = EtOrbi.to_windows_tz(zone_name) if Gem.win_platform?\n ENV['TZ'] = zone_name\n end\n\n block.call\n\nensure\n\n ENV['TZ'] = prev_tz\nend",
"def timestamp2date(t)\n Time.at(t.to_i).utc\nend",
"def populate_timezones\n if new_record?\n self.created_at_timezone ||= Time.zone.name\n else\n if self.deleted?\n self.deleted_at_timezone ||= Time.zone.name\n end\n end\n end",
"def set_timezone\n offset = [Time.now.beginning_of_year.utc_offset, Time.now.beginning_of_year.change(month: 7).utc_offset].min\n offset *= 3600 if offset.abs < 13\n Time.zone = ActiveSupport::TimeZone.all.select { |zone| zone.utc_offset == offset }.first\nend",
"def with_timezone\n Time.use_zone(current_user.try(:get_time_zone)) { yield }\n end",
"def set_time_in_time_zone\n return true if time_observed_at.blank? || time_zone.blank?\n return true unless time_observed_at_changed? || time_zone_changed?\n \n # Render the time as a string\n time_s = time_observed_at_before_type_cast\n unless time_s.is_a? String\n time_s = time_observed_at_before_type_cast.strftime(\"%Y-%m-%d %H:%M:%S\")\n end\n \n # Get the time zone offset as a string and append it\n offset_s = Time.parse(time_s).in_time_zone(time_zone).formatted_offset(false)\n time_s += \" #{offset_s}\"\n \n self.time_observed_at = Time.parse(time_s)\n true\n end",
"def end_at\n super.in_time_zone(time_zone) if super && time_zone\n end",
"def UTC_to_PA(t)\n TZ_PA.utc_to_local t\nend",
"def has_floating_timezone?\n false\n end",
"def PA_to_UTC(t)\n TZ_PA.local_to_utc t\nend",
"def timezones\n subcomponents[\"VTIMEZONE\"]\n end",
"def update_timestamp(*_args)\n current_time = current_time_from_proper_timezone\n\n write_attribute('updated_at', current_time) if respond_to?(:updated_at)\n write_attribute('updated_on', current_time) if respond_to?(:updated_on)\n end",
"def create_timezone\n raise_not_implemented('create_timezone')\n end",
"def timezone\n @timezone || Sequel.database_timezone\n end",
"def timezone_identifiers\n @timezone_index\n end",
"def utc_offset(time = T.unsafe(nil)); end",
"def type_literal_generic_only_time(column)\n :time\n end",
"def set_time_zone\n # Make sure blank is always nil\n self.time_zone = nil if time_zone.blank?\n # If there are coordinates, use them to set the time zone, and reject\n # changes to the time zone if the coordinates have not changed\n if georeferenced?\n if coordinates_changed?\n lat = ( latitude_changed? || private_latitude.blank? ) ? latitude : private_latitude\n lng = ( longitude_changed? || private_longitude.blank? ) ? longitude : private_longitude\n self.time_zone = TimeZoneGeometry.time_zone_from_lat_lng( lat, lng ).try(:name)\n self.zic_time_zone = ActiveSupport::TimeZone::MAPPING[time_zone] unless time_zone.blank?\n elsif time_zone_changed?\n self.time_zone = time_zone_was\n self.zic_time_zone = zic_time_zone_was\n end\n end\n # Try to assign a reasonable default time zone\n if time_zone.blank?\n self.time_zone = nil\n self.time_zone ||= user.time_zone if user && !user.time_zone.blank?\n self.time_zone ||= Time.zone.try(:name) unless time_observed_at.blank?\n self.time_zone ||= 'UTC'\n end\n if !time_zone.blank? && !ActiveSupport::TimeZone::MAPPING[time_zone] && ActiveSupport::TimeZone[time_zone]\n # We've got a zic time zone\n self.zic_time_zone = time_zone\n self.time_zone = if rails_tz = ActiveSupport::TimeZone::MAPPING.invert[time_zone]\n rails_tz\n elsif ActiveSupport::TimeZone::INAT_MAPPING[time_zone]\n # Now we're in trouble, b/c the client specified a valid IANA time\n # zone that TZInfo knows about, but it's one Rails chooses to ignore\n # and doesn't provide any mapping for so... we have to map it\n ActiveSupport::TimeZone::INAT_MAPPING[time_zone]\n elsif time_zone =~ /^Etc\\//\n # If we don't have custom mapping and there's no fancy Rails wrapper\n # and it's one of these weird oceanic Etc zones, use that as the\n # time_zone. Rails can use that to cast times into other zones, even\n # if it doesn't recognize it as its own zone\n time_zone\n else\n ActiveSupport::TimeZone[time_zone].name\n end\n end\n self.time_zone ||= user.time_zone if user && !user.time_zone.blank?\n self.zic_time_zone ||= ActiveSupport::TimeZone::MAPPING[time_zone] unless time_zone.blank?\n if !zic_time_zone.blank? && ActiveSupport::TimeZone::MAPPING[zic_time_zone] && ActiveSupport::TimeZone[zic_time_zone]\n self.zic_time_zone = ActiveSupport::TimeZone::MAPPING[zic_time_zone]\n end\n true\n end",
"def quote_time(value)\n offset = value.utc_offset\n if offset >= 0\n offset_string = \"+#{sprintf(\"%02d\", offset / 3600)}:#{sprintf(\"%02d\", (offset % 3600) / 60)}\"\n elsif offset < 0\n offset_string = \"-#{sprintf(\"%02d\", -offset / 3600)}:#{sprintf(\"%02d\", (-offset % 3600) / 60)}\"\n end\n \"'#{value.strftime('%Y-%m-%dT%H:%M:%S')}\" << (value.usec > 0 ? \".#{value.usec.to_s.rjust(6, '0')}\" : \"\") << offset_string << \"'\"\n end",
"def timezone_identifiers\n raise_invalid_data_source('timezone_identifiers')\n end",
"def in_time_zone(zone = ::Time.zone)\n if zone\n ::Time.find_zone!(zone).local(year, month, day)\n else\n to_time\n end\n end",
"def timezone(identifier)\n identifier = StringDeduper.global.dedupe(identifier)\n @timezones << identifier\n @data_timezones << identifier\n end",
"def apply_function_current_timestamp(scope, ast)\n return scope, \"(current_timestamp at time zone 'UTC')\"\n end"
] |
[
"0.6196883",
"0.6196883",
"0.61800057",
"0.61800057",
"0.61647296",
"0.61647296",
"0.61647296",
"0.5974996",
"0.5974996",
"0.5974996",
"0.5974996",
"0.5974996",
"0.5974996",
"0.5963424",
"0.5963424",
"0.5963424",
"0.5963424",
"0.5883919",
"0.58165675",
"0.58165675",
"0.58165675",
"0.58165675",
"0.58165675",
"0.57881415",
"0.57881415",
"0.57881415",
"0.57881415",
"0.57881415",
"0.5748673",
"0.57449245",
"0.57449245",
"0.57449245",
"0.57449245",
"0.57387894",
"0.57387894",
"0.57123566",
"0.57097614",
"0.56620073",
"0.56283695",
"0.5613665",
"0.56089646",
"0.559112",
"0.5585035",
"0.5585035",
"0.55569154",
"0.55412257",
"0.55201554",
"0.55016565",
"0.5487052",
"0.54652774",
"0.5463855",
"0.54381233",
"0.54293287",
"0.5381528",
"0.5365008",
"0.5360411",
"0.5354729",
"0.5348572",
"0.53443724",
"0.5339347",
"0.53352857",
"0.5327793",
"0.5313816",
"0.53067505",
"0.5302498",
"0.53005564",
"0.5299842",
"0.5289578",
"0.5264146",
"0.5264146",
"0.5257175",
"0.5253321",
"0.524836",
"0.524636",
"0.52399",
"0.5228123",
"0.5220944",
"0.52157116",
"0.52104944",
"0.52073336",
"0.5206361",
"0.5205161",
"0.520035",
"0.51907885",
"0.5185477",
"0.5181607",
"0.5180929",
"0.5179095",
"0.5177172",
"0.51613617",
"0.51460063",
"0.51427644",
"0.51394373",
"0.5125151",
"0.5118793",
"0.51144403",
"0.51000625",
"0.5096383"
] |
0.7225166
|
1
|
PostgreSQL 8.4+ supports WINDOW clause.
|
def supports_window_clause?
server_version >= 80400
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def apply_window_function_eager_limit_strategy(ds, limit_and_offset=limit_and_offset())\n rn = ds.row_number_column \n limit, offset = limit_and_offset\n ds = ds.unordered.select_append{|o| o.row_number.function.over(:partition=>predicate_key, :order=>ds.opts[:order]).as(rn)}.from_self\n ds = if !returns_array?\n ds.where(rn => offset ? offset+1 : 1)\n elsif offset\n offset += 1\n if limit\n ds.where(rn => (offset...(offset+limit))) \n else\n ds.where{SQL::Identifier.new(rn) >= offset} \n end\n else\n ds.where{SQL::Identifier.new(rn) <= limit} \n end\n end",
"def window(id); end",
"def true_eager_graph_limit_strategy\n if associated_class.dataset.supports_window_functions?\n :window_function\n else\n :ruby\n end\n end",
"def window(name, opts)\n clone(:window=>((@opts[:window]||EMPTY_ARRAY) + [[name, SQL::Window.new(opts)].freeze]).freeze)\n end",
"def apply_filter_by_associations_window_function_limit_strategy(ds)\n ds.where(filter_by_associations_limit_key=>apply_window_function_eager_limit_strategy(associated_eager_dataset.select(*filter_by_associations_limit_alias_key)).select(*filter_by_associations_limit_aliases))\n end",
"def best_rank_subquery(group_by)\n @source.respond_to?(:project) or raise ThroughHierarchySourceError, \"#{@source} cannot be converted into a subquery\"\n subq = source.\n project(foreign_type_column, foreign_key_column, group_by, best_rank).\n where(filters).\n group(source[group_by]).\n as(best_rank_table_name)\n\n spawn(subq)\n end",
"def windowed_max_range_two(arr,window)\n \nend",
"def receive_next_window\n @window += 1\n receive_window\n end",
"def windowed_max_range_s(arr, window_size)\n \nend",
"def min_window\nend",
"def find_products_in_window(product, options)\n limit = options[:limit]\n \n compare_column = @@order_mapping[options[:sort]].split(',').first.gsub('products.','')\n \n left_sql = find_products_sql(options.merge({:conditions =>\n \"products.#{compare_column} <= #{connection.quote(product[compare_column])}\",\n :sort => options[:sort],\n :desc => true}))\n add_limit_offset!(left_sql, {:limit => limit+15, :offset => 0})\n left_list = Product.find_by_sql(left_sql)\n return nil unless idx = left_list.index(product)\n left_list = left_list[(idx+1)..-1] unless left_list.empty?\n \n right_sql = find_products_sql(options.merge({:conditions =>\n \"products.#{compare_column} >= #{connection.quote(product[compare_column])}\",\n :sort => options[:sort]}))\n add_limit_offset!(right_sql, {:limit => limit+15, :offset => 0})\n right_list = Product.find_by_sql(right_sql)\n return nil unless idx = right_list.index(product)\n right_list = right_list[(idx+1)..-1] unless right_list.empty?\n \n if left_list.size >= right_list.size\n right_list = right_list.slice(0,limit/2)\n left_list = left_list.slice(0,limit-right_list.size-1)\n else\n left_list = left_list.slice(0,limit/2)\n right_list = right_list.slice(0,limit-left_list.size-1)\n end\n \n left_list.reverse + [product] + right_list\n end",
"def current_pick_window\n pick_windows.where(\"window_start < '#{DateTime.now}' AND window_end > '#{DateTime.now}'\").first\n end",
"def within_window\n if session[:window_count] >= WINDOW_LIMIT\n session[:request_restrained] = true\n session[:sec_restrained] = @sec_now # impose a wait of RESTRAIN_SECONDS duration\n else\n session[:window_count] += 1\n end\n end",
"def local_window_size; end",
"def window\r\n return $window\r\n end",
"def max_windowed_range_op(arr, window)\n queue = Min_max_stack_queue.new()\n max_range = nil\n\n arr.each do |ele|\n queue.enqueue(ele)\n\n if queue.size > window\n queue.dequeue()\n end\n\n if queue.size == window\n current_range = queue.max - queue.min\n if max_range == nil || max_range < current_range\n max_range = current_range\n end\n end\n end\n\n return max_range\nend",
"def max_windowed_range(arr, window)\n current_max_range = nil\n\n arr.each_cons(window) do |slice| \n current_range = slice.max - slice.min\n if current_max_range == nil || current_max_range < current_range\n current_max_range = current_range\n end\n end\n\n return current_max_range\nend",
"def aggregated_over_time_query\n quantity_field = @country_ids.present? ? \"#{entity_quantity}_reported_quantity\" : \"#{@reported_by}_reported_quantity\"\n\n <<-SQL\n SELECT ROW_TO_JSON(row)\n FROM (\n SELECT JSON_AGG(JSON_BUILD_OBJECT('x', year, 'y', value) ORDER BY year) AS datapoints\n FROM (\n SELECT year, ROUND(SUM(#{quantity_field}::FLOAT)) AS value\n FROM #{shipments_table}\n #{child_taxa_join}\n WHERE #{@condition} AND #{quantity_field} IS NOT NULL AND #{country_condition}\n AND #{child_taxa_condition}\n GROUP BY year\n #{quantity_condition(quantity_field)}\n ORDER BY value DESC\n #{limit}\n ) t\n ) row\n SQL\n end",
"def group_rank_and_filter_sql_statement(connection, start_date, finish_date)\n # 1. Takes all CodigoExterno with FechaCreacion >= start_date && \n # FechaCreacion <= finish_date\n # 2. Ranks them (densely: https://www.postgresql.org/docs/9.6/static/functions-window.html)\n # 3. Sorts the by FechaCreacion DESC (that is, newest to oldest)\n # 4. Returns the newest one (The first ranked one, hence by_fecha_creacion < 2)\n # 5. Sorts by id.\n connection.execute(\n \"SELECT id FROM (\n SELECT id, updated_at,\n dense_rank() OVER (\n PARTITION BY value -> 'Listado' -> 0 -> 'CodigoExterno'\n ORDER BY value ->> 'FechaCreacion' DESC) as by_fecha_creacion\n FROM results\n WHERE f_cast_isots(value ->> 'FechaCreacion'::text) >= #{start_date}\n AND f_cast_isots(value ->> 'FechaCreacion'::text) <= #{finish_date}\n ) as q WHERE by_fecha_creacion < 2\n ORDER BY id\"\n )\n end",
"def rank_over(source_table, valid_source_table)\n Arel::Nodes::Over.new(\n Arel::Nodes::SqlLiteral.new('rank()'),\n Arel::Nodes::Window.new.partition(\n [ valid_source_table[:parent_id] ]\n ).order( source_table[:cached_valid_taxon_name_id], source_table[:cached], source_table[:name] )\n ).as('ro')\n end",
"def process(group, stream, window)\n end",
"def window_id\n -1\n end",
"def window_id\n -1\n end",
"def windowed_max_range(array, window)\n max_window = nil\n\n array.each_cons(window) do |w|\n value = w.max - w.min\n max_window ||= value\n max_window = value if max_window < value\n end\n\n max_window\nend",
"def window_with_count(count, skip)\n raise ArgumentError.new 'Count must be greater than zero' if count <= 0\n raise ArgumentError.new 'Skip must be greater than zero' if skip <= 0\n\n AnonymousObservable.new do |observer|\n q = []\n n = 0\n\n m = SingleAssignmentSubscription.new\n ref_count_disposable = RefCountSubscription.new m\n\n create_window = lambda {\n s = Subject.new\n q.push s\n observer.on_next(s.add_ref(ref_count_disposable))\n }\n\n create_window.call\n\n new_obs = Observer.configure do |o|\n o.on_next do |x|\n q.each {|s| s.on_next x}\n\n c = n - count + 1\n q.shift.on_completed if c >=0 && c % skip == 0\n\n n += 1\n create_window.call if n % skip == 0\n end\n\n o.on_error do |err|\n q.shift.on_error err while q.length > 0\n observer.on_error err\n end\n\n o.on_completed do\n q.shift.on_completed while q.length > 0\n observer.on_completed\n end\n end\n\n m.subscription = subscribe new_obs\n ref_count_disposable\n end\n end",
"def get_window; @window; end",
"def window_width\n end",
"def window_width\n return WINDOW_WIDTH\n end",
"def window_width\n return WINDOW_WIDTH\n end",
"def window_width\n return WINDOW_WIDTH\n end",
"def window_width\n return WINDOW_WIDTH\n end",
"def window_width\n return WINDOW_WIDTH\n end",
"def test_window_search\n @obj.window_search(4) do |subseq|\n assert_equal(20, @obj.size)\n end\n end",
"def windowed_max_range(array, w)\nend",
"def supports_window_function_frame_option?(option)\n case option\n when :rows, :range\n true\n when :offset\n server_version >= 90000\n when :groups, :exclude\n server_version >= 110000\n else\n false\n end\n end",
"def receive_window\n @sum_buffer_len += @max_buffer_len\n @foo = (@foo || 0) + 1\n\n @max_buffer_len = 0\n @buffer_len = 0\n\n @next_frame = (@window * @options[:window_size])\n\n @options[:window_size].times do |i|\n @buffer[i + ((@window % 2) * @options[:window_size])] = nil\n end\n end",
"def max_sliding_window(nums, k)\n max_array = []\n start_index = 0\n end_index = k - 1\n window = nums[start_index..end_index]\n \n (k..nums.size).each do |index|\n max_array << window.last(k).max\n window << nums[index]\n end\n \n max_array\nend",
"def width\n return @window_width # outer width\nend",
"def delete_row_number_column(ds=associated_dataset)\n if eager_limit_strategy == :window_function\n ds.row_number_column \n end\n end",
"def window\n Window\n end",
"def row(row_number, get_by = :copy)\n rank(0, row_number, get_by)\n end",
"def _window_size\n unless @window_size\n rows = `tput lines`\n cols = `tput cols`\n @window_size = [cols.chomp.to_i, rows.chomp.to_i]\n end\n @window_size\n end",
"def windowed_max_range(arr, window)\n current_max_range = nil\n length = arr.count\n arr.each_with_index do |num, idx|\n break if idx + window > length\n window_arr = arr[idx...(idx + window)]\n range = find_range(window_arr)\n current_max_range = range if current_max_range.nil? || \\\n range > current_max_range\n end\n return current_max_range\nend",
"def local_maximum_window_size; end",
"def apply_correlated_subquery_limit_strategy(ds)\n table = ds.first_source_table\n table_alias = ds.first_source_alias\n primary_key = associated_class.primary_key\n key = self[:key]\n cs_alias = :t1\n cs = associated_dataset.\n from(Sequel.as(table, :t1)).\n select(*qualify(cs_alias, primary_key)).\n where(Array(qualify(cs_alias, key)).zip(Array(qualify(table_alias, key)))).\n limit(*limit_and_offset)\n ds.where(qualify(table_alias, primary_key)=>cs)\n end",
"def next_window\n return unless peek_event\n\n window = History::Window.new\n\n while event = next_event\n window.add(event)\n\n break if event.type == 'DecisionTaskCompleted'\n end\n\n # Find the end of the window by exhausting all the commands\n window.add(next_event) while command?(peek_event)\n\n window.freeze\n end",
"def window_line_size\n lines - 2\n end",
"def windowed_max_range_q(arr, window_size)\n max = 0\n q = arr[0...window_size]\n \n (window_size..arr.length).each do |i|\n sum = q.max - q.min \n max = sum if max < sum \n p q\n q.shift \n q << arr[i]\n end\n # until arr.empty? && q.length != window_size\n # sum = q.max - q.min \n # max = sum if max < sum \n \n # q.shift \n # break if arr.empty?\n # q << arr.shift \n # end\n\n max \nend",
"def window\n self\n end",
"def count_distinct(input, window)\n counter = 0\n len = 0\n elems = {}\n output = []\n input.each do |elem|\n counter2 = counter + 1\n sum = 1\n elems = {}\n elems[elem] = 1\n win = window - 1\n while counter2 < input.size && win > 0\n unless elems.key?(input[counter2])\n sum += 1\n elems[input[counter2]] = 1\n end\n counter2 += 1\n win -= 1\n end\n if win == 0\n p elems\n p sum\n end\n counter += 1\n end\nend",
"def running_score\n events.where('created_at > ?', 1.weeks.ago.to_s(:db)).sum(:value)\n end",
"def windowed_max_range(array, window)\n arr = MinMaxStackQueue.new(array[0...window])\n res = arr.max - arr.min\n array[window..-1].each do |el|\n arr.slide_window(el)\n temp_dif = arr.max - arr.min\n res = temp_dif if temp_dif > res\n end\n\n res\nend",
"def limit(count)\n scoped(:row_limit => count)\n end",
"def window\n base = @collection.offset\n high = base + @collection.per_page\n high = @collection.total_entries if high > @collection.total_entries\n\n # TODO: What's the best way to allow customization of this text, particularly \"of\"?\n tag(:span, \" #{base + 1} - #{high} of #{@collection.total_entries} \",\n :class => WillPaginateRenderers.pagination_options[:gmail_window_class])\n end",
"def page_window window_number=0\n get_page source_window window_number\n nil\n end",
"def count\n ensure_aggregate!\n SQLite::API.aggregate_count( @func )\n end",
"def scan_window\n return nil unless scan_windows\n scan_windows.first\n end",
"def max_windowed_range(array, window_size)\n # byebug\n stack_queue = StackQueue.new\n window_size.times { stack_queue.enqueue(array.shift) }\n current_max = stack_queue.max - stack_queue.min\n\n array.each do |el|\n stack_queue.dequeue\n stack_queue.enqueue(el)\n diff = stack_queue.max - stack_queue.min\n\n current_max = diff if diff > current_max\n end\n\n current_max\nend",
"def windowed_max_range(arr, w)\n\n windows = []\n win = 2\n until win == w\n a = arr.each_cons(win).to_a\n windows << a\n\n win += 1\n end\n\n values = []\n\n windows.flatten(1).each do |array|\n sum = array.last - array.first\n values << sum\n end\n #p windows.flatten(1)\n p values.max\nend",
"def true_eager_graph_limit_strategy\n r = super\n ds = associated_dataset\n if r == :ruby && ds.supports_limits_in_correlated_subqueries? && (Array(associated_class.primary_key).length == 1 || ds.supports_multiple_column_in?) && (!offset || ds.supports_offsets_in_correlated_subqueries?)\n :correlated_subquery\n else\n r\n end\n end",
"def max_snapshots_per_group; end",
"def window\n @window || create_window\n end",
"def window_params\n params[:window]\n end",
"def max_window_range(arr, window)\n queue = MinMaxStackQueue.new\n curr_max = nil\n\n until arr.empty?\n queue.enqueue(arr.shift)\n next if queue.size < window\n range = queue.max - queue.min\n curr_max = range if curr_max.nil? || range > curr_max\n queue.dequeue\n end\n\n curr_max\nend",
"def windowed_max_range(arr, window_size)\n current_max_range = nil\n arr.each_with_index do |el, i|\n\n sub_arr = arr[i...i+window_size]\n diff = sub_arr.max - sub_arr.min\n if !current_max_range || current_max_range < diff\n current_max_range = diff\n\n end\n end\n current_max_range\nend",
"def popular_group_by_year\n sql = <<-SQL\n SELECT year, guest_group, MAX(num)\n FROM (\n SELECT year, guest_group, COUNT(*) AS num\n FROM guests\n GROUP BY year, guest_group\n )\n GROUP BY year;\n SQL\n DB[:conn].execute(sql)\nend",
"def apply_eager_graph_limit_strategy(strategy, ds)\n case strategy\n when :distinct_on\n apply_distinct_on_eager_limit_strategy(ds.order_prepend(*self[:order]))\n when :window_function\n apply_window_function_eager_limit_strategy(ds.order_prepend(*self[:order])).select(*ds.columns)\n else\n ds\n end\n end",
"def year_with_most_guests\n sql = <<-SQL\n SELECT year FROM guests GROUP BY year\n ORDER BY count(*) DESC LIMIT 1;\n SQL\n DB[:conn].execute(sql)[0][0]\nend",
"def win_mp\n maxmp * features_sum(:mp_on_win)\n end",
"def last_x_rows_ocr_group\n DB[\"SELECT ocr, COUNT(ocr) AS count FROM (SELECT ocr FROM v3_lists WHERE ocr != '' AND ocr IS NOT NULL LIMIT 30) GROUP BY ocr\"]\nend",
"def send_next_window\n @window += 1\n send_window\n end",
"def zrangebyscore(key, min, max, withscores: T.unsafe(nil), with_scores: T.unsafe(nil), limit: T.unsafe(nil)); end",
"def get(key, window=@max_window)\n @redis.multi { do_get(key.to_s, Time.now.to_f, window) }.last\n end",
"def window\n @win\n end",
"def windowed_max_range(arr, window_size)\n current_max_range = 0 #start at minimum possible\n (0...arr.length).each do |i1|\n i2 = i1 + window_size\n window_range = arr[i1...i2].max - arr[i1...i2].min\n current_max_range = window_range if window_range > current_max_range\n end\n current_max_range\nend",
"def random_weighted_order_clause(ranking_column, opts={})\n connection = opts[:connection]\n\n if connection.adapter_name =~ /sqlite/i\n # computer multiplication is faster than division I was once taught...so translate here\n max_int = 9223372036854775807.0\n multiplier = 1.0 / max_int\n \"(#{ranking_column} * ABS(#{random_for(opts)} * #{multiplier}) ) DESC\"\n else\n \"(#{ranking_column} * #{random_for(opts)}) DESC\"\n end\n end",
"def ranked_reports_query\n Report.joins(:variant).where({relation_name => relation}).select(<<-SQL).to_sql\n reports.*,\n DENSE_RANK() OVER (\n PARTITION BY product_id\n ORDER BY reported_at DESC\n ) AS product_price_index\n SQL\n end",
"def new_window(type); end",
"def select_sql\n return super unless o = @opts[:offset]\n raise(Error, \"#{db.database_type} requires an order be provided if using an offset\") unless order = @opts[:order]\n dsa1 = dataset_alias(1)\n rn = row_number_column\n sql = @opts[:append_sql] || ''\n subselect_sql_append(sql, unlimited.\n unordered.\n select_append{ROW_NUMBER(:over, :order=>order){}.as(rn)}.\n from_self(:alias=>dsa1).\n limit(@opts[:limit]).\n where(SQL::Identifier.new(rn) > o).\n order(rn))\n sql\n end",
"def cost_window=(cost_window)\n @cost_window = cost_window\n end",
"def window_width\r\n @options[:window_width]\r\n end",
"def max_windowed_range(_array, _window_size)\n queue = MinMaxStackQueue.new\n best_range = nil\n\n array.each_with_index do |el, _i|\n queue.enqueue(el)\n queue.dequeue if queue.size > window_size\n\n if queue.size == window_size\n current_range = queue.max - queue.min\n best_range = current_range if !best_range || current_range > best_range\n end\n end\n\n # p max_windowed_range([1, 0, 2, 5, 4, 8], 3) == 5\n # 0, 2, 5\n\n # queue.dequeue if queue.size > window_size\n # queue(new) = [8,4,5,2] - przestawiamy na nowy stos\n # queue(old) = [2,5,4,8] , i usuwamy ostatni (8)\n # queue(old) = [2,5,4] , i usuwamy ostatni (8) -> [2,5,4]\n # moge obliczyć okno\n #\n # zabrakło rozpisania na papierze ( w głowie nie widać! aż tak dobrze)\n # i dobrego zrozumienia struktury danych minmaxStackQueue, gdzie były\n # wystarczające wskazówki\nend",
"def each_row_batch_by_sql(sql, options={}, &block)\n options = {:connection => self.connection}.merge(options)\n cursor = PostgreSQLCursor::Cursor.new(sql, options)\n return cursor.each_row_batch(&block) if block_given?\n cursor.iterate_batched\n end",
"def most_popular_group_per_year\n sql = <<-SQL\n -- SELECT year, category FROM guests GROUP BY year, category ORDER BY count(category), year DESC\n SELECT DISTINCT year, category, count(category) FROM guests GROUP BY year, category ORDER BY count(category) DESC\n SQL\n DB[:conn].execute(sql)\nend",
"def next_row\n observation_matrix.observation_matrix_rows.where('position > ?', position).order(:position).first\n end",
"def window\n Window_Base.new(0, 0, 0, 0)\n end",
"def max_snapshots_per_group=(_arg0); end",
"def windowed_max_range_v2(arr,w)\n stack = MinMaxStackQueue.new\n \n\nend",
"def max_windowed_range(array, window_size)\n current_max_range = 0\n\n array.each_with_index do |el, idx|\n end_index = idx + window_size - 1\n break if end_index >= array.length\n\n slice = array[idx..end_index]\n difference = slice.max - slice.min\n\n current_max_range = difference if difference > current_max_range\n end\n\n current_max_range\nend",
"def window_width\n return ACTOR_WINDOW_WIDTH\n end",
"def active_window\n return @active_members_window if @last_window == :active_members_window\n return @reserve_members_window if @last_window == :reserve_members_window\n nil\n end",
"def windowed_max_range(arr, w)\n current_max_range = nil\n\n w.downto(1).each do |ww|\n idx = 0\n\n while idx <= arr.length - ww\n\n window = arr[idx..(idx + ww - 1)]\n max = arr[idx + ww - 1]\n min = arr[idx]\n range = max - min\n\n if current_max_range.nil? || range > current_max_range\n current_max_range = range\n end\n\n idx += 1\n end\n end\n\n current_max_range\nend",
"def select_sql\n return super unless o = @opts[:offset]\n raise(Error, 'MSSQL requires an order be provided if using an offset') unless order = @opts[:order]\n dsa1 = dataset_alias(1)\n dsa2 = dataset_alias(2)\n rn = row_number_column\n unlimited.\n unordered.\n from_self(:alias=>dsa2).\n select{[WILDCARD, ROW_NUMBER(:over, :order=>order){}.as(rn)]}.\n from_self(:alias=>dsa1).\n limit(@opts[:limit]).\n where(rn > o).\n select_sql\n end",
"def add_limit_offset!(statement, limit, offset, bind_values)\n # Limit and offset is handled by subqueries (see #select_statement).\n if limit\n # If there is just a limit on rows to return, but no offset, then we\n # can use TOP clause.\n statement.sub!(/^\\s*SELECT(\\s+DISTINCT)?/i) { \"SELECT#{$1} TOP #{limit}\" }\n # bind_values << limit\n end\n end",
"def windowed_max_range_1(array, window_size)\n curr_max = 0\n i = 0\n while i < array.length-window_size+1\n curr_wind = array[i...i+window_size]\n curr_max = [curr_wind.max - curr_wind.min, curr_max].max\n i+=1\n end\n curr_max\nend",
"def windowed_max_range(array, window_size)\n current_max_range = nil\n\n array.each_index do |i|\n subarray = array[i...(i + window_size)] \n range = subarray.max - subarray.min\n\n if current_max_range.nil? || range > current_max_range\n current_max_range = range \n end\n end\n\n current_max_range\nend",
"def select_sql\n return super unless l = @opts[:limit]\n o = @opts[:offset] || 0\n order = @opts[:order]\n dsa1 = dataset_alias(1)\n dsa2 = dataset_alias(2)\n rn = row_number_column\n irn = Sequel::SQL::Identifier.new(rn).qualify(dsa2)\n subselect_sql(unlimited.\n from_self(:alias=>dsa1).\n select_more(Sequel::SQL::QualifiedIdentifier.new(dsa1, WILDCARD),\n Sequel::SQL::WindowFunction.new(SQL::Function.new(:ROW_NUMBER), Sequel::SQL::Window.new(:order=>order)).as(rn)).\n from_self(:alias=>dsa2).\n select(Sequel::SQL::QualifiedIdentifier.new(dsa2, WILDCARD)).\n where((irn > o) & (irn <= l + o)))\n end",
"def get_window_area()\n get_children_area(\"WINDOW\")\n end",
"def windowed_max_range(array, w)\n queue = MinMaxStackQueue.new\n max = 0\n\n array.each do |num|\n queue.enqueue(num)\n end\n queue.max\nend",
"def get_window_area()\n get_children_area(\"WINDOW\")\n end"
] |
[
"0.6295806",
"0.5677758",
"0.53731406",
"0.5138089",
"0.50766724",
"0.49283078",
"0.4863528",
"0.4830885",
"0.48183408",
"0.47789818",
"0.47674927",
"0.47634125",
"0.47199786",
"0.46511826",
"0.46487308",
"0.4641359",
"0.4581243",
"0.4581204",
"0.45763293",
"0.45496976",
"0.45447746",
"0.45363635",
"0.45363635",
"0.45108518",
"0.45100605",
"0.45024323",
"0.44708806",
"0.44601697",
"0.44601697",
"0.44601697",
"0.44601697",
"0.44601697",
"0.4407748",
"0.44003212",
"0.43724307",
"0.43415594",
"0.43361676",
"0.43316928",
"0.4323722",
"0.43203443",
"0.43102854",
"0.4306258",
"0.4299331",
"0.42844883",
"0.42546144",
"0.42443517",
"0.42313513",
"0.4220784",
"0.42089796",
"0.4196558",
"0.41931605",
"0.41811252",
"0.41779494",
"0.4164219",
"0.41611677",
"0.4158912",
"0.41500792",
"0.4149237",
"0.4148737",
"0.41479135",
"0.414326",
"0.41405636",
"0.4139611",
"0.413247",
"0.41274783",
"0.41243702",
"0.41221827",
"0.41142756",
"0.41124433",
"0.41083324",
"0.41024542",
"0.40981117",
"0.40945947",
"0.40837574",
"0.40829876",
"0.408187",
"0.40801746",
"0.40739557",
"0.4069114",
"0.40674794",
"0.40528828",
"0.4048889",
"0.404235",
"0.4037882",
"0.4036682",
"0.40297073",
"0.40262234",
"0.40255806",
"0.40190703",
"0.40166408",
"0.40130487",
"0.40075877",
"0.4002062",
"0.40005738",
"0.39979362",
"0.39912322",
"0.3987158",
"0.39869037",
"0.39857844",
"0.39852023"
] |
0.5358746
|
3
|
PostgreSQL 8.4+ supports window functions
|
def supports_window_functions?
server_version >= 80400
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def apply_window_function_eager_limit_strategy(ds, limit_and_offset=limit_and_offset())\n rn = ds.row_number_column \n limit, offset = limit_and_offset\n ds = ds.unordered.select_append{|o| o.row_number.function.over(:partition=>predicate_key, :order=>ds.opts[:order]).as(rn)}.from_self\n ds = if !returns_array?\n ds.where(rn => offset ? offset+1 : 1)\n elsif offset\n offset += 1\n if limit\n ds.where(rn => (offset...(offset+limit))) \n else\n ds.where{SQL::Identifier.new(rn) >= offset} \n end\n else\n ds.where{SQL::Identifier.new(rn) <= limit} \n end\n end",
"def window(id); end",
"def true_eager_graph_limit_strategy\n if associated_class.dataset.supports_window_functions?\n :window_function\n else\n :ruby\n end\n end",
"def rank_over(source_table, valid_source_table)\n Arel::Nodes::Over.new(\n Arel::Nodes::SqlLiteral.new('rank()'),\n Arel::Nodes::Window.new.partition(\n [ valid_source_table[:parent_id] ]\n ).order( source_table[:cached_valid_taxon_name_id], source_table[:cached], source_table[:name] )\n ).as('ro')\n end",
"def windowed_max_range_two(arr,window)\n \nend",
"def aggregated_over_time_query\n quantity_field = @country_ids.present? ? \"#{entity_quantity}_reported_quantity\" : \"#{@reported_by}_reported_quantity\"\n\n <<-SQL\n SELECT ROW_TO_JSON(row)\n FROM (\n SELECT JSON_AGG(JSON_BUILD_OBJECT('x', year, 'y', value) ORDER BY year) AS datapoints\n FROM (\n SELECT year, ROUND(SUM(#{quantity_field}::FLOAT)) AS value\n FROM #{shipments_table}\n #{child_taxa_join}\n WHERE #{@condition} AND #{quantity_field} IS NOT NULL AND #{country_condition}\n AND #{child_taxa_condition}\n GROUP BY year\n #{quantity_condition(quantity_field)}\n ORDER BY value DESC\n #{limit}\n ) t\n ) row\n SQL\n end",
"def supports_window_clause?\n server_version >= 80400\n end",
"def apply_filter_by_associations_window_function_limit_strategy(ds)\n ds.where(filter_by_associations_limit_key=>apply_window_function_eager_limit_strategy(associated_eager_dataset.select(*filter_by_associations_limit_alias_key)).select(*filter_by_associations_limit_aliases))\n end",
"def group_rank_and_filter_sql_statement(connection, start_date, finish_date)\n # 1. Takes all CodigoExterno with FechaCreacion >= start_date && \n # FechaCreacion <= finish_date\n # 2. Ranks them (densely: https://www.postgresql.org/docs/9.6/static/functions-window.html)\n # 3. Sorts the by FechaCreacion DESC (that is, newest to oldest)\n # 4. Returns the newest one (The first ranked one, hence by_fecha_creacion < 2)\n # 5. Sorts by id.\n connection.execute(\n \"SELECT id FROM (\n SELECT id, updated_at,\n dense_rank() OVER (\n PARTITION BY value -> 'Listado' -> 0 -> 'CodigoExterno'\n ORDER BY value ->> 'FechaCreacion' DESC) as by_fecha_creacion\n FROM results\n WHERE f_cast_isots(value ->> 'FechaCreacion'::text) >= #{start_date}\n AND f_cast_isots(value ->> 'FechaCreacion'::text) <= #{finish_date}\n ) as q WHERE by_fecha_creacion < 2\n ORDER BY id\"\n )\n end",
"def window_with_count(count, skip)\n raise ArgumentError.new 'Count must be greater than zero' if count <= 0\n raise ArgumentError.new 'Skip must be greater than zero' if skip <= 0\n\n AnonymousObservable.new do |observer|\n q = []\n n = 0\n\n m = SingleAssignmentSubscription.new\n ref_count_disposable = RefCountSubscription.new m\n\n create_window = lambda {\n s = Subject.new\n q.push s\n observer.on_next(s.add_ref(ref_count_disposable))\n }\n\n create_window.call\n\n new_obs = Observer.configure do |o|\n o.on_next do |x|\n q.each {|s| s.on_next x}\n\n c = n - count + 1\n q.shift.on_completed if c >=0 && c % skip == 0\n\n n += 1\n create_window.call if n % skip == 0\n end\n\n o.on_error do |err|\n q.shift.on_error err while q.length > 0\n observer.on_error err\n end\n\n o.on_completed do\n q.shift.on_completed while q.length > 0\n observer.on_completed\n end\n end\n\n m.subscription = subscribe new_obs\n ref_count_disposable\n end\n end",
"def windowed_max_range_s(arr, window_size)\n \nend",
"def best_rank_subquery(group_by)\n @source.respond_to?(:project) or raise ThroughHierarchySourceError, \"#{@source} cannot be converted into a subquery\"\n subq = source.\n project(foreign_type_column, foreign_key_column, group_by, best_rank).\n where(filters).\n group(source[group_by]).\n as(best_rank_table_name)\n\n spawn(subq)\n end",
"def window(name, opts)\n clone(:window=>((@opts[:window]||EMPTY_ARRAY) + [[name, SQL::Window.new(opts)].freeze]).freeze)\n end",
"def delete_row_number_column(ds=associated_dataset)\n if eager_limit_strategy == :window_function\n ds.row_number_column \n end\n end",
"def running_score\n events.where('created_at > ?', 1.weeks.ago.to_s(:db)).sum(:value)\n end",
"def ranked_reports_query\n Report.joins(:variant).where({relation_name => relation}).select(<<-SQL).to_sql\n reports.*,\n DENSE_RANK() OVER (\n PARTITION BY product_id\n ORDER BY reported_at DESC\n ) AS product_price_index\n SQL\n end",
"def max_snapshots_per_group; end",
"def currval(seq)\n $new.select_one(\"SELECT CASE WHEN is_called THEN last_value ELSE last_value-increment_by END from #{seq}\")[0]\nend",
"def find_products_in_window(product, options)\n limit = options[:limit]\n \n compare_column = @@order_mapping[options[:sort]].split(',').first.gsub('products.','')\n \n left_sql = find_products_sql(options.merge({:conditions =>\n \"products.#{compare_column} <= #{connection.quote(product[compare_column])}\",\n :sort => options[:sort],\n :desc => true}))\n add_limit_offset!(left_sql, {:limit => limit+15, :offset => 0})\n left_list = Product.find_by_sql(left_sql)\n return nil unless idx = left_list.index(product)\n left_list = left_list[(idx+1)..-1] unless left_list.empty?\n \n right_sql = find_products_sql(options.merge({:conditions =>\n \"products.#{compare_column} >= #{connection.quote(product[compare_column])}\",\n :sort => options[:sort]}))\n add_limit_offset!(right_sql, {:limit => limit+15, :offset => 0})\n right_list = Product.find_by_sql(right_sql)\n return nil unless idx = right_list.index(product)\n right_list = right_list[(idx+1)..-1] unless right_list.empty?\n \n if left_list.size >= right_list.size\n right_list = right_list.slice(0,limit/2)\n left_list = left_list.slice(0,limit-right_list.size-1)\n else\n left_list = left_list.slice(0,limit/2)\n right_list = right_list.slice(0,limit-left_list.size-1)\n end\n \n left_list.reverse + [product] + right_list\n end",
"def popular_group_by_year\n sql = <<-SQL\n SELECT year, guest_group, MAX(num)\n FROM (\n SELECT year, guest_group, COUNT(*) AS num\n FROM guests\n GROUP BY year, guest_group\n )\n GROUP BY year;\n SQL\n DB[:conn].execute(sql)\nend",
"def process(group, stream, window)\n end",
"def count\n ensure_aggregate!\n SQLite::API.aggregate_count( @func )\n end",
"def receive_next_window\n @window += 1\n receive_window\n end",
"def max_windowed_range(arr, window)\n current_max_range = nil\n\n arr.each_cons(window) do |slice| \n current_range = slice.max - slice.min\n if current_max_range == nil || current_max_range < current_range\n current_max_range = current_range\n end\n end\n\n return current_max_range\nend",
"def windowed_max_range(array, window)\n max_window = nil\n\n array.each_cons(window) do |w|\n value = w.max - w.min\n max_window ||= value\n max_window = value if max_window < value\n end\n\n max_window\nend",
"def min_window\nend",
"def window_id\n -1\n end",
"def window_id\n -1\n end",
"def group_with_builtin(coll)\n puts \"Grouping by 'Make' (built-in)\"\n coll.group({\n :key => [:Make],\n :initial => {:crashes => 0},\n :reduce => 'function(doc, prev) {prev.crashes += 1;}'\n })\nend",
"def initial_window_months(order)\n order.total > 0 ? 1 : 12\n end",
"def windowed_max_range(array, w)\nend",
"def aggregate(property, resolution)\n # Look up the date/time dimensions for the resolution.\n date_time_dimensions = date_time_dimensions_for_resolution(resolution)\n\n # Build the timestamp from the date/time dimensions.\n timestamp = Sequel::SQL::NumericExpression.new(:+, *date_time_dimensions).cast(:timestamp).as(:timestamp)\n\n # Build a window function to sum the counts.\n count_window_function = Sequel::SQL::Function.new(:sum, :count).over(partition: date_time_dimensions).as(:count)\n\n # Build the aggregation window functions.\n aggregation_window_functions = AGGREGATIONS.map do |aggregation|\n Sequel::SQL::Function.new(aggregation, :\"#{property}\").over(partition: date_time_dimensions).as(:\"#{aggregation}_#{property}\")\n end\n\n facts_dataset\n .join(:dimension_dates, date: Sequel.cast(:timestamp, :date))\n .join(:dimension_times, time: Sequel.cast(:timestamp, :time))\n .distinct(*date_time_dimensions)\n .select(timestamp, count_window_function, *aggregation_window_functions)\n end",
"def year_with_most_guests\n sql = <<-SQL\n SELECT year FROM guests GROUP BY year\n ORDER BY count(*) DESC LIMIT 1;\n SQL\n DB[:conn].execute(sql)[0][0]\nend",
"def apply_eager_graph_limit_strategy(strategy, ds)\n case strategy\n when :distinct_on\n apply_distinct_on_eager_limit_strategy(ds.order_prepend(*self[:order]))\n when :window_function\n apply_window_function_eager_limit_strategy(ds.order_prepend(*self[:order])).select(*ds.columns)\n else\n ds\n end\n end",
"def date_groups(table, column) # rubocop:disable Metrics/MethodLength\n query = <<~SQL.squish\n SELECT date_trunc('year', #{column}) AS year, count(*) as events\n FROM #{table}\n WHERE #{column} IS NOT NULL\n GROUP BY year\n ORDER BY year ASC\n SQL\n\n ApplicationRecord.connection.execute(query)\n .map { |row| [row[\"year\"].year, row[\"events\"]] }\n .select { |row| (2004..Time.current.year).cover? row.first }\n .to_h\n end",
"def most_popular_group_per_year\n sql = <<-SQL\n -- SELECT year, category FROM guests GROUP BY year, category ORDER BY count(category), year DESC\n SELECT DISTINCT year, category, count(category) FROM guests GROUP BY year, category ORDER BY count(category) DESC\n SQL\n DB[:conn].execute(sql)\nend",
"def max_snapshots_per_group=(_arg0); end",
"def row(row_number, get_by = :copy)\n rank(0, row_number, get_by)\n end",
"def agg_pubkeys\n index = latest_agg_pubkey_index\n (index + 1).times.map { |i| agg_pubkey(i) }\n end",
"def monotonically_increasing_mileage\n if transam_asset\n previous_mileage_update = transam_asset.asset_events\n .where.not(current_mileage: nil)\n .where(\"event_date < ? OR (event_date = ? AND created_at < ?)\", self.event_date, self.event_date, (self.new_record? ? Time.current : self.created_at) ) # Define a window that runs up to this event\n .where('object_key != ?', self.object_key)\n .order(:event_date, :created_at => :asc).last\n next_mileage_update = transam_asset.asset_events\n .where.not(current_mileage: nil)\n .where('event_date > ? OR (event_date = ? AND created_at > ?)', self.event_date, self.event_date, (self.new_record? ? Time.current : self.created_at )) # Define a window that backs up to this event\n .where('object_key != ?', self.object_key)\n .order(:event_date, :created_at => :desc).first\n\n if previous_mileage_update\n errors.add(:current_mileage, \"can't be less than last update (#{previous_mileage_update.current_mileage})\") if current_mileage < previous_mileage_update.current_mileage\n end\n if next_mileage_update\n errors.add(:current_mileage, \"can't be more than next update (#{next_mileage_update.current_mileage})\") if current_mileage > next_mileage_update.current_mileage\n end\n end\n end",
"def aggregate_after_grouping?; @aggregate_after_grouping; end",
"def max_windowed_range_op(arr, window)\n queue = Min_max_stack_queue.new()\n max_range = nil\n\n arr.each do |ele|\n queue.enqueue(ele)\n\n if queue.size > window\n queue.dequeue()\n end\n\n if queue.size == window\n current_range = queue.max - queue.min\n if max_range == nil || max_range < current_range\n max_range = current_range\n end\n end\n end\n\n return max_range\nend",
"def fast_bucket_function\n Proc.new do |item|\n if item.is_a?(Float) && item.nan?\n nil\n else\n bucket_number = (item - min)/increment\n if bucket_number > counter_size || bucket_number < 0\n nil\n else\n [bucket_number.to_i, counter_size-1].min\n end\n end\n end\n end",
"def most_popular_group_per_year\n sql = <<-SQL\n WITH mp AS (\n SELECT show_year,\n guest_group,\n COUNT(id) as group_count\n FROM guest_appearances\n GROUP BY guest_group, show_year)\n SELECT show_year, guest_group, MAX(group_count) as frequency\n FROM mp\n GROUP BY show_year;\n SQL\n DB[:conn].execute(sql).each {|record| puts \"#{record[0]}: #{record[1]}, #{record[2]}.\"}\nend",
"def groupby_date(query, period: \"month\", column: \"updated_at\")\n query.group(\"DATE_TRUNC('#{period}', #{column})\").count.sort_by { |key, _v| key || Time.utc(1900) }.to_h\nend",
"def local_window_size; end",
"def order_with_pinned_sql\n -\"CASE\n WHEN (COALESCE(topics.pinned_at, '#{lowest_date}') > COALESCE(tu.cleared_pinned_at, '#{lowest_date}'))\n THEN topics.pinned_at + interval '9999 years'\n ELSE topics.bumped_at\n END DESC\"\n end",
"def create_aggregates(db: EventSourcery::Postgres.config.event_store_database,\n table_name: EventSourcery::Postgres.config.aggregates_table_name)\n db.create_table(table_name) do\n uuid :aggregate_id, primary_key: true\n column :version, :bigint, default: 1\n end\n end",
"def aggregate_f(*args)\n aggregate_function.f(*args)\n end",
"def count_distinct(input, window)\n counter = 0\n len = 0\n elems = {}\n output = []\n input.each do |elem|\n counter2 = counter + 1\n sum = 1\n elems = {}\n elems[elem] = 1\n win = window - 1\n while counter2 < input.size && win > 0\n unless elems.key?(input[counter2])\n sum += 1\n elems[input[counter2]] = 1\n end\n counter2 += 1\n win -= 1\n end\n if win == 0\n p elems\n p sum\n end\n counter += 1\n end\nend",
"def running_average; end",
"def current_pick_window\n pick_windows.where(\"window_start < '#{DateTime.now}' AND window_end > '#{DateTime.now}'\").first\n end",
"def last_x_rows_ocr_group\n DB[\"SELECT ocr, COUNT(ocr) AS count FROM (SELECT ocr FROM v3_lists WHERE ocr != '' AND ocr IS NOT NULL LIMIT 30) GROUP BY ocr\"]\nend",
"def max_sliding_window(nums, k)\n max_array = []\n start_index = 0\n end_index = k - 1\n window = nums[start_index..end_index]\n \n (k..nums.size).each do |index|\n max_array << window.last(k).max\n window << nums[index]\n end\n \n max_array\nend",
"def supports_window_function_frame_option?(option)\n case option\n when :rows, :range\n true\n when :offset\n server_version >= 90000\n when :groups, :exclude\n server_version >= 110000\n else\n false\n end\n end",
"def functions\n pg_major = ::PgSaurus::Engine.pg_server_version[0]\n res = select_all <<-SQL\n SELECT n.nspname AS \"Schema\",\n p.proname AS \"Name\",\n pg_catalog.pg_get_function_result(p.oid) AS \"Returning\",\n CASE\n WHEN #{pg_major >= 11 ? \"p.prokind = 'w'\" : \"p.proiswindow\"} THEN 'window'\n WHEN p.prorettype = 'pg_catalog.trigger'::pg_catalog.regtype THEN 'trigger'\n ELSE 'normal'\n END AS \"Type\",\n p.oid AS \"Oid\"\n FROM pg_catalog.pg_proc p\n LEFT JOIN pg_catalog.pg_namespace n ON n.oid = p.pronamespace\n WHERE pg_catalog.pg_function_is_visible(p.oid)\n AND n.nspname <> 'pg_catalog'\n AND n.nspname <> 'information_schema'\n AND #{pg_major >= 11 ? \"p.prokind <> 'a'\" : \"p.proisagg <> TRUE\"}\n ORDER BY 1, 2, 3, 4;\n SQL\n res.inject([]) do |buffer, row|\n returning = row['Returning']\n function_type = row['Type']\n oid = row['Oid']\n\n function_str = select_value(\"SELECT pg_get_functiondef(#{oid});\")\n\n name = parse_function_name(function_str)\n language = parse_function_language(function_str)\n definition = parse_function_definition(function_str)\n volatility = parse_function_volatility(function_str)\n\n if definition\n buffer << ::PgSaurus::ConnectionAdapters::FunctionDefinition.new(name,\n returning,\n definition.strip,\n function_type,\n language,\n oid,\n volatility)\n end\n buffer\n end\n end",
"def true_eager_graph_limit_strategy\n r = super\n ds = associated_dataset\n if r == :ruby && ds.supports_limits_in_correlated_subqueries? && (Array(associated_class.primary_key).length == 1 || ds.supports_multiple_column_in?) && (!offset || ds.supports_offsets_in_correlated_subqueries?)\n :correlated_subquery\n else\n r\n end\n end",
"def increment_bucket(bucket)\n (bucket + 1) % table.length\n end",
"def apply_correlated_subquery_limit_strategy(ds)\n table = ds.first_source_table\n table_alias = ds.first_source_alias\n primary_key = associated_class.primary_key\n key = self[:key]\n cs_alias = :t1\n cs = associated_dataset.\n from(Sequel.as(table, :t1)).\n select(*qualify(cs_alias, primary_key)).\n where(Array(qualify(cs_alias, key)).zip(Array(qualify(table_alias, key)))).\n limit(*limit_and_offset)\n ds.where(qualify(table_alias, primary_key)=>cs)\n end",
"def windowed_max_range(arr, window)\n current_max_range = nil\n length = arr.count\n arr.each_with_index do |num, idx|\n break if idx + window > length\n window_arr = arr[idx...(idx + window)]\n range = find_range(window_arr)\n current_max_range = range if current_max_range.nil? || \\\n range > current_max_range\n end\n return current_max_range\nend",
"def apply(aggregate)\n raise NotImplementedError\n end",
"def windowed_max_range(array, window)\n arr = MinMaxStackQueue.new(array[0...window])\n res = arr.max - arr.min\n array[window..-1].each do |el|\n arr.slide_window(el)\n temp_dif = arr.max - arr.min\n res = temp_dif if temp_dif > res\n end\n\n res\nend",
"def run_sql(sql)\n conn = PG.connect(dbname: 'movies')\n result = conn.exec(sql)\n conn.close\n result\nend",
"def zrangebyscore(key, min, max, withscores: T.unsafe(nil), with_scores: T.unsafe(nil), limit: T.unsafe(nil)); end",
"def prev_ranking\n Ranking.all(:jtop_id => Ranking.all(:jtop_id.lt => jtop_id, :clip_id => clip_id).aggregate(:jtop_id.max), :clip_id => clip_id).first\n end",
"def subsequent(obj, timestamp_arg = false)\n if timestamp_arg != true && primary_key_is_int?\n return where(arel_table[primary_key].gt(obj.id)).order(arel_table[primary_key].asc)\n end\n\n obj = obj.send(PaperTrail.timestamp_field) if obj.is_a?(self)\n where(arel_table[PaperTrail.timestamp_field].gt(obj)).order(timestamp_sort_order)\n end",
"def window_width\n end",
"def windowed_max_range(arr, window_size)\n current_max_range = nil\n arr.each_with_index do |el, i|\n\n sub_arr = arr[i...i+window_size]\n diff = sub_arr.max - sub_arr.min\n if !current_max_range || current_max_range < diff\n current_max_range = diff\n\n end\n end\n current_max_range\nend",
"def to_sql\n \"\n SELECT row_to_json(fc)\n FROM ( SELECT 'FeatureCollection' AS type, array_to_json(array_agg(f)) AS features\n FROM ( SELECT 'Feature' AS type\n , ST_AsGeoJSON(subquery.geom)::json AS geometry\n , row_to_json(\n (SELECT l FROM (SELECT id, geoid) AS l)\n ) AS properties\n\n FROM (\n SELECT\n ct.id,\n ct.geom,\n ct.geoid,\n ST_Area(ST_SetSRID(geom,4326)) as d,\n ST_Area(\n ST_Intersection(\n ST_SetSRID( ST_GeomFromGeoJSON('#{ @geojson }'), 4326),\n ST_SetSRID(geom,4326)\n )\n ) as n\n FROM census_tracts_2010 AS ct\n WHERE\n ST_Intersects(\n ST_SetSRID(geom,4326),\n ST_SetSRID( ST_GeomFromGeoJSON('#{ @geojson }'), 4326)\n )\n ) subquery\n WHERE (n/d*100) >= 15\n\n\n ) AS f\n ) AS fc;\n \"\n end",
"def variant_for_poisson(seq)\n seq_size = seq.size\n l = seq[0].size - 1\n var = []\n (0..l).to_a.each do |pos|\n nt = []\n seq.each do |s|\n nt << s[pos]\n end\n count_nt = count(nt)\n v = seq_size - count_nt.values.max\n var << v\n end\n var_count = count(var)\n var_count.sort_by{|key,value|key}.to_h\nend",
"def interval_function(interval=:week)\n if Rails.env.development?\n \"strftime('%Y%W',#{@date_column}) AS _key\"\n else\n \"YEARWEEK(#{@date_column}) AS _key\"\n end\n end",
"def group_by\n end",
"def _test_replicate(sql)\n sql = [ sql ] unless sql.is_a? Enumerable\n connection do |c|\n c.query \"select setval('t_seq', 1, false)\"\n sql.each do |q|\n c.query q\n end\n end\n\n config['backend']['nodes'].map do |node|\n PGconn.open(:port => node['port'], :dbname => @dbname) do |c|\n break c.query(\"select nextval('t_seq')\")[0]\n end\n end\n end",
"def windowed_max_range_q(arr, window_size)\n max = 0\n q = arr[0...window_size]\n \n (window_size..arr.length).each do |i|\n sum = q.max - q.min \n max = sum if max < sum \n p q\n q.shift \n q << arr[i]\n end\n # until arr.empty? && q.length != window_size\n # sum = q.max - q.min \n # max = sum if max < sum \n \n # q.shift \n # break if arr.empty?\n # q << arr.shift \n # end\n\n max \nend",
"def within_window\n if session[:window_count] >= WINDOW_LIMIT\n session[:request_restrained] = true\n session[:sec_restrained] = @sec_now # impose a wait of RESTRAIN_SECONDS duration\n else\n session[:window_count] += 1\n end\n end",
"def windowed_max_range(arr, window_size)\n current_max_range = 0 #start at minimum possible\n (0...arr.length).each do |i1|\n i2 = i1 + window_size\n window_range = arr[i1...i2].max - arr[i1...i2].min\n current_max_range = window_range if window_range > current_max_range\n end\n current_max_range\nend",
"def trigger\n trigger_function = \"insert_#{master_table}\"\n unless @column == 'page'\n column = \"#{@column},\"\n column_function = \"coalesce(quote_literal(NEW.#{@column}), 'NULL') || ',' ||\"\n end\n \n cmd = <<-COMMAND\n CREATE OR REPLACE FUNCTION #{trigger_function}() \n RETURNS TRIGGER AS $$ \n DECLARE\n ins_sql TEXT; \n BEGIN\n ins_sql := 'INSERT INTO daily_#{@column}_views_' || (NEW.writer_id % #{@partition_size}) ||\n '(date,article_id,#{column}count,writer_id,partition_id) \n VALUES ' ||\n '('|| quote_literal(NEW.date) || ',' || NEW.article_id ||',' ||\n \t#{column_function} \n \t\t\tNEW.count || ',' || \n \t\t\tNEW.writer_id || ',' || (NEW.writer_id % #{@partition_size}) ||')'\n ; \n EXECUTE ins_sql;\n RETURN NULL;\n END; \n $$\n LANGUAGE plpgsql;\n \n CREATE TRIGGER #{trigger_function}_trigger\n BEFORE INSERT ON #{master_table}\n FOR EACH ROW EXECUTE PROCEDURE #{trigger_function}();\n COMMAND\n @conns.each{|conn| conn.exec(cmd)}\n end",
"def resample_hybrid upsample_factor, downsample_factor, filter_order\n return self.clone.resample_hybrid!(upsample_factor, downsample_factor, filter_order)\n end",
"def latest_agg_pubkey\n agg_pubkey(latest_agg_pubkey_index)[1]\n end",
"def apply_distinct_on_eager_limit_strategy(ds)\n keys = predicate_key\n ds.distinct(*keys).order_prepend(*keys)\n end",
"def calculate_function(function)\n raise \"invalid function '#{function}'\" unless [:sum, :avg, :min, :max, :count].include?(function.to_sym)\n Sequel::SQL::Function.new(function.to_sym, :value)\n end",
"def random_weighted_order_clause(ranking_column, opts={})\n connection = opts[:connection]\n\n if connection.adapter_name =~ /sqlite/i\n # computer multiplication is faster than division I was once taught...so translate here\n max_int = 9223372036854775807.0\n multiplier = 1.0 / max_int\n \"(#{ranking_column} * ABS(#{random_for(opts)} * #{multiplier}) ) DESC\"\n else\n \"(#{ranking_column} * #{random_for(opts)}) DESC\"\n end\n end",
"def all_proposal_details_rows_sql\n <<-SQL\n SELECT CAST(DocumentID AS INT), ProposalNature, ProposalOutcome, ProposalRepresentation, ProposalNo\n FROM #{table_name}\n GROUP BY DocumentID, ProposalNature, ProposalOutcome, ProposalRepresentation, ProposalNo\n SQL\n end",
"def window\r\n return $window\r\n end",
"def each_row_batch_by_sql(sql, options={}, &block)\n options = {:connection => self.connection}.merge(options)\n cursor = PostgreSQLCursor::Cursor.new(sql, options)\n return cursor.each_row_batch(&block) if block_given?\n cursor.iterate_batched\n end",
"def max_snapshot_groups; end",
"def select_sql\n return super unless o = @opts[:offset]\n raise(Error, \"#{db.database_type} requires an order be provided if using an offset\") unless order = @opts[:order]\n dsa1 = dataset_alias(1)\n rn = row_number_column\n sql = @opts[:append_sql] || ''\n subselect_sql_append(sql, unlimited.\n unordered.\n select_append{ROW_NUMBER(:over, :order=>order){}.as(rn)}.\n from_self(:alias=>dsa1).\n limit(@opts[:limit]).\n where(SQL::Identifier.new(rn) > o).\n order(rn))\n sql\n end",
"def summarize_per_subset\n @having = ANY_ROWS\n end",
"def windowed_max_range_1(array, window_size)\n curr_max = 0\n i = 0\n while i < array.length-window_size+1\n curr_wind = array[i...i+window_size]\n curr_max = [curr_wind.max - curr_wind.min, curr_max].max\n i+=1\n end\n curr_max\nend",
"def determineQuery(metric_param, time_window, time_period, series_length) \n \tif current_user\n user_id = MyRainBarrel.where(user_id: current_user.id).first\n else\n user_id = MyRainBarrel.where(id: 1).first\n end \n\n @total_metric = DailyWaterLog.find_by_sql([\"SELECT \n DISTINCT date_trunc(?, series.date)::date as date,\n id, \n #{metric_param.downcase.gsub(\" \", \"_\")},\n sum(COALESCE(#{metric_param.downcase.gsub(\" \", \"_\")}, 0)) OVER WIN as total_#{metric_param.downcase.gsub(\" \", \"_\")},\n overflowed, water_used, amount_overflown, ph, tds\n FROM (\n SELECT generate_series(?, 0) + (current_date-13)::date as date\n ) series \n LEFT OUTER JOIN \n daily_water_logs on series.date = daily_water_logs.created_at::date AND \n (daily_water_logs.created_at BETWEEN ? AND ?)\n WINDOW \n WIN AS (PARTITION BY date_trunc(?, series.date)::date)\n ORDER BY \n date ASC\", time_period, series_length, time_window, Time.now.tomorrow.midnight, time_period])\n end",
"def ps\n sql = %q(\n select\n procpid,\n application_name as source,\n age(now(),query_start) as running_for,\n waiting,\n current_query as query\n from pg_stat_activity\n where\n current_query <> '<insufficient privilege>'\n AND current_query <> '<IDLE>'\n and procpid <> pg_backend_pid()\n order by 3 desc\n )\n\n exec_sql(sql, find_uri)\n end",
"def apply_eager_limit_strategy(ds, strategy=eager_limit_strategy, limit_and_offset=limit_and_offset())\n case strategy\n when :distinct_on\n apply_distinct_on_eager_limit_strategy(ds)\n when :window_function\n apply_window_function_eager_limit_strategy(ds, limit_and_offset)\n else\n ds\n end\n end",
"def windowed_max_range(arr, w)\n\n windows = []\n win = 2\n until win == w\n a = arr.each_cons(win).to_a\n windows << a\n\n win += 1\n end\n\n values = []\n\n windows.flatten(1).each do |array|\n sum = array.last - array.first\n values << sum\n end\n #p windows.flatten(1)\n p values.max\nend",
"def year_with_most_guests\n sql = <<-SQL\n SELECT show_year, count(id) AS guest_count\n FROM guest_appearances\n GROUP BY show_year\n ORDER BY guest_count DESC\n LIMIT 1;\n SQL\n mg = DB[:conn].execute(sql)[0]\n puts \"#{mg[0]}: #{mg[1]} guests.\"\nend",
"def rank\n\t\trr = self.clone\n\t\trr.row_reduce_below\n\t\trr.rank_rr\n\tend",
"def existing_clients_new_revenue \n existing_clients.where(new_money: true).sum(:first_year_comp)\nend",
"def sql_with_postgres_pgp(event)\n filter = /(pgp_sym_(encrypt|decrypt))\\(((.|\\n)*?)\\)/i\n\n event.payload[:sql] = event.payload[:sql].gsub(filter) do |_|\n \"#{$1}([FILTERED])\"\n end\n\n sql_without_postgres_pgp(event)\n end",
"def replication_lag_time\n row = query_and_release('SELECT EXTRACT(EPOCH FROM (now() - pg_last_xact_replay_timestamp()))::float as lag')\n\n row['lag'].to_f if row.any?\n end",
"def win_mp\n maxmp * features_sum(:mp_on_win)\n end",
"def rows\n @pz.group_by.each_with_index { |val, n| row_id(n) }\n end",
"def incr(key, window=@max_window)\n key = key.to_s\n now = Time.now.to_f\n @redis.multi do\n @redis.zadd(key, now, SecureRandom.uuid)\n @redis.expire(key, @max_window.ceil)\n do_get(key, now, window)\n end.last\n end"
] |
[
"0.5796378",
"0.52517116",
"0.5064502",
"0.4594665",
"0.45769972",
"0.45617715",
"0.45282045",
"0.45062193",
"0.44971704",
"0.44761306",
"0.44666955",
"0.44301325",
"0.43804103",
"0.43185493",
"0.43053",
"0.42579615",
"0.42533994",
"0.42289972",
"0.42187497",
"0.4182494",
"0.41639137",
"0.41462022",
"0.41369388",
"0.41256392",
"0.41082263",
"0.41071656",
"0.41043326",
"0.41043326",
"0.4080945",
"0.40726945",
"0.40562582",
"0.4035456",
"0.40074536",
"0.4001893",
"0.39999643",
"0.39963776",
"0.39880398",
"0.39856702",
"0.3981514",
"0.3956906",
"0.39539295",
"0.39519662",
"0.39382178",
"0.3937709",
"0.3931462",
"0.38808396",
"0.38681534",
"0.38675514",
"0.38665062",
"0.3856989",
"0.38561457",
"0.38525215",
"0.38461983",
"0.38440755",
"0.38420358",
"0.38371423",
"0.3834632",
"0.38345662",
"0.38174975",
"0.3809737",
"0.38087484",
"0.37904897",
"0.37860474",
"0.37817714",
"0.37811613",
"0.37782922",
"0.37750354",
"0.3768344",
"0.3765902",
"0.3760584",
"0.37586585",
"0.37559342",
"0.3752559",
"0.37520817",
"0.3750974",
"0.37506658",
"0.3749313",
"0.37490362",
"0.37469822",
"0.37463096",
"0.37442586",
"0.37431994",
"0.3740588",
"0.37214872",
"0.37182927",
"0.3714637",
"0.37107596",
"0.37045947",
"0.37029865",
"0.36966935",
"0.36930114",
"0.369223",
"0.36885077",
"0.36838138",
"0.36784723",
"0.3677379",
"0.36710703",
"0.36653146",
"0.36623135",
"0.3661718",
"0.36616784"
] |
0.0
|
-1
|
Base support added in 8.4, offset supported added in 9.0, GROUPS and EXCLUDE support added in 11.0.
|
def supports_window_function_frame_option?(option)
case option
when :rows, :range
true
when :offset
server_version >= 90000
when :groups, :exclude
server_version >= 110000
else
false
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def normalize_offset(offset); end",
"def offset; end",
"def offset; end",
"def offset; end",
"def offset(*) end",
"def first_offset; end",
"def first_offset; end",
"def check_offset(*)\n false\n end",
"def current_offset; end",
"def limit_and_offset\n r = super\n if r.first == 1\n r\n else\n [1, r[1]]\n end\n end",
"def offset(arg0)\n end",
"def x_offset; end",
"def test_small_offset_for_index\n expected = [0x001ac4c3, 0x001bcaab, 0x001a82fe, 0x0003518e]\n 0.upto(3) do |idx|\n assert_equal expected[idx], @index.offset_for_index(idx)\n end\n end",
"def data_offset?(offset, group)\n log_offset = offset % group.size\n log_no, block_no, block_offset = location_of(offset, group)\n\n status ||= log_no > group.logs\n status ||= log_offset <= LOG_HEADER_SIZE\n status ||= block_no.negative?\n status ||= block_no >= group.log(log_no).blocks\n status ||= block_offset < Innodb::LogBlock::DATA_OFFSET\n status ||= block_offset >= Innodb::LogBlock::TRAILER_OFFSET\n\n !status\n end",
"def default_offset_amount\n 50\n end",
"def offset_matches_rule?(offset, rule_offset); end",
"def offset_matches_rule?(offset, rule_offset); end",
"def test_big_offset_for_index\n expected = 0x0000bbbb00000000\n assert_equal expected, @index.offset_for_index(8)\n expected = 0x0000ffff00000000\n assert_equal expected, @index.offset_for_index(9)\n end",
"def char_offset(byte_offset)\n end",
"def local_header_offset; end",
"def private; end",
"def line_for_offset(offset)\n end",
"def test_generate_offset_return_value\n offset_generator = OffsetGenerator.new(260317)\n assert_equal String, offset_generator.generate_offset(\"a\").class\n assert_equal 1, offset_generator.generate_offset(\"a\").length\n #assert_equal \"FailingValue\", offset_generator.generate_offset(\"a\")\n #assert_equal \"FailingValue\", offset_generator.generate_offset(\"b\")\n #assert_equal \"FailingValue\", offset_generator.generate_offset(\"c\")\n #assert_equal \"FailingValue\", offset_generator.generate_offset(\"d\")\n end",
"def char_offset(offset)\n raise \"Should not be called\"\n end",
"def offset(offset)\n @conjunction.add_offset(offset)\n nil\n end",
"def slice_range(limit_and_offset = limit_and_offset())\n limit, offset = limit_and_offset\n if limit || offset\n (offset||0)..(limit ? (offset||0)+limit-1 : -1)\n end\n end",
"def location_of(offset, group)\n log_no, log_offset = offset.divmod(group.size)\n block_no, block_offset = (log_offset - LOG_HEADER_SIZE).divmod(LOG_BLOCK_SIZE)\n [log_no, block_no, block_offset]\n end",
"def hash_offset\n super\n end",
"def offset_value #:nodoc:\n start == 0 && !@group.nil? && @group['groups'][0]['doclist']['start'] > 0 ? @group['groups'][0]['doclist']['start'] : start\n end",
"def offset_of(member)\n self.class.offset_of(member)\n end",
"def standard_offset\n return @standard_offset\n end",
"def modify_limit_offset(sql)\n modified_sql = \"\"\n subquery_sql = \"\"\n in_single_quote = false\n in_double_quote = false\n nesting_level = 0\n if sql =~ /(OFFSET|LIMIT)/xmi then\n if sql =~ /\\(/ then\n sql.split(//).each_with_index do |x, i|\n case x[0]\n when 40 # left brace - (\n modified_sql << x if nesting_level == 0\n subquery_sql << x if nesting_level > 0\n nesting_level = nesting_level + 1 unless in_double_quote || in_single_quote\n when 41 # right brace - )\n nesting_level = nesting_level - 1 unless in_double_quote || in_single_quote\n if nesting_level == 0 and !in_double_quote and !in_single_quote then\n modified_sql << modify_limit_offset(subquery_sql)\n subquery_sql = \"\"\n end\n modified_sql << x if nesting_level == 0\n subquery_sql << x if nesting_level > 0 \n when 39 # single quote - '\n in_single_quote = in_single_quote ^ true unless in_double_quote\n modified_sql << x if nesting_level == 0\n subquery_sql << x if nesting_level > 0 \n when 34 # double quote - \"\n in_double_quote = in_double_quote ^ true unless in_single_quote\n modified_sql << x if nesting_level == 0\n subquery_sql << x if nesting_level > 0\n else\n modified_sql << x if nesting_level == 0\n subquery_sql << x if nesting_level > 0\n end\n raise ActiveRecord::StatementInvalid.new(\"Braces do not match: #{sql}\") if nesting_level < 0\n end\n else\n modified_sql = sql\n end\n raise ActiveRecord::StatementInvalid.new(\"Quotes do not match: #{sql}\") if in_double_quote or in_single_quote\n return \"\" if modified_sql.nil?\n select_components = modified_sql.scan(/\\ASELECT\\s+(DISTINCT)?(.*?)(?:\\s+LIMIT\\s+(.*?))?(?:\\s+OFFSET\\s+(.*?))?\\Z/xmi)\n return modified_sql if select_components[0].nil?\n final_sql = \"SELECT #{select_components[0][0]} \"\n final_sql << \"TOP #{select_components[0][2].nil? ? 1000000 : select_components[0][2]} \" \n final_sql << \"START AT #{(select_components[0][3].to_i + 1).to_s} \" unless select_components[0][3].nil?\n final_sql << \"#{select_components[0][1]}\"\n return final_sql\n else\n return sql\n end\n end",
"def new_offset(offset=0)\n self.class.new!(:civil=>civil, :parts=>time_parts, :offset=>(offset*86400).to_i)\n end",
"def set_rack_ip_offsets # These Rack base addresses get added to 10.31.0.0 and 172.31.0.0\n base = IPv4.new('0.0.80.0').to_i\n @ip_offset ||= { # Rack, and IP address offset from base\n 'b15'=> (IPv4.new('0.0.82.100') - base).to_i, \n 'd15'=> (IPv4.new('0.0.81.150') -base).to_i ,\n 'e15'=> (IPv4.new('0.0.81.50') - base).to_i,\n 'h15'=> (IPv4.new('0.0.83.150') - base).to_i,\n 'i15'=> (IPv4.new('0.0.83.50') - base).to_i,\n 'b18'=> (IPv4.new('0.0.82.0') - base).to_i,\n 'd18'=> (IPv4.new('0.0.81.100') - base).to_i,\n 'e18'=> (IPv4.new('0.0.81.0') - base).to_i,\n 'h18'=> (IPv4.new('0.0.83.100') - base).to_i,\n 'i18'=> (IPv4.new('0.0.83.0') - base).to_i\n }\nend",
"def _lex_index_offsets; end",
"def _lex_index_offsets; end",
"def _lex_index_offsets; end",
"def _lex_index_offsets; end",
"def required_positionals; end",
"def get_offset\n @offset\n end",
"def to_find_offset\n @offset\n end",
"def validate_offset!(options)\r\n return unless options[:offset]\r\n\r\n if options[:offset].to_s == \"0\"\r\n options.delete :offset\r\n elsif options[:offset].to_i < 0 || options[:offset].to_s[/\\D+/]\r\n raise InvalidOffset,\r\n \"offset must be a number zero or greater\"\r\n end\r\n\r\n options[:offset]\r\n end",
"def offset_after_match\n offset + match_length\n end",
"def struct_offsets(definition, offset)\n padding = 0\n offsets = []\n definition.each do |mapping|\n key, data_type = mapping\n if sizeof_type(data_type) > padding\n offset = offset + padding\n end\n\n offsets.push(offset)\n\n offset = offset + sizeof_type(data_type)\n padding = calc_padding(offset)\n end\n\n offsets\n end",
"def offset(value)\n using(offset: value)\n end",
"def address_container_for_image_offset(img, offset)\n address_containers.select { |ac| \n (ac.image.base_image == img.base_image) && \n (ac.contains_image_offset? offset)\n }.first\n end",
"def offsetsAndConfigurationIndex(file)\n endiannessMarkerBytes = nil\n result = {}\n \n def readInt(endianness, bytes)\n if endianness == :little\n # Little endian\n (bytes[0] << 0 |\n bytes[1] << 8 |\n bytes[2] << 16 |\n bytes[3] << 24)\n else\n # Big endian\n (bytes[0] << 24 |\n bytes[1] << 16 |\n bytes[2] << 8 |\n bytes[3] << 0)\n end\n end\n \n def prepareMagic(endianness, numbers)\n magicBytes = []\n numbers.each {\n | number |\n currentBytes = []\n 4.times {\n currentBytes << (number & 0xff)\n number >>= 8\n }\n if endianness == :big\n currentBytes.reverse!\n end\n magicBytes += currentBytes\n }\n magicBytes\n end\n \n fileBytes = []\n \n File.open(file, \"rb\") {\n | inp |\n loop {\n byte = inp.getbyte\n break unless byte\n fileBytes << byte\n }\n }\n \n def sliceByteArrays(byteArray, pattern)\n result = []\n lastSlicePoint = 0\n (byteArray.length - pattern.length + 1).times {\n | index |\n foundOne = true\n pattern.length.times {\n | subIndex |\n if byteArray[index + subIndex] != pattern[subIndex]\n foundOne = false\n break\n end\n }\n if foundOne\n result << byteArray[lastSlicePoint...index]\n lastSlicePoint = index + pattern.length\n end\n }\n \n result << byteArray[lastSlicePoint...(byteArray.length)]\n \n result\n end\n \n [:little, :big].each {\n | endianness |\n headerMagicBytes = prepareMagic(endianness, OFFSET_HEADER_MAGIC_NUMBERS)\n magicBytes = prepareMagic(endianness, OFFSET_MAGIC_NUMBERS)\n \n bigArray = sliceByteArrays(fileBytes, headerMagicBytes)\n unless bigArray.size <= 1\n bigArray[1..-1].each {\n | configArray |\n array = sliceByteArrays(configArray, magicBytes)\n index = readInt(endianness, array[1])\n offsets = []\n array[2..-1].each {\n | data |\n offsets << readInt(endianness, data)\n }\n result[index] = offsets\n }\n end\n }\n \n raise MissingMagicValuesException unless result.length >= 1\n \n # result is {index1=>offsets1, index2=>offsets2} but we want to return\n # [[offsets1, index1], [offsets2, index2]].\n return result.map {\n | pair |\n pair.reverse\n }\nend",
"def offset\n loffset + 6\n end",
"def utc_offset() end",
"def all_data_zone_identifiers; end",
"def all_data_zone_identifiers; end",
"def offset_range\n return nil unless original_tag\n if original_end_tag\n end_offset = end_pos.offset + original_end_tag.bytesize\n else\n end_offset = start_pos.offset + original_tag.bytesize\n end\n\n start_pos.offset...end_offset\n end",
"def offset_on_line(offset)\n end",
"def rest_positionals; end",
"def offset()\n @offset__\n end",
"def starting_position; end",
"def import!(foreign_rewriter, offset: T.unsafe(nil)); end",
"def b_offset\n @last_4[1].to_i\n end",
"def sub_d8\n end",
"def offset_of(lsn, offset, new_lsn, group)\n log_size = group.log_size\n group_capacity = group.capacity\n\n # Calculate the offset in LSN.\n if new_lsn >= lsn\n lsn_offset = new_lsn - lsn\n else\n lsn_offset = lsn - new_lsn\n lsn_offset %= group_capacity\n lsn_offset = group_capacity - lsn_offset\n end\n\n # Transpose group size offset to a group capacity offset.\n group_offset = offset - (LOG_HEADER_SIZE * (1 + (offset / log_size)))\n\n offset = (lsn_offset + group_offset) % group_capacity\n\n # Transpose group capacity offset to a group size offset.\n offset + (LOG_HEADER_SIZE * (1 + (offset / (log_size - LOG_HEADER_SIZE))))\n end",
"def offsetsList(ast)\n ast.filter(StructOffset).uniq.sort\nend",
"def on_has_internal_subset\n end",
"def test_get_node_single_position_no_features\n node = @container.get_node('1',7,0)\n assert_equal([6,10,0,0,nil], [node.start, node.stop, node.count, node.flag, node.sum])\n assert_equal([], node.feature_byte_offsets)\n end",
"def call(offset = T.unsafe(nil)); end",
"def true_offset\n offset_flags >> 16\n end",
"def offset!(offset)\n @offset = offset || 0\n self\n end",
"def offset(value = EMPTY)\n if value == EMPTY\n @offset\n else\n clone(offset: value)\n end\n end",
"def parse_offset(offset)\n case offset.to_s\n when /^\\d+$/\n offset.to_i\n when /^0x\\h/i\n offset.to_i(16)\n when /^$/\n 0\n else\n raise(OptionParser::InvalidArgument, 'Invalid offset')\n end\nend",
"def require_group_x\n end",
"def sum_offset\n offset + operand.offset\n end",
"def observed_utc_offset; end",
"def observed_utc_offset; end",
"def observed_utc_offset; end",
"def observed_utc_offset; end",
"def extended(*) end",
"def content_offset(offset, dimension)\n return 0 unless offset >= dimension\n\n offset - dimension\n end",
"def populate(pre_offset=nil)\n if pre_offset.nil?\n calculate_fill_indexes(nil)\n raw_populate()\n elsif @pre_offset && pre_offset == @pre_offset # Nothing need to change -- new data set bounded the same\n return self.index_range\n elsif @pre_offset && pre_offset < @pre_offset\n map_local_range()\n else\n calculate_fill_indexes(pre_offset)\n raw_populate()\n end\n end",
"def get_offset\n if @offset == nil and @rep\n attribs = @rep.get_attributes \"camera\"\n if( attribs )\n eo = attribs[\"eo\"]\n if( eo.kind_of?(Array) and eo.length == 3 )\n @offset = eo\n end\n end\n @offset = false if @offset == nil\n end\n @offset\nend",
"def at(offset)\n if loaded? || (offset >= 0 ? lazy_possible?(head, offset + 1) : lazy_possible?(tail, offset.abs))\n super\n elsif offset >= 0\n first(:offset => offset)\n else\n last(:offset => offset.abs - 1)\n end\n end",
"def extra; end",
"def offset\n 1\n end",
"def index(loc, offset=0) end",
"def standard_offset=(value)\n @standard_offset = value\n end",
"def offset\n limit_and_offset.last\n end",
"def derive_offsets(transitions, offsets); end",
"def derive_offsets(transitions, offsets); end",
"def test_match_case_sensitive_offset_no_match\r\n\t\t#content with exact match\r\n\t\tcontent = \"123MyMatch and some more\"\r\n\t\tsnort_rule_content = SnortRuleContent.new\r\n\t\tsnort_rule_content.unescaped_string = \"MyMatch\"\r\n\t\tsnort_rule_content.offset = 4\r\n\t\tsnort_rule_content.nocase = false\r\n\t\tassert(!snort_rule_content.match(content,0),\"incorrect match on content with offset.\")\r\n\tend",
"def bytepos; end",
"def offset(offset)\n self.class.new(@grouping, @date_time + offset.send(@grouping.identifier))\n end",
"def unexpected_indent_offset; end",
"def groups_base\n get_groups_base\n# get_attribute_from_auth_source('groups_base')\n end",
"def optional_positionals; end",
"def extra_annotations; end",
"def zone_identifiers; end",
"def zone_identifiers; end",
"def to_offset(offset)\n if @orig.is_a?(DateTime)\n off = OffsetRationals.rational_for_offset(offset)\n TimeOrDateTime.new(@orig.new_offset(off), false)\n elsif @orig.is_a?(Time)\n time = time_with_offset(@orig, offset)\n TimeOrDateTime.new(time, false)\n elsif @orig.is_a?(TimestampWithOffset)\n TimeOrDateTime.new(TimestampWithOffset.new(@orig.timestamp, offset), false)\n else # Integer\n if offset == 0\n TimeOrDateTime.new(@orig, false)\n else\n TimeOrDateTime.new(TimestampWithOffset.new(@orig, offset), false)\n end\n end\n end",
"def find_group_results(group, offset, limit)\n #place solution here\n @coll.find(:group => group).skip(offset).limit(limit).sort({:secs => 1}).projection(group: false, _id: false)\n end",
"def offset\n operation.offset\n end",
"def wildmask\n super\n end",
"def deco_pos; end",
"def api_offset_and_limit(options=params)\n offset = 0\n\n if options[:offset].present?\n offset = options[:offset].to_i\n offset = 0 if offset < 0\n end\n\n limit = options[:limit].to_i\n if limit < 1\n limit = 25\n elsif limit > 100\n limit = 100\n end\n\n if offset.nil? && options[:page].present?\n offset = (options[:page].to_i - 1) * limit\n offset = 0 if offset < 0\n end\n\n [offset, limit]\n end"
] |
[
"0.62224346",
"0.61713517",
"0.61713517",
"0.61713517",
"0.61191493",
"0.58633673",
"0.58633673",
"0.5722773",
"0.57086873",
"0.569345",
"0.5534722",
"0.55226004",
"0.5475919",
"0.5419492",
"0.53431475",
"0.5339308",
"0.5339308",
"0.53376627",
"0.5272072",
"0.5266593",
"0.5243627",
"0.51451856",
"0.51376754",
"0.5131626",
"0.5121149",
"0.51114494",
"0.50657165",
"0.50350654",
"0.50340843",
"0.49908787",
"0.49760732",
"0.494115",
"0.4937592",
"0.49373454",
"0.4931503",
"0.4931503",
"0.4931503",
"0.4931503",
"0.49208727",
"0.49046797",
"0.49018663",
"0.48944318",
"0.48911813",
"0.4890494",
"0.4884398",
"0.48838812",
"0.48837662",
"0.4879086",
"0.4878507",
"0.48697102",
"0.48697102",
"0.48659852",
"0.4860838",
"0.4845799",
"0.4843521",
"0.4837148",
"0.4830844",
"0.48108608",
"0.48089",
"0.48083997",
"0.47757632",
"0.47746366",
"0.4772276",
"0.4761869",
"0.47593486",
"0.47542512",
"0.47439966",
"0.47389922",
"0.47385305",
"0.47363707",
"0.47334424",
"0.47334424",
"0.47334424",
"0.47334424",
"0.4732756",
"0.47199488",
"0.47156563",
"0.47027385",
"0.47024706",
"0.47022185",
"0.46963552",
"0.46942186",
"0.4688151",
"0.46826392",
"0.4674784",
"0.4674784",
"0.46646103",
"0.4664253",
"0.46631542",
"0.4653091",
"0.46483287",
"0.4645661",
"0.4641455",
"0.4640078",
"0.4640078",
"0.46390617",
"0.46330836",
"0.46330017",
"0.46310788",
"0.46301758",
"0.46247625"
] |
0.0
|
-1
|
Truncates the dataset. Returns nil. Options: :cascade :: whether to use the CASCADE option, useful when truncating tables with foreign keys. :only :: truncate using ONLY, so child tables are unaffected :restart :: use RESTART IDENTITY to restart any related sequences :only and :restart only work correctly on PostgreSQL 8.4+. Usage: DB[:table].truncate TRUNCATE TABLE "table" DB[:table].truncate(cascade: true, only: true, restart: true) TRUNCATE TABLE ONLY "table" RESTART IDENTITY CASCADE
|
def truncate(opts = OPTS)
if opts.empty?
super()
else
clone(:truncate_opts=>opts).truncate
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def truncate!\n return if truncated?\n update_column :metadata, nil\n end",
"def truncate\n connection.truncate(table_name)\n end",
"def _truncate_sql(table)\n to = @opts[:truncate_opts] || OPTS\n \"TRUNCATE TABLE#{' ONLY' if to[:only]} #{table}#{' RESTART IDENTITY' if to[:restart]}#{' CASCADE' if to[:cascade]}\"\n end",
"def truncate(truncate)\n self.query.truncate = truncate\n self\n end",
"def truncate() end",
"def truncate(model_klass, table_name)\n model_klass.connection.truncate(model_klass.table_name)\n end",
"def mssql_truncate(table_name)\n execute \"TRUNCATE TABLE #{quote_table_name(table_name)}\", 'Truncate Tables'\n rescue => e\n if e.message =~ /Cannot truncate table .* because it is being referenced by a FOREIGN KEY constraint/\n execute \"DELETE FROM #{quote_table_name(table_name)}\", 'Truncate Tables with Delete'\n else\n raise\n end\n end",
"def exec_query_truncate_table\n execute_remote(query_truncate_table)\n end",
"def truncate_table\n\t\t\t transaction { connection.execute(\"TRUNCATE TABLE #{quoted_table_name};\") }\n\t\t end",
"def truncate\n end",
"def TableTruncate\n begin\n unless getDBConn\n raise _(\"Please open a database before trying to truncate it.\")\n end\n\n tables = treeview_getSelection(@tv_tables)\n raise _(\"You have to select a table to truncate.\") if count(tables) <= 0\n\n # Confirm and truncate.\n tables.each do |table|\n table_ob = @dbconn.getTable(table[0])\n\n if msgbox(_(\"Question\"), sprintf(_(\"Do you want to truncate the table: %s?\"), table[0]), \"yesno\") == \"yes\"\n table_ob.truncate\n end\n end\n rescue => e\n knj_msgbox.error_exc(e)\n end\n\n @dbpage.TablesUpdate()\n end",
"def truncate_table(table = quoted_table_name)\n connection.execute(\"TRUNCATE TABLE #{table}\")\n end",
"def truncate(namespace, set_name = nil, before_last_update = nil, options = {})\n policy = create_policy(options, Policy, default_info_policy)\n\n node = @cluster.random_node\n\n if set_name && !set_name.to_s.strip.empty?\n str_cmd = \"truncate:namespace=#{namespace}\"\n str_cmd << \";set=#{set_name}\" unless set_name.to_s.strip.empty?\n else\n if node.supports_feature?(Aerospike::Features::TRUNCATE_NAMESPACE)\n str_cmd = \"truncate-namespace:namespace=#{namespace}\"\n else\n str_cmd = \"truncate:namespace=#{namespace}\"\n end\n end\n\n if before_last_update\n lut_nanos = (before_last_update.to_f * 1_000_000_000.0).round\n str_cmd << \";lut=#{lut_nanos}\"\n elsif supports_feature?(Aerospike::Features::LUT_NOW)\n # Servers >= 4.3.1.4 require lut argument\n str_cmd << \";lut=now\"\n end\n\n response = send_info_command(policy, str_cmd, node).upcase\n return if response == \"OK\"\n raise Aerospike::Exceptions::Aerospike.new(Aerospike::ResultCode::SERVER_ERROR, \"Truncate failed: #{response}\")\n end",
"def truncate(table_name, name = nil)\n execute \"TRUNCATE TABLE #{quote_table_name(table_name)}\", name\n end",
"def truncate\n push pop.truncate\n end",
"def truncate_all_tables(options = {})\n options[:verbose] ||= false\n EOL::Db.all_connections.uniq.each do |conn|\n count = 0\n conn.tables.each do |table|\n next if table == 'schema_migrations'\n count += 1\n if conn.respond_to? :with_master\n conn.with_master do\n truncate_table(conn, table)\n end\n else\n truncate_table(conn, table)\n end\n end\n if options[:verbose]\n puts \"-- Truncated #{count} tables in \" +\n conn.instance_eval { @config[:database] } +\n \".\"\n end\n end\n # EOL.forget_everything # expensive, but without it, would risk errors.\n end",
"def truncate_revisions! (options = nil)\n options = {:limit => acts_as_revisionable_options[:limit], :minimum_age => acts_as_revisionable_options[:minimum_age]} unless options\n RevisionRecord.truncate_revisions(self.class, self.id, options)\n end",
"def truncate_revisions!(options = nil)\n options = {:limit => self.class.acts_as_revisionable_options[:limit], :minimum_age => self.class.acts_as_revisionable_options[:minimum_age]} unless options\n revision_record_class.truncate_revisions(self.class, self.id, options)\n end",
"def truncate( length ) File.truncate( expand_tilde, length ) end",
"def truncate\r\n truncate_records\r\n redirect_to action: 'index'\r\n end",
"def truncate_preserve(table_name_str)\n puts \"Truncating '#{table_name_str}' table (it may take a while):\"\n table_name = TableName.valueOf(table_name_str)\n\n if enabled?(table_name_str)\n puts 'Disabling table...'\n disable(table_name_str)\n end\n\n puts 'Truncating table...'\n @admin.truncateTable(table_name, true)\n end",
"def truncate(length)\n #This is a stub, used for indexing\n end",
"def truncate(table_name_str)\n puts \"Truncating '#{table_name_str}' table (it may take a while):\"\n table_name = TableName.valueOf(table_name_str)\n\n if enabled?(table_name_str)\n puts 'Disabling table...'\n disable(table_name_str)\n end\n\n puts 'Truncating table...'\n @admin.truncateTable(table_name, false)\n end",
"def truncate(length) File.truncate(path, length) end",
"def hard_clean!\n open_db\n @file.truncate(PREAMBLE_SIZE)\n write_fresh_database\n end",
"def truncate(arg0)\n end",
"def truncate(path=nil,len=nil)\n if path.class == String && len.class == Fixnum && block_given?\n @j_del.java_method(:truncate, [Java::java.lang.String.java_class,Java::long.java_class,Java::IoVertxCore::Handler.java_class]).call(path,len,(Proc.new { |ar| yield(ar.failed ? ar.cause : nil) }))\n return self\n end\n raise ArgumentError, \"Invalid arguments when calling truncate(path,len)\"\n end",
"def truncate(ctx,path,offset)\n return wrap_context(ctx,__method__,path,offset) if ctx\n\n unless @root.can_write?(path)\n raise Errno::EACESS.new(path)\n end\n\n current_size = size(path)\n unless @root.raw_truncate(path,offset)\n contents = @root.read_file(path)\n if (offset <= 0)\n @root.write_to(path,\"\")\n elsif offset < contents.length\n @root.write_to(path,contents[0..offset] )\n end\n end\n @adj_size = @adj_size - current_size + (offset <= 0 ? 0 : offset)\n end",
"def truncate(decplace = 0)\n #This is a stub, used for indexing\n end",
"def truncate_trait\n options[:style] = [options.try(:[], :style), 'min-width: 0;'].join(' ').squish\n end",
"def truncate_db\n drop_table\n create_table\n end",
"def truncate( len = 0 )\n _must_be_a_file # raises error\n _must_be_writable # raises error\n \n old_size = self.size\n padding = len > old_size ? \"\\0\" * (len - old_size) : \"\"\n \n self.write( self.read( len ) + padding )\n end",
"def clear(options = nil)\n rescue_error_with(nil) { @data.with { |c| c.flush_all } }\n end",
"def TruncateAllClicked\n unless getDBConn.conn\n msgbox(_(\"Warning\"), _(\"You need to open a database, before you can truncate its databases\"), \"warning\")\n return null\n end\n\n if msgbox(_(\"Question\"), _(\"Do you really want to truncate all databases on the current connection?\"), \"yesno\") != \"yes\"\n return null\n end\n\n begin\n dbs = getDBConn.GetDBs()\n\n dbs.each do |value|\n getDBConn.ChooseDB(value)\n tables = getDBConn.GetTables(value)\n\n tables.each do |table|\n getDBConn.TruncateTable(table[\"name\"])\n end\n end\n rescue => e\n msgbox(_(\"Warning\"), sprintf(_(\"An error occurred:\\n\\n%s\"), e.getMessage), \"warning\")\n end\n\n @dbpage.TablesUpdate()\n end",
"def truncate(input, length = T.unsafe(nil), truncate_string = T.unsafe(nil)); end",
"def truncate(text, options={})\n options[:length] ||= 30\n options[:omission] ||= \"...\"\n\n if text\n l = options[:length] - options[:omission].length\n chars = text\n stop = options[:separator] ? (chars.rindex(options[:separator], l) || l) : l\n (chars.length > options[:length] ? chars[0...stop] + options[:omission] : text).to_s\n end\n end",
"def truncate(truncate_at, options = {})\n return dup unless length > truncate_at\n\n omission = options[:omission] || '...'\n length_with_room_for_omission = truncate_at - omission.length\n stop =\n if options[:separator]\n rindex(options[:separator], length_with_room_for_omission) || length_with_room_for_omission\n else\n length_with_room_for_omission\n end\n\n \"#{self[0, stop]}#{omission}\"\n end",
"def truncate_all\n Content::Version.all.map(&:destroy)\n ContentKey::Version.all.map(&:destroy)\n Content.all.map(&:destroy)\n ContentKey.all.map(&:destroy)\n end",
"def truncate(truncate_at, options = {})\n return dup unless length > truncate_at\n\n omission = options[:omission] || '...'\n length_with_room_for_omission = truncate_at - omission.length\n stop = \\\n if options[:separator]\n rindex(options[:separator], length_with_room_for_omission) || length_with_room_for_omission\n else\n length_with_room_for_omission\n end\n\n \"#{self[0, stop]}#{omission}\"\n end",
"def clear(options = nil)\n @data.flushdb\n end",
"def truncate_index(options = {})\n truncate(\n options[:value].join(I18n.t('support.array.words_connector')),\n length: INDEX_TRUNCATION_VALUE,\n omission: I18n.t('meta.search.index.truncated_field'),\n escape: false\n )\n end",
"def truncate(truncate_at, options = {})\n return dup unless length > truncate_at\n\n options[:omission] ||= '...'\n length_with_room_for_omission = truncate_at - options[:omission].length\n stop = \\\n if options[:separator]\n rindex(options[:separator], length_with_room_for_omission) || length_with_room_for_omission\n else\n length_with_room_for_omission\n end\n\n self[0...stop] + options[:omission]\n end",
"def truncate(symbol_or_array)\n symbol_or_array = [ symbol_or_array ] unless symbol_or_array.is_a? Array\n total_count = count = 0\n startt = Time.now\n\n symbol_or_array.each do |symbol|\n name = send(symbol).name\n logger.info \"(#{chunk_id}) Begining truncate of #{name}...\" if log? :basic\n case symbol\n when :entry_trigger then\n count += entry_trigger.positions.count\n entry_trigger.positions.clear\n when :entry_strategy then\n count += entry_strategy.positions.count\n entry_strategy.positions.clear\n when :exit_trigger then\n count += exit_trigger.positions.count\n exit_trigger.positions.clear\n when :exit_strategy then\n count += exit_strategy.positions.count\n exit_strategy.positions.clear\n when :scan then\n count += scan.positions.count\n scan.positions.clear\n else\n raise ArgumentError, \":truncate must take one or an array of the following: :entry_trigger, :entry_strategy, :exit_strategy or :scan\"\n end\n total_count += count\n end\n delta = Time.now - startt\n logger.info \"(#{chunk_id}) Truncated #{total_count} positions in #{Backtester.format_et(delta)}\" if log? :basic\n end",
"def delete_all\n begin\n db.execute \"TRUNCATE TABLE #{table}\"\n\n rescue SQLite3::Exception => e\n puts \"Database Exception occured\"\n puts e\n \n ensure\n db.close if db\n end\n end",
"def truncate(*fields, length:, strip_whitespaces: true)\n strip(*fields) if strip_whitespaces\n sanitize(*fields) { |value| value[0...length] }\n strip(*fields) if strip_whitespaces\n end",
"def ftruncate(ctx,path,offset,ffi)\n\n return wrap_context(ctx,__method__,path,offset,ffi) if ctx\n\n fh = ffi.fh\n\n if fh.raw\n @root.raw_truncate(path,offset,fh.raw)\n if (offset <= 0)\n fh.contents = \"\"\n else\n fh.contents = fh.contents[0..offset]\n end\n end\n end",
"def truncate(length)\n range_stop = -1 - length\n # Get the keys to delete from the hash\n keys = @index.range(0, range_stop)\n # Remove the keys from the index\n @index.delete_by_rank(0, range_stop)\n\n # Remove the values from the hash\n multi do\n keys.each do |key|\n @hash.delete(key)\n end\n end\n self\n end",
"def truncate(max)\n resize([max, self.length].min)\n end",
"def truncate content # :nodoc:\n maxlen = @maxlen ||= @width-2\n if !content.nil? \n if content.length > maxlen # only show maxlen\n @longest_line = content.length if content.length > @longest_line\n #content = content[@pcol..@pcol+maxlen-1] \n content.replace content[@pcol..@pcol+maxlen-1] \n else\n # can this be avoided if pcol is 0 XXX\n content.replace content[@pcol..-1] if @pcol > 0\n end\n end\n content\n end",
"def truncate_name(name, options={})\n options = {:length => NAME_MAX_LENGTH, :separator => ' '}.merge options\n h truncate(name, options)\n end",
"def truncate(max_length)\n output = deparse(@tree)\n\n # Early exit if we're already below the max length\n return output if output.size <= max_length\n\n truncations = find_possible_truncations\n\n # Truncate the deepest possible truncation that is the longest first\n truncations.sort_by! { |t| [-t.location.size, -t.length] }\n\n tree = deep_dup(@tree)\n truncations.each do |truncation|\n next if truncation.length < 3\n\n find_tree_location(tree, truncation.location) do |expr, k|\n expr[k] = { A_TRUNCATED => nil }\n expr[k] = [expr[k]] if truncation.is_array\n end\n\n output = deparse(tree)\n return output if output.size <= max_length\n end\n\n # We couldn't do a proper smart truncation, so we need a hard cut-off\n output[0..max_length - 4] + '...'\n end",
"def clean_truncate_html(len=30, ellipsis=\"...\")\n Nokogiri::HTML::DocumentFragment.parse(HTML_Truncator.truncate(self, len, :ellipsis => ellipsis, :length_in_chars => true)).to_xhtml\n end",
"def clean_annuaire()\n puts \"TRUNCATE ALL USER RELATED TABLES\"\n [\n :last_uid, :telephone, :email, :relation_eleve, :ressource,\n :enseigne_dans_regroupement, :role_user, :profil_user, :user, :regroupement, :eleve_dans_regroupement\n ].each do |table|\n if table == :ressource\n truncate_ressource()\n else \n DB[table].truncate()\n end\n end\n\n create_super_admin_and_ressource_laclasse()\nend",
"def truncated_content\n\t\t\tid.to_s+\" - \"+content.truncate(50,{omission: '...'})\n\t end",
"def truncated_content\n\t\tid.to_s+\" - \"+content.truncate(50,{omission: '...'})\n end",
"def truncate(string, limit = nil, natural_separator = \" \")\n return string if limit.to_i == 0\n\n helpers.truncate(\n string,\n length: limit,\n separator: natural_separator,\n omission: \"\",\n escape: true\n )\n end",
"def truncate\n @text.truncate_words(@word_num, omission: \"#{@cut_off}\")\n end",
"def truncate(p0) end",
"def truncate(p0) end",
"def empty!\n underlying_table.delete\n end",
"def truncate(s, truncate_at, options = {})\n return s unless s.length > truncate_at\n omission = options[:omission] || '...'\n with_extra_room = truncate_at - omission.length\n stop = \\\n if options[:separator]\n rindex(options[:separator], with_extra_room) || with_extra_room\n else\n with_extra_room\n end\n \"#{s[0, stop]}#{omission}\"\n end",
"def truncate_index(options = {})\n truncate(\n options[:value].join(I18n.t('support.array.two_words_connector')),\n length: INDEX_TRUNCATION_VALUE,\n omission: I18n.t('search.index.truncated_field')\n )\n end",
"def truncate(integer)\n #This is a stub, used for indexing\n end",
"def truncate(s, truncate_at, options = {})\r\n return s unless s.length > truncate_at\r\n omission = options[:omission] || '...'\r\n with_extra_room = truncate_at - omission.length\r\n stop = \\\r\n if options[:separator]\r\n rindex(options[:separator], with_extra_room) || with_extra_room\r\n else\r\n with_extra_room\r\n end\r\n \"#{s[0, stop]}#{omission}\"\r\n end",
"def clear_keyspace!(options = {})\n schema.keys.each { |column_family| clear_column_family!(column_family, options) }\n end",
"def cascade\n if relation\n if relation.is_a?(Enumerable)\n relation.entries\n relation.each { |doc| doc.destroy }\n else\n relation.destroy\n end\n end\n end",
"def truncate(text, length = 100, truncate_string = '...')\n l = length - truncate_string.length\n (text.length > length ? text[0...l] + truncate_string : text).to_s\n end",
"def clear(options=nil)\n instrument_with_log(:clear, 'flushing all keys') do\n with { |c| c.flush_all }\n end\n rescue Dalli::DalliError => e\n log_dalli_error(e)\n instrument_error(e) if instrument_errors?\n raise if raise_errors?\n nil\n end",
"def truncate content # :nodoc:\n maxlen = @maxlen || @width-@internal_width\n maxlen = @width-@internal_width if maxlen > @width-@internal_width\n if maxlen == 0 # (otherwise it becoems -1 below)\n content.replace \"\"\n return\n end\n if !content.nil? \n cl = content.length\n if cl > maxlen # only show maxlen\n @longest_line = cl if cl > @longest_line\n #content = content[@pcol..@pcol+maxlen-1] \n ## taking care of when scrolling is needed but longest_line is misreported\n # So we scroll always and need to check 2013-03-06 - 00:09 \n content.replace(content[@pcol..@pcol+maxlen-1] || \" \")\n else\n ## taking care of when scrolling is needed but longest_line is misreported\n # So we scroll always and need to check 2013-03-06 - 00:09 \n content.replace(content[@pcol..-1]||\" \") if @pcol > 0 \n end\n end\n content\n end",
"def check_truncation_allowed!\n raise(InvalidOperation, \"Grouped datasets cannot be truncated\") if opts[:group]\n raise(InvalidOperation, \"Joined datasets cannot be truncated\") if opts[:join]\n end",
"def delete_metadata_full_path\n FileUtils.safe_unlink(metadata_full_path)\n @metadata_full_path = nil\n end",
"def delete_metadata_full_path\n FileUtils.safe_unlink(metadata_full_path)\n @metadata_full_path = nil\n end",
"def clear!\n transaction do\n Schema::TABLES.each do |table|\n execute(\"DELETE FROM #{table}\")\n execute(\"DELETE FROM sqlite_sequence WHERE name = '#{table}'\") # reset the auto-increment sequence\n end\n end\n self\n end",
"def truncate(text, *args)\n options = args.extract_options!\n unless args.empty?\n options[:length] = args[0] || 30\n options[:omission] = args[1] || \"...\"\n end\n options.reverse_merge!(:length => 30, :omission => \"...\")\n\n if text\n l = options[:length] - options[:omission].mb_chars.length\n chars = text.mb_chars\n (chars.length > options[:length] ? chars[0...l] + options[:omission] : text).to_s\n end\n end",
"def truncate(str, truncate_at, options = {})\n return str unless str.length > truncate_at\n\n options[:separator] ||= ' '\n stop = str.rindex(options[:separator], truncate_at) || truncate_at\n\n \"#{str[0, stop]}\"\nend",
"def truncate(text, length = 30, truncate_string = \"...\")\n if text\n l = length - truncate_string.chars.length\n chars = text.chars\n #$log.info(chars[0..10])\n (chars.length > length ) ?\n (chars[0...l] + truncate_string.chars).join('').to_s :\n text\n end\n end",
"def truncate(stream)\n stream = stream_name(stream)\n connection.xtrim(stream, 0)\n end",
"def trunc()\n return MicrosoftGraph::Drives::Item::Items::Item::Workbook::Functions::Trunc::TruncRequestBuilder.new(@path_parameters, @request_adapter)\n end",
"def truncate(length = 30)\n return self if self.length < length\n self[0..length].gsub(/\\s\\w+\\s*$/, '...')\n end",
"def truncate(length, options = {})\n text = self.dup\n options[:omission] ||= \"...\"\n\n length_with_room_for_omission = length - options[:omission].mb_chars.length\n chars = text.mb_chars\n stop = options[:separator] ?\n (chars.rindex(options[:separator].mb_chars, length_with_room_for_omission) || length_with_room_for_omission) : length_with_room_for_omission\n\n (chars.length > length ? chars[0...stop] + options[:omission] : text).to_s\n end",
"def wipe!\n resource_factory.orm_class.delete_all\n end",
"def truncate(string, options = {})\n length = options[:length] || 30\n\n if string.length > length\n string[0..length - 3] + \"...\"\n else\n string\n end\n end",
"def purge\n limit = Time.now.to_i - (@conf[:obsolete] * 60) # conf in minutes\n\n @db.execute(\"DELETE FROM #{@dataset} WHERE timestamp < #{limit}\")\n end",
"def trunc(size)\n\t#Loop through all of the found log files and send them to the ftunc functon one by one to be nuked :)\n\tif not @bin_logz.empty?\n\t\t@bin_logz.each do |log|\n\t\t\tftrunc(log, size)\n\t\t\tputs \"#{HC}#{FWHT}#{log} #{FGRN}if done#{FWHT}!#{RS}\"\n\t\tend\n\tend\n\tif not @logz.empty?\n\t\t@logz.each do |log|\n\t\t\tftrunc(log, size)\n\t\t\tputs \"#{HC}#{FWHT}#{log} #{FGRN}if done#{FWHT}!#{RS}\"\n\t\tend\n\tend\nend",
"def ftrunc(file, size)\n\tfoo = File.stat(file)\n\toatime=foo.atime #atime before edit\n\tomtime=foo.mtime #mtime before edit\n\n\tFile.truncate(file, size.to_i) #Rewind\n\tFile.utime(oatime, omtime, file) #Make the atime & mtime look they did before we did the rewind :)\nend",
"def purge\n @db.execute( \"DELETE FROM #{TABLE_NAME};\" )\n end",
"def truncate_attributes\n self.title = truncate(title, length: 1000)\n self.site_name = truncate(site_name, length: 200)\n self\n end",
"def clean_schema\n # AppControl.restart_server # if Rails.env.production?\n ActiveRecord::Base.connection.schema_cache.clear!\n end",
"def clear!\n @columns.clear\n @columns_hash.clear\n @primary_keys.clear\n @tables.clear\n @version = nil\n end",
"def truncate(text, length = 30, truncate_string = \"...\")\n if text.nil? then return end\n l = length - truncate_string.chars.length\n (text.chars.length > length ? text.chars[0...l] + truncate_string : text).to_s\n end",
"def truncate(*args)\n i=args[0]\n limit=args[1].to_i\n begin\n if i.split(\"\").size > limit\n k=0\n s=''\n for j in i.split(\"\")\n if k.to_i<limit\n s=s+j\n end\n k+=1\n end\n return s=s+'..'\n else\n return i\n end\n rescue Exception=>e\n puts \"DEBUG :: ERROR :: AdminXMLBuilder Component > truncate Action. Exception is #{e.to_s}\"\n return i\n end\n end",
"def clear(options = {})\n invoke(:clear, '*') do |store|\n if options[:async]\n store.flushdb(async: true)\n else\n store.flushdb\n end\n end\n end",
"def clear\n table_ = [keyspace, table].compact.join '.'\n statement = \"DROP TABLE IF EXISTS #{table_} ;\"\n session.execute(statement)\n end",
"def clear_metadata(section, *args)\n configuration.clear_metadata(section, *args)\n end",
"def truncate(text, length = 30, truncate_string = \"...\")\n return \"\" if text.nil?\n l = length - truncate_string.size\n text.size > length ? (text[0...l] + truncate_string).to_s : text\n end",
"def truncate!\n self.name = name[LENGTH_RANGE] if name && name.size > MAX_LENGTH\n self.scope = scope[LENGTH_RANGE] if scope && scope.size > MAX_LENGTH\n end",
"def wikitext_truncate_and_strip(markup, options = {})\n squished = strip_tags(markup.w).squish # may have entities, eg. " etc\n truncated = truncate squished, options.merge(escape: false)\n if truncated == squished # string wasn't changed\n truncated.html_safe\n else # string was chopped\n omission = options[:omission] || '...'\n truncated.gsub! /&[^;]+?#{Regexp.escape omission}\\z/, omission\n truncated.html_safe\n end\n end",
"def log_reset\n @log_device.truncate(0) if defined? @log_device\n self\n end",
"def clear\n instrument(:clear, nil, nil) do\n failsafe(:clear) do\n with(&:flushdb)\n end\n end\n end",
"def cleanup(options = nil)\n raise NotImplementedError.new(\"#{self.class.name} does not support cleanup\")\n end"
] |
[
"0.70971954",
"0.70765924",
"0.7043742",
"0.69265467",
"0.6460157",
"0.62650967",
"0.62350446",
"0.62074137",
"0.61999035",
"0.60787535",
"0.6010918",
"0.6002308",
"0.5951963",
"0.5832811",
"0.5784054",
"0.5650781",
"0.5630688",
"0.5513483",
"0.5496783",
"0.5469438",
"0.54579407",
"0.5392101",
"0.53192407",
"0.5317605",
"0.5316841",
"0.5312045",
"0.5285506",
"0.5274315",
"0.5246738",
"0.5242563",
"0.52369446",
"0.5220433",
"0.5213184",
"0.51907814",
"0.516641",
"0.5130397",
"0.5119107",
"0.51187855",
"0.5115779",
"0.50963134",
"0.5084692",
"0.50704837",
"0.5005944",
"0.4984507",
"0.4983478",
"0.49778554",
"0.49494925",
"0.4943433",
"0.49274698",
"0.49026158",
"0.48821676",
"0.4868018",
"0.48412892",
"0.4814537",
"0.47740692",
"0.47611082",
"0.47530422",
"0.4717869",
"0.47166672",
"0.47092485",
"0.47063965",
"0.47006366",
"0.46929792",
"0.46866816",
"0.4677645",
"0.46671477",
"0.46655515",
"0.46619877",
"0.4658424",
"0.4636275",
"0.46308103",
"0.46308103",
"0.4620224",
"0.46073785",
"0.45957845",
"0.4583226",
"0.45790842",
"0.45674992",
"0.45672497",
"0.45478243",
"0.45446613",
"0.45445243",
"0.45402798",
"0.4536578",
"0.45326772",
"0.45262086",
"0.45228338",
"0.45220542",
"0.4515397",
"0.4513939",
"0.4497985",
"0.44876966",
"0.4477479",
"0.44763947",
"0.44760883",
"0.44551983",
"0.4444895",
"0.44436222",
"0.44057253",
"0.43799263"
] |
0.63920975
|
5
|
Use WITH TIES when limiting the result set to also include additional rules that have the same results for the order column as the final row. Requires PostgreSQL 13.
|
def with_ties
clone(:limit_with_ties=>true)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def apply_distinct_on_eager_limit_strategy(ds)\n keys = predicate_key\n ds.distinct(*keys).order_prepend(*keys)\n end",
"def apply_eager_graph_limit_strategy(strategy, ds)\n case strategy\n when :distinct_on\n apply_distinct_on_eager_limit_strategy(ds.order_prepend(*self[:order]))\n when :window_function\n apply_window_function_eager_limit_strategy(ds.order_prepend(*self[:order])).select(*ds.columns)\n else\n ds\n end\n end",
"def apply_filter_by_associations_window_function_limit_strategy(ds)\n ds.where(filter_by_associations_limit_key=>apply_window_function_eager_limit_strategy(associated_eager_dataset.select(*filter_by_associations_limit_alias_key)).select(*filter_by_associations_limit_aliases))\n end",
"def apply_window_function_eager_limit_strategy(ds, limit_and_offset=limit_and_offset())\n rn = ds.row_number_column \n limit, offset = limit_and_offset\n ds = ds.unordered.select_append{|o| o.row_number.function.over(:partition=>predicate_key, :order=>ds.opts[:order]).as(rn)}.from_self\n ds = if !returns_array?\n ds.where(rn => offset ? offset+1 : 1)\n elsif offset\n offset += 1\n if limit\n ds.where(rn => (offset...(offset+limit))) \n else\n ds.where{SQL::Identifier.new(rn) >= offset} \n end\n else\n ds.where{SQL::Identifier.new(rn) <= limit} \n end\n end",
"def apply_filter_by_associations_distinct_on_limit_strategy(ds)\n k = filter_by_associations_limit_key \n ds.where(k=>apply_distinct_on_eager_limit_strategy(associated_eager_dataset.select(*k)))\n end",
"def find_expired_orders_row\n\n orders = []\n row = nil\n offset = 0\n\n while row.nil? || row.size > 0\n row = Order.where(gateway_id: self.id).order(Sequel.desc(:keychain_id), Sequel.desc(:reused)).limit(Config.reuse_address_orders_threshold).offset(offset).to_a\n\n row.reject! do |o|\n reject = false\n row.each do |o2|\n reject = true if o.keychain_id == o2.keychain_id && o.reused < o2.reused\n end\n reject\n end\n\n row.sort! { |o1, o2| o2.id <=> o1.id }\n\n row.each do |o|\n if o.status == Order::STATUSES[:expired]\n orders.unshift(o)\n elsif o.status == Order::STATUSES[:new]\n next\n else\n return orders[0...Config.reuse_address_orders_threshold]\n end\n end\n offset += Config.reuse_address_orders_threshold\n end\n\n orders\n\n end",
"def order_with(expr)\n unless expr.is_a?(String)\n raise \"must call FatTable::Table\\#order_with with a single string expression\"\n end\n\n rev = false\n if expr.match?(/\\s*!\\s*\\z/)\n rev = true\n expr = expr.sub(/\\s*!\\s*\\z/, '')\n end\n sort_sym = rev ? :sort_key! : :sort_key\n dup.select(*headers, sort_key: expr).order_by(sort_sym)\n end",
"def ranked_reports_query\n Report.joins(:variant).where({relation_name => relation}).select(<<-SQL).to_sql\n reports.*,\n DENSE_RANK() OVER (\n PARTITION BY product_id\n ORDER BY reported_at DESC\n ) AS product_price_index\n SQL\n end",
"def apply_filter_by_associations_limit_strategy(ds)\n case filter_by_associations_limit_strategy\n when :correlated_subquery\n apply_correlated_subquery_limit_strategy(ds)\n else\n super\n end\n end",
"def apply_filter_by_associations_limit_strategy(ds)\n case filter_by_associations_limit_strategy\n when :distinct_on\n apply_filter_by_associations_distinct_on_limit_strategy(ds)\n when :window_function\n apply_filter_by_associations_window_function_limit_strategy(ds)\n else\n ds\n end\n end",
"def true_eager_graph_limit_strategy\n if associated_class.dataset.supports_ordered_distinct_on? && !offset\n :distinct_on\n else\n super\n end\n end",
"def apply_correlated_subquery_limit_strategy(ds)\n table = ds.first_source_table\n table_alias = ds.first_source_alias\n primary_key = associated_class.primary_key\n key = self[:key]\n cs_alias = :t1\n cs = associated_dataset.\n from(Sequel.as(table, :t1)).\n select(*qualify(cs_alias, primary_key)).\n where(Array(qualify(cs_alias, key)).zip(Array(qualify(table_alias, key)))).\n limit(*limit_and_offset)\n ds.where(qualify(table_alias, primary_key)=>cs)\n end",
"def optimize_order!(rules)\n first_can_in_group = -1\n rules.each_with_index do |rule, i|\n (first_can_in_group = -1) && next unless rule.base_behavior\n (first_can_in_group = i) && next if first_can_in_group == -1\n next unless rule.subjects == [:all]\n\n rules[i] = rules[first_can_in_group]\n rules[first_can_in_group] = rule\n first_can_in_group += 1\n end\n end",
"def apply_eager_limit_strategy(ds, strategy=eager_limit_strategy, limit_and_offset=limit_and_offset())\n case strategy\n when :distinct_on\n apply_distinct_on_eager_limit_strategy(ds)\n when :window_function\n apply_window_function_eager_limit_strategy(ds, limit_and_offset)\n else\n ds\n end\n end",
"def aggregated_over_time_query\n quantity_field = @country_ids.present? ? \"#{entity_quantity}_reported_quantity\" : \"#{@reported_by}_reported_quantity\"\n\n <<-SQL\n SELECT ROW_TO_JSON(row)\n FROM (\n SELECT JSON_AGG(JSON_BUILD_OBJECT('x', year, 'y', value) ORDER BY year) AS datapoints\n FROM (\n SELECT year, ROUND(SUM(#{quantity_field}::FLOAT)) AS value\n FROM #{shipments_table}\n #{child_taxa_join}\n WHERE #{@condition} AND #{quantity_field} IS NOT NULL AND #{country_condition}\n AND #{child_taxa_condition}\n GROUP BY year\n #{quantity_condition(quantity_field)}\n ORDER BY value DESC\n #{limit}\n ) t\n ) row\n SQL\n end",
"def apply_eager_graph_limit_strategy(strategy, ds)\n case strategy\n when :correlated_subquery\n apply_correlated_subquery_limit_strategy(ds)\n else\n super\n end\n end",
"def default_eager_limit_strategy\n super if self[:order] || offset\n end",
"def filter_by_associations_add_conditions?\n super || self[:order] || self[:eager_limit_strategy] || self[:filter_limit_strategy]\n end",
"def filter_by_associations_limit_strategy\n super if self[:order] || offset || self[:eager_limit_strategy] || self[:filter_limit_strategy]\n end",
"def issues_with_ayty_fix_problem_order_by(options={})\n if group_by_sort_order.is_a?(String)\n # caso a variavel group_by_sort_order seja uma String, converte a mesma para Array\n # para funcionar um distinct depois nas colunas\n order_option = [group_by_sort_order.split(','), options[:order]].flatten.reject(&:blank?)\n else\n order_option = [group_by_sort_order, options[:order]].flatten.reject(&:blank?)\n end\n\n scope = Issue.visible.\n joins(:status, :project).\n where(statement).\n includes(([:status, :project] + (options[:include] || [])).uniq).\n where(options[:conditions]).\n order(order_option).\n joins(joins_for_order_statement(order_option.join(','))).\n limit(options[:limit]).\n offset(options[:offset])\n\n scope = scope.preload(:custom_values)\n if has_column?(:author)\n scope = scope.preload(:author)\n end\n\n issues = scope.to_a\n\n if has_column?(:spent_hours)\n Issue.load_visible_spent_hours(issues)\n end\n if has_column?(:total_spent_hours)\n Issue.load_visible_total_spent_hours(issues)\n end\n if has_column?(:relations)\n Issue.load_visible_relations(issues)\n end\n issues\n rescue ::ActiveRecord::StatementInvalid => e\n raise StatementInvalid.new(e.message)\n end",
"def eligibleOrders(orders, available_cookies)\n eligibleorders = Array.new\n # iterate through each order, and if it is not fulfilled, then iterate over each product it has, then see if it is\n # an eligible order based on it it has less cookies than what is currently the available cookies\n orders.each do |order|\n if !order.isFulfilled\n order.getProducts.each do |product|\n if product.getTitle == 'Cookie' and product.getAmount <= available_cookies\n eligibleorders.push(order)\n end\n end\n end\n end\n return eligibleorders\nend",
"def latest_answers\n unscope(:order).select('DISTINCT ON (question_id) *').order(:question_id, created_at: :desc)\n end",
"def latest_answers\n unscope(:order).select('DISTINCT ON (question_id) *').order(:question_id, created_at: :desc)\n end",
"def latest_answers\n unscope(:order).select('DISTINCT ON (question_id) *').order(:question_id, created_at: :desc)\n end",
"def ex(order,cost)\n order.select do |key, value|\n value >cost\n end\nend",
"def remove_invalid_ordering_from_select_statement(node)\n return unless Arel::Nodes::SelectStatement === node\n\n node.orders = [] unless node.offset || node.limit\n end",
"def refd_qings\n qing_group.display_conditions.map(&:ref_qing).uniq.sort_by(&:full_rank)\n end",
"def best_rank_subquery(group_by)\n @source.respond_to?(:project) or raise ThroughHierarchySourceError, \"#{@source} cannot be converted into a subquery\"\n subq = source.\n project(foreign_type_column, foreign_key_column, group_by, best_rank).\n where(filters).\n group(source[group_by]).\n as(best_rank_table_name)\n\n spawn(subq)\n end",
"def ordering_query; end",
"def true_eager_graph_limit_strategy\n r = super\n ds = associated_dataset\n if r == :ruby && ds.supports_limits_in_correlated_subqueries? && (Array(associated_class.primary_key).length == 1 || ds.supports_multiple_column_in?) && (!offset || ds.supports_offsets_in_correlated_subqueries?)\n :correlated_subquery\n else\n r\n end\n end",
"def ties\n placings.select { |s| s.participated? && !s.disqualified? }\n .group_by { |s| [s.tier, s.score, s.tiebreaker_place] }\n .values\n .reject { |g| g.size == 1 }\n end",
"def shift_up_orders()\n Feature.update_all('\"order\" = \"order\" - 1', '\"order\" > '+order.to_s)\n end",
"def order(expr)\n from(default_table).order(expr)\n end",
"def grouped_order_details\n sorted_order_details = order_details.sort_by(&:safe_group_id)\n sorted_order_details.slice_when do |before, after|\n before.group_id.nil? || before.group_id != after.group_id\n end\n end",
"def solve\n # get the json object from the REST endpoint\n json = readJSONFromUrl($Leading_json_url)\n orders = json['orders']\n allorders = Array.new\n i = 1\n # iterate through the paginated api until there is no more valid orders, and build the allorders array\n while orders.size() != 0\n # get the json from the new page\n url = $Leading_json_url + $Url_page_param + i.to_s\n json = readJSONFromUrl(url)\n # get the orders and append them all into allorders array\n orders = json['orders']\n orders.each{|orderObject| allorders.push(buildOrder(orderObject)) }\n i+=1\n end\n\n # store temp values for output\n # store available cookies output\n retlineone = Hash.new\n # store unfulfilled_orders output\n retlinetwo = Hash.new\n\n available_cookies = json['available_cookies']\n retlineone['remaining_cookies'] = available_cookies\n\n unfulfilled_orders = Array.new\n # start building the unfulfilled_orders array by iterating over each order and see if the cookie count is higher than\n # the available cookies, if it is, then add it to unfulfilled_orders\n allorders.each do |order|\n if !order.isFulfilled\n order.getProducts.each do |product|\n if product.getTitle() == 'Cookie' and getCookie(order.getProducts).getAmount > available_cookies\n unfulfilled_orders.push(order.getId)\n end\n end\n end\n end\n retlinetwo['unfulfilled_orders'] = unfulfilled_orders\n # get all eligible orders to do challenge requirements on\n orderstocheck = eligibleOrders(allorders, available_cookies)\n\n # accomplishes sorting the eligibleorders ()using custom compare method)\n # in descending cookie count and then if there is any repeats, it sorts the repeats so that the lowest id comes first\n orderstocheck.sort!{|a, b| compare(a, b)}.reverse!\n # iterate through each order, and do cases for challenge. Since the list is 'doubly-sorted', a simple iteration down\n # from most to least cookies solves the challenge\n orderstocheck.each do |order|\n # if the cookies of the order is less than currently available cookies, decrement our available_cookies by how\n # much cookies the order has\n if getCookie(order.getProducts).getAmount <= available_cookies\n available_cookies -= getCookie(order.getProducts).getAmount\n else\n # otherwise the amount of cookies the order required was more than what was available, so it is unfulfilled, and\n # added to the array of unfulfilled_orders\n unfulfilled_orders.push(order.getId)\n end\n end\n # ensure that the final unfulfilled_orders array is sorted in ascending order\n unfulfilled_orders.sort!\n # create the hash that represents the output json object\n output_hash = {\n 'remaining_cookies' => available_cookies,\n 'unfulfilled_orders' => unfulfilled_orders\n }\n # get the output json object from the hash representation, in a properly formatted output\n output_json = JSON.pretty_generate(output_hash)\n # return the solved json\n return output_json\nend",
"def use_composite_row_comparison(values)\n columns = Arel::Nodes::Grouping.new(column_definitions.map(&:column_expression))\n values = Arel::Nodes::Grouping.new(column_definitions.map do |column_definition|\n value = values[column_definition.attribute_name]\n build_quoted(value, column_definition.column_expression)\n end)\n\n if column_definitions.first.ascending_order?\n [columns.gt(values)]\n else\n [columns.lt(values)]\n end\n end",
"def order_by_cached_appeal_priority_clause\n Arel.sql(<<-SQL)\n (CASE\n WHEN cached_appeal_attributes.case_type = 'Court Remand' THEN 1\n ELSE 0\n END) DESC,\n cached_appeal_attributes.is_aod DESC,\n cached_appeal_attributes.docket_number ASC\n SQL\n end",
"def resort(*args)\n fresh.tap do |criteria|\n criteria.sort_values = args\n end\n end",
"def summarize_per_subset\n @having = ANY_ROWS\n end",
"def group_rank_and_filter_sql_statement(connection, start_date, finish_date)\n # 1. Takes all CodigoExterno with FechaCreacion >= start_date && \n # FechaCreacion <= finish_date\n # 2. Ranks them (densely: https://www.postgresql.org/docs/9.6/static/functions-window.html)\n # 3. Sorts the by FechaCreacion DESC (that is, newest to oldest)\n # 4. Returns the newest one (The first ranked one, hence by_fecha_creacion < 2)\n # 5. Sorts by id.\n connection.execute(\n \"SELECT id FROM (\n SELECT id, updated_at,\n dense_rank() OVER (\n PARTITION BY value -> 'Listado' -> 0 -> 'CodigoExterno'\n ORDER BY value ->> 'FechaCreacion' DESC) as by_fecha_creacion\n FROM results\n WHERE f_cast_isots(value ->> 'FechaCreacion'::text) >= #{start_date}\n AND f_cast_isots(value ->> 'FechaCreacion'::text) <= #{finish_date}\n ) as q WHERE by_fecha_creacion < 2\n ORDER BY id\"\n )\n end",
"def select_best_rank(group_by:)\n sub = best_rank_subquery(group_by)\n @source.\n join(sub.source).\n on(\n hierarchy_rank.eq(sub.best_rank_column).\n and(@source[group_by].eq(sub.source[group_by]))\n ).\n order(@source[group_by])\n end",
"def select_sql\n return super unless o = @opts[:offset]\n raise(Error, 'MSSQL requires an order be provided if using an offset') unless order = @opts[:order]\n dsa1 = dataset_alias(1)\n dsa2 = dataset_alias(2)\n rn = row_number_column\n unlimited.\n unordered.\n from_self(:alias=>dsa2).\n select{[WILDCARD, ROW_NUMBER(:over, :order=>order){}.as(rn)]}.\n from_self(:alias=>dsa1).\n limit(@opts[:limit]).\n where(rn > o).\n select_sql\n end",
"def reduce!\n least_important_key = if reverse_priority\n reverse_priority.detect do |k|\n current.has_key? k\n end\n else\n current.keys.last\n end\n if least_important_key\n current.delete least_important_key\n else\n raise ::RuntimeError, \"[cohort_analysis] Priority improperly specified\"\n end\n end",
"def check_order\n return if order.present? && Faq.where(order: order).where.not(id: id).empty?\n self.order = begin\n Faq.order(order: :desc).limit(1).first.order + 1.0\n rescue\n 1\n end\n end",
"def top_aggregate_options(options, requested_count)\n suggested_options = options.sort.select { |option|\n option.count.positive?\n }.take(requested_count)\n applied_options = options.select(&:applied)\n suggested_options.concat(applied_options).uniq.sort.map(&:as_hash)\n end",
"def apply_search_order( ds, options )\n\t\tif (( fields = options[:order] ))\n\t\t\tds = ds.to_a.sort_by do |uuid|\n\t\t\t\t@storage[ uuid ].values_at( *fields.compact ).map {|val| val || ''}\n\t\t\tend\n\t\tend\n\n\t\treturn ds\n\tend",
"def find_products_in_window(product, options)\n limit = options[:limit]\n \n compare_column = @@order_mapping[options[:sort]].split(',').first.gsub('products.','')\n \n left_sql = find_products_sql(options.merge({:conditions =>\n \"products.#{compare_column} <= #{connection.quote(product[compare_column])}\",\n :sort => options[:sort],\n :desc => true}))\n add_limit_offset!(left_sql, {:limit => limit+15, :offset => 0})\n left_list = Product.find_by_sql(left_sql)\n return nil unless idx = left_list.index(product)\n left_list = left_list[(idx+1)..-1] unless left_list.empty?\n \n right_sql = find_products_sql(options.merge({:conditions =>\n \"products.#{compare_column} >= #{connection.quote(product[compare_column])}\",\n :sort => options[:sort]}))\n add_limit_offset!(right_sql, {:limit => limit+15, :offset => 0})\n right_list = Product.find_by_sql(right_sql)\n return nil unless idx = right_list.index(product)\n right_list = right_list[(idx+1)..-1] unless right_list.empty?\n \n if left_list.size >= right_list.size\n right_list = right_list.slice(0,limit/2)\n left_list = left_list.slice(0,limit-right_list.size-1)\n else\n left_list = left_list.slice(0,limit/2)\n right_list = right_list.slice(0,limit-left_list.size-1)\n end\n \n left_list.reverse + [product] + right_list\n end",
"def filter_by_associations_limit_strategy\n v = fetch(:filter_limit_strategy, self[:eager_limit_strategy])\n if v || self[:limit] || !returns_array?\n case v ||= self[:model].default_eager_limit_strategy\n when true, :union, :ruby\n # Can't use a union or ruby-based strategy for filtering by associations, switch to default eager graph limit\n # strategy.\n true_eager_graph_limit_strategy\n when Symbol\n v\n end\n end\n end",
"def ensure_contiguous_ranks(_field_name = \"rank\")\n sort_by! { |e| e.rank || 1_000_000_000 }\n each_with_index { |e, idx| e.rank = idx + 1 }\n end",
"def get_results(query, conditions, order)\n query_call = \"select * from #{domain} \"\n query_call << \"where #{conditions.compact.join(' and ')}\" if conditions.length > 0\n query_call << \" #{order}\"\n if query.limit!=nil\n query_limit = query.limit\n query_call << \" limit #{query.limit}\" \n else\n #on large items force the max limit\n query_limit = 999999999 #TODO hack for query.limit being nil\n #query_call << \" limit 2500\" #this doesn't work with continuation keys as it halts at the limit passed not just a limit per query.\n end\n results = sdb.select(query_call)\n \n sdb_continuation_key = results[:next_token]\n while (sdb_continuation_key!=nil && results[:items].length < query_limit)do\n old_results = results\n results = sdb.select(query_call, sdb_continuation_key)\n results[:items] = old_results[:items] + results[:items]\n sdb_continuation_key = results[:next_token]\n end\n\n results = results[:items][0...query_limit]\n end",
"def order_items_by_article_categories_and_taxes\n object.order_items.includes(:product).where(product_type: 'Article').reduce({}) do |hash, order_item|\n hash[\"#{order_item.product_category_name}_#{order_item.product_taxes_rate}\"] ||= []\n hash[\"#{order_item.product_category_name}_#{order_item.product_taxes_rate}\"].push(order_item)\n hash\n end\n end",
"def find_reusable_order\n return # FIXME: currently it may return address which is being used by other active order\n expired_orders = find_expired_orders_row\n if expired_orders.size >= Config.reuse_address_orders_threshold &&\n fetch_transactions_for(expired_orders.last.address).empty?\n return expired_orders.last\n end\n nil\n end",
"def agregate_order_by(params, options)\n case params[:order_by].to_s\n when \"direction\" then\n order_by = \"destinations.direction_code\"\n when \"destination\" then\n order_by = \"destinations.name\"\n when \"customer_orig\" then\n order_by = \"nice_user\"\n when \"customer_term\" then\n order_by = \"terminators.name\"\n when \"billed_orig\" then\n order_by = \"originating_billed\"\n when \"billed_term\" then\n order_by = \"terminating_billed\"\n when \"billsec_orig\" then\n order_by = \"originating_billsec\"\n when \"billsec_term\" then\n order_by = \"terminating_billsec\"\n when \"duration\" then\n order_by = \"duration\"\n when \"answered_calls\" then\n order_by = \"answered_calls\"\n when \"total_calls\" then\n order_by = \"total_calls\"\n when \"asr\" then\n order_by = \"asr\"\n when \"acd\" then\n order_by = \"acd\"\n else\n options[:order_by] ? order_by = options[:order_by] : order_by = \"\"\n end\n\n without = order_by\n order_by = \"users.first_name \" + (options[:order_desc] == 1 ? \"DESC\" : \"ASC\") + \", users.last_name\" if order_by.to_s == \"users.first_name\"\n order_by = \"ds.direction_code \" + (options[:order_desc] == 1 ? \"DESC\" : \"ASC\") + \", ds.name\" if order_by.to_s == \"destinations.name\"\n order_by = \"ds.direction_code \" + (options[:order_desc] == 1 ? \"DESC\" : \"ASC\") + \", ds.subcode\" if order_by.to_s == \"destinations.name\"\n order_by = \"t.name\" if order_by.to_s == \"terminators.name\"\n\n order_by += \" ASC\" if options[:order_desc] == 0 and order_by != \"\"\n order_by += \" DESC\" if options[:order_desc] == 1 and order_by != \"\"\n return without, order_by\n end",
"def latest_orders\n @latest_orders ||= OrderDecorator.decorate_collection(\n object.\n orders.\n includes(:order_items).\n order('created_at DESC').\n limit(3))\n end",
"def refd_qings\n return @refd_qings if defined?(@refd_qings)\n qings = display_conditions.map(&:ref_qing) + skip_rules.flat_map(&:ref_qings)\n @refd_qings = qings.uniq.sort_by(&:full_rank)\n end",
"def delete_nth(order,max_e)\n #z\n # each перебирает каждый элемент массива order. добавить его в новый массив res\n # за исключением когда счётчик этого элемента будет равен числу за скобками\n res = []\n order.each { |num| res << num unless res.count(num) == max_e }\n p res\n\n # другой вариант записи\n # p order.select.with_index { |x, index| order[0..index].count(x) <= max_e }\n\n\n# то же самое через хеш\n# result = []\n# hash = Hash.new(0)\n# order.each do |a|\n# hash[a] += 1\n# result << a if hash[a] <= max_e\n# end\n# result\n\n\n\n\n# лучшее решение по kate\n# hash = Hash.new(0)\n# order.reject { |a| (hash[a] += 1) > max_e }\n\n\n# p order.delete_if {|i| order.count(i) > max_e} # порядок не сохраняется\nend",
"def order_with_pinned_sql\n -\"CASE\n WHEN (COALESCE(topics.pinned_at, '#{lowest_date}') > COALESCE(tu.cleared_pinned_at, '#{lowest_date}'))\n THEN topics.pinned_at + interval '9999 years'\n ELSE topics.bumped_at\n END DESC\"\n end",
"def critical_risks_unique_sorted\n\t\t\t\t\t#Item.select(\"items.*\").select(\"count(*) as count_all\").where(:severity => 4).group(:plugin_id).order(\"count_all DESC\")\n\t\t\t\t\tItem.where(:severity => 4).group(:plugin_id).order(Arel.sql('COUNT(*) DESC'))\n\t\t\t\tend",
"def evaluateOrder(order, orders)\n chosen=[]\n sum_indexes=0\n\n order.each do |person|\n orders[person].each_with_index do |figure, idx|\n unless(chosen.include?(figure))\n sum_indexes += (idx + 1)\n chosen << figure\n break\n end\n end\n end\n return [sum_indexes, chosen]\nend",
"def accepted_values(count)\n sheetcells.select('case when category_id >0 then (select short from categories where id = sheetcells.category_id) else accepted_value end AS accepted_value')\n .where(status_id: [2, 3, 4])\n .limit(count).distinct.order('accepted_value')\n end",
"def high_risks_unique_sorted\n\t\t\t\t\tItem.where(:severity => 3).group(:plugin_id).order(Arel.sql('COUNT(*) DESC'))\n\t\t\t\t\t#select(\"items.*\").select(\"count(*) as count_all\").where(:severity => 3).group(:plugin_id).order(\"count_all DESC\")\n\t\t\t\tend",
"def sort_criteria_clause\n return nil unless sort_criteria.present?\n sort_criteria.map { |name, order| order_clause(name, order) if name != group_by }.reject(&:blank?).join(',')\n end",
"def next_items(column=nil)\n column = column || default_orderable_column\n orderable_scoped(column).where(orderable_column(column).gt => send(column))\n end",
"def filter_articles(articles)\n include PopularitySearch\n #this isn't fair to more recent articles\n popularity_cutoff = 300\n articles.each do |article|\n article[:twitter_pop] = twitter_popularity(article[:url])\n end\n articles.select do |article|\n article[:twitter_pop] > popularity_cutoff\n end\n articles = articles.sort_by{|article| article[:twitter_pop]}.reverse\n return articles[0..2] #only pick the top 3 if there's more than 3\nend",
"def ordered_by_qualifications (collection)\n collection.sort_by {|x| [x[:years_of_experience], x[:github_points]]}.reverse!\nend",
"def most_popular_group_per_year\n sql = <<-SQL\n -- SELECT year, category FROM guests GROUP BY year, category ORDER BY count(category), year DESC\n SELECT DISTINCT year, category, count(category) FROM guests GROUP BY year, category ORDER BY count(category) DESC\n SQL\n DB[:conn].execute(sql)\nend",
"def delete_nth(order, max_e)\n result = []\n occurrences = Hash.new(0)\n\n order.each do |el|\n occurrences[el] += 1\n result << el unless occurrences[el] > max_e\n end\n\n result\nend",
"def ordered_by_qualifications(candidates)\n ordered_candidates = candidates.sort_by { |candidate| [candidate[:years_of_experience], candidate[:github_points]] }\n return (ordered_candidates).reverse\n\n # @ordered_by_qualifications = []\n\n # candidates.each do |candidate|\n # years_exp s=candidate[:years_of_experience]\n # @ordered_by_qualifications << years_exp\n # if years_exp == years_exp += 1\n # candidate[:github_points] > candidate[github_points] += 1\n # end \n \n # end\n # return @ordered_by_qualifications.sort!.reverse\n #This line returns the values 12..1 \n # return @ordered_by_qualifications.sort!.reverse\nend",
"def include_eager_order?(options)\n order = options[:order]\n return false unless order\n order.scan(/([\\.\\w]+)\\.[\\[\"]?\\w+[\\]\"]?/).flatten.any? do |order_table_name|\n order_table_name != table_name\n end\n end",
"def order_more(*order)\n order(*((@opts[:order] || []) + order))\n end",
"def score(user_id, order, question_id, limit, adj=true)\n joins = \"INNER JOIN users ON items.user_id=#{user_id}\"\n joins += \" INNER JOIN items_questions ON (items_questions.question_id=#{question_id} AND items_questions.item_id=items.id)\" if question_id.to_i > 0\n conditions = {\n :joins => joins,\n :group => 'items.id'\n }\n items = Item.find(:all, conditions)\n\n conditions = {\n :conditions => { :active => false },\n :order => 'prompts.created_at',\n :include => [:items, :votes],\n :group => 'prompts.id'\n }\n conditions[:conditions].merge!(:question_id => question_id) if question_id > 0\n prompts = Prompt.all(conditions)\n\n elo = {}\n items.each { |item| elo[item] = START_RATING }\n @adj = adj\n prompts.each do |prompt|\n prompt.votes.each do |vote|\n # elo values static during update\n old_elo = elo.clone\n # if vote has items it has winner(s)\n unless vote.items.empty?\n vote.items.each do |item|\n lost_items = prompt.items - vote.items\n lost_items.each do |loser|\n elo[item] += adjust_elo(WIN_SCORE, old_elo[item], elo[loser])\n elo[loser] += adjust_elo(LOSS_SCORE, elo[loser], old_elo[item])\n end\n end\n else\n # otherwise consider as draw between all prompt items\n prompt.items.each do |item|\n (prompt.items - [item]).each do |other|\n elo[item] += adjust_elo(DRAW_SCORE, elo[item], old_elo[other])\n end\n end # item_ids each\n end # else\n end # votes each\n end # prompts each\n elo = (order == \"asc\") ? elo.sort_by { |k, v| v } : elo.sort_by { |k, v| -v }\n elo = elo.first(limit) if limit > 0\n elo.transpose\n end",
"def ordered_by_qualifications(candidates)\n if [:years_of_experience] == [:years_of_experience]\n @candidates.sort_by { |experience| experience[:years_of_experience] }.reverse\n else\n @candidates.sort_by { |github| github[:github_points]}.reverse\n end\nend",
"def find_good_items_for_room\n return Item.where({\"room_id\" => self.id, \"condition\" => 4..5}).order('condition desc')\n end",
"def apply\n jobs.sort_by.with_index { |job, ix| order(job, ix) }\n end",
"def sort_relevant_rules\r\n # sort array in rule priority order\r\n @relevant_rules = @relevant_rules.sort do |r1, r2| \r\n r2.priority <=> r1.priority \r\n end\r\n end",
"def rank_quotes\n @quotes.sort! do |quote1, quote2| \n compare = quote1.price_with_taxes <=> quote2.price_with_taxes\n compare.zero? ? (quote1.supplier <=> quote2.supplier) : compare #in case quotes are the same, but with different suppliers\n end\n \n lowest_price = @quotes.first.price_with_taxes\n output = []\n @quotes.each_with_index do |quote, rank|\n differential = (quote.price_with_taxes - lowest_price).round(4)\n output.push([@id, quote.supplier, quote.price_with_taxes.round(4), rank + 1, differential])\n end\n output\n \n end",
"def order_unordered\n self.reset_column_information\n self.group(self.orderable_scope).each do |obj|\n unordered_conditions = \"#{self.orderable_column} IS NULL OR #{self.table_name}.#{self.orderable_column} = 0\"\n ordered_conditions = \"#{self.orderable_column} IS NOT NULL AND #{self.table_name}.#{self.orderable_column} != 0\"\n order_nr = obj.all_orderable.order(self.orderable_column).last[self.orderable_column] || 0\n obj.all_orderable.where(unordered_conditions).find_each do |item|\n order_nr += 1\n raw_orderable_update(item.id, order_nr)\n end\n end\n end",
"def visits_by_organization group_by_disregarded: true, disregard_threshold: 5\n organizations_query = visits_by_organization_query\n\n if group_by_disregarded\n pertinent_organizations = Organization.find_by_sql(\n [organizations_query, pertinent_organization_query].join(\" \")\n )\n\n non_pertinent_organizations = Organization.find_by_sql(\n [\n organizations_query,\n non_pertinent_organization_query(without: pertinent_organizations)\n ].join(\" \")\n )\n\n orgs = {\n pertinent: pertinent_organizations,\n non_pertinent: non_pertinent_organizations\n }\n else\n orgs = Organization.find_by_sql([\n organizations_query,\n \"ORDER BY visit_count DESC\"\n ].join(\" \"))\n end\n\n orgs\n end",
"def calculate_similar_items(preferences, limit = 10)\n result = {} \n # Invert the preference matrix to be item-centric \n item_preferences = transform_preferences(preferences) \n item_preferences.keys.each do |item|\n # Find the most similar items to this one \n scores = top_matches(item_preferences, item, limit) \n result[item] = scores\n end\n return result\nend",
"def ezproxy_top_x_data (model, x, group)\n requests = model.group(group)\n .sum(:requests)\n .sort_by{|k,v| -v}\n .first(x)\n .to_h\n\n sessions = model.group(group)\n .sum(:sessions)\n .select{|k,v| requests.keys.include? k}\n\n [\n {\n name: \"Requests\",\n data: requests\n },\n {\n name: \"Sessions\",\n data: sessions\n }\n ]\n end",
"def apply_criteria_options\n if spec = criteria.options[:sort]\n query.sort(spec)\n end\n if spec = criteria.options[:fields]\n query.select(spec)\n end\n end",
"def oldest(constraints = {})\n constraints.merge!(order: :created_at.asc)\n _q = query(constraints)\n _q.define_singleton_method(:method_missing) { |m, *args, &block| self.results.send(m, *args, &block) }\n _q\n end",
"def filter_by_associations_limit_strategy\n nil\n end",
"def where(expr)\n expr = expr.to_s\n result = empty_dup\n ev = Evaluator.new(ivars: { row: 0, group: 0 })\n rows.each_with_index do |row, k|\n grp = row_index_to_group_index(k)\n ev.update_ivars(row: k + 1, group: grp)\n ev.eval_before_hook(locals: row)\n result << row if ev.evaluate(expr, locals: row)\n ev.eval_after_hook(locals: row)\n end\n result.normalize_boundaries\n result\n end",
"def index\n @things = Thing.rank(:row_order).all\n end",
"def select_sql\n return super unless o = @opts[:offset]\n raise(Error, \"#{db.database_type} requires an order be provided if using an offset\") unless order = @opts[:order]\n dsa1 = dataset_alias(1)\n rn = row_number_column\n sql = @opts[:append_sql] || ''\n subselect_sql_append(sql, unlimited.\n unordered.\n select_append{ROW_NUMBER(:over, :order=>order){}.as(rn)}.\n from_self(:alias=>dsa1).\n limit(@opts[:limit]).\n where(SQL::Identifier.new(rn) > o).\n order(rn))\n sql\n end",
"def item_number_conditions( object )\n\t\t# nothing to do: no order specified\n\t\treturn {} if order.nil?\n\n\t\ts = ''\n\t\tv = []\n\n# First, we'll start with the last and we'll build our query from the inside out\n\t\tordering = order.reverse\n# End case: the last query:\n\t\to = ordering.shift\n\t\tif o\n\t\t\tcolumn = o.first\n\t\t\tdirection = o.last\n\t\t\ts = \"( #{absolute_column_name( column )} #{order_to_sign( direction )}= ? )\"\n\t\t\tif table_join_accessors.has_key?( column )\n\t\t\t\tv << table_join_accessors[column].call( object )\n\t\t\telse\n\t\t\t\tv << object.send( column )\n\t\t\tend\n\t\tend\n\n# Now, the rest of the clauses are built to include all previous clauses\n\t\tordering.each do |column, direction|\n\t\t\ts = \"( #{absolute_column_name( column )} #{order_to_sign( direction )} ? OR ( #{absolute_column_name( column )} = ? AND #{s} ) )\"\n\t\t\tif table_join_accessors.has_key?( column )\n\t\t\t\tv << table_join_accessors[column].call( object )\n\t\t\t\tv << table_join_accessors[column].call( object )\n\t\t\telse\n\t\t\t\tv << object.send( column )\n\t\t\t\tv << object.send( column )\n\t\t\tend\n\t\tend\n\n\t\t{ :conditions => [\"(#{s})\"].concat(v.reverse) }\n\tend",
"def build_subselect(key, o)\n subselect = super\n\n # Materialize subquery by adding distinct\n # to work with MySQL 5.7.6 which sets optimizer_switch='derived_merge=on'\n unless has_limit_or_offset_or_orders?(subselect)\n core = subselect.cores.last\n core.set_quantifier = Arel::Nodes::Distinct.new\n end\n\n Nodes::SelectStatement.new.tap do |stmt|\n core = stmt.cores.last\n core.froms = Nodes::Grouping.new(subselect).as(\"__active_record_temp\")\n core.projections = [Arel.sql(quote_column_name(key.name))]\n end\n end",
"def return_cards_from_top(count)\n if cards and count > 0\n cards.where(\"card_order > ?\", (get_top_order-count) )\n end\n end",
"def ordered_results_for_activity(activity_object)\n all_results_for_activity = Result.where({\"activity_id\" => activity_object.id}).where.not({\"student_score\" => nil})\n ordered_results = all_results_for_activity.order(student_score: :desc)\n return ordered_results\n end",
"def newest(constraints = {})\n constraints.merge!(order: :created_at.desc)\n _q = query(constraints)\n _q.define_singleton_method(:method_missing) { |m, *args, &block| self.results.send(m, *args, &block) }\n _q\n end",
"def true_eager_limit_strategy\n if self[:eager_graph] || (offset && !associated_dataset.supports_offsets_in_correlated_subqueries?)\n # An SQL-based approach won't work if you are also eager graphing,\n # so use a ruby based approach in that case.\n :ruby\n else\n :union \n end\n end",
"def select_with_sql_cte(sql, cte)\n super\n select_with_sql_cte_search_cycle(sql, cte)\n end",
"def by_priority; end",
"def add_top_driver(results_table,top_drivers,rank_by)\n top_score = results_table.max_by{|h|h[rank_by]}[rank_by]\n top_driver = results_table.select{|h| h[rank_by] == top_score }.map{|h| h[:driver_id]}\n top_drivers[rank_by] = {top_score: top_score, drivers: top_driver}\nend",
"def order_by(*sort_heads)\n # Sort the rows in order and add to new_rows.\n key_hash = partition_sort_keys(sort_heads)\n new_rows = rows.sort do |r1, r2|\n # Set the sort keys based on direction\n key1 = []\n key2 = []\n key_hash.each_pair do |h, dir|\n if dir == :forward\n key1 << r1[h]\n key2 << r2[h]\n else\n key1 << r2[h]\n key2 << r1[h]\n end\n end\n # Make any booleans comparable with <=>\n key1 = key1.map_booleans\n key2 = key2.map_booleans\n\n # If there are any nils, <=> will return nil, and we have to use the\n # special comparison method, compare_with_nils, instead.\n result = (key1 <=> key2)\n result.nil? ? compare_with_nils(key1, key2) : result\n end\n\n # Add the new_rows to the table, but mark a group boundary at the points\n # where the sort key changes value. NB: I use self.class.new here\n # rather than Table.new because if this class is inherited, I want the\n # new_tab to be an instance of the subclass. With Table.new, this\n # method's result will be an instance of FatTable::Table rather than of\n # the subclass.\n new_tab = empty_dup\n last_key = nil\n new_rows.each_with_index do |nrow, k|\n new_tab << nrow\n key = nrow.fetch_values(*key_hash.keys)\n new_tab.mark_boundary(k - 1) if last_key && key != last_key\n last_key = key\n end\n new_tab.normalize_boundaries\n new_tab\n end",
"def filter_mutually_exclusive_discounts\n superset = Set.new\n discount_lines.each { |dl| superset |= dl.mutually_exclusive_with }\n superset.each do |x|\n mx = discount_lines.select { |dl| dl.mutually_exclusive_with.member?(x) }\n .sort_by(&:price_adjustment)\n # Keep the best.\n mx.shift\n # Delete the rest.\n mx.each { |dl| discount_lines.delete(dl) }\n end\n end",
"def ordered_by_qualifications(candidates)\n sortedCandidates = candidates.sort_by! { |candidate| [candidate[:years_of_experience], candidate[:github_points]] }\n return sortedCandidates.reverse! \nend",
"def replace_by_most_seen_value!\n each_sample do |k, s|\n each_feature do |f|\n fv = get_feature_values(f)\n next if fv.size == get_sample_size # no missing values\n \n seen_count, seen_value = 0, nil\n fv.uniq.each do |v|\n count = fv.count(v)\n if count > seen_count\n seen_count = count\n seen_value = v\n end\n end\n \n if not s.has_key? f\n s[f] = seen_value\n end\n end\n end\n \n # clear variables\n clear_vars\n end",
"def ordered_by_qualifications(candidates)\n\n candidates.sort_by{ |candidate| [-candidate[:years_of_experience], -candidate[:github_points]] }\n\nend",
"def last_partitions_order_by_clause\n unless @last_partitions_order_by_clause\n @last_partitions_order_by_clause = collect_first(&:last_partitions_order_by_clause)\n end\n return @last_partitions_order_by_clause\n end"
] |
[
"0.5299166",
"0.5172057",
"0.48928833",
"0.48259202",
"0.48073283",
"0.4780117",
"0.4733667",
"0.4678507",
"0.46374992",
"0.46358874",
"0.4614721",
"0.4611258",
"0.45715606",
"0.45108175",
"0.4510816",
"0.44654173",
"0.44593266",
"0.44482273",
"0.44270986",
"0.44252208",
"0.44130385",
"0.44028774",
"0.44028774",
"0.44028774",
"0.4386451",
"0.43518084",
"0.43470395",
"0.4313078",
"0.43051806",
"0.4277129",
"0.42676428",
"0.42567754",
"0.42436934",
"0.42356414",
"0.4224531",
"0.4213285",
"0.42105192",
"0.42015737",
"0.4179946",
"0.41521284",
"0.4151924",
"0.414646",
"0.41416726",
"0.41351923",
"0.41223142",
"0.41144022",
"0.41128564",
"0.409641",
"0.4052926",
"0.40508503",
"0.40480176",
"0.40320948",
"0.4031087",
"0.40298462",
"0.40216213",
"0.4020131",
"0.40136552",
"0.4013581",
"0.4012494",
"0.40116107",
"0.40112126",
"0.4003117",
"0.39925152",
"0.39835075",
"0.39828327",
"0.39799356",
"0.3976553",
"0.39685184",
"0.3964456",
"0.39640433",
"0.3963479",
"0.39630923",
"0.39622343",
"0.3960128",
"0.39525053",
"0.39471453",
"0.39403936",
"0.39386883",
"0.39378497",
"0.39371258",
"0.39301094",
"0.3918999",
"0.3918218",
"0.3910996",
"0.39103833",
"0.39043918",
"0.39041594",
"0.39040318",
"0.3903546",
"0.39017433",
"0.38964015",
"0.38960767",
"0.38956067",
"0.38951364",
"0.38949114",
"0.3891304",
"0.38847598",
"0.38837564",
"0.38793668",
"0.38769713",
"0.38734907"
] |
0.0
|
-1
|
If returned primary keys are requested, use RETURNING unless already set on the dataset. If RETURNING is already set, use existing returning values. If RETURNING is only set to return a single columns, return an array of just that column. Otherwise, return an array of hashes.
|
def _import(columns, values, opts=OPTS)
if @opts[:returning]
# no transaction: our multi_insert_sql_strategy should guarantee
# that there's only ever a single statement.
sql = multi_insert_sql(columns, values)[0]
returning_fetch_rows(sql).map{|v| v.length == 1 ? v.values.first : v}
elsif opts[:return] == :primary_key
returning(insert_pk)._import(columns, values, opts)
else
super
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def returning(*values)\n if values.empty?\n cached_dataset(:_returning_ds) do\n raise Error, \"RETURNING is not supported on #{db.database_type}\" unless supports_returning?(:insert)\n clone(:returning=>EMPTY_ARRAY)\n end\n else\n raise Error, \"RETURNING is not supported on #{db.database_type}\" unless supports_returning?(:insert)\n clone(:returning=>values.freeze)\n end\n end",
"def returning(*columns)\n columns = columns.flatten\n\n columns.each do |column|\n check_column(column, @table, @col_types)\n end\n\n with_options(:returning => columns)\n end",
"def ids\n primary_key_array = Array(primary_key)\n\n if loaded?\n result = records.map do |record|\n if primary_key_array.one?\n record._read_attribute(primary_key_array.first)\n else\n primary_key_array.map { |column| record._read_attribute(column) }\n end\n end\n return @async ? Promise::Complete.new(result) : result\n end\n\n if has_include?(primary_key)\n relation = apply_join_dependency.group(*primary_key_array)\n return relation.ids\n end\n\n columns = arel_columns(primary_key_array)\n relation = spawn\n relation.select_values = columns\n\n result = if relation.where_clause.contradiction?\n ActiveRecord::Result.empty\n else\n skip_query_cache_if_necessary do\n klass.connection.select_all(relation, \"#{klass.name} Ids\", async: @async)\n end\n end\n\n result.then { |result| type_cast_pluck_values(result, columns) }\n end",
"def insert_returning_columns(ds)\n return unless ds.supports_returning?(:insert)\n return unless values = ds.opts[:select]\n\n values = values.map{|v| ds.unqualified_column_for(v)}\n if values.all?\n values\n end\n end",
"def returning(columns)\n @sql_returning = ::MultiInsert::QueryBuilder.returning(columns)\n @returning_flat = false\n self\n end",
"def primary_key\n unless @primary_key\n pk_column_names = Set.new( primary_key_columns.collect { |c| c.name } )\n unique_indexes = indexes.values.find_all { |i| i.unique? }\n\n pk_result = []\n\n unique_indexes.each do |idx|\n idx_column_names = Set.new( idx.columns.collect { |c| c.name } )\n r = idx_column_names ^ pk_column_names\n if r.size == 0 then\n pk_result = idx.columns\n break\n end\n end\n\n # no joy, see about just using all the columns that say the are primary\n # keys\n if pk_result.empty? then\n pk_result = self.primary_key_columns\n end\n @primary_key = pk_result\n end\n return @primary_key\n end",
"def query_return_array(sql, *binds)\n mysql.fetch(sql, *binds).all\n end",
"def _select_pk_ds\n @_select_pk_ds ||= metadata_dataset.\n from(:pg_class, :pg_attribute, :pg_index, :pg_namespace).\n where{[\n [pg_class[:oid], pg_attribute[:attrelid]],\n [pg_class[:relnamespace], pg_namespace[:oid]],\n [pg_class[:oid], pg_index[:indrelid]],\n [pg_index[:indkey].sql_subscript(0), pg_attribute[:attnum]],\n [pg_index[:indisprimary], 't']\n ]}.\n select{pg_attribute[:attname].as(:pk)}\n end",
"def sql_with_returning(sql)\n table_ref = extract_table_ref_from_update_sql(sql)\n\n returning_columns = quote_returning_column_names(table_ref, nil, :update)\n\n return sql if returning_columns.blank?\n\n \"#{sql} RETURNING #{returning_columns.join(', ')}\"\n end",
"def primary_keys\n cached_fetch(:primary_keys){Array(primary_key)}\n end",
"def primary_keys(field)\n sql = \"SELECT #{field.primary_key_col} from #{field.table} \"\n sql += \"#{where_and(sql)} #{field.column} IS NOT NULL \" if field.leave_null\n field.where&.each_pair do |column, value|\n sql += \"#{where_and(sql)} #{column} = #{value} \"\n end\n sql += \"ORDER BY #{field.primary_key_col};\"\n execute(sql).split(\"\\n\")\nend",
"def rows\n @rows ||= if ActiveRecord::Base.connection.adapter_name == \"PostgreSQL\"\n result.entries\n else\n [].tap do |row_hashes|\n result.entries.map do |row|\n hash = {}\n result.fields.each do |field|\n hash[field] = row[result.fields.index(field)]\n end\n row_hashes << hash\n end\n end\n end\n end",
"def exec_raw(sql, options = {})\n cursor = $connection.exec(sql)\n if(options[:return_hash])\n recordset, = pack_cursor(cursor, :return => \"hash\")\n return recordset\n else\n return_data = []\n while current_row = cursor.fetch()\n return_data.push(current_row)\n end\n return return_data\n end\n end",
"def primary_key_columns\n @columns.values.find_all { |c| c.primary_key? }\n end",
"def to_a\n column_names = [@query.klass.primary_key] | connect_by_columns\n column_names.map { |name| @query.table[name] }\n end",
"def fetch_rows(sql)\n execute(sql) do |stmt|\n columns = []\n convert = convert_smallint_to_bool\n cps = db.conversion_procs\n stmt.num_fields.times do |i|\n k = stmt.field_name i\n key = output_identifier(k)\n type = stmt.field_type(i).downcase.to_sym\n # decide if it is a smallint from precision\n type = :boolean if type == :int && convert && stmt.field_precision(i) < 8\n type = :blob if type == :clob && db.use_clob_as_blob\n columns << [key, cps[type]]\n end\n cols = columns.map{|c| c[0]}\n self.columns = cols\n\n while res = stmt.fetch_array\n row = {}\n res.zip(columns).each do |v, (k, pr)|\n row[k] = ((pr ? pr.call(v) : v) if v)\n end\n yield row\n end\n end\n self\n end",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(*insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def all(sql, *args, into: nil, &block)\n raise ArgumentError, \"all no longer support blocks, use each instead.\" if block\n\n rows, pg_source_oid, column_info = each_without_conversion(sql, *args, into: into)\n\n result = convert_rows_to_result rows, into: into, pg_source_oid: pg_source_oid\n\n # [TODO] - resolve associations. Note that this is only possible if the type\n # is not an Array (i.e. into is nil)\n\n result.pagination_scope = sql if sql.is_a?(::Simple::SQL::Connection::Scope) && sql.paginated?\n result.column_info = column_info\n result\n end",
"def pick(*column_names)\n if loaded? && all_attributes?(column_names)\n result = records.pick(*column_names)\n return @async ? Promise::Complete.new(result) : result\n end\n\n limit(1).pluck(*column_names).then(&:first)\n end",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def primary_key\n select(&:primary_key?)\n end",
"def result\n ActiveRecord::Base.connection.select_all(sql).entries\n end",
"def pluck(*column_names)\n return [] if @none\n\n if loaded? && all_attributes?(column_names)\n result = records.pluck(*column_names)\n if @async\n return Promise::Complete.new(result)\n else\n return result\n end\n end\n\n if has_include?(column_names.first)\n relation = apply_join_dependency\n relation.pluck(*column_names)\n else\n klass.disallow_raw_sql!(column_names.flatten)\n columns = arel_columns(column_names)\n relation = spawn\n relation.select_values = columns\n result = skip_query_cache_if_necessary do\n if where_clause.contradiction?\n ActiveRecord::Result.empty(async: @async)\n else\n klass.connection.select_all(relation.arel, \"#{klass.name} Pluck\", async: @async)\n end\n end\n result.then do |result|\n type_cast_pluck_values(result, columns)\n end\n end\n end",
"def returning_id\n @sql_returning = ::MultiInsert::QueryBuilder.returning([:id])\n @returning_flat = true\n self\n end",
"def each\n return enum_for(:each) unless block_given?\n\n relation = @query.all(batch_order).all(:limit => @per_chunk)\n records = relation.all(primary_key.name.gte => 0)\n\n while records.any?\n yield records\n\n break if records.size < @per_chunk\n\n if primary_key_offset = records.last.key.first\n records = relation.all(primary_key.name.gt => primary_key_offset)\n else\n raise \"Primary key not included in the custom select clause\"\n end\n end\n\n return self\n end",
"def primary_key(_table_name)\n []\n end",
"def fetch_rows(sql)\n return cursor_fetch_rows(sql){|h| yield h} if @opts[:cursor]\n execute(sql){|res| yield_hash_rows(res, fetch_rows_set_cols(res)){|h| yield h}}\n end",
"def result_columns_or_default\n self.result_columns || DEFAULT_RESULT_COLUMNS.dup\n end",
"def get_records(params, columns)\n []\n end",
"def fetch(sql, *params)\n rs = self.execute(sql, *params)\n self.execute(\"flush privileges\") # Always flush in case user wants to alter users\n return [] if self.interpreter.preview? && ! rs\n return rs.fetch_all rescue nil\n end",
"def select_all(stmt, bindvars={}, &p)\n sanity_check(stmt)\n rows = nil\n execute(stmt, bindvars) do |sth|\n if block_given?\n sth.each(&p)\n else\n rows = sth.fetch_all\n end\n end\n return rows\n end",
"def primary_key_names(table_name, options = {})\n return connection.primary_key_names(table_name) if options[:raw]\n \n self.primary_key_names_cache ||= {}\n result = primary_key_names_cache[table_name]\n unless result\n result = manual_primary_keys[table_name] || connection.primary_key_names(table_name)\n primary_key_names_cache[table_name] = result\n end\n result\n end",
"def extract_key(row)\n row.reject {|column, value| not primary_key_names.include? column }\n end",
"def pluck(*columns)\n fail ArgumentError, 'No columns specified for Query#pluck' if columns.size.zero?\n\n query = return_query(columns)\n columns = query.response.columns\n\n if columns.size == 1\n column = columns[0]\n query.map { |row| row[column] }\n else\n query.map { |row| columns.map { |column| row[column] } }\n end\n end",
"def async_lookup args\n unless Hash === args\n args = primary_key_hash(args)\n end\n\n dataset.where(args).limit(1).async_all{ |rows|\n if rows.any?\n yield rows.first\n else\n yield nil\n end\n }\n nil\n end",
"def primary_keys\n ::Kernel.raise Errors::NotImplemented\n end",
"def execute\n result = nil\n ActiveRecord::Base.connection_pool.with_connection do |con|\n result = con.execute(to_sql)\n end\n if @sql_returning.nil?\n nil\n else\n if @returning_flat\n result.values.map{|r| r.first}\n else\n result\n end\n end\n end",
"def recordset_from_stored_procedure(sp)\n \n plsql = $connection.parse(sp)\n plsql.bind_param(':out', OCI8::Cursor)\n plsql.exec\n \n cursor = plsql[':out']\n \n \n recordset, = pack_cursor(cursor, :return => 'hash')\n plsql.close\n return recordset\n end",
"def primary_key_lookup(pk)\n if sql = @fast_pk_lookup_sql\n sql = sql.dup\n ds = dataset\n ds.literal_append(sql, pk)\n ds.fetch_rows(sql){|r| return ds.row_proc.call(r)}\n nil\n else\n dataset.first(primary_key_hash(pk))\n end\n end",
"def primary_key_lookup(pk)\n if sql = @fast_pk_lookup_sql\n sql = sql.dup\n ds = dataset\n ds.literal_append(sql, pk)\n ds.fetch_rows(sql){|r| return ds.row_proc.call(r)}\n nil\n elsif dataset.joined_dataset?\n # SEQUEL5: Remove as joined model datasets are not allowed\n dataset.first(qualified_primary_key_hash(pk))\n else\n dataset.first(primary_key_hash(pk))\n end\n end",
"def all_hash\n results = CONNECTION.execute(\"SELECT * FROM #{get_table_name};\")\n return array_list = make_object_array(results)\n end",
"def keys\n @keys ||= [column_for_order_by(relation), primary_key].compact.uniq\n end",
"def _select_map_multiple(ret_cols)\n rows = []\n clone(:_sequel_pg_type=>:array).fetch_rows(sql){|s| rows << s}\n rows\n end",
"def get_primary_key_value_map\n # {{{\n return @primary_key_value_map if (@primary_key_value_map && !@touched)\n \n accessor = self.class\n base_models = accessor.__associations__.base_klasses()\n table_name = accessor.table_name\n pkey_fields = accessor.get_primary_keys\n \n if !pkey_fields[table_name] then\n raise ::Exception.new(\"Unable to resolve pkey fields for #{self.class.to_s}. Known fields are: #{pkey_fields.inspect}\")\n end\n\n @primary_key_value_map = { table_name => {} }\n pkey_fields[table_name].each { |own_pkey|\n @primary_key_value_map[table_name][own_pkey] = @attribute_values_flat[own_pkey]\n }\n \n # Map own foreign key values back to foreign primary key \n # values. This is necessary as joined primary key field names are \n # shadowed. \n accessor.__associations__.pkey_value_lookup.each { |mapping|\n foreign_pkeys = {}\n mapping.at(1).each_with_index { |fkey,idx|\n value = @attribute_values_flat[fkey]\n foreign_pkeys[mapping.at(2).at(idx)] = value \n }\n @primary_key_value_map[mapping.at(0)] = foreign_pkeys\n }\n return @primary_key_value_map\n end",
"def fetch_rows(sql)\n execute(sql) do |r|\n i = -1\n cps = db.conversion_procs\n cols = r.fetch_fields.map do |f| \n # Pretend tinyint is another integer type if its length is not 1, to\n # avoid casting to boolean if convert_tinyint_to_bool is set.\n type_proc = f.type == 1 && cast_tinyint_integer?(f) ? cps[2] : cps[f.type]\n [output_identifier(f.name), type_proc, i+=1]\n end\n self.columns = cols.map(&:first)\n if opts[:split_multiple_result_sets]\n s = []\n yield_rows(r, cols){|h| s << h}\n yield s\n else\n yield_rows(r, cols){|h| yield h}\n end\n end\n self\n end",
"def pack_cursor(cursor, options = {})\n \n recordset = []\n column_names = []\n var_cursor = cursor.get_col_names\n \n while current_row = cursor.fetch() \n case options[:return]\n when 'hash'\n current_record = {}\n current_row.each_index{ |index| \n current_record[var_cursor[index]] = current_row[index] \n column_names[index] = var_cursor[index].split('_').join(' ')\n }\n when 'array'\n current_record = []\n current_row.each_index{ |index| \n current_record[index] = current_row[index] \n column_names[index] = var_cursor[index].split('_').join(' ')\n } \n end\n \n recordset.push(current_record)\n end\n \n return recordset, column_names\n end",
"def all \n results = CONNECTION.execute(\"SELECT * FROM #{self.table_name}\")\n \n return self.results_as_objects(results)\n end",
"def select_rows(sql, name = nil)\r\n rs = ADS.instance.api.ads_execute_direct(@connection, sql)\r\n raise ActiveRecord::StatementInvalid.new(\"#{ADS.instance.api.ads_error(@connection)}:#{sql}\") if rs.nil?\r\n record = []\r\n while ADS.instance.api.ads_fetch_next(rs) == 1\r\n max_cols = ADS.instance.api.ads_num_cols(rs)\r\n result = Array.new(max_cols)\r\n max_cols.times do |cols|\r\n result[cols] = ADS.instance.api.ads_get_column(rs, cols)[1]\r\n end\r\n record << result\r\n end\r\n ADS.instance.api.ads_free_stmt(rs)\r\n return record\r\n end",
"def primary_key(table_name)\n stmt = @connection.primary_keys(native_case(table_name.to_s))\n result = stmt.fetch_all || []\n stmt.drop unless stmt.nil?\n result[0] && result[0][3]\n end",
"def fetch_rows(sql)\n execute(sql) do |stmt|\n self.columns = cols = stmt.result_fields.map{|c| output_identifier(c)}\n col_count = cols.size\n stmt.each do |result|\n row = {}\n col_count.times{|i| row[cols[i]] = result[i]}\n yield row\n end\n end\n end",
"def hash_query(sql, name = nil, binds = [])\n \n return if sql.nil?\n #sql = modify_limit_offset(sql)\n\n # ActiveRecord allows a query to return TOP 0. SQL Anywhere requires that the TOP value is a positive integer.\n return Array.new() if sql =~ /TOP 0/i\n\n stmt = SA.instance.api.sqlany_prepare(@connection, sql)\n \n # sql may contain unbounded params\n \n i = 0\n binds.map do |col, val|\n result, param = SA.instance.api.sqlany_describe_bind_param(stmt, i)\n param.set_value(type_cast(val, col)) if result\n result = SA.instance.api.sqlany_bind_param(stmt, i, param) if param\n i = i + 1\n end\n \n # Executes the query, iterates through the results, and builds an array of hashes.\n # rs = SA.instance.api.sqlany_execute_direct(@connection, sql)\n return [] if stmt.nil?\n result = SA.instance.api.sqlany_execute(stmt)\n if result.nil?\n result, errstr = SA.instance.api.sqlany_error(@connection)\n raise SQLAnywhereException.new(errstr, result, sql)\n end\n \n record = []\n if( SA.instance.api.sqlany_num_cols(stmt) > 0 ) \n while SA.instance.api.sqlany_fetch_next(stmt) == 1\n max_cols = SA.instance.api.sqlany_num_cols(stmt)\n result = Hash.new()\n max_cols.times do |cols|\n result[SA.instance.api.sqlany_get_column_info(stmt, cols)[2]] = SA.instance.api.sqlany_get_column(stmt, cols)[1]\n end\n record << result\n end\n @affected_rows = 0\n else\n @affected_rows = SA.instance.api.sqlany_affected_rows(stmt)\n end \n SA.instance.api.sqlany_free_stmt(stmt)\n\n SA.instance.api.sqlany_commit(@connection)\n \n return record\n end",
"def primary_keys(table)\n pks = query(<<-end_sql, 'SCHEMA')\n SELECT DISTINCT attr.attname\n FROM pg_attribute attr\n INNER JOIN pg_depend dep ON attr.attrelid = dep.refobjid AND attr.attnum = dep.refobjsubid\n INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey)\n WHERE cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n pks.present? ? pks[0] : pks\n end",
"def select!\n # Ensure we can never be destructive by nilifying :update.\n Mao.sql(with_options(:update => nil).sql) do |pg_result|\n if @options[:join]\n other = Mao.query(@options[:join][0])\n pg_result.map {|result|\n Mao.normalize_join_result(result, self, other)\n }\n else\n pg_result.map {|result| Mao.normalize_result(result, @col_types)}\n end\n end\n end",
"def primary_keys(table)\n row = exec_query(<<-end_sql, 'SCHEMA').rows.map do |row|\n SELECT DISTINCT(attr.attname)\n FROM pg_attribute attr\n INNER JOIN pg_depend dep ON attr.attrelid = dep.refobjid AND attr.attnum = dep.refobjsubid\n INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = cons.conkey[1]\n WHERE cons.contype = 'p'\n AND dep.refobjid = '#{quote_table_name(table)}'::regclass\n end_sql\n row && row.first\n end\n end",
"def prepared_lookup\n # SEQUEL5: Remove\n cached_prepared_statement(:fixed, :lookup){prepare_explicit_statement(filter(prepared_statement_key_array(primary_key)), :first)}\n end",
"def attributes_protected_by_default\n begin\n default = [primary_key, inheritance_column]\n\n if !primary_key.eql?('id')\n default << 'id'\n end\n rescue ActiveRecord::NoDatabaseError\n default = []\n end\n\n return default\n end",
"def generate_dataset\n return nil if query.nil?\n # execute the query\n data = Array ActiveRecord::Base.connection.execute(query)\n return [] if data.empty?\n # Convert the query into an easy to read format\n @dataset = [data.first.keys]+data.map{|dd|dd.values}\n return @dataset\n end",
"def columns\n unless defined?(@columns) && @columns\n @columns = connection.columns(table_name, \"#{name} Columns\").select do |column| \n column.name =~ Regexp.new(\"^#{self.to_s.underscore}__\") || column.name == primary_key\n end\n @columns.each { |column| column.primary = column.name == primary_key }\n end\n @columns\n end",
"def reload_sql(primary_keys, fetched_columns)\n sql_keys = primary_keys.collect{|pk| \"'#{pk}'\"}.join(ScroogeComma)\n cols = scrooge_select_sql(missing_columns(fetched_columns))\n \"SELECT #{cols} FROM #{@quoted_table_name} WHERE #{@quoted_primary_key} IN (#{sql_keys})\"\n end",
"def query_return_first(sql, *binds)\n mysql.fetch(sql, *binds).first\n end",
"def map_to_hash_by_primary_key(result) \n hash = {}\n\n result.each do |record|\n hash[record[@primary_key]] = record\n end\n\n hash\n end",
"def synthetic_columns\n @columns ||= [:id]\n end",
"def result\n Hash[\n metadata.map do |key, value|\n [key, PostgresValue.for(value).result]\n end\n ]\n end",
"def _refresh_get(dataset)\n if (sql = model.fast_pk_lookup_sql) && !dataset.opts[:lock]\n sql = sql.dup\n ds = use_server(dataset)\n ds.literal_append(sql, pk)\n ds.with_sql_first(sql)\n else\n dataset.first\n end\n end",
"def _refresh_get(dataset)\n if (sql = model.fast_pk_lookup_sql) && !dataset.opts[:lock]\n sql = sql.dup\n ds = use_server(dataset)\n ds.literal_append(sql, pk)\n ds.with_sql_first(sql)\n else\n dataset.first\n end\n end",
"def select\n (@select ||= (defaults[:select] || [])).collect { |c| c == '*' ? all_columns.keys : c }.flatten\n end",
"def fetch_rows(sql)\n execute(sql) do |res|\n columns = set_columns(res)\n yield_hash_rows(res, columns) {|h| yield h}\n end\n end",
"def query_return_first_value(sql, *binds)\n mysql.fetch(sql, *binds).single_value\n end",
"def query_columns\n explicit_columns\n end",
"def _select_map_single\n rows = []\n clone(:_sequel_pg_type=>:first).fetch_rows(sql){|s| rows << s}\n rows\n end",
"def quote_returning_column_names(table_ref, pk, action)\n returning_columns = []\n returning_columns << pk if suppress_composite_primary_key(pk)\n returning_columns += ApplicationRecord.custom_returning_columns(table_ref, action)\n returning_columns.map { |column| quote_column_name(column) }\n end",
"def returning_clause(serial)\n \" RETURNING #{quote_name(serial.field)} INTO :insert_id\"\n end",
"def pluck_one(sql, *bindvars)\n result = []\n exec(sql, *bindvars) { |row| result << row.first }\n result\n end",
"def _schema_ds\n @_schema_ds ||= begin\n ds = metadata_dataset.select{[\n pg_attribute[:attname].as(:name),\n SQL::Cast.new(pg_attribute[:atttypid], :integer).as(:oid),\n SQL::Cast.new(basetype[:oid], :integer).as(:base_oid),\n SQL::Function.new(:format_type, basetype[:oid], pg_type[:typtypmod]).as(:db_base_type),\n SQL::Function.new(:format_type, pg_type[:oid], pg_attribute[:atttypmod]).as(:db_type),\n SQL::Function.new(:pg_get_expr, pg_attrdef[:adbin], pg_class[:oid]).as(:default),\n SQL::BooleanExpression.new(:NOT, pg_attribute[:attnotnull]).as(:allow_null),\n SQL::Function.new(:COALESCE, SQL::BooleanExpression.from_value_pairs(pg_attribute[:attnum] => SQL::Function.new(:ANY, pg_index[:indkey])), false).as(:primary_key),\n Sequel[:pg_type][:typtype],\n (~Sequel[Sequel[:elementtype][:oid]=>nil]).as(:is_array),\n ]}.\n from(:pg_class).\n join(:pg_attribute, :attrelid=>:oid).\n join(:pg_type, :oid=>:atttypid).\n left_outer_join(Sequel[:pg_type].as(:basetype), :oid=>:typbasetype).\n left_outer_join(Sequel[:pg_type].as(:elementtype), :typarray=>Sequel[:pg_type][:oid]).\n left_outer_join(:pg_attrdef, :adrelid=>Sequel[:pg_class][:oid], :adnum=>Sequel[:pg_attribute][:attnum]).\n left_outer_join(:pg_index, :indrelid=>Sequel[:pg_class][:oid], :indisprimary=>true).\n where{{pg_attribute[:attisdropped]=>false}}.\n where{pg_attribute[:attnum] > 0}.\n order{pg_attribute[:attnum]}\n\n # :nocov:\n if server_version > 100000\n # :nocov:\n ds = ds.select_append{pg_attribute[:attidentity]}\n\n # :nocov:\n if server_version > 120000\n # :nocov:\n ds = ds.select_append{Sequel.~(pg_attribute[:attgenerated]=>'').as(:generated)}\n end\n end\n\n ds\n end\n end",
"def each_without_conversion(sql, *args, into: nil)\n pg_result = exec_logged(sql, *args)\n\n column_info = collect_column_info(pg_result)\n rows = []\n pg_source_oid = nil\n\n if pg_result.ntuples > 0 && pg_result.nfields > 0\n decoder = Decoder.new(pg_result, into: (into ? Hash : nil), column_info: column_info)\n pg_source_oid = pg_result.ftable(0)\n\n pg_result.each_row do |row|\n rows << decoder.decode(row)\n end\n end\n\n [rows, pg_source_oid, column_info]\n ensure\n # optimization: If we wouldn't clear here the GC would do this later.\n pg_result.clear if pg_result && !pg_result.autoclear?\n end",
"def select_one(sql)\n result = execute(sql)\n result.fetch_hash\n end",
"def pluck(sql, *bindvars)\n result = []\n exec(sql, *bindvars) { |row| result << row }\n result\n end",
"def all_rows\n @rows\n end",
"def real_get(k, opt = {})\n if k.is_a?(Array)\n do_op(:multi_get, column_family, k, opt)\n elsif opt[:count]\n do_op(:get, column_family, k, opt)\n else\n opt = opt.clone\n opt[:count] = DEFAULT_COUNT\n columns = Cassandra::OrderedHash.new\n loop do\n chunk = do_op(:get, column_family, k, opt)\n columns.merge!(chunk)\n if chunk.size == opt[:count]\n # Assume there are more chunks, use last key as start of next get\n opt[:start] = chunk.keys.last\n else\n # This must be the last chunk\n break\n end\n end\n columns\n end\n end",
"def real_get(k, opt = {})\n if k.is_a?(Array)\n do_op(:multi_get, column_family, k, opt)\n elsif opt[:count]\n do_op(:get, column_family, k, opt)\n else\n opt = opt.clone\n opt[:count] = DEFAULT_COUNT\n columns = Cassandra::OrderedHash.new\n loop do\n chunk = do_op(:get, column_family, k, opt)\n columns.merge!(chunk)\n if chunk.size == opt[:count]\n # Assume there are more chunks, use last key as start of next get\n opt[:start] = chunk.keys.last\n else\n # This must be the last chunk\n break\n end\n end\n columns\n end\n end",
"def as_array\n row_names = []\n rows = [] \n if @db == @@sdb\n @@sdb.select('select * from `' + @table_name + '`')[:items].each do |row| \n row.each do |row_name, row_data| \n row_names << row_name\n rows << reassemble_sdb_items(row_data)\n end\n end\n elsif @db == @@google_storage\n row_names = as_name_array\n rows = get_rows_from_names(row_names)\n end\n return row_names, rows\n end",
"def recordset_from_plsql(sp)\n logger.debug \"\\n\" << sp << \"\\n\"\n cursor = $connection.exec(sp)\n recordset, = pack_cursor(cursor, :return => 'hash')\n return recordset\n end",
"def primary_key(table_name, opts=OPTS)\n quoted_table = quote_schema_table(table_name)\n Sequel.synchronize{return @primary_keys[quoted_table] if @primary_keys.has_key?(quoted_table)}\n out_identifier, in_identifier = identifier_convertors(opts)\n schema, table = schema_or_current_and_table(table_name, opts)\n dataset = metadata_dataset.\n select(:kc__column_name).\n from(Sequel.as(:information_schema__key_column_usage, 'kc')).\n join(Sequel.as(:information_schema__table_constraints, 'tc'),\n [:table_name, :table_schema, :constraint_name]).\n where(:kc__table_name => in_identifier.call(table),\n :kc__table_schema => schema,\n :tc__constraint_type => 'PRIMARY KEY')\n value = dataset.map do |row|\n out_identifier.call(row.delete(:column_name))\n end\n value = case value.size\n when 0 then nil\n when 1 then value.first\n else value\n end\n Sequel.synchronize{@primary_keys[quoted_table] = value}\n end",
"def pluck_unique(column_name, results = last_results)\n results.map {|r| r[column_name]}.uniq\nend",
"def dataset_need_primary_key?\n true\n end",
"def dataset_need_primary_key?\n true\n end",
"def results\n @scope.where(@scope.primary_key => @ids).to_a\n end",
"def to_a\n res = data\n return [] unless data\n res = apply_selects(res, opts[:fast_selects] || [])\n res = strip_unused(res)\n unless opts[:includes].blank?\n res = enrich(res, opts[:includes])\n res = strip_unenriched(res, opts[:includes])\n end\n res = apply_selects(res, opts[:slow_selects] || [])\n res = apply_maps(res, opts[:maps] || [])\n res = wrap(res)\n res\n end",
"def retrieve_from_database\n query = \"SELECT * FROM #{@_table} WHERE #{@_pkey} = #{@_pkey_id}\"\n result = Taupe::Database.fetch(query, true)\n\n return nil if result.nil? || result.empty?\n\n result.each do |k, v|\n @_values[k.to_sym] = v if k.is_a? Symbol\n end\n\n Taupe::Cache.set @_cache_key, @_values unless @_cache_key.nil?\n end",
"def resultset_to_hash(resultset)\n meta = resultset.meta_data\n rows = []\n\n while resultset.next\n row = {}\n\n (1..meta.column_count).each do |i|\n name = meta.column_name i\n row[name] = case meta.column_type(i)\n when -6, -5, 5, 4\n # TINYINT, BIGINT, INTEGER\n resultset.get_int(i).to_i\n when 41\n # Date\n resultset.get_date(i)\n when 92\n # Time\n resultset.get_time(i).to_i\n when 93\n # Timestamp\n resultset.get_timestamp(i)\n when 2, 3, 6\n # NUMERIC, DECIMAL, FLOAT\n case meta.scale(i)\n when 0\n resultset.get_long(i).to_i\n else\n BigDecimal.new(resultset.get_string(i).to_s)\n end\n when 1, -15, -9, 12\n # CHAR, NCHAR, NVARCHAR, VARCHAR\n resultset.get_string(i).to_s\n else\n resultset.get_string(i).to_s\n end\n end\n\n rows << row\n end\n rows\nend",
"def fetch_rows(sql, &block)\n execute(sql) do |r|\n r.each(:symbolize_keys => true, &block)\n end\n self\n end",
"def sql_load\n row = DBIntf.get_first_row(\"SELECT * FROM #{tbl_name} #{generate_where_on_pk};\")\n return row.nil? ? reset : load_from_row(row)\n end",
"def pluck_rows(*cols)\n options = cols.last.is_a?(Hash) ? cols.pop : {}\n all.each_row(options).pluck(*cols)\n end",
"def dataset_need_primary_key?\n false\n end",
"def columns\n get_metadata unless @columns\n return @columns\n end",
"def query_thredis(prepare_only)\n if prepare_only\n @rows = @connection.redis.sqlprepare(@sql)\n else\n @rows = @connection.redis.sql(@sql, *@params)\n @prepare_only = false\n end\n if @rows.is_a? Integer\n @rows, @columns, @connection.changes = [], [], @rows\n else\n @columns = @rows.shift\n## @rows = convert_type(@rows, @columns)\n end\n end",
"def resultset; end",
"def dataset_need_primary_key?\n false\n end",
"def columns\r\n unless @columns\r\n @columns = connection.columns(table_name, \"#{name} Columns\")\r\n @columns.each {|column| column.primary = primary_keys.include?(column.name.to_sym)}\r\n end\r\n @columns\r\n end",
"def get_data sql\n #$log.debug \"SQL: #{sql} \"\n columns, *rows = @db.execute2(sql)\n #$log.debug \"XXX COLUMNS #{sql}, #{rows.count} \"\n content = rows\n return content\n end"
] |
[
"0.66599965",
"0.6531039",
"0.63882446",
"0.6354546",
"0.6209912",
"0.6093167",
"0.60481465",
"0.58934253",
"0.5852828",
"0.57763964",
"0.5716967",
"0.5696746",
"0.566648",
"0.5648507",
"0.5596756",
"0.5550032",
"0.55439866",
"0.5517233",
"0.550438",
"0.5496207",
"0.5445",
"0.5438135",
"0.5415599",
"0.53995234",
"0.53768414",
"0.53290296",
"0.5320317",
"0.5307164",
"0.5295398",
"0.5292823",
"0.5253607",
"0.52484393",
"0.523668",
"0.5228445",
"0.5219611",
"0.5202089",
"0.5201972",
"0.51942104",
"0.51929224",
"0.5188267",
"0.5179355",
"0.51718265",
"0.5157886",
"0.5145654",
"0.51358354",
"0.51330954",
"0.5111936",
"0.51078534",
"0.51063585",
"0.51009023",
"0.50985473",
"0.5092851",
"0.5092028",
"0.50893146",
"0.50877386",
"0.50830126",
"0.50711894",
"0.5057501",
"0.5054327",
"0.5046002",
"0.5044687",
"0.50394493",
"0.5035401",
"0.5033605",
"0.5033605",
"0.50334746",
"0.5023515",
"0.50181085",
"0.5014161",
"0.50135595",
"0.501315",
"0.49859408",
"0.4981738",
"0.49781245",
"0.49719942",
"0.49698958",
"0.49665457",
"0.49634427",
"0.4957285",
"0.4957285",
"0.49557596",
"0.49406892",
"0.4935171",
"0.49295714",
"0.49232477",
"0.49028802",
"0.48876098",
"0.48819813",
"0.48773128",
"0.48715052",
"0.48714295",
"0.48698536",
"0.48685172",
"0.48428154",
"0.48296657",
"0.48290014",
"0.48284128",
"0.48215574",
"0.48207492",
"0.4819323"
] |
0.541241
|
23
|
Append the INSERT sql used in a MERGE
|
def _merge_insert_sql(sql, data)
sql << " THEN INSERT "
columns, values = _parse_insert_sql_args(data[:values])
_insert_columns_sql(sql, columns)
if override = data[:override]
sql << override
end
_insert_values_sql(sql, values)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def merge_insert(*values, &block)\n h = {:type=>:insert, :values=>values}\n if override = @opts[:override]\n h[:override] = insert_override_sql(String.new)\n end\n _merge_when(h, &block)\n end",
"def insert_conflict_sql(sql)\n if opts = @opts[:insert_conflict]\n sql << \" ON CONFLICT\"\n\n if target = opts[:constraint] \n sql << \" ON CONSTRAINT \"\n identifier_append(sql, target)\n elsif target = opts[:target]\n sql << ' '\n identifier_append(sql, Array(target))\n if conflict_where = opts[:conflict_where]\n sql << \" WHERE \"\n literal_append(sql, conflict_where)\n end\n end\n\n if values = opts[:update]\n sql << \" DO UPDATE SET \"\n update_sql_values_hash(sql, values)\n if update_where = opts[:update_where]\n sql << \" WHERE \"\n literal_append(sql, update_where)\n end\n else\n sql << \" DO NOTHING\"\n end\n end\n end",
"def insert_into_sql(sql)\n sql << \" INTO \"\n if (f = @opts[:from]) && f.length == 1\n identifier_append(sql, server_version >= 90500 ? f.first : unaliased_identifier(f.first))\n else\n source_list_append(sql, f)\n end\n end",
"def build_insert_sql(insert) # :nodoc:\n if insert.skip_duplicates? || insert.update_duplicates?\n raise NotImplementedError, \"#{self.class} should define `build_insert_sql` to implement adapter-specific logic for handling duplicates during INSERT\"\n end\n\n \"INSERT #{insert.into} #{insert.values_list}\"\n end",
"def merge_statement(target, stage)\n <<-SQLMERGE\n begin transaction;\n\n delete from #{target}\n using #{stage}\n where #{target}.id = #{stage}.id;\n insert into #{target}\n select * from #{stage};\n\n end transaction;\n SQLMERGE\n end",
"def multi_insert_sql_strategy\n :union\n end",
"def to_sql\n [@sql_insert, @sql_on_conflict, @sql_returning].reject(&:nil?).join(' ')\n end",
"def exec_insert(sql, name = nil, binds = []) #:nodoc:\r\n log(sql, \"insert\", binds) { query(sql, binds) }\r\n end",
"def exec_insert(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def post_sql_statements( table_name, options ) # :nodoc:\n post_sql_statements = []\n if options[:on_duplicate_key_update]\n post_sql_statements << sql_for_on_duplicate_key_update( table_name, options[:on_duplicate_key_update] )\n end\n\n #custom user post_sql\n post_sql_statements << options[:post_sql] if options[:post_sql]\n\n #with rollup\n post_sql_statements << rollup_sql if options[:rollup]\n\n post_sql_statements\n end",
"def exec_insert(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def visit_Arel_Nodes_InsertStatement o, *a\n [\n \"INSERT INTO #{visit(o.relation).gsub(/\"/, '')}\",\n \"(#{o.columns.map { |x| x.name }.join ', '})\",\n \" VALUES (#{o.values.left.map { |value| value }.join ', '})\"\n ].compact.join ' '\n end",
"def to_bulk_insert_script\n s = TRANSACTION_START[db_type].dup\n s << \"\\n\"\n insert_command = rows.first.bulk_insert_str\n\n rows.each_slice(500) do |sliced_rows|\n s << insert_command\n s << sliced_rows.map {|row| row.bulk_insert_values_str(db_type) }.join(\",\\n\")\n s << \"#{SqlRow::STATEMENT_TERMINATOR[db_type]}\\n\"\n end\n\n s << \"#{TRANSACTION_END[db_type]}\\n\"\n s\n end",
"def to_inserts(args={})\n args[:table] ||= Pathname.new(@filename).basename.to_s.downcase.gsub(/\\W/, '_')\n args[:before] ||= @@defaults[:before]\n args[:after] ||= @@defaults[:after]\n insert_sql = args[:ignore] ? 'insert ignore' : 'insert'\n if args[:bulk]\n args[:before] += \"#{insert_sql} into #{args[:table]} values\"\n args[:values_glue] ||= \", \"\n args[:row_format] ||= \" (%s)\"\n args[:row_glue] ||= \",\\n\"\n else\n args[:before] ||= \"\"\n args[:values_glue] ||= \", \"\n args[:row_format] ||= \"#{insert_sql} into #{args[:table]} values(%s)\"\n args[:row_glue] ||= \";\\n\"\n end\n to_any args\n end",
"def insert_statement(statement)\n self.inserts << statement\n end",
"def insert_statement(statement)\n self.inserts << statement\n end",
"def multi_insert_sql(columns, values)\n values = values.map {|r| \"SELECT #{expression_list(r)}\" }.join(\" UNION ALL \")\n [\"#{insert_sql_base}#{source_list(@opts[:from])} (#{identifier_list(columns)}) #{values}\"]\n end",
"def post_sql_statements( table_name, options ) # :nodoc:\n post_sql_statements = []\n\n if supports_on_duplicate_key_update? && options[:on_duplicate_key_update]\n post_sql_statements << sql_for_on_duplicate_key_update( table_name, options[:on_duplicate_key_update], options[:model], options[:primary_key], options[:locking_column] )\n elsif logger && options[:on_duplicate_key_update]\n logger.warn \"Ignoring on_duplicate_key_update because it is not supported by the database.\"\n end\n\n # custom user post_sql\n post_sql_statements << options[:post_sql] if options[:post_sql]\n\n # with rollup\n post_sql_statements << rollup_sql if options[:rollup]\n\n post_sql_statements\n end",
"def _insert_dataset\n if upsert_plugin_upserting\n if postgres?\n super.insert_conflict(update: values_to_update, target: self.class.upsert_plugin_identifying_columns)\n elsif mysql?\n columns_to_update = values_to_update.keys - self.class.upsert_plugin_identifying_columns\n super.on_duplicate_key_update(*columns_to_update)\n else\n super\n end\n else\n super\n end\n end",
"def execute_insert(sql, opts=OPTS)\n _execute(sql, opts){|conn| log_connection_yield(sql, conn){conn.execute_batch(sql)}; conn.last_insert_rowid}\n end",
"def raw_sql(record)\n record.class.arel_table.create_insert.tap do |insert_manager|\n insert_manager.insert(insert_values(record))\n end.to_sql\n end",
"def generate_insert\n @binds = Array.new\n @insert_statement = \"insert into #{fully_qualified_table_name} (\"\n @insert_statement << column_details.keys.sort.map { |k| column_detail(k).column_name }.join(',')\n @insert_statement << ') values ('\n @insert_statement << column_details.keys.sort.map { |k|\n \":#{k}\"\n }.join(',')\n column_details.keys.sort.each { |k|\n if @column_values[k] == nil\n @binds.push [column_type_to_ruby_type(column_details[k]), nil]\n else\n @binds.push @column_values[k]\n end\n }\n @insert_statement << ')'\n @insert_statement\n end",
"def append_to_sql_diff( data_import_session, resulting_row )\n if resulting_row.kind_of?( ActiveRecord::Base )\n # Append also to the session log file:\n append_to_log_file(\n data_import_session,\n \"New #{ resulting_row.class.name } created, ID:#{ resulting_row.id }.\\r\\n\"\n )\n # Append/update the SQL DB-diff text:\n sql_diff_text_log << to_sql_insert( resulting_row, false )\n end\n end",
"def sql_insert(record)\n flds, vals = parse_fldsvalues(record)\n ph = vals.map{|x| placeholder }\n\n sql = %Q|insert into #{quoted_table}\n ( #{flds.join ','} )\n output inserted.#{quote_field id_fld}\n values( #{ph.join ','} );|\n\n [sql, vals]\n end",
"def statement\n [\n \"insert into\",\n @table,\n column_list,\n query_expression,\n ].compact.join(' ')\n end",
"def multi_insert_sql(columns, values)\n table = quote_identifier(@opts[:from].first)\n columns = literal(columns)\n values.map do |r|\n \"INSERT INTO #{table} #{columns} VALUES #{literal(r)}\"\n end\n end",
"def exec_insert(sql, name = nil, binds = [], pk = nil, sequence_name = nil)\n exec_query(sql, name, binds)\n end",
"def insert_override_sql(sql)\n case opts[:override]\n when :system\n sql << \" OVERRIDING SYSTEM VALUE\"\n when :user\n sql << \" OVERRIDING USER VALUE\"\n end\n end",
"def multi_insert_sql_strategy\n :values\n end",
"def insertion_query\n [\"INSERT IGNORE INTO collectif SET collectif_nom = '#{@name}'\",\n \"collectif_client = '#{@provider}'\",\n \"collectif_annee = #{@year}\",\n \"collectif_facture = '#{@billing}'\",\n \"collectif_revues = '#{@journal_ids.join(',')}'\"\n ].join(', ')\n end",
"def insert_sql(*values)\n if values.empty?\n insert_default_values_sql\n else\n values = values[0] if values.size == 1\n \n # if hash or array with keys we need to transform the values\n if @transform && (values.is_a?(Hash) || (values.is_a?(Array) && values.keys))\n values = transform_save(values)\n end\n from = source_list(@opts[:from])\n\n case values\n when Array\n if values.empty?\n insert_default_values_sql\n else\n \"INSERT INTO #{from} VALUES #{literal(values)}\"\n end\n when Hash\n if values.empty?\n insert_default_values_sql\n else\n fl, vl = [], []\n values.each {|k, v| fl << literal(k.is_a?(String) ? k.to_sym : k); vl << literal(v)}\n \"INSERT INTO #{from} (#{fl.join(COMMA_SEPARATOR)}) VALUES (#{vl.join(COMMA_SEPARATOR)})\"\n end\n when Dataset\n \"INSERT INTO #{from} #{literal(values)}\"\n else\n if values.respond_to?(:values)\n insert_sql(values.values)\n else\n \"INSERT INTO #{from} VALUES (#{literal(values)})\"\n end\n end\n end\n end",
"def modify_after_insert(conn, postgres_cmd, default_cmd)\n conn = ActiveRecord::Base.connection\n cmd = if conn.instance_values[\"config\"][:adapter].in? %w[postgresql postgres postgis]\n postgres_cmd\n else\n default_cmd\n end\n\n %i[enrollments organisations contacts addresses].each do |t|\n conn.execute \"#{cmd} flood_risk_engine_#{t};\"\n end\n end",
"def compound_dataset_sql_append(sql, ds)\n sql << '('\n super\n sql << ')'\n end",
"def sql sql\n @master.puts \"#{sql};\"\n end",
"def execute_insert(sql, opts=OPTS)\n execute(sql, opts.merge(:meth=>:autoid))\n end",
"def _insert_raw(ds)\n ds.insert(_insert_values)\n end",
"def _insert_raw(ds)\n ds.insert(_insert_values)\n end",
"def insert_sql(*values)\n if values.size == 1 && (v = values[0]).is_a?(Sequel::Model) && !v.respond_to?(:sql_literal_append)\n Sequel::Deprecation.deprecate(\"Passing Sequel::Model instance argument to Sequel::Dataset#insert\", \"Pass model_instance.values or model_instance.to_hash as the argument instead\")\n super(v.to_hash)\n else\n super\n end\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def execute_insert(sql, opts={})\n synchronize(opts[:server]) do |conn|\n conn.execute(sql)\n insert_result(conn, opts[:table], opts[:values])\n end\n end",
"def insert_default_values_sql\n \"INSERT INTO #{source_list(@opts[:from])} DEFAULT VALUES\"\n end",
"def _insert_raw(ds)\n if use_prepared_statements_for?(:insert)\n _set_prepared_statement_server(model.send(:prepared_insert, @values.keys)).call(@values)\n else\n super\n end\n end",
"def to_script\n s = TRANSACTION_START[db_type].dup\n s << \"\\n\"\n rows.each {|r| s << r.insert_sql(db_type) }\n s << \"#{TRANSACTION_END[db_type]}\\n\"\n s\n end",
"def combine_multi_statements(total_sql)\n total_sql\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil) end",
"def merge_or_replace(mode:)\n target = replication.target_table\n stage = temporary_table_name\n sql = send(\"#{mode}_statement\", target, stage)\n ::RailsRedshiftReplicator.connection.exec sql\n end",
"def sql_for_insert(sql, pk, id_value, sequence_name, binds)\n unless pk\n table_ref = extract_table_ref_from_insert_sql(sql)\n pk = primary_key(table_ref) if table_ref\n end\n\n sql = \"#{sql} RETURNING #{quote_column_name(pk)}\" if pk\n\n [sql, binds]\n end",
"def multiple_value_sets_insert_sql(table_name, column_names, options) # :nodoc:\n \"INSERT #{options[:ignore] ? 'IGNORE ':''}INTO #{table_name} (#{column_names.join(',')}) VALUES \"\n end",
"def addSQL(statement)\n \n if (@count % 250000 == 0)\n @filecount += 1\n self.clearSQL(@filecount)\n puts \"Now writing to insert-#{@filecount}.sql\"\n end\n\n File.open(\"data/insert-#{@filecount}.sql\", 'a') do |file|\n file.puts(statement)\n @count += 1\n end\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n execute(sql, name)\n id_value\n end",
"def combine_multi_statements(total_sql)\n total_sql\n end",
"def insert_statement(model, properties, serial)\n statement = \"\"\n # Check if there is a serial property being set directly\n require_identity_insert = !properties.empty? && properties.any? { |property| property.serial? }\n set_identity_insert(model, statement, true) if require_identity_insert\n statement << super\n set_identity_insert(model, statement, false) if require_identity_insert\n statement\n end",
"def sql_literal_append(ds, sql)\n check_columns!\n sql << 'ROW'\n ds.literal_append(sql, values_at(*columns))\n if db_type\n sql << '::'\n ds.quote_schema_table_append(sql, db_type)\n end\n end",
"def to_insert(output, table, row)\n columns = @columns[table].map {|i| i[0] }.join(',')\n values = map_values(row, @columns[table])\n output << \"INSERT INTO #{table} (#{columns}) VALUES (#{values});\\n\"\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n return super if id_value\n pk = pk_from_insert_sql(sql) unless pk\n select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n return super if id_value\n pk = pk_from_insert_sql(sql) unless pk\n select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n end",
"def version_inserted_rows\n if versioning_enabled\n logger.warn \"Inserting initial version for inserted rows in '#{name}'\"\n\n last_version = Time.now.to_i\n\n q_versioning = <<-EOF\n INSERT INTO #{versioning} \n SELECT #{fields.map(&:audit).join(', ')}, #{last_version} \n FROM #{audit} \n WHERE #{audit}.`_copied_at` IS NULL\n EOF\n\n q_audit_update = \"UPDATE #{audit} SET `_last_version`= #{last_version} WHERE `_copied_at` IS NULL\"\n\n db.query(q_versioning)\n db.query(q_audit_update)\n end\n end",
"def build_insert_set_cols(key)\n \"#{quote_column_name(key)} = EXCLUDED.#{quote_column_name(key)}\"\n end",
"def notice_sql(sql)\n return unless txn = NewRelic::Agent::Tracer.current_transaction\n\n current_segment = txn.current_segment\n return unless current_segment.is_a?(NewRelic::Agent::Transaction::DatastoreSegment)\n\n if current_segment.sql_statement\n current_segment.sql_statement.append_sql(sql)\n else\n current_segment._notice_sql(sql, self.opts, explainer_for(sql))\n end\n end",
"def on_conflict_sql(sql)\n @sql_on_conflict = sql\n self\n end",
"def sql_literal_append(ds, sql)\n sql << 'ROW'\n ds.literal_append(sql, to_a)\n if db_type\n sql << '::'\n ds.quote_schema_table_append(sql, db_type)\n end\n end",
"def run_insert\n raise DataFactory::NoInsertStatement unless @insert_statement\n\n stmt = db_interface.execute_sql(@insert_statement, *@binds)\n stmt.close\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n super\n\n exec_query('SELECT @@IDENTITY AS id')\n end",
"def <<(sql); execute((Array === sql) ? sql.to_sql : sql); end",
"def build_insert(data)\n fields = \"\"\n values = \"\"\n data.each do |k,v|\n fields += \"`#{escape_str_field(k)}`, \"\n values += escape_value(v)+\", \"\n end\n \"(\"+fields.chomp(', ')+\") VALUES (\"+values.chomp(', ')+\")\"\n end",
"def _insert_select_raw(ds)\n ds.insert_select(_insert_values)\n end",
"def _insert_select_raw(ds)\n ds.insert_select(_insert_values)\n end",
"def insert_sql(c, insert)\n\n\t\ttime = Time.now.to_s(:db)\n\n\t\tfirstname = remove_apostrophe(c.first_name)\n\t\tlastname = remove_apostrophe(c.last_name)\n\t\tcompany = remove_apostrophe(c.company)\n\t\tnotes = remove_apostrophe(c.notes)\n\t\temail = remove_apostrophe(c.email)\n\t\tdate_created = map_date(c.date_created)\n\t\tdate_modified = map_date(c.date_modified)\n\t\tphone = c.phone.gsub(/\\s+/, \"\")\n\n\t\tsql = cust = \"\"\n\n\t\tif insert == 1\n\n\t\t\tunallocated_staff_id = 34\n\n\t\t\tcust = \"('#{c.id}', '#{firstname}', '#{lastname}', '#{company}',\\\n\t\t\t'#{email}', '#{phone}', '#{c.store_credit}', '#{c.registration_ip_address}',\\\n\t\t\t'#{notes}', '#{date_created}', '#{date_modified}', '#{time}', '#{time}', '#{c.customer_group_id}', '#{unallocated_staff_id}')\"\n\n\t\t\tsql = \"INSERT INTO customers(id, firstname, lastname, company, email, phone,\\\n\t\t\tstore_credit, registration_ip_address, notes, date_created, date_modified,\\\n\t\t\tcreated_at, updated_at, cust_type_id, staff_id) VALUES #{cust}\"\n\t\telse\n\n\t\t\tsql = \"UPDATE customers SET firstname = '#{firstname}', lastname = '#{lastname}', company = '#{company}',\\\n\t\t\temail = '#{email}', phone = '#{phone}', store_credit = '#{c.store_credit}',\\\n\t\t\tregistration_ip_address = '#{c.registration_ip_address}', notes = '#{notes}', date_created = '#{date_created}',\\\n\t\t\tdate_modified = '#{date_modified}', updated_at = '#{time}', cust_type_id = '#{c.customer_group_id}' WHERE id = '#{c.id}'\"\n\n\n\t\tend\n\n ActiveRecord::Base.connection.execute(sql) \n\n\tend",
"def insert_sql(sql, name = nil, pri_key = nil, id_value = nil, sequence_name = nil)\n unless pri_key\n table_ref = extract_table_ref_from_insert_sql(sql)\n pri_key = primary_key(table_ref) if table_ref\n end\n\n if pri_key\n select_value(\"#{sql} RETURNING #{quote_column_name(pri_key)}\")\n else\n super\n end\n end",
"def create_sql_insert(mysql, json, source)\n fields = []\n values = []\n sql = nil\n\n fields << 'source'\n values << source\n\n fields << 'created_at'\n values << Time.now.strftime('%Y-%m-%d %H:%M:%S')\n\n if !json[\"posting_id\"].nil?\n fields << 'posting_id'\n values << json[\"posting_id\"]\n end\n\n if !json[\"common\"][\"position_title\"].nil?\n fields << 'position_title'\n values << json[\"common\"][\"position_title\"]\n end\n if !json[\"common\"][\"title\"].nil?\n fields << 'title'\n values << json[\"common\"][\"title\"]\n end\n if !json[\"common\"][\"competition_type\"].nil?\n fields << 'competition_type'\n values << json[\"common\"][\"competition_type\"]\n end\n if !json[\"common\"][\"employer\"].nil?\n fields << 'employer'\n values << json[\"common\"][\"employer\"]\n end\n if !json[\"common\"][\"employer_about\"].nil?\n fields << 'employer_about'\n values << json[\"common\"][\"employer_about\"]\n end\n if !json[\"common\"][\"category\"].nil?\n fields << 'category'\n values << json[\"common\"][\"category\"]\n end\n if !json[\"common\"][\"deadline\"].nil?\n fields << 'deadline'\n values << json[\"common\"][\"deadline\"]\n end\n if !json[\"common\"][\"salary\"].nil?\n fields << 'salary'\n values << json[\"common\"][\"salary\"]\n end\n if !json[\"common\"][\"salary_currency\"].nil?\n fields << 'salary_currency'\n values << json[\"common\"][\"salary_currency\"]\n end\n if !json[\"common\"][\"number_openings\"].nil?\n fields << 'number_openings'\n values << json[\"common\"][\"number_openings\"]\n end\n if !json[\"common\"][\"duty\"].nil?\n fields << 'duty'\n values << json[\"common\"][\"duty\"]\n end\n if !json[\"common\"][\"job_type\"].nil?\n fields << 'job_type'\n values << json[\"common\"][\"job_type\"]\n end\n if !json[\"common\"][\"probation_period\"].nil?\n fields << 'probation_period'\n values << json[\"common\"][\"probation_period\"]\n end\n\n if !json[\"common\"][\"functions\"].nil?\n fields << 'functions'\n values << json[\"common\"][\"functions\"]\n end\n\n if !json[\"common\"][\"minimum_education\"].nil?\n fields << 'minimum_education'\n values << json[\"common\"][\"minimum_education\"]\n end\n if !json[\"common\"][\"years_work_experience\"].nil?\n fields << 'years_work_experience'\n values << json[\"common\"][\"years_work_experience\"]\n end\n if !json[\"common\"][\"profession\"].nil?\n fields << 'profession'\n values << json[\"common\"][\"profession\"]\n end\n if !json[\"common\"][\"minimum_age\"].nil?\n fields << 'minimum_age'\n values << json[\"common\"][\"minimum_age\"]\n end\n\n if !json[\"common\"][\"competition_topic\"].nil?\n fields << 'competition_topic'\n values << json[\"common\"][\"competition_topic\"]\n end\n\n if !json[\"common\"][\"contact_address\"].nil?\n fields << 'contact_address'\n values << json[\"common\"][\"contact_address\"]\n end\n if !json[\"common\"][\"contact_phone\"].nil?\n fields << 'contact_phone'\n values << json[\"common\"][\"contact_phone\"]\n end\n if !json[\"common\"][\"contact_person\"].nil?\n fields << 'contact_person'\n values << json[\"common\"][\"contact_person\"]\n end\n\n if !json[\"common\"][\"additional_requirements\"].nil?\n fields << 'additional_requirements'\n values << json[\"common\"][\"additional_requirements\"]\n end\n if !json[\"common\"][\"for_more_information\"].nil?\n fields << 'for_more_information'\n values << json[\"common\"][\"for_more_information\"]\n end\n\n if !json[\"common\"][\"form_and_terms_of_decision\"].nil?\n fields << 'form_and_terms_of_decision'\n values << json[\"common\"][\"form_and_terms_of_decision\"]\n end\n\n if !json[\"common\"][\"closed_vacancy_job_number\"].nil?\n fields << 'closed_vacancy_job_number'\n values << json[\"common\"][\"closed_vacancy_job_number\"]\n end\n\n if !json[\"special_cases\"][\"computer_software\"].nil?\n fields << 'computer_software'\n value = \"total:#{json[\"special_cases\"][\"computer_software\"].length}\\n\"\n value += json[\"special_cases\"][\"computer_software\"].map{|x| \"type:#{x['name']} | level:#{x['level']}\"}.join(\"\\n\")\n values << value\n end\n\n if !json[\"special_cases\"][\"languages\"].nil?\n fields << 'languages'\n value = \"total:#{json[\"special_cases\"][\"languages\"].length}\\n\"\n value += json[\"special_cases\"][\"languages\"].map{|x| \"language:#{x['language']} | writing:#{x['writing']} | speaking:#{x['speaking']}\"}.join(\"\\n\")\n values << value\n end\n\n if !json[\"special_cases\"][\"competition_stages\"].nil?\n fields << 'competition_stages'\n values << json[\"special_cases\"][\"competition_stages\"].join(\"\\n\")\n end\n\n if !json[\"special_cases\"][\"mandatory_blocks\"].nil?\n fields << 'mandatory_blocks'\n values << json[\"special_cases\"][\"mandatory_blocks\"].join(\"\\n\")\n end\n\n\n if !fields.empty? && !values.empty?\n sql= \"insert into postings(\"\n sql << fields.join(', ')\n sql << \") values(\"\n sql << values.map{|x| \"\\\"#{mysql.escape(x.to_s)}\\\"\"}.join(', ')\n sql << \")\"\n end\n\n return sql\nend",
"def insert values\n if $VERBOSE\n warn <<-eowarn\ninsert (#{caller.first}) is deprecated and will be removed in ARel 3.0.0. Please\nswitch to `compile_insert`\n eowarn\n end\n @engine.connection.insert compile_insert(values).to_sql\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n table = sql.split(\" \", 4)[2]\n super || last_insert_id(table, sequence_name || default_sequence_name(table, pk))\n end",
"def sql_insert_into_select(insertion_table, selection, options = {})\n InsertIntoSelect.new insertion_table, selection, options\n end",
"def log_sql\n @opts[:log_sql]\n end",
"def copy_statement(table_name, options = {})\n format_options = replication.csv? ? \"CSV\" : \"GZIP DELIMITER ',' ESCAPE REMOVEQUOTES\"\n sql = <<-CS\n COPY #{table_name} from '#{import_file}' #{\"NOLOAD\" if options[:noload]}\n REGION '#{RailsRedshiftReplicator.s3_bucket_params[:region]}'\n CREDENTIALS 'aws_access_key_id=#{RailsRedshiftReplicator.aws_credentials[:key]};aws_secret_access_key=#{RailsRedshiftReplicator.aws_credentials[:secret]}'\n #{format_options}\n #{copy_options}\n CS\n sql.squish\n end",
"def <<(sql)\n execute((Array === sql) ? sql.to_sql : sql)\n end",
"def insert_ignore\n insert_conflict\n end",
"def execute(sql, name = nil) \n # Only skip select statements from logging \n unless /^(select|show|begin|commit)/i.match(sql.strip) \n\t\tFile.open( File.join(RAILS_ROOT, 'db', 'ddl.sql'),'a') {|f|\n\t\t\ttemp_sql = sql.gsub(\"\\n\",\"\") \n\t\t\ttemp_sql = temp_sql + ';' if adapter_name != 'IBM_DB2' or adapter_name != 'IBM_DB'\n\t\t\tf.puts temp_sql\n\t\t}\n end\n\t old_execute sql, name\n end",
"def insert(*values)\n if @opts[:sql] || @opts[:returning]\n super\n else\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def prepare_sql\n @sql = 'insert into people (' + @params.join(', ') + ') values '\n\n people.each_with_index do |person, i|\n person = person['person']\n gender = person['gender']\n person['gender'] = Person.genders[gender.downcase]\n\n # Select the values at our params\n values = person.values_at(*@params).map(&replace_nil)\n joined = values.join(', ')\n\n @sql += '(' + joined + ')'\n @sql += ', ' unless i+1 == people.size\n end\n end",
"def _insert_select_raw(ds)\n if use_prepared_statements_for?(:insert_select)\n if ps = model.send(:prepared_insert_select, @values.keys)\n _set_prepared_statement_server(ps).call(@values)\n end\n else\n super\n end\n end",
"def generate_pg_insert_query(table_name, keys, rows)\n \"INSERT INTO #{table_name}(#{keys.map { |i| \"\\\"#{i}\\\"\" }.join(',')}) VALUES(#{keys.map { |i| rows[i] == nil ? 'NULL' : \"'\" + pg_conn.escape_string(rows[i]) + \"'\" }.join(',')});\\n\"\n end",
"def execute_insert(query)\n @connection.nil? and raise \"Attempting to query a connection that isn't open.\"\n\n if (@type == \"db2\")\n Models::Databases::SiteDatabase::Base.connection.execute(query)\n elsif (@type == \"bops\")\n Models::Databases::Bops::Base.connection.execute(query)\n else\n Models::Databases::Dyces::Base.connection.execute(query)\n end\n\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n insert_sql(sql, name, pk, id_value, sequence_name)\n end",
"def resolve_ids_habtm_sql(source, klass, param, param_class, insert_table)\n\n\t\ttemptable = resolve_ids_temptable_sql(source, klass, param, param_class)\n\n\t\tsource.connection.execute \"\n\t\t\tINSERT INTO #{insert_table} (#{klass.name.underscore + \"_id\"}, #{param}) \n\t\t\t\tSELECT object_id, param_id FROM #{temptable};\"\n\n\t\tresolve_ids_cleanup_sql(source, temptable)\n\n\t\treturn true\n\tend",
"def insert_select(*values)\n with_sql_first(insert_select_sql(*values))\n end",
"def add_lock!(sql, options)\n sql\n end",
"def db_insert(visit_id)\n \"INSERT INto image_datasets\n (rmr, series_description, path, timestamp, created_at, updated_at, visit_id, \n glob, rep_time, bold_reps, slices_per_volume, scanned_file, 'dicom_study_uid')\n VALUES ('#{@rmr_number}', '#{@series_description}', '#{@directory}', '#{@timestamp.to_s}', '#{DateTime.now}', \n '#{DateTime.now}', '#{visit_id}', '#{self.glob}', '#{@raw_image_files.first.rep_time}', \n '#{@raw_image_files.first.bold_reps}', '#{@raw_image_files.first.num_slices}', '#{@scanned_file}' )\"\n end",
"def _insert\n return super if model.cti_tables.length == 1\n model.cti_models.each do |m|\n v = {}\n m.cti_table_columns.each{|c| v[c] = @values[c] if @values.include?(c)}\n ds = use_server(m.cti_instance_dataset)\n if ds.supports_insert_select? && (h = ds.insert_select(v))\n @values.merge!(h)\n else\n nid = ds.insert(v)\n @values[primary_key] ||= nid\n end\n end\n db.dataset.supports_insert_select? ? nil : @values[primary_key]\n end",
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @logger.unknown(\"ODBCAdapter#pre_insert>\") if @trace\n @logger.unknown(\"args=[#{sql}|#{name}|#{pk}|#{id_value}|#{sequence_name}]\") if @trace\n @iiTable = get_table_name(sql)\n @logger.unknown(\"@iiTable=#{@iiTable}\") if @trace\n @iiCol = get_autounique_column(@iiTable)\n @logger.unknown(\"@iiCol=#{@iiCol}\") if @trace\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n remove_null_sequence_value_from_sql(sql, @iiCol)\n# rescue Exception => e\n# raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def insert values\n im = InsertManager.new @engine\n im.insert values\n @engine.connection.insert im.to_sql\n end",
"def prepared_insert(cols)\n cached_prepared_statement(:insert, prepared_columns(cols)){prepare_statement(dataset, :insert, prepared_statement_key_hash(cols))}\n end",
"def fast_insert(rows, base_cmd, end_cmd = '')\n RawDB.fast_insert(db, rows, base_cmd, end_cmd)\n end",
"def prepared_insert_select(cols)\n if dataset.supports_insert_select?\n cached_prepared_statement(:insert_select, prepared_columns(cols)){prepare_explicit_statement(naked.clone(:server=>dataset.opts.fetch(:server, :default)), :insert_select, prepared_statement_key_hash(cols))}\n end\n end",
"def dump_insert_multi(io, table_obj, rows)\n debug \"Inserting #{rows.length} into #{table_obj.name}.\"\n sqls = @export_db.insert_multi(\n table_obj.name,\n rows,\n replace_line_breaks: true,\n return_sql: true,\n keys: @keys\n )\n sqls.each do |sql|\n io.write(\"#{sql};\\n\")\n end\n\n rows.clear\n\n # Ensure garbage collection or we might start using A LOT of memory.\n GC.start\n end",
"def insert_statement(model, properties, identity_field)\n statement = \"INSERT INTO #{quote_name(model.storage_name(name))} \"\n\n if supports_default_values? && properties.empty?\n statement << 'DEFAULT VALUES'\n else\n statement << <<-SQL.compress_lines\n (#{properties.map { |property| quote_name(property.field) }.join(', ')})\n VALUES\n (#{(['?'] * properties.size).join(', ')})\n SQL\n end\n\n if supports_returning? && identity_field\n statement << \" RETURNING #{quote_name(identity_field.field)}\"\n end\n\n statement\n end",
"def dump_insert_multi(io, table_obj, rows)\n print \"Inserting #{rows.length} into #{table_obj.name}.\\n\" if @debug\n sqls = @args[:db].insert_multi(table_obj.name, rows, :return_sql => true, :keys => @keys)\n sqls.each do |sql|\n io.write(\"#{sql};\\n\")\n end\n \n rows.clear\n \n #Ensure garbage collection or we might start using A LOT of memory.\n GC.start\n end",
"def save\r\n # raise error if schema is not same between from and to.\r\n # so , create sql from record map\r\n #keys = get_schema.map{|v| v[0]}\r\n keys = @__stored_map__.keys\r\n sql = \"insert into #{self.class.table_name} (#{keys.join(',')}) values( \"\r\n\r\n keys.each_with_index do |key , index|\r\n v = @__stored_map__[key]\r\n sql << ',' if index != 0\r\n sql << (v.kind_of?(String) ? \"'#{v}'\" : v.to_s)\r\n end\r\n sql << \")\"\r\n puts sql if $DEBUG\r\n self.class.execute_sql(sql)\r\n end"
] |
[
"0.7337785",
"0.7198908",
"0.71126693",
"0.707527",
"0.68403673",
"0.6808274",
"0.6782001",
"0.65473086",
"0.64943826",
"0.6456124",
"0.6433498",
"0.6402454",
"0.6376985",
"0.63509864",
"0.6320931",
"0.6320931",
"0.626627",
"0.6218312",
"0.621048",
"0.6189745",
"0.61735564",
"0.6155588",
"0.6147588",
"0.61439776",
"0.61288124",
"0.6128083",
"0.60862947",
"0.60852754",
"0.60662794",
"0.6005256",
"0.6004235",
"0.5970602",
"0.5930564",
"0.59037924",
"0.5894897",
"0.5888416",
"0.5888416",
"0.58835214",
"0.5871042",
"0.5871042",
"0.5871042",
"0.58610386",
"0.58480424",
"0.58463925",
"0.5841273",
"0.5802794",
"0.57940346",
"0.57598734",
"0.5754201",
"0.5752139",
"0.5715526",
"0.5705432",
"0.56979203",
"0.56873643",
"0.5663675",
"0.56595767",
"0.5637779",
"0.5637779",
"0.56361496",
"0.5614633",
"0.5599738",
"0.5593827",
"0.55702776",
"0.5565475",
"0.5562561",
"0.55545646",
"0.55471873",
"0.5539758",
"0.5539758",
"0.5528175",
"0.5521033",
"0.55156577",
"0.55092233",
"0.5479357",
"0.54779077",
"0.5474635",
"0.54603374",
"0.5425346",
"0.542372",
"0.54187506",
"0.5417747",
"0.54112494",
"0.53893316",
"0.5388681",
"0.5368402",
"0.53537077",
"0.5353376",
"0.5353005",
"0.5342349",
"0.53378344",
"0.5335416",
"0.5333767",
"0.53229654",
"0.531929",
"0.531231",
"0.5306964",
"0.5289852",
"0.528714",
"0.5277533",
"0.52715456"
] |
0.80021787
|
0
|
Format TRUNCATE statement with PostgreSQL specific options.
|
def _truncate_sql(table)
to = @opts[:truncate_opts] || OPTS
"TRUNCATE TABLE#{' ONLY' if to[:only]} #{table}#{' RESTART IDENTITY' if to[:restart]}#{' CASCADE' if to[:cascade]}"
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def notice_nosql_statement(nosql_statement)\n return unless record_sql?\n\n @nosql_statement = Database.truncate_query(nosql_statement)\n nil\n end",
"def truncate_table\n\t\t\t transaction { connection.execute(\"TRUNCATE TABLE #{quoted_table_name};\") }\n\t\t end",
"def exec_query_truncate_table\n execute_remote(query_truncate_table)\n end",
"def truncate(table_name, name = nil)\n execute \"TRUNCATE TABLE #{quote_table_name(table_name)}\", name\n end",
"def truncate_table(table = quoted_table_name)\n connection.execute(\"TRUNCATE TABLE #{table}\")\n end",
"def truncate\n connection.truncate(table_name)\n end",
"def mssql_truncate(table_name)\n execute \"TRUNCATE TABLE #{quote_table_name(table_name)}\", 'Truncate Tables'\n rescue => e\n if e.message =~ /Cannot truncate table .* because it is being referenced by a FOREIGN KEY constraint/\n execute \"DELETE FROM #{quote_table_name(table_name)}\", 'Truncate Tables with Delete'\n else\n raise\n end\n end",
"def truncate() end",
"def truncate_trait\n options[:style] = [options.try(:[], :style), 'min-width: 0;'].join(' ').squish\n end",
"def truncate(truncate_at, options = {})\n return dup unless length > truncate_at\n\n omission = options[:omission] || '...'\n length_with_room_for_omission = truncate_at - omission.length\n stop = \\\n if options[:separator]\n rindex(options[:separator], length_with_room_for_omission) || length_with_room_for_omission\n else\n length_with_room_for_omission\n end\n\n \"#{self[0, stop]}#{omission}\"\n end",
"def truncate(truncate_at, options = {})\n return dup unless length > truncate_at\n\n omission = options[:omission] || '...'\n length_with_room_for_omission = truncate_at - omission.length\n stop =\n if options[:separator]\n rindex(options[:separator], length_with_room_for_omission) || length_with_room_for_omission\n else\n length_with_room_for_omission\n end\n\n \"#{self[0, stop]}#{omission}\"\n end",
"def delete_from_sql(sql)\n sql << ' FROM '\n source_list_append(sql, @opts[:from][0..0])\n end",
"def cleanse(args)\n # Clean sql, need to add code to escape quotes and prevent deletions without conditions\n @sql\n end",
"def truncate(input, length = T.unsafe(nil), truncate_string = T.unsafe(nil)); end",
"def table_options_sql(options)\n\t sql = []\n\t sql << flag_option_sql(options, :parallel)\n\t sql << flag_option_sql(options, :logging)\n\t sql << flag_option_sql(options, :monitoring)\n\t sql << \"TABLESPACE #{quote_identifier(options[:tablespace])}\" if options[:tablespace]\n\t sql << compress_option_sql(options)\n\t sql << options[:options] if String === options[:options]\n\t sql.compact.join ' '\n\t end",
"def truncate_preserve(table_name_str)\n puts \"Truncating '#{table_name_str}' table (it may take a while):\"\n table_name = TableName.valueOf(table_name_str)\n\n if enabled?(table_name_str)\n puts 'Disabling table...'\n disable(table_name_str)\n end\n\n puts 'Truncating table...'\n @admin.truncateTable(table_name, true)\n end",
"def connection_configuration_sqls\n sqls = super\n sqls << \"SET DateStyle = 'ISO'\" if @use_iso_date_format\n sqls\n end",
"def truncate(truncate)\n self.query.truncate = truncate\n self\n end",
"def truncate(arg0)\n end",
"def to_truncated\n truncate_datetime(to)\n end",
"def sanitize_select_options(options)#:nodoc:\n o = options.dup\n select = o.delete :select\n o[:override_select] = select ? select_column_sql(select) : ' * '\n o\n end",
"def truncate!\n return if truncated?\n update_column :metadata, nil\n end",
"def trunc(num, type)\n numeric_cast([:trunc, :fp_trunc], num, type)\n end",
"def create_psql_cmd(query, db)\n \"su postgres -c \\\"psql -d #{db} -p #{@port} -q -t -c #{escaped_query(query)}\\\"\"\n end",
"def truncate(opts = OPTS)\n if opts.empty?\n super()\n else\n clone(:truncate_opts=>opts).truncate\n end\n end",
"def compact_pg_version\n pg_version.gsub('.','')\n end",
"def truncate(truncate_at, options = {})\n return dup unless length > truncate_at\n\n options[:omission] ||= '...'\n length_with_room_for_omission = truncate_at - options[:omission].length\n stop = \\\n if options[:separator]\n rindex(options[:separator], length_with_room_for_omission) || length_with_room_for_omission\n else\n length_with_room_for_omission\n end\n\n self[0...stop] + options[:omission]\n end",
"def truncate(text, options={})\n options[:length] ||= 30\n options[:omission] ||= \"...\"\n\n if text\n l = options[:length] - options[:omission].length\n chars = text\n stop = options[:separator] ? (chars.rindex(options[:separator], l) || l) : l\n (chars.length > options[:length] ? chars[0...stop] + options[:omission] : text).to_s\n end\n end",
"def prepared_statement_arg(v)\n case v\n when Numeric\n v.to_s\n when Date, Time\n literal(v).gsub(\"'\", '')\n else\n v\n end\n end",
"def truncate(p0) end",
"def truncate(p0) end",
"def delete_options_statement\n super\n end",
"def truncate(s, truncate_at, options = {})\r\n return s unless s.length > truncate_at\r\n omission = options[:omission] || '...'\r\n with_extra_room = truncate_at - omission.length\r\n stop = \\\r\n if options[:separator]\r\n rindex(options[:separator], with_extra_room) || with_extra_room\r\n else\r\n with_extra_room\r\n end\r\n \"#{s[0, stop]}#{omission}\"\r\n end",
"def format_sql_comment(comment)\n \" -- #{comment.to_s.gsub(/\\s+/, ' ')}\\n\"\n end",
"def truncate_all_tables(options = {})\n options[:verbose] ||= false\n EOL::Db.all_connections.uniq.each do |conn|\n count = 0\n conn.tables.each do |table|\n next if table == 'schema_migrations'\n count += 1\n if conn.respond_to? :with_master\n conn.with_master do\n truncate_table(conn, table)\n end\n else\n truncate_table(conn, table)\n end\n end\n if options[:verbose]\n puts \"-- Truncated #{count} tables in \" +\n conn.instance_eval { @config[:database] } +\n \".\"\n end\n end\n # EOL.forget_everything # expensive, but without it, would risk errors.\n end",
"def unprepare_sqlserver_statement(sql)\n if sql.starts_with?(SQLSERVER_STATEMENT_PREFIX)\n executesql = sql.from(SQLSERVER_STATEMENT_PREFIX.length)\n args = executesql.split(', ')\n unprepared_sql = args.shift.strip.match(SQLSERVER_NATIONAL_STRING_MATCHER)[1]\n unprepared_sql = Utils.unquote_string(unprepared_sql)\n args = args.from(args.length / 2)\n args.each_with_index do |arg, index|\n value = arg.match(SQLSERVER_PARAM_MATCHER)[1]\n unprepared_sql.sub! \"@#{index}\", value\n end\n unprepared_sql\n else\n sql\n end\n end",
"def delete_sql(opts = nil)\n opts = opts ? @opts.merge(opts) : @opts\n\n if opts[:group]\n raise Error::InvalidOperation, \"Grouped datasets cannot be deleted from\"\n elsif opts[:from].is_a?(Array) && opts[:from].size > 1\n raise Error::InvalidOperation, \"Joined datasets cannot be deleted from\"\n end\n\n sql = \"DELETE FROM #{source_list(opts[:from])}\"\n\n if where = opts[:where]\n sql << \" WHERE #{literal(where)}\"\n end\n\n sql\n end",
"def truncate(s, truncate_at, options = {})\n return s unless s.length > truncate_at\n omission = options[:omission] || '...'\n with_extra_room = truncate_at - omission.length\n stop = \\\n if options[:separator]\n rindex(options[:separator], with_extra_room) || with_extra_room\n else\n with_extra_room\n end\n \"#{s[0, stop]}#{omission}\"\n end",
"def to_postgres_string\n min_string = minutes.to_s\n sec_string = seconds.to_s\n \n min_string = \"0\" + min_string if minutes.to_s.length < 2\n sec_string = \"0\" + sec_string if seconds.to_s.length < 2\n \n return hours.to_s+\":\"+min_string+\":\"+sec_string\n end",
"def truncate\n end",
"def drop_schema_sql(name, opts = {})\n \"DROP SCHEMA #{quote_identifier(name)}\"\n end",
"def truncate(model_klass, table_name)\n model_klass.connection.truncate(model_klass.table_name)\n end",
"def truncate(namespace, set_name = nil, before_last_update = nil, options = {})\n policy = create_policy(options, Policy, default_info_policy)\n\n node = @cluster.random_node\n\n if set_name && !set_name.to_s.strip.empty?\n str_cmd = \"truncate:namespace=#{namespace}\"\n str_cmd << \";set=#{set_name}\" unless set_name.to_s.strip.empty?\n else\n if node.supports_feature?(Aerospike::Features::TRUNCATE_NAMESPACE)\n str_cmd = \"truncate-namespace:namespace=#{namespace}\"\n else\n str_cmd = \"truncate:namespace=#{namespace}\"\n end\n end\n\n if before_last_update\n lut_nanos = (before_last_update.to_f * 1_000_000_000.0).round\n str_cmd << \";lut=#{lut_nanos}\"\n elsif supports_feature?(Aerospike::Features::LUT_NOW)\n # Servers >= 4.3.1.4 require lut argument\n str_cmd << \";lut=now\"\n end\n\n response = send_info_command(policy, str_cmd, node).upcase\n return if response == \"OK\"\n raise Aerospike::Exceptions::Aerospike.new(Aerospike::ResultCode::SERVER_ERROR, \"Truncate failed: #{response}\")\n end",
"def pg_dump_args\n ['-x', '-O', if schema_only\n '-s'\n end, conf['database']].compact.join(' ')\n end",
"def truncate_formatted_filename f, unformatted_len, wid\n excess = unformatted_len - wid\n\n f = case @truncate_from\n\n when :right\n # FIXME: 2019-04-23 - do we need the control code at end ??\n f[0..wid - 3] + '$ '\n\n when :center\n\n # from central point calculate how much to remove in both directions\n center = unformatted_len / 2\n excess_half = excess / 2\n point = center + excess_half\n point1 = point - excess\n\n # remove text between point1 and point\n f[0..(point1 - 1)] + '$' + f[point + 2..-1] + ' '\n\n when :left\n\n # NOTE: we cannot remove the hint\n # for single hints we need to add extra space\n # there could be escape codes of varying length\n sindex = f.index(' ') || f.index('+')\n # 4 = 2 for literals, 2 to get ahead of sindex+1\n # FIXME crashing here, maybe there was a plus sign in place of space\n f[0..sindex + 0] + '<' + f[sindex + 3 + excess..-1] + ' '\n end\n return f\nend",
"def psql(args)\n \"psql -X -q #{args}\"\nend",
"def truncate(str, truncate_at, options = {})\n return str unless str.length > truncate_at\n\n options[:separator] ||= ' '\n stop = str.rindex(options[:separator], truncate_at) || truncate_at\n\n \"#{str[0, stop]}\"\nend",
"def truncate(text, *args)\n options = args.extract_options!\n unless args.empty?\n options[:length] = args[0] || 30\n options[:omission] = args[1] || \"...\"\n end\n options.reverse_merge!(:length => 30, :omission => \"...\")\n\n if text\n l = options[:length] - options[:omission].mb_chars.length\n chars = text.mb_chars\n (chars.length > options[:length] ? chars[0...l] + options[:omission] : text).to_s\n end\n end",
"def check_truncation_allowed!\n raise(InvalidOperation, \"Grouped datasets cannot be truncated\") if opts[:group]\n raise(InvalidOperation, \"Joined datasets cannot be truncated\") if opts[:join]\n end",
"def prepare_statement_sub(sql)\n sql.gsub(/\\$\\d+/, '?')\n end",
"def replace_statement(target, stage)\n <<-SQLREPLACE\n begin transaction;\n drop table #{target};\n alter table #{stage} rename to #{target};\n end transaction;\n SQLREPLACE\n end",
"def _format_sql(sql)\n sql = sql.delete '\"'\n sql.downcase\n end",
"def command_builder(format_options, query)\n verified_query = verify_query(query)\n sql_prefix, sql_postfix = \"\", \"\"\n if inspec.os.windows?\n sql_prefix = %{@'\\n#{format_options}\\n#{verified_query}\\nEXIT\\n'@ | }\n else\n sql_postfix = %{ <<'EOC'\\n#{format_options}\\n#{verified_query}\\nEXIT\\nEOC}\n end\n\n if @db_role.nil?\n %{#{sql_prefix}#{bin} \"#{user}\"/\"#{password}\"@#{host}:#{port}/#{@service}#{sql_postfix}}\n elsif @su_user.nil?\n %{#{sql_prefix}#{bin} \"#{user}\"/\"#{password}\"@#{host}:#{port}/#{@service} as #{@db_role}#{sql_postfix}}\n else\n %{su - #{@su_user} -c \"env ORACLE_SID=#{@service} #{bin} / as #{@db_role}#{sql_postfix}}\n end\n end",
"def fp_trunc(val, ty, name = \"\")\n Instruction.from_ptr(C.build_fp_trunc(self, val, LLVM::Type(ty), name))\n end",
"def prep_sql_for_run\n self.sql = sql.strip\n self.sql = sql[0..-2] if sql.last == ';'\n self.sql = sql.dup\n end",
"def parse_statement(statement)\n alter_argument = AlterArgument.new(statement)\n dsn = DSN.new(connection_details.database, alter_argument.table_name)\n\n \"#{command} #{all_options} #{dsn} #{alter_argument}\"\n end",
"def wikitext_truncate_and_strip(markup, options = {})\n squished = strip_tags(markup.w).squish # may have entities, eg. " etc\n truncated = truncate squished, options.merge(escape: false)\n if truncated == squished # string wasn't changed\n truncated.html_safe\n else # string was chopped\n omission = options[:omission] || '...'\n truncated.gsub! /&[^;]+?#{Regexp.escape omission}\\z/, omission\n truncated.html_safe\n end\n end",
"def to_sql_drop_namespace(db)\n if db.supports_schemas?\n db.send(:drop_schema_sql, self.name, {})\n else\n \"\"\n end\n end",
"def alter_materialized_view_reset_options(name, *args)\n options = args.extract_options!\n\n execute PostgreSQLMaterializedViewAlterer.new(self, name, {\n :reset_options => args\n }, options).to_sql\n end",
"def sql(options = {})\n options = normalize_options(options)\n ret = \"\"\n\n if exact_date?(options)\n\tret += \"#{column} = '#{self.from_date(options)}' \"\n else\n\tret += \"#{column} >= '#{self.from_date(options)}' AND \"\t\n\tret += \"#{column} <= '#{self.to_date(options)}' \"\t\n end\n\n raise \"not time selection supported yet '#{range(options)}' please use beginning and end.\" if from_time or to_time\n\n return nil if ret == \"\"\n return ret\n end",
"def truncate_db\n drop_table\n create_table\n end",
"def destroy_constraints_statement(table_name, constraint_name)\n <<-EOS.compress_lines\n ALTER TABLE #{quote_table_name(table_name)}\n DROP CONSTRAINT #{quote_constraint_name(constraint_name)}\n EOS\n end",
"def remove_monetize table_name, *args\n options = args.extract_options!\n Array(args).each do |name|\n remove_column table_name, \"#{name}_money\"\n remove_column table_name, \"#{name}_currency\"\n end\n remove_currency table_name, options unless has_money_columns?(table_name)\n end",
"def truncate_index(options = {})\n truncate(\n options[:value].join(I18n.t('support.array.words_connector')),\n length: INDEX_TRUNCATION_VALUE,\n omission: I18n.t('meta.search.index.truncated_field'),\n escape: false\n )\n end",
"def create_view_prefix_sql(name, options)\n create_view_sql_append_columns(\"CREATE #{'OR REPLACE 'if options[:replace]}VIEW #{quote_schema_table(name)}\", options[:columns])\n end",
"def shortened_tweet_truncator(tweet)\n\n if tweet.length <= 140\n tweet\n else\n word_substituter(tweet)[0..136] + \"...\"\n end\nend",
"def truncate_name(name, options={})\n options = {:length => NAME_MAX_LENGTH, :separator => ' '}.merge options\n h truncate(name, options)\n end",
"def to_sql\n @line.gsub(OUTPUT_REGEX, '')\n end",
"def update_table_sql(sql)\n sql << ' '\n source_list_append(sql, @opts[:from][0..0])\n end",
"def rollback_transaction_sql\n SQL_ROLLBACK\n end",
"def rollback_transaction_sql\n SQL_ROLLBACK\n end",
"def truncate(v, l = 30)\n s = v.to_s\n s.size > l ? s.first(l) + t('truncation', '...') : s\n end",
"def reset_sql_diff_text_log\n @sql_diff_text_log = ''\n end",
"def delete_statement(query)\n conditions_statement, bind_values = conditions_statement(query.conditions)\n\n statement = \"DELETE FROM #{quote_name(query.model.storage_name(name))}\"\n statement << \" WHERE #{conditions_statement}\" unless conditions_statement.blank?\n\n return statement, bind_values\n end",
"def clean_sql\n self.sql=SqlText.clean(sql) if sql.present?\n end",
"def truncate(table_name_str)\n puts \"Truncating '#{table_name_str}' table (it may take a while):\"\n table_name = TableName.valueOf(table_name_str)\n\n if enabled?(table_name_str)\n puts 'Disabling table...'\n disable(table_name_str)\n end\n\n puts 'Truncating table...'\n @admin.truncateTable(table_name, false)\n end",
"def clean_annuaire()\n puts \"TRUNCATE ALL USER RELATED TABLES\"\n [\n :last_uid, :telephone, :email, :relation_eleve, :ressource,\n :enseigne_dans_regroupement, :role_user, :profil_user, :user, :regroupement, :eleve_dans_regroupement\n ].each do |table|\n if table == :ressource\n truncate_ressource()\n else \n DB[table].truncate()\n end\n end\n\n create_super_admin_and_ressource_laclasse()\nend",
"def fix_dates\n Dir[File.join( @base_dir, \"**/*.sql\" )].each do |sql_dump|\n puts \"Fixing dates in #{sql_dump.split('.sql')[0]}...\"\n `ruby -p -i -e '$_.gsub!(/date,\\n/,\"varchar(30),\\n\")' #{sql_dump}`\n end\n end",
"def psql_db_dump_replacer__for_psql_db__sample_example\n [\n psql_db__sample_example,\n [\n \"/tmp/psql_db_original_dump\"\n ],\n [\n \"/tmp/database_dump\"\n ],\n \"ON_ERROR_STOP=off\",\n ]\n end",
"def shortened_tweet_truncator(tweet)\n if word_substituter(tweet).length <= 140\n word_substituter(tweet)\n else\n word_substituter(tweet)[0..136] << \"...\" \n end\nend",
"def TableTruncate\n begin\n unless getDBConn\n raise _(\"Please open a database before trying to truncate it.\")\n end\n\n tables = treeview_getSelection(@tv_tables)\n raise _(\"You have to select a table to truncate.\") if count(tables) <= 0\n\n # Confirm and truncate.\n tables.each do |table|\n table_ob = @dbconn.getTable(table[0])\n\n if msgbox(_(\"Question\"), sprintf(_(\"Do you want to truncate the table: %s?\"), table[0]), \"yesno\") == \"yes\"\n table_ob.truncate\n end\n end\n rescue => e\n knj_msgbox.error_exc(e)\n end\n\n @dbpage.TablesUpdate()\n end",
"def post_sql_statements( table_name, options ) # :nodoc:\n post_sql_statements = []\n if options[:on_duplicate_key_update]\n post_sql_statements << sql_for_on_duplicate_key_update( table_name, options[:on_duplicate_key_update] )\n end\n\n #custom user post_sql\n post_sql_statements << options[:post_sql] if options[:post_sql]\n\n #with rollup\n post_sql_statements << rollup_sql if options[:rollup]\n\n post_sql_statements\n end",
"def compress_option_sql(attrs)\n\t case value=attrs[:compress]\n\t when Fixnum, Integer then \"COMPRESS(#{value})\"\n\t else flag_option_sql attrs, :compress\n\t end\n end",
"def shortened_tweet_truncator(tweet)\n if word_substituter(tweet).length > 140\n word_substituter(tweet)[0..136] + '...'\n else\n tweet\n end\nend",
"def to_s\n regex = REPLACEMENT\n\n if Gitlab::Database.mysql?\n regex = Regexp.union(regex, MYSQL_REPLACEMENTS)\n end\n\n sql = @sql.gsub(regex, '?').gsub(CONSECUTIVE) do |match|\n \"#{match.count(',') + 1} values\"\n end\n\n # InfluxDB escapes double quotes upon output, so lets get rid of them\n # whenever we can.\n if Gitlab::Database.postgresql?\n sql = sql.delete('\"')\n end\n\n sql.tr(\"\\n\", ' ')\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end",
"def truncate(length, options = {})\n text = self.dup\n options[:omission] ||= \"...\"\n\n length_with_room_for_omission = length - options[:omission].mb_chars.length\n chars = text.mb_chars\n stop = options[:separator] ?\n (chars.rindex(options[:separator].mb_chars, length_with_room_for_omission) || length_with_room_for_omission) : length_with_room_for_omission\n\n (chars.length > length ? chars[0...stop] + options[:omission] : text).to_s\n end",
"def delete_queries\n [\n \"DROP TRIGGER tr_#{suffix} ON #{src_table};\",\n \"DROP FUNCTION fn_#{suffix}();\",\n \"DROP TRIGGER tr_#{suffix}_cleaner ON #{surveys_table};\",\n \"DROP FUNCTION fn_#{suffix}_cleaner();\"\n ]\n end",
"def create_table_suffix_sql(name, options)\n sql = String.new\n\n if inherits = options[:inherits]\n sql << \" INHERITS (#{Array(inherits).map{|t| quote_schema_table(t)}.join(', ')})\"\n end\n\n if partition_by = options[:partition_by]\n sql << \" PARTITION BY #{options[:partition_type]||'RANGE'} #{literal(Array(partition_by))}\"\n end\n\n if on_commit = options[:on_commit]\n raise(Error, \"can't provide :on_commit without :temp to create_table\") unless options[:temp]\n raise(Error, \"unsupported on_commit option: #{on_commit.inspect}\") unless ON_COMMIT.has_key?(on_commit)\n sql << \" ON COMMIT #{ON_COMMIT[on_commit]}\"\n end\n\n if tablespace = options[:tablespace]\n sql << \" TABLESPACE #{quote_identifier(tablespace)}\"\n end\n\n if server = options[:foreign]\n sql << \" SERVER #{quote_identifier(server)}\"\n if foreign_opts = options[:options]\n sql << \" OPTIONS (#{foreign_opts.map{|k, v| \"#{k} #{literal(v.to_s)}\"}.join(', ')})\"\n end\n end\n\n sql\n end",
"def truncate(symbol_or_array)\n symbol_or_array = [ symbol_or_array ] unless symbol_or_array.is_a? Array\n total_count = count = 0\n startt = Time.now\n\n symbol_or_array.each do |symbol|\n name = send(symbol).name\n logger.info \"(#{chunk_id}) Begining truncate of #{name}...\" if log? :basic\n case symbol\n when :entry_trigger then\n count += entry_trigger.positions.count\n entry_trigger.positions.clear\n when :entry_strategy then\n count += entry_strategy.positions.count\n entry_strategy.positions.clear\n when :exit_trigger then\n count += exit_trigger.positions.count\n exit_trigger.positions.clear\n when :exit_strategy then\n count += exit_strategy.positions.count\n exit_strategy.positions.clear\n when :scan then\n count += scan.positions.count\n scan.positions.clear\n else\n raise ArgumentError, \":truncate must take one or an array of the following: :entry_trigger, :entry_strategy, :exit_strategy or :scan\"\n end\n total_count += count\n end\n delta = Time.now - startt\n logger.info \"(#{chunk_id}) Truncated #{total_count} positions in #{Backtester.format_et(delta)}\" if log? :basic\n end",
"def delete_safe_times(volumes)\n volumes.keys.each do |volume_id|\n command = \"delete from results where volume_id=?\"\n options = [volume_id]\n volumes[volume_id].each do |time_window|\n command += \"\\n and access_date not between datetime(?) and datetime(?)\"\n options += [time_window[\"start\"].iso8601, time_window[\"end\"].iso8601]\n end\n command += \";\"\n command_literal = command\n options.each { |opt| command_literal.sub!(opt)} # Don't ever do this.\n puts \"Executing: #{command_literal}\\n\"\n @db.execute(command, *options)\n end\n end",
"def shortened_tweet_truncator(tweet) \n if word_substitutor(tweet).length > 140 \n word_substitutor(tweet)[0...137] + \"...\"\n else \n tweet\n end \nend",
"def to_maql_drop\n maql = \"\"\n [ attributes, facts ].each do |obj|\n maql += obj.to_maql_drop\n end\n maql += \"DROP {#{self.identifier}};\\n\"\n end",
"def drop_table_sql(name, options)\n \"DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_schema_table(name)}#{' CASCADE' if options[:cascade]}\"\n end",
"def setup_test_database\n connection = PG.connect(dbname: 'chitter_challenge_test')\n connection.exec(\"TRUNCATE peeps;\")\nend",
"def truncate(str, truncate_at, omission: '...', separator: nil)\r\n return str.dup unless str.length > truncate_at\r\n\r\n omission = omission || '...'\r\n length_with_room_for_omission = truncate_at - omission.length\r\n stop = \\\r\n if separator\r\n rindex(separator, length_with_room_for_omission) || length_with_room_for_omission\r\n else\r\n length_with_room_for_omission\r\n end\r\n\r\n \"#{str[0, stop]}#{omission}\"\r\n end",
"def awesome_truncate(text, length = 30, truncate_string = \"...\")\n return if text.nil?\n l = length - truncate_string.mb_chars.length\n text.mb_chars.length > length ? text[/\\A.{#{l}}\\w*\\;?/m][/.*[\\w\\;]/m] + truncate_string : text\nend",
"def prettify_pg_err(err, original_sql_query = nil)\n return err if err[/LINE \\d+/].nil?\n\n # LINE 2: ... -> err_line_num = 2\n err_line_num = err.match(/LINE (\\d+):/)[1].to_i\n # LINE 1: SELECT usr FROM users ORDER BY 1\n err_address_line = err.lines[1]\n\n sql_start_line_num = 3 if err.lines.length <= 3\n # error not always contains HINT\n sql_start_line_num ||= err.lines[3][/(HINT|DETAIL)/] ? 4 : 3\n sql_body_lines = if sql_start_line_num < err.lines.length\n err.lines[sql_start_line_num..-1]\n else\n original_sql_query&.lines\n end\n\n # this means original query is missing so it's nothing to prettify\n return err unless sql_body_lines\n\n # this is an SQL line with an error.\n # we need err_line to properly align the caret in the caret line\n # and to apply a full red colorizing schema on an SQL line with error\n err_line = sql_body_lines[err_line_num - 1]\n\n # colorizing keywords, strings and error line\n err_body = sql_body_lines.map do |ln|\n ln == err_line ? StringColorize.colorize_err(ln) : colorize_err_line(ln)\n end\n\n err_caret_line = extract_err_caret_line(err_address_line, err_line, sql_body_lines, err)\n err_body.insert(err_line_num, StringColorize.colorize_err(err_caret_line))\n\n err.lines[0..sql_start_line_num - 1].join + err_body.join\n end",
"def post_sql_statements( table_name, options ) # :nodoc:\n post_sql_statements = []\n\n if supports_on_duplicate_key_update? && options[:on_duplicate_key_update]\n post_sql_statements << sql_for_on_duplicate_key_update( table_name, options[:on_duplicate_key_update], options[:model], options[:primary_key], options[:locking_column] )\n elsif logger && options[:on_duplicate_key_update]\n logger.warn \"Ignoring on_duplicate_key_update because it is not supported by the database.\"\n end\n\n # custom user post_sql\n post_sql_statements << options[:post_sql] if options[:post_sql]\n\n # with rollup\n post_sql_statements << rollup_sql if options[:rollup]\n\n post_sql_statements\n end",
"def drop_table_sql(name)\n \"DROP TABLE #{quote_schema_table(name)}\"\n end"
] |
[
"0.5507991",
"0.54600996",
"0.5273651",
"0.5187143",
"0.50678164",
"0.4914534",
"0.48577395",
"0.48543635",
"0.4853718",
"0.4715146",
"0.47071767",
"0.46592757",
"0.46435788",
"0.46198285",
"0.45876056",
"0.45748812",
"0.4544344",
"0.45238018",
"0.4521532",
"0.45109248",
"0.44717032",
"0.44479775",
"0.44455972",
"0.444346",
"0.44433755",
"0.4426155",
"0.44174093",
"0.44046274",
"0.4404442",
"0.43866435",
"0.43862087",
"0.4383557",
"0.43819165",
"0.4374588",
"0.43726906",
"0.43726042",
"0.43665722",
"0.43619472",
"0.4359651",
"0.43351784",
"0.43314055",
"0.43117782",
"0.42809695",
"0.42778388",
"0.42602605",
"0.42380902",
"0.42269668",
"0.42217728",
"0.42193583",
"0.42175",
"0.4209354",
"0.4207485",
"0.41849414",
"0.41762963",
"0.4154979",
"0.41377082",
"0.41338807",
"0.41317543",
"0.4126806",
"0.4124688",
"0.41188025",
"0.41160887",
"0.41129708",
"0.41103208",
"0.41099337",
"0.4106414",
"0.4102732",
"0.4099391",
"0.40953517",
"0.4089104",
"0.4089104",
"0.40823913",
"0.4082293",
"0.40806565",
"0.40703514",
"0.40685594",
"0.40627223",
"0.4060736",
"0.40556836",
"0.4054845",
"0.40542805",
"0.40523738",
"0.40510896",
"0.40509802",
"0.4049801",
"0.40458724",
"0.40458304",
"0.4045241",
"0.40427637",
"0.403572",
"0.4032601",
"0.402924",
"0.40274906",
"0.40209365",
"0.40069214",
"0.40065777",
"0.40063837",
"0.40021226",
"0.4002091",
"0.39960006"
] |
0.6143214
|
0
|
Use from_self for aggregate dataset using VALUES.
|
def aggreate_dataset_use_from_self?
super || @opts[:values]
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def aggregate_values(rows)\n # Convert rows into hash where each key is a column name and the each\n # value is an array of values for that column\n cols = OrderedHash.new\n rows.each do |row|\n row.each do |k,v|\n cols[k] ||= []\n cols[k] << v\n end\n end\n\n # Loop through each column, applying an aggregate proc if one exists\n # to the array of column values. If a proc does not exist we take the\n # last value from the array.\n result = cols.inject(OrderedHash.new) do |hsh, (col, vals)|\n hsh[col] = if @aggregators[col]\n @aggregators[col].call(vals)\n else\n vals.last\n end\n hsh\n end\n\n Row[result]\n end",
"def transform_values!; end",
"def from_self(opts=OPTS)\n fs = {}\n @opts.keys.each{|k| fs[k] = nil unless non_sql_option?(k)}\n pr = proc do\n c = clone(fs).from(opts[:alias] ? as(opts[:alias], opts[:column_aliases]) : self)\n if cols = _columns\n c.send(:columns=, cols)\n end\n c\n end\n\n opts.empty? ? cached_dataset(:_from_self_ds, &pr) : pr.call\n end",
"def aggregate(value)\n @query_hash[AGGREGATE][value] = value\n self\n end",
"def with_values\n Fetcher.new(self).with_values\n end",
"def data\n @data ||= aggregate\n end",
"def build values, options\n adapter_class.new(values, options).execute\n end",
"def new_aggregate(type,name, value)\n Models::Aggregate.new(\n :type => type,\n :parent_name => assessment_group.display_name,\n :parent_type => 'AssessmentGroup',\n :name => name,\n :value => value\n )\n end",
"def _values\n _data.merge(kept_data)\n end",
"def values\n self\n end",
"def merge_inheritance\n relation.itself_only_value = true if other.itself_only_value.present?\n\n if other.cast_records_value.present?\n relation.cast_records_value += other.cast_records_value\n relation.cast_records_value.uniq!\n end\n end",
"def from(value)\n new(dataset.from(value))\n end",
"def data\n @data ||= aggregate(:single)\n end",
"def aggregateData(aggregator)\n @timeStamps = aggregator.aggregate(@timeStamps, ChartDirector::AggregateFirst)\n @highData = aggregator.aggregate(@highData, ChartDirector::AggregateMax)\n @lowData = aggregator.aggregate(@lowData, ChartDirector::AggregateMin)\n @openData = aggregator.aggregate(@openData, ChartDirector::AggregateFirst)\n @closeData = aggregator.aggregate(@closeData, ChartDirector::AggregateLast)\n @volData = aggregator.aggregate(@volData, ChartDirector::AggregateSum)\n end",
"def values(*) end",
"def assign_values(subview, value)\n unless value.first.is_a?(Array)\n value = [value]\n end\n\n value.each_with_index do |row,row_index|\n row.each_with_index do |val,col_index|\n subview.set(row_index, col_index, val.to_java(:double))\n end\n end\n end",
"def _refresh(dataset)\n super\n recalculate_values_hashes\n end",
"def emit_values(values)\n values.each do |value|\n emit_self(value)\n end\n end",
"def data_values\n if bare_data_set?\n [data]\n else\n data.map{ |set| set.is_a?(Hash) ? set[:values] : set }\n end\n end",
"def with_values\n @values = true\n self\n end",
"def values value_type = :formatted_value\n return @values unless @values.nil?\n\n @values = []\n while @rowset.next do\n @values << 1.upto(self.columns.size).map do |i|\n @rowset.getString i\n end\n end\n\n @values\n end",
"def push(values)\n values = Array(values)\n unless values.empty?\n @data.expand(values.count, @data.columns_count)\n [@data.rows_count, values.count].max.times { |index| (@data.values[index] ||= []).push values[index] }\n end\n self\n end",
"def convert_input_dataset(ds)\n case ds\n when Symbol, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, LiteralString\n self.simple_table = db.literal(ds).freeze\n ds = db.from(ds)\n when Dataset\n if ds.joined_dataset?\n # raise Error, \"Using a joined dataset as a model dataset is not support, use from_self on the dataset to wrap it in a subquery\" # SEQUEL5\n Sequel::Deprecation.deprecate(\"Using a joined dataset as a Sequel::Model dataset\", respond_to?(:cti_base_model) ? \"Use the class_table_inheritance plugin :alias option in #{cti_base_model.inspect}\" : \"Call from_self on the dataset to wrap it in a subquery\")\n end\n\n self.simple_table = if ds.send(:simple_select_all?)\n ds.literal(ds.first_source_table).freeze\n end\n @db = ds.db\n else\n raise(Error, \"Model.set_dataset takes one of the following classes as an argument: Symbol, LiteralString, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, Dataset\")\n end\n\n set_dataset_row_proc(ds.clone(:model=>self))\n end",
"def values(v)\n raise Error, \"Cannot provide an empty array for values\" if v.empty?\n @default_dataset.clone(:values=>v)\n end",
"def push(values)\n values = Array(values)\n unless values.empty?\n @data.expand(@data.rows_count, values.count)\n @data.values.push Array.new([@data.columns_count, values.count].max) { |index| values[index] }\n end\n self\n end",
"def values\n rows.map{|r| r.value}\n end",
"def merge_by_values(relation, other)\n other.cte.with_values.each do |name, expression|\n relation = if other.cte.materialized_key?(name)\n relation.with!.materialized(name => expression)\n elsif other.cte.not_materialized_key?(name)\n relation.with!.not_materialized(name => expression)\n else\n relation.with!(name => expression)\n end\n end\n\n relation\n end",
"def add_data_values\n # remove any previous accepted values so that we can keep a track of what has been updated\n sqlclean = \"select clear_datacolumn_accepted_values(#{id})\"\n\n datatype = Datatypehelper.find_by_name(import_data_type)\n\n # I would like to change this so that the SQL is in one function but it wasn't working\n # TODO: I will look at this again - SR\n if datatype.name == 'text'\n sql = \"select accept_text_datacolumn_values(#{id})\"\n else\n dataset = Dataset.find(dataset_id)\n comment = ''\n comment = dataset.title unless dataset.nil?\n sql = \"select accept_datacolumn_values(#{datatype.id}, #{id}, #{datagroup_id}, '#{comment}')\"\n end\n\n begin\n connection = ActiveRecord::Base.connection()\n connection.begin_db_transaction\n connection.execute(sqlclean)\n connection.execute(sql)\n\n connection.commit_db_transaction\n rescue StandardError\n connection.rollback_db_transaction\n raise\n end\n end",
"def apply(_aggregate)\n raise NotImplementedError\n end",
"def inject_values!\n each_adapter { |adapter| adapter.inject!(calculator) }\n nil\n end",
"def aggregate\n []\n end",
"def values() end",
"def inject_values!\n each_adapter(&:inject!)\n set_dispatchable_positions!\n inject_graph_values!\n end",
"def dataset\n DB[SQL, from: from_truncated, to: to_truncated, tick: tick]\n end",
"def convert_input_dataset(ds)\n case ds\n when Symbol, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, LiteralString\n self.simple_table = db.literal(ds).freeze\n ds = db.from(ds)\n when Dataset\n ds = ds.from_self(:alias=>ds.first_source) if ds.joined_dataset?\n\n self.simple_table = if ds.send(:simple_select_all?)\n ds.literal(ds.first_source_table).freeze\n end\n @db = ds.db\n else\n raise(Error, \"Model.set_dataset takes one of the following classes as an argument: Symbol, LiteralString, SQL::Identifier, SQL::QualifiedIdentifier, SQL::AliasedExpression, Dataset\")\n end\n\n set_dataset_row_proc(ds.clone(:model=>self))\n end",
"def self_values\n self_values = []\n database_field_names.each do |param| \n self_values << get_value_including_foreign_keys(self.send(param))\n end\n self_values\n end",
"def values\n end",
"def get_values(from_date, to_date)\n self.spc_values.select(\"work_date, seq, val1, val2, val3, val4, val5, x_val, r_val, x_usl, x_lsl, r_usl, r_lsl\").where(\"spc_item_id = ? and work_date between ? and ?\", self.id, from_date, to_date).order(\"work_date asc, seq asc\")\n end",
"def _refresh_set_values(values)\n ret = super\n load_typecast\n ret\n end",
"def value\n @value ||= values_for columns\n end",
"def values\n @values ||= Values.new(schema_result.to_h)\n end",
"def fetch\n if raw\n values = Array.wrap(super)\n (field =~ /_[a-z]$/) ? values : values.first\n else\n super\n end\n end",
"def _normalize( rows )\n rows.map do |row|\n h = row['_id']\n h['value'] = row['value'].to_i\n h\n end\n ensure\n rows.close\n end",
"def createTotalCustomerValueDataSet\n\t\t\n\tend",
"def from_values(values)\n obj = self.new\n set_values_from_values_to_object(values, obj)\n end",
"def dynamic_values\n if @dynamic_values.blank?\n\n if self.column.blank?\n raise \"dynamic_values column not defined for filter_param_id = #{self.id}\"\n end\n\n query_result = self.dynamic_values_query.try(:query_result, nil, nil,\n :sample_option => ::Query::Sample::REAL,\n :sql_string => \"SELECT `#{self.column}` FROM {table_name}\"\n )\n result_set = query_result.try(:result_set)\n if !result_set.nil?\n @dynamic_values = result_set.try(:results_array).collect do |row|\n result_set.value_at(row, self.column)\n end\n end\n end\n\n @dynamic_values ||= []\n end",
"def values(start_time, ndatapoints)\n requires :id, :granularity\n data = service.get_instrumentation_value(self.uris.find {|uri| uri['name'] == 'value_raw'}['uri'], start_time, ndatapoints, self.granularity).body\n data.map do |datum|\n Fog::Joyent::Analytics::Value.new(datum)\n end\n end",
"def add_values_rec(schema, table, t, query)\n if t.parent == nil\n t.data['values'] = exec(query)\n else\n t.parent.data['values'].each_with_index { |v, i|\n where = \"WHERE\"\n unless t.parent.foreign_keys.size == 0\n t.parent.foreign_keys.each { |x|\n if x['foreign_table_name'] == t.table_name\n foreign_col = x['foreign_column_name']\n col = x['column_name']\n parent_val = t.parent.data['values'][i][col]\n where = \"#{where} #{foreign_col} = '#{parent_val}'\"\n end\n }\n end\n query = \"SELECT * FROM #{schema}.#{table} #{where} LIMIT 1\";\n if t.data['values'].nil?\n t.data['values'] = Array.new\n end\n t.data['values'] << exec(query)[0]\n }\n end\n\n t.depends.each { |n|\n add_values_rec(schema, n.table_name, n, query)\n }\nend",
"def value\n self.send :\"#{_mapped_value_column}\"\n end",
"def values\n end",
"def values(coords)\n coords.collect { |coord| self[*coord] }\n end",
"def values\n self[:values]\n end",
"def metric_values(_metric, _from, _to)\n raise \"Not implemented\"\n end",
"def store_values\n self.parent.write_attribute(self.field_name, self.ids)\n end",
"def deep_transform_values!(&block)\n _deep_transform_values_in_object!(self, &block)\n end",
"def aggregated_over_time_query\n quantity_field = @country_ids.present? ? \"#{entity_quantity}_reported_quantity\" : \"#{@reported_by}_reported_quantity\"\n\n <<-SQL\n SELECT ROW_TO_JSON(row)\n FROM (\n SELECT JSON_AGG(JSON_BUILD_OBJECT('x', year, 'y', value) ORDER BY year) AS datapoints\n FROM (\n SELECT year, ROUND(SUM(#{quantity_field}::FLOAT)) AS value\n FROM #{shipments_table}\n #{child_taxa_join}\n WHERE #{@condition} AND #{quantity_field} IS NOT NULL AND #{country_condition}\n AND #{child_taxa_condition}\n GROUP BY year\n #{quantity_condition(quantity_field)}\n ORDER BY value DESC\n #{limit}\n ) t\n ) row\n SQL\n end",
"def inject_value_fields\n @composition.all_composite.each do |composite|\n mapping = composite.mapping\n if mapping.object_type.is_a?(MM::ValueType) and # Composite needs a ValueField\n !mapping.all_member.detect{|m| m.is_a?(MM::ValueField)} # And doesn't already have one\n trace :relational_columns, \"Adding value field for #{mapping.object_type.name}\"\n @constellation.ValueField(\n :new,\n parent: mapping,\n name: mapping.object_type.name+\" Value\",\n object_type: mapping.object_type\n )\n mapping.re_rank\n end\n end\n end",
"def _reduce_275(val, _values, result)\n result = @builder.call_method(nil, nil, val[0])\n \n result\nend",
"def value(entity)\n apply_mutations(_value(entity), entity)\n end",
"def _reduce_281(val, _values, result)\n result = @builder.call_method(nil, nil, val[0])\n\n result\nend",
"def select_values_sql(sql)\n sql << \"VALUES \"\n expression_list_append(sql, opts[:values])\n end",
"def to_data value\n if @to_data.nil?\n raise NoMethodError, \"#to_data not defined\"\n end\n \n @to_data.call value\n end",
"def aggregate\n #response = Result.collection.map_reduce(self.map_fn(), _reduce(), :raw => true, :out => {:inline => true}, :query => {:execution_id => id})\n response = Result.where(execution_id: id).map_reduce(self.map_fn(), self.query.reduce).out(inline: true).raw()\n results = response['results']\n if results\n self.aggregate_result = {}\n results.each do |result|\n result = prettify_generated_result(result) if self.query.generated? && result['value']['rereduced']\n self.aggregate_result[result['_id']] = result['value']\n end\n save!\n end\n end",
"def apply(aggregate)\n raise NotImplementedError\n end",
"def apply_rows\n return to_enum unless block_given?\n DataFrame.new(each.collect do |row| {:value => yield(row)} end, rownames: @rownames, colnames: [:value])\n end",
"def _reduce_281(val, _values, result)\n result = @builder.call_method(nil, nil, val[0])\n \n result\nend",
"def _reduce_281(val, _values, result)\n result = @builder.call_method(nil, nil, val[0])\n \n result\nend",
"def merge!(rel)\n return self if rel.nil?\n\n MULTI_VALUE_METHODS.each do |method|\n (@values[method] ||= []).concat(rel.values[method]).uniq! unless rel.values[method].nil?\n end\n\n MULTI_KEY_METHODS.each do |method|\n (@values[method] ||= {}).merge!(rel.values[method]) unless rel.values[method].nil?\n end\n\n SINGLE_VALUE_METHODS.each do |method|\n @values[method] = rel.values[method] unless rel.values[method].nil?\n end\n\n self\n end",
"def values_at(*) end",
"def transform_values! &block # :yields: value\n return enum_for(:transform_values!) unless block_given?\n each do |k, v|\n store k, (yield v)\n end\n self\n end",
"def _reduce_1(val, _values)\n Cat.new(val.first, val.last)\nend",
"def values\n raise NotImplementedError, \"#{__method__} has not been implemented for this #{name} index\"\n end",
"def flatten!\n self.class.attributes.keys.select { |k| k.end_with?('_data') }.each do |data_attr|\n reference_attr = data_attr[/(.+?)_data$/, 1]\n value = send(data_attr)\n next if value.nil?\n\n send(\"#{data_attr}=\", value)\n send(\"#{reference_attr})\", nil)\n end\n\n self\n end",
"def fill_aggregator\n @aggregator = {}\n Setting.where(often: true).each do |sset|\n @aggregator[sset.ident] = sset.value\n end\n end",
"def all\n\n @all ||= dataset.map { |item| self.new item }\n\n end",
"def measurement_values\n return @measurement_values unless @measurement_values.nil? \n return nil unless self.id\n \n @measurement_values = MetricMeasurementValues.new({:metric_id => self.id})\n return @measurement_values\n end",
"def data(options = {})\n add_required_columns(options[:required_columns])\n @rows ||= aggregate\n end",
"def _reduce_282(val, _values, result)\n result = @builder.call_method(nil, nil, val[0])\n\n result\nend",
"def aggregate\n @aggregate ||= klass.build.tap do |aggregate|\n aggregate.instance_variable_set(:@id, id)\n aggregate.instance_variable_set(:@local_version, version)\n aggregate.instance_variable_set(:@persisted_version, version)\n events.each { |event| EventApplicator.apply_event!(aggregate, event) }\n end\n end",
"def values=(collection)\n @values = collection\n end",
"def wrap_dataset(dataset)\n if relation.is_a?(Relation::Composite)\n relation.new(dataset).to_a\n else\n dataset\n end\n end",
"def _reduce_282(val, _values, result)\n result = @builder.call_method(nil, nil, val[0])\n \n result\nend",
"def _reduce_282(val, _values, result)\n result = @builder.call_method(nil, nil, val[0])\n \n result\nend",
"def _reduce_282(val, _values, result)\n result = @builder.call_method(nil, nil, val[0])\n \n result\nend",
"def values\n @data.values\n end",
"def _reduce_275(val, _values, result)\n result = val[0] << @builder.associate(nil, val[2], nil)\n \n result\nend",
"def _reduce_275(val, _values, result)\n result = val[0] << @builder.associate(nil, val[2], nil)\n \n result\nend",
"def _reduce_545(val, _values, result)\n result = @builder.accessible(val[0])\n\n result\nend",
"def data\n super.dup.merge(date: date)\n end",
"def aggregate\n klass.collection.group(\n :key => field_list,\n :cond => selector,\n :initial => { :count => 0 },\n :reduce => Javascript.aggregate\n )\n end",
"def build_values\n @elements = Core::CommandWatcher.report(@list)\n @elements.each_with_index do |(level, _, age), col|\n build_column_for(col + 1, age, level)\n end\n end",
"def values\n\n self.to_h.values\n end",
"def fill!(value_)\n raise TableLockedError if @parent\n @vals.fill(value_)\n self\n end",
"def values; end",
"def values; end",
"def values; end",
"def values; end",
"def values; end",
"def values; end",
"def values; end"
] |
[
"0.55833846",
"0.55151165",
"0.5500441",
"0.5476202",
"0.54460794",
"0.5347169",
"0.5315667",
"0.5272025",
"0.5245065",
"0.5163853",
"0.51637846",
"0.51460284",
"0.5136454",
"0.5136451",
"0.51340026",
"0.51255244",
"0.5118315",
"0.5113868",
"0.51118565",
"0.5096498",
"0.50929064",
"0.5090477",
"0.5083467",
"0.506313",
"0.50630605",
"0.50519264",
"0.5042918",
"0.5028553",
"0.5027803",
"0.5013575",
"0.5002425",
"0.5000152",
"0.49954534",
"0.49914992",
"0.49897417",
"0.49393764",
"0.49379575",
"0.4928545",
"0.4894591",
"0.48879683",
"0.48855615",
"0.48552045",
"0.48493618",
"0.48487377",
"0.48419788",
"0.48398525",
"0.4830998",
"0.48295465",
"0.48121142",
"0.47983897",
"0.47826424",
"0.47790697",
"0.4775963",
"0.47754738",
"0.47661725",
"0.47622555",
"0.47619292",
"0.47446665",
"0.47433424",
"0.4741385",
"0.47392723",
"0.47363403",
"0.47258043",
"0.4720546",
"0.47193715",
"0.4717015",
"0.4717015",
"0.47116587",
"0.47086707",
"0.4706893",
"0.4702721",
"0.47008908",
"0.46932846",
"0.46909228",
"0.4689086",
"0.46807456",
"0.46789122",
"0.46751532",
"0.46716547",
"0.4667144",
"0.4667127",
"0.46596268",
"0.46593344",
"0.46593344",
"0.46583655",
"0.46566534",
"0.46566534",
"0.46424142",
"0.46389332",
"0.463852",
"0.46376714",
"0.46353066",
"0.46263295",
"0.46191004",
"0.46191004",
"0.46191004",
"0.46191004",
"0.46191004",
"0.46191004",
"0.46191004"
] |
0.64051324
|
0
|
Allow truncation of multiple source tables.
|
def check_truncation_allowed!
raise(InvalidOperation, "Grouped datasets cannot be truncated") if opts[:group]
raise(InvalidOperation, "Joined datasets cannot be truncated") if opts[:join]
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def setup_target_table\n self.target_table = source_table if target_table.blank?\n end",
"def tables_for_sql\n @tables_for_sql ||= RailsRedshiftReplicator.replicable_target_tables.join(\",\")\n end",
"def _truncate_sql(table)\n to = @opts[:truncate_opts] || OPTS\n \"TRUNCATE TABLE#{' ONLY' if to[:only]} #{table}#{' RESTART IDENTITY' if to[:restart]}#{' CASCADE' if to[:cascade]}\"\n end",
"def truncate_all_tables(options = {})\n options[:verbose] ||= false\n EOL::Db.all_connections.uniq.each do |conn|\n count = 0\n conn.tables.each do |table|\n next if table == 'schema_migrations'\n count += 1\n if conn.respond_to? :with_master\n conn.with_master do\n truncate_table(conn, table)\n end\n else\n truncate_table(conn, table)\n end\n end\n if options[:verbose]\n puts \"-- Truncated #{count} tables in \" +\n conn.instance_eval { @config[:database] } +\n \".\"\n end\n end\n # EOL.forget_everything # expensive, but without it, would risk errors.\n end",
"def update_table_sql(sql)\n sql << ' '\n source_list_append(sql, @opts[:from][0..0])\n end",
"def copy_tables(table_names, from_db, to_db)\n return if table_names.empty?\n \n # For efficiency, turn off time consuming options.\n sql_connection.execute(\"set autocommit = 0;\")\n sql_connection.execute(\"set unique_checks = 0;\")\n sql_connection.execute(\"set foreign_key_checks = 0;\")\n\n from_escaped = sql_connection.identifier(from_db)\n to_escaped = sql_connection.identifier(to_db)\n\n table_names.each { |name| \n print \".\"\n # Think about whether we should drop/create/re-add triggers, or just truncate.\n tbl = sql_connection.identifier(name)\n\n to_create, to_autoincr = show_create_table_without_increment(to_db, name)\n from_create, from_autoincr = show_create_table_without_increment(from_db, name)\n\n if to_create.nil?\n # table doesn't exist, create it.\n op = :create\n sql_connection.execute(\"CREATE TABLE IF NOT EXISTS #{to_escaped}.#{tbl} LIKE #{from_escaped}.#{tbl}\")\n elsif from_create == to_create\n # table is the same, truncate it.\n op = :truncate\n sql_connection.execute(\"TRUNCATE TABLE #{to_escaped}.#{tbl}\")\n else\n # table is different, drop and create.\n op = :drop_and_create\n sql_connection.execute(\"DROP TABLE #{to_escaped}.#{tbl}\")\n sql_connection.execute(\"CREATE TABLE IF NOT EXISTS #{to_escaped}.#{tbl} LIKE #{from_escaped}.#{tbl}\")\n end\n\n if block_given?\n yield name, op\n end\n\n sql_connection.execute(\"INSERT INTO #{to_escaped}.#{tbl} SELECT * FROM #{from_escaped}.#{tbl}\")\n #\n # if from_create == to_create and from_autoincr != to_autoincr\n # puts \"Warning: set auto_increment not implemented yet.\"\n # For many purposes it won't matter because TRUNCATE TABLE\n # will reset auto_increment (see docs for TRUNCATE TABLE).\n # If it does matter then either implement this or\n # provide an option to drop the table.\n # end\n\n }\n\n sql_connection.execute(\"COMMIT;\")\n sql_connection.execute(\"set foreign_key_checks = 1;\")\n sql_connection.execute(\"set unique_checks = 1;\")\n sql_connection.execute(\"set autocommit = 1;\")\n end",
"def mssql_truncate(table_name)\n execute \"TRUNCATE TABLE #{quote_table_name(table_name)}\", 'Truncate Tables'\n rescue => e\n if e.message =~ /Cannot truncate table .* because it is being referenced by a FOREIGN KEY constraint/\n execute \"DELETE FROM #{quote_table_name(table_name)}\", 'Truncate Tables with Delete'\n else\n raise\n end\n end",
"def truncate_preserve(table_name_str)\n puts \"Truncating '#{table_name_str}' table (it may take a while):\"\n table_name = TableName.valueOf(table_name_str)\n\n if enabled?(table_name_str)\n puts 'Disabling table...'\n disable(table_name_str)\n end\n\n puts 'Truncating table...'\n @admin.truncateTable(table_name, true)\n end",
"def setup\n setup_connection\n\n unless tables.count == 0\n puts \"Aborting! Job already has tables attached. Setup requires a new job.\"\n return\n end\n source_tables = source_connection.tables\n source_tables = source_tables.reject { |table| \n exclude_table_names.include?(table) || table.starts_with?('tmp_') \n }\n\n source_tables.each do |table_name|\n copy_table_schema(table_name)\n end\n\n puts \"Done setup!\"\n puts \"Warning! Table settings have been guessed but you can do extra configuration such as setting the insert_only flag on tables to further increase speed of loading.\"\n end",
"def exec_query_truncate_table\n execute_remote(query_truncate_table)\n end",
"def copy_new_rows\n tables.each(&:copy_new_rows)\n end",
"def truncate(model_klass, table_name)\n model_klass.connection.truncate(model_klass.table_name)\n end",
"def truncate_table\n\t\t\t transaction { connection.execute(\"TRUNCATE TABLE #{quoted_table_name};\") }\n\t\t end",
"def modify_datasource(datasources)\n end",
"def copy_embedded_tables\n self.embed_tables.each do |t|\n sql_connection.select_rows(t.sql_name) do |rows, page, total_pages|\n Mongify::Status.publish('copy_embedded', :size => rows.count, :name => \"Embedding #{t.name} (#{page}/#{total_pages})\", :action => 'add')\n rows.each do |row|\n target_row = no_sql_connection.find_one(t.embed_in, {:pre_mongified_id => get_type_casted_value(t, t.embed_on, row)})\n next unless target_row.present?\n row, parent_row, unset_keys = t.translate(row, target_row)\n parent_row ||= {}\n parent_row.delete(\"_id\")\n parent_row.delete(t.name.to_s)\n unset_keys ||= {}\n row.delete(t.embed_on)\n row.merge!(fetch_reference_ids(t, row))\n row.delete('pre_mongified_id')\n save_function_call = t.embedded_as_object? ? '$set' : '$addToSet'\n no_sql_connection.update(t.embed_in, target_row['_id'], append_parent_object({save_function_call => {t.name => row}}, parent_row, unset_keys))\n Mongify::Status.publish('copy_embedded')\n end\n Mongify::Status.publish('copy_embedded', :action => 'finish')\n end\n end\n end",
"def remove_tables_from_publication(name, tables)\n typed_exec(\"ALTER PUBLICATION #{connection.quote_ident(name)} DROP TABLE #{safe_list(tables)}\")\n end",
"def resolve_ids_cleanup_sql(source, temptable)\n\t\tsource.connection.execute \"\n\t\tDELETE FROM unresolved_ids \n\t\t WHERE EXISTS (SELECT 1 FROM #{temptable} \n\t\t WHERE unresolved_id = unresolved_ids.id)\n\t\t AND source_id = #{source.id};\"\n\n\t\tsource.connection.execute \"DROP TABLE #{temptable}\"\n\tend",
"def clean_annuaire()\n puts \"TRUNCATE ALL USER RELATED TABLES\"\n [\n :last_uid, :telephone, :email, :relation_eleve, :ressource,\n :enseigne_dans_regroupement, :role_user, :profil_user, :user, :regroupement, :eleve_dans_regroupement\n ].each do |table|\n if table == :ressource\n truncate_ressource()\n else \n DB[table].truncate()\n end\n end\n\n create_super_admin_and_ressource_laclasse()\nend",
"def apply_single\n validate_schema\n\n # Prepare some lists of columns.\n key_cols = @db1.primary_key(@table1)\n data_cols = @db1.except_primary_key(@table1)\n all_cols = @db1.column_names(@table1)\n\n # Let our public know we are beginning.\n @patch.begin_diff\n\n # Advertise column names.\n @rc_columns = DiffColumns.new\n @rc_columns.title_row = all_cols\n @rc_columns.update(0)\n cells = all_cols.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"@@\",cells)\n @patch.apply_row(rc)\n\n # If requested, we will be providing context rows around changed rows.\n # This is not a natural thing to do with SQL, so we do it only on request.\n # When requested, we need to buffer row changes.\n @pending_rcs = []\n\n # Prepare some useful SQL fragments to assemble later.\n sql_table1 = @db1.quote_table(@table1)\n sql_table2 = @db1.quote_table(@table2)\n sql_key_cols = key_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_all_cols = all_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_key_match = key_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS #{sql_table2}.#{c}\"}.join(\" AND \")\n sql_data_mismatch = data_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS NOT #{sql_table2}.#{c}\"}.join(\" OR \")\n\n # For one query we will need to interleave columns from two tables. For\n # portability we need to give these columns distinct names.\n weave = all_cols.map{|c| [[sql_table1,@db1.quote_column(c)],\n [sql_table2,@db2.quote_column(c)]]}.flatten(1)\n dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]}\"}\n sql_dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]} AS #{c[0].gsub(/[^a-zA-Z0-9]/,'_')}_#{c[1].gsub(/[^a-zA-Z0-9]/,'_')}\"}.join(\",\")\n\n # Prepare a map of primary key offsets.\n keys_in_all_cols = key_cols.each.map{|c| all_cols.index(c)}\n keys_in_dbl_cols = keys_in_all_cols.map{|x| 2*x}\n\n # Find rows in table2 that are not in table1.\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table2} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table1} WHERE #{sql_key_match})\"\n apply_inserts(sql,all_cols,keys_in_all_cols)\n\n # Find rows in table1 and table2 that differ while having the same primary\n # key.\n sql = \"SELECT #{sql_dbl_cols} FROM #{sql_table1} INNER JOIN #{sql_table2} ON #{sql_key_match} WHERE #{sql_data_mismatch}\"\n apply_updates(sql,dbl_cols,keys_in_dbl_cols)\n\n # Find rows that are in table1 but not table2\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table1} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table2} WHERE #{sql_key_match})\"\n apply_deletes(sql,all_cols,keys_in_all_cols)\n\n # If we are supposed to provide context, we need to deal with row order.\n if @patch.want_context\n sql = \"SELECT #{sql_all_cols}, 0 AS __coopy_tag__ FROM #{sql_table1} UNION SELECT #{sql_all_cols}, 1 AS __coopy_tag__ FROM #{sql_table2} ORDER BY #{sql_key_cols}, __coopy_tag__\"\n apply_with_context(sql,all_cols,keys_in_all_cols)\n end\n\n # Done!\n @patch.end_diff\n end",
"def apply_single\n validate_schema\n\n # Prepare some lists of columns.\n key_cols = @db1.primary_key(@table1)\n data_cols = @db1.except_primary_key(@table1)\n all_cols = @db1.column_names(@table1)\n\n # Let our public know we are beginning.\n @patch.begin_diff\n\n # Advertise column names.\n @rc_columns = DiffColumns.new\n @rc_columns.title_row = all_cols\n @rc_columns.update(0)\n cells = all_cols.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"@@\",cells)\n @patch.apply_row(rc)\n\n # If requested, we will be providing context rows around changed rows.\n # This is not a natural thing to do with SQL, so we do it only on request.\n # When requested, we need to buffer row changes.\n @pending_rcs = []\n\n # Prepare some useful SQL fragments to assemble later.\n sql_table1 = @db1.quote_table(@table1)\n sql_table2 = @db1.quote_table(@table2)\n sql_key_cols = key_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_all_cols = all_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_key_match = key_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS #{sql_table2}.#{c}\"}.join(\" AND \")\n sql_data_mismatch = data_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS NOT #{sql_table2}.#{c}\"}.join(\" OR \")\n\n # For one query we will need to interleave columns from two tables. For\n # portability we need to give these columns distinct names.\n weave = all_cols.map{|c| [[sql_table1,@db1.quote_column(c)],\n [sql_table2,@db2.quote_column(c)]]}.flatten(1)\n dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]}\"}\n sql_dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]} AS #{c[0].gsub(/[^a-zA-Z0-9]/,'_')}_#{c[1].gsub(/[^a-zA-Z0-9]/,'_')}\"}.join(\",\")\n\n # Prepare a map of primary key offsets.\n keys_in_all_cols = key_cols.each.map{|c| all_cols.index(c)}\n keys_in_dbl_cols = keys_in_all_cols.map{|x| 2*x}\n\n # Find rows in table2 that are not in table1.\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table2} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table1} WHERE #{sql_key_match})\"\n apply_inserts(sql,all_cols,keys_in_all_cols)\n\n # Find rows in table1 and table2 that differ while having the same primary\n # key.\n sql = \"SELECT #{sql_dbl_cols} FROM #{sql_table1} INNER JOIN #{sql_table2} ON #{sql_key_match} WHERE #{sql_data_mismatch}\"\n apply_updates(sql,dbl_cols,keys_in_dbl_cols)\n\n # Find rows that are in table1 but not table2\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table1} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table2} WHERE #{sql_key_match})\"\n apply_deletes(sql,all_cols,keys_in_all_cols)\n\n # If we are supposed to provide context, we need to deal with row order.\n if @patch.want_context\n sql = \"SELECT #{sql_all_cols}, 0 AS __coopy_tag__ FROM #{sql_table1} UNION SELECT #{sql_all_cols}, 1 AS __coopy_tag__ FROM #{sql_table2} ORDER BY #{sql_key_cols}, __coopy_tag__\"\n apply_with_context(sql,all_cols,keys_in_all_cols)\n end\n\n # Done!\n @patch.end_diff\n end",
"def apply_single\n validate_schema\n\n # Prepare some lists of columns.\n key_cols = @db1.primary_key(@table1)\n data_cols = @db1.except_primary_key(@table1)\n all_cols = @db1.column_names(@table1)\n\n # Let our public know we are beginning.\n @patch.begin_diff\n\n # Advertise column names.\n @rc_columns = DiffColumns.new\n @rc_columns.title_row = all_cols\n @rc_columns.update(0)\n cells = all_cols.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"@@\",cells)\n @patch.apply_row(rc)\n\n # If requested, we will be providing context rows around changed rows.\n # This is not a natural thing to do with SQL, so we do it only on request.\n # When requested, we need to buffer row changes.\n @pending_rcs = []\n\n # Prepare some useful SQL fragments to assemble later.\n sql_table1 = @db1.quote_table(@table1)\n sql_table2 = @db1.quote_table(@table2)\n sql_key_cols = key_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_all_cols = all_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_key_match = key_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS #{sql_table2}.#{c}\"}.join(\" AND \")\n sql_data_mismatch = data_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS NOT #{sql_table2}.#{c}\"}.join(\" OR \")\n\n # For one query we will need to interleave columns from two tables. For\n # portability we need to give these columns distinct names.\n weave = all_cols.map{|c| [[sql_table1,@db1.quote_column(c)],\n [sql_table2,@db2.quote_column(c)]]}.flatten(1)\n dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]}\"}\n sql_dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]} AS #{c[0].gsub(/[^a-zA-Z0-9]/,'_')}_#{c[1].gsub(/[^a-zA-Z0-9]/,'_')}\"}.join(\",\")\n\n # Prepare a map of primary key offsets.\n keys_in_all_cols = key_cols.each.map{|c| all_cols.index(c)}\n keys_in_dbl_cols = keys_in_all_cols.map{|x| 2*x}\n\n # Find rows in table2 that are not in table1.\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table2} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table1} WHERE #{sql_key_match})\"\n apply_inserts(sql,all_cols,keys_in_all_cols)\n\n # Find rows in table1 and table2 that differ while having the same primary\n # key.\n sql = \"SELECT #{sql_dbl_cols} FROM #{sql_table1} INNER JOIN #{sql_table2} ON #{sql_key_match} WHERE #{sql_data_mismatch}\"\n apply_updates(sql,dbl_cols,keys_in_dbl_cols)\n\n # Find rows that are in table1 but not table2\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table1} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table2} WHERE #{sql_key_match})\"\n apply_deletes(sql,all_cols,keys_in_all_cols)\n\n # If we are supposed to provide context, we need to deal with row order.\n if @patch.want_context\n sql = \"SELECT #{sql_all_cols}, 0 AS __coopy_tag__ FROM #{sql_table1} UNION SELECT #{sql_all_cols}, 1 AS __coopy_tag__ FROM #{sql_table2} ORDER BY #{sql_key_cols}, __coopy_tag__\"\n apply_with_context(sql,all_cols,keys_in_all_cols)\n end\n\n # Done!\n @patch.end_diff\n end",
"def set_publication_tables(name, tables)\n typed_exec(\"ALTER PUBLICATION #{connection.quote_ident(name)} SET TABLE #{safe_list(tables)}\")\n end",
"def truncate\n connection.truncate(table_name)\n end",
"def copy_polymorphic_tables\n self.polymorphic_tables.each do |t|\n polymorphic_id_col, polymorphic_type_col = \"#{t.polymorphic_as}_id\", \"#{t.polymorphic_as}_type\"\n sql_connection.select_rows(t.sql_name) do |rows, page, total_pages|\n Mongify::Status.publish('copy_polymorphic', :size => rows.count, :name => \"Polymorphicizing #{t.name}\", :action => 'add')\n rows.each do |row|\n\n #If no data is in the column, skip importing\n if (row[polymorphic_type_col])\n table_name = row[polymorphic_type_col].tableize\n new_id = no_sql_connection.get_id_using_pre_mongified_id(table_name, get_type_casted_value(t, polymorphic_id_col, row))\n end\n\n row = t.translate(row)\n row[polymorphic_id_col] = new_id if new_id\n row.merge!(fetch_reference_ids(t, row))\n row.delete('pre_mongified_id')\n\n if t.embedded? && table_name\n row.delete(polymorphic_id_col)\n row.delete(polymorphic_type_col)\n save_function_call = t.embedded_as_object? ? '$set' : '$addToSet'\n no_sql_connection.update(table_name, new_id, {save_function_call => {t.name => row}})\n else\n no_sql_connection.insert_into(t.name, row)\n end\n\n Mongify::Status.publish('copy_polymorphic')\n end\n Mongify::Status.publish('copy_polymorphic', :action => 'finish')\n end\n end\n end",
"def rebuild(table); end",
"def splice_table(*tables)\n table = tables.shift\n tables.each do |new_table|\n table.each_with_index do |row, i|\n table[i] = row + new_table[i]\n end\n end\n table\nend",
"def prepare_hgt_com_trsf_prkgrs_old()\n \n @conn.execute \\\n \"truncate table hgt_com_trsf_prkgrs\"\n \n puts \"hgt_com_trsf_prkgrs table truncated...\"\n \n #\n sql = \"select id,\n gene_id,\n TXSRC_ID,\n TXDST_ID,\n WEIGHT_TR_TX\n from HGT_COM_TRSF_TAXONS\"\n \n #puts \"sql: #{sql}\"\n \n \n tr_taxons = HgtComTrsfTaxon.find_by_sql(sql)\n \n tr_taxons.each {|tr|\n \n\n #debugging\n #next unless tr.gene_id == 111 and tr.txsrc_id == 768679 and tr.txdst_id == 374847\n \n #puts \"tr: #{tr.inspect}\"\n #puts \"tr.id: #{tr.id}, #{tr.gene_id}\"\n \n #for each chiteria\n (0..1).each {|crit|\n \n #for each criteria and\n #for each source and destination prok groups\n sql = \"select tg.PROK_GROUP_ID,\n tg.WEIGHT_PG\n from TAXON_GROUPS tg \n join PROK_GROUPS pg on pg.id = tg.PROK_GROUP_ID\n where tg.TAXON_ID = #{tr.txsrc_id} and\n pg.GROUP_CRITER_ID = #{crit}\"\n #puts \"sql: \\n #{sql}\"\n \n pg_src = TaxonGroup.find_by_sql(sql)\n \n \n sql = \"select tg.PROK_GROUP_ID,\n tg.WEIGHT_PG\n from TAXON_GROUPS tg \n join PROK_GROUPS pg on pg.id = tg.PROK_GROUP_ID\n where tg.TAXON_ID = #{tr.txdst_id} and\n pg.GROUP_CRITER_ID = #{crit}\"\n #puts \"sql: \\n #{sql}\"\n \n pg_dst = TaxonGroup.find_by_sql(sql)\n \n pg_src.each {|src|\n pg_dst.each {|dst|\n \n #puts \"src: #{src.inspect}\"\n #puts \"dst: #{dst.inspect}\"\n \n #insert alternative\n prkg = HgtComTrsfPrkgr.new \n prkg.gene_id = tr.gene_id\n prkg.hgt_com_trsf_taxon_id = tr.id\n prkg.pgsrc_id = src.prok_group_id\n prkg.pgdst_id = dst.prok_group_id\n prkg.weight_tr_pg = tr.weight_tr_tx * src.weight_pg * dst.weight_pg\n prkg.save\n \n #prkg.gene_id = tr.gene_id \n #prkg.save\n \n \n }\n }\n \n \n \n \n \n } \n }\n \n \n end",
"def TableTruncate\n begin\n unless getDBConn\n raise _(\"Please open a database before trying to truncate it.\")\n end\n\n tables = treeview_getSelection(@tv_tables)\n raise _(\"You have to select a table to truncate.\") if count(tables) <= 0\n\n # Confirm and truncate.\n tables.each do |table|\n table_ob = @dbconn.getTable(table[0])\n\n if msgbox(_(\"Question\"), sprintf(_(\"Do you want to truncate the table: %s?\"), table[0]), \"yesno\") == \"yes\"\n table_ob.truncate\n end\n end\n rescue => e\n knj_msgbox.error_exc(e)\n end\n\n @dbpage.TablesUpdate()\n end",
"def multi_insert_sql_strategy\n :union\n end",
"def copy_to(db, args = {})\r\n data[\"tables\"].each do |table|\r\n table_args = nil\r\n table_args = args[\"tables\"][table[\"name\"].to_s] if args and args[\"tables\"] and args[\"tables\"][table[\"name\"].to_s]\r\n next if table_args and table_args[\"skip\"]\r\n table.delete(\"indexes\") if table.key?(\"indexes\") and args[\"skip_indexes\"]\r\n db.tables.create(table[\"name\"], table)\r\n \r\n limit_from = 0\r\n limit_incr = 1000\r\n \r\n loop do\r\n ins_arr = []\r\n q_rows = self.select(table[\"name\"], {}, {\"limit_from\" => limit_from, \"limit_to\" => limit_incr})\r\n while d_rows = q_rows.fetch\r\n col_args = nil\r\n \r\n if table_args and table_args[\"columns\"]\r\n d_rows.each do |col_name, col_data|\r\n col_args = table_args[\"columns\"][col_name.to_s] if table_args and table_args[\"columns\"]\r\n d_rows[col_name] = \"\" if col_args and col_args[\"empty\"]\r\n end\r\n end\r\n \r\n ins_arr << d_rows\r\n end\r\n \r\n break if ins_arr.empty?\r\n \r\n db.insert_multi(table[\"name\"], ins_arr)\r\n limit_from += limit_incr\r\n end\r\n end\r\n end",
"def stamp_new_rows\n tables.each(&:stamp_new_rows)\n end",
"def truncate(table_name_str)\n puts \"Truncating '#{table_name_str}' table (it may take a while):\"\n table_name = TableName.valueOf(table_name_str)\n\n if enabled?(table_name_str)\n puts 'Disabling table...'\n disable(table_name_str)\n end\n\n puts 'Truncating table...'\n @admin.truncateTable(table_name, false)\n end",
"def add_tables_to_publication(name, tables)\n typed_exec(\"ALTER PUBLICATION #{connection.quote_ident(name)} ADD TABLE #{safe_list(tables)}\")\n end",
"def additional_support_tables\n additional_tables.map { |scale| OVERLAY_TABLENAME % [scale, table_name] }\n end",
"def additional_support_tables\n additional_tables.map { |scale| OVERLAY_TABLENAME % [scale, table_name] }\n end",
"def create_side_table\n RailsRedshiftReplicator.connection.exec \"CREATE TABLE #{temporary_table_name} (LIKE #{replication.target_table})\"\n end",
"def clean\n tables.clean if tables\n end",
"def supports_combining_alter_table_ops?\n false\n end",
"def migrate_from(src_rs, condition = nil, opts = {})\r\n Rigrate.logger.info(\"start migration : source rs [#{src_rs.size}] ->target rs [#{rows.size}]\")\r\n mode = opts[:mode]\r\n condition = eval \"{#{condition.to_s.upcase}}\" unless condition.nil?\r\n\r\n if condition\r\n src_cols_idx = src_rs.column_idx(*condition.keys)\r\n tg_cols_idx = column_idx(*condition.values)\r\n else\r\n # delete line -> src_cols_idx = src_rs.column_idx(*src_rs.default_migration_condition)\r\n # suppose all primary key of target resultset can be found in src result, and in same column idx\r\n tg_cols_idx = column_idx(*default_migration_condition)\r\n src_cols_idx = tg_cols_idx\r\n end\r\n @rows = handle_rows(src_rs.rows, mode, src_cols_idx, tg_cols_idx)\r\n save!(condition)\r\n end",
"def supports_combining_alter_table_ops?\n true\n end",
"def copy_rows( field, \n table_struct, \n src_table_name = TABLE_NAME, \n dest_table_name = NEW_TABLE_NAME, \n num_rows = ROWS_PER_TRANSACTION)\n rows = grab_rows(field, src_table_name, num_rows)\n keys_for_delete = insert_rows(rows, field, table_struct, dest_table_name)\n keys_for_delete\nend",
"def alter_table_sql_list(table, operations, options=nil)\n\t return super(table, operations) unless Hash===options\n\t \n\t prologue = \"ALTER TABLE #{quote_schema_table(table)} \"\n\t sql = operations.map do |op|\n\t frag = alter_table_sql table, op\n\t raise ArgumentError unless frag.slice![0,prologue.length] == prologue\n\t frag\n\t end\n\t sql.push(table_options_sql(options)).join ' '\n end",
"def clear_sequence_setup(rep_prefix, table_name)\n sequence_table_name = \"#{rep_prefix}_sequences\"\n if tables.include?(sequence_table_name)\n trigger_name = \"#{rep_prefix}_#{table_name}_sequence\"\n trigger_row = select_one(<<-end_sql)\n select * from information_schema.triggers\n where trigger_schema = database()\n and trigger_name = '#{trigger_name}'\n end_sql\n if trigger_row\n execute \"DROP TRIGGER `#{trigger_name}`\"\n execute \"delete from #{sequence_table_name} where name = '#{table_name}'\"\n unless select_one(\"select * from #{sequence_table_name}\")\n # no more sequences left --> delete sequence table\n drop_table sequence_table_name.to_sym\n end\n end\n end\n end",
"def truncate!\n return if truncated?\n update_column :metadata, nil\n end",
"def table_name\n respond_to?(:first_source_alias) ? first_source_alias : super\n end",
"def specified_tables(opts)\n Hbacker.log.debug \"Import#specified_tables\"\n Hbacker.log.debug \"#{opts.inspect}\"\n begin\n opts = Hbacker.transform_keys_to_symbols(opts)\n\n @import_db.start_info(opts[:session_name], opts[:source_root], opts[:start_time], opts[:end_time], Time.now.utc)\n\n exported_table_names = @export_db.table_names(opts[:session_name], opts[:source_root])\n Hbacker.log.debug \"import.rb/specified_tables/exported_table_names: #{exported_table_names.inspect}\"\n if opts[:tables]\n # Only import the tables specified in opts[:tables]\n exported_table_names = exported_table_names & opts[:tables]\n if exported_table_names.length < opts[:tables].length\n Hbacker.log.debug \"opts[:tables]: #{opts[:tables].inspect} exported_table_names: #{exported_table_names}\"\n raise Thor::InvocationError, \"One or more of the tables requested does not exist in this backup\"\n end\n end\n exported_table_names.each do |table|\n source = \"#{opts[:source_root]}#{opts[:session_name]}/#{table}/\"\n \n wait_results = Hbacker.wait_for_hbacker_queue('queue_table_import', opts[:workers_watermark], opts[:workers_timeout])\n unless wait_results[:ok]\n msg = \"Hbacker::Import#specified_tables: Timeout (#{opts[:workers_timeout]}) \" +\n \" waiting for workers in queue < opts[:workers_timeout]\"\n Hbacker.log.error msg\n next\n end\n \n Hbacker.log.debug \"Calling queue_table_import_job(#{table}, #{source}, \" + \n \"#{opts[:session_name]}, #{opts[:import_session_name]}, #{opts[:timeout]}, \" +\n \"#{opts[:reiteration_time]}, #{opts[:mapred_max_jobs]}, #{opts[:restore_empty_tables]})\"\n \n self.queue_table_import_job(table, source, opts[:session_name], \n opts[:import_session_name], opts[:timeout], opts[:reiteration_time], \n opts[:mapred_max_jobs], opts[:restore_empty_tables])\n end\n rescue Exception => e\n Hbacker.log.error \"Hbacker::Import#specified_tables: EXCEPTION: #{e.inspect}\"\n Hbacker.log.error caller.join(\"\\n\")\n @import_db.end_info(opts[:session_name], opts[:source_root], Time.now.utc, {:info => \"#{e.class}: #{e.message}\"})\n raise ImportError, \"#{e.class}: #{e.message} #{e.backtrace}\"\n end\n @import_db.end_info(opts[:session_name], opts[:source_root], Time.now.utc)\n end",
"def union(target, opts = {})\r\n src_col_size = column_info.size\r\n target_col_size = target.column_info.size\r\n\r\n # TODO need type checking?\r\n\r\n if src_col_size > target_col_size\r\n target.rows = target.rows.map do |row|\r\n row.fill_with_nil(src_col_size - target_col_size)\r\n end\r\n elsif src_col_size < target_col_size\r\n target.rows = target.rows.map { |row| row[0...src_col_size] }\r\n end\r\n\r\n @rows += target.rows\r\n\r\n self\r\n end",
"def truncate() end",
"def source_list(source)\n if source.nil? || source.empty?\n raise Error, 'No source specified for query'\n end\n auto_alias_count = @opts[:num_dataset_sources] || 0\n m = source.map do |s|\n case s\n when Dataset\n auto_alias_count += 1\n s.to_table_reference(\"t#{auto_alias_count}\")\n else\n table_ref(s)\n end\n end\n m.join(COMMA_SEPARATOR)\n end",
"def extend_sql_avoiding_table_naming_clashes!(sql, addition)\r\n used_table_aliases = table_aliases_from_join_fragment(addition)\r\n old_table_aliases = table_aliases_from_join_fragment(sql)\r\n (used_table_aliases & old_table_aliases).each do |join_table_alias|\r\n i = 0\r\n begin\r\n i += 1\r\n new_alias = \"renamed_join_table_#{i}\"\r\n end until !used_table_aliases.include?(new_alias)\r\n convert_table_name_to_new_alias!(sql, join_table_alias, new_alias)\r\n end\r\n sql << \" #{addition} \"\r\n end",
"def prefix_table\n update unless @prefix_table\n @prefix_table\n end",
"def truncate_table(table = quoted_table_name)\n connection.execute(\"TRUNCATE TABLE #{table}\")\n end",
"def TruncateAllClicked\n unless getDBConn.conn\n msgbox(_(\"Warning\"), _(\"You need to open a database, before you can truncate its databases\"), \"warning\")\n return null\n end\n\n if msgbox(_(\"Question\"), _(\"Do you really want to truncate all databases on the current connection?\"), \"yesno\") != \"yes\"\n return null\n end\n\n begin\n dbs = getDBConn.GetDBs()\n\n dbs.each do |value|\n getDBConn.ChooseDB(value)\n tables = getDBConn.GetTables(value)\n\n tables.each do |table|\n getDBConn.TruncateTable(table[\"name\"])\n end\n end\n rescue => e\n msgbox(_(\"Warning\"), sprintf(_(\"An error occurred:\\n\\n%s\"), e.getMessage), \"warning\")\n end\n\n @dbpage.TablesUpdate()\n end",
"def delete_queries\n [\n \"DROP TRIGGER tr_#{suffix} ON #{src_table};\",\n \"DROP FUNCTION fn_#{suffix}();\",\n \"DROP TRIGGER tr_#{suffix}_cleaner ON #{surveys_table};\",\n \"DROP FUNCTION fn_#{suffix}_cleaner();\"\n ]\n end",
"def reset_tables(*args)\n return unless reset_tables?\n table_names = select_reset_tables(args)\n message \"==Resetting #{table_names.length} tables.\", :warn\n [table_names].flatten.compact.sort.each do |table_name|\n delete_all_table_records(table_name)\n end\n reset_common_tables\n run_domain_loader\n end",
"def merge_into(target)\n target_id = target.id\n # Find all the Entries attached to this name, that will need to be\n # reindexed after the merge\n entry_ids = entry_ids_to_index_on_update\n\n ids = EntryArtist.where(artist_id: self.id).pluck(:id)\n EntryArtist.where(artist_id: self.id).update_all({ artist_id: target_id })\n EntryArtist.where( id: ids ).each(&:update_bunny)\n\n ids = EntryAuthor.where(author_id: self.id).pluck(:id)\n EntryAuthor.where(author_id: self.id).update_all({ author_id: target_id })\n EntryAuthor.where( id: ids ).each(&:update_bunny)\n\n ids = EntryScribe.where(scribe_id: self.id).pluck(:id)\n EntryScribe.where(scribe_id: self.id).update_all({ scribe_id: target_id })\n EntryScribe.where( id: ids ).each(&:update_bunny)\n\n ids = SaleAgent.where(agent_id: self.id).pluck(:id)\n SaleAgent.where(agent_id: self.id).update_all({ agent_id: target_id })\n SaleAgent.where( id: ids ).each(&:update_bunny)\n\n ids = SourceAgent.where(agent_id: self.id).pluck(:id)\n SourceAgent.where(agent_id: self.id).update_all({ agent_id: target_id })\n SourceAgent.where( id: ids ).each(&:update_bunny)\n\n ids = Provenance.where(provenance_agent_id: self.id).pluck(:id)\n Provenance.where(provenance_agent_id: self.id).update_all({ provenance_agent_id: target_id })\n Provenance.where( id: ids ).each(&:update_bunny)\n\n ids = DericciLink.where(name_id: self.id).pluck(:id)\n DericciLink.where(name_id: self.id).update_all({ name_id: target_id })\n DericciLink.where( id: ids ).each(&:update_bunny)\n\n ids = DericciRecord.where(verified_id: self.id).pluck(:id)\n DericciRecord.where(verified_id: self.id).update_all({verified_id: target_id})\n DericciRecord.where( id: ids ).each(&:update_bunny)\n\n # update flags on the target\n target.is_artist ||= self.is_artist\n target.is_author ||= self.is_author\n target.is_scribe ||= self.is_scribe\n target.is_provenance_agent ||= self.is_provenance_agent\n\n target.save\n\n # but ... CAN't SAVE when name is BLANK (nil)\n # self.name = nil\n self.viaf_id = nil\n self.deleted = true\n self.save!\n\n # slice into managable chunks to avoid running out of space in mysql\n entry_ids.each_slice(200) do |slice|\n SDBMSS::IndexJob.perform_later(Entry.to_s, slice)\n end\n\n Name.update_counters(target.id,\n :authors_count => target.author_entries.where(deprecated: false, draft: false).count - target.authors_count,\n :artists_count => target.artist_entries.where(deprecated: false, draft: false).count - target.artists_count,\n :scribes_count => target.scribe_entries.where(deprecated: false, draft: false).count - target.scribes_count,\n :sale_agents_count => target.sale_entries.where(deprecated: false, draft: false).count - target.sale_agents_count,\n :provenance_count => target.provenance_entries.where(deprecated: false, draft: false).count - target.provenance_count,\n :source_agents_count => target.agent_sources.count - target.source_agents_count\n )\n end",
"def join_writer\n \"@join_table.create(@source.id,@destination.id)\"\n end",
"def supports_external_drop_constraints?() true; end",
"def update_audit_tables\n tables.each(&:update_audit_table)\n end",
"def test_insert_update_database_destination\n row = ETL::Row[:id => 1, :first_name => 'Bob', :last_name => 'Smith', :ssn => '111234444']\n row_needs_escape = ETL::Row[:id => 2, :first_name => \"Foo's\", :last_name => \"Bar\", :ssn => '000000000' ]\n row_needs_update = ETL::Row[:id => 1, :first_name => \"Sean\", :last_name => \"Toon\", :ssn => '000000000' ]\n control = ETL::Control.parse(File.dirname(__FILE__) +\n '/delimited_insert_update.ctl')\n\n Person.delete_all\n assert_equal 0, Person.count\n\n # First define a basic configuration to check defaults\n configuration = {\n :type => :insert_update_database,\n :target => :data_warehouse,\n :database => 'etl_unittest',\n :table => 'people',\n :buffer_size => 0\n }\n mapping = {\n :primarykey => [:id],\n :order => [:id, :first_name, :last_name, :ssn]\n }\n dest = ETL::Control::InsertUpdateDatabaseDestination.new(control, configuration, mapping)\n dest.write(row)\n dest.write(row_needs_escape)\n dest.write(row_needs_update)\n dest.close\n\n assert_equal 2, Person.find(:all).length\n end",
"def reset_db_peak_sequence\n ActiveRecord::Base.connection.tables.each do |t|\n ActiveRecord::Base.connection.reset_pk_sequence!(t)\n end\nend",
"def reset_db_peak_sequence\n ActiveRecord::Base.connection.tables.each do |t|\n ActiveRecord::Base.connection.reset_pk_sequence!(t)\n end\nend",
"def merge_or_replace(mode:)\n target = replication.target_table\n stage = temporary_table_name\n sql = send(\"#{mode}_statement\", target, stage)\n ::RailsRedshiftReplicator.connection.exec sql\n end",
"def create_target_table(context, &block)\n with_logging(:debug, \"Creating target table #{target} (from #{source})\") do\n context.execute \"CREATE TABLE #{target} LIKE #{source}\"\n yield if block_given?\n end\n end",
"def remove_max_length(table, *arr_cn_len)\n execute sql_remove_max_length(table, *arr_cn_len)\n end",
"def truncate_db\n drop_table\n create_table\n end",
"def destroy_all\n objs = target\n source.update_attribute(source_attribute, nil)\n objs.each(&:destroy)\n end",
"def copy_migrations\n [\n 'change_audit_log_pid_to_generic_file_id.rb',\n 'change_proxy_deposit_request_pid_to_generic_file_id.rb'\n ].each do |file|\n better_migration_template file\n end\n end",
"def check_tables_left\n if self.tables_left <= 6\n self.tables_left = (20..30).to_a.sample\n self.save\n end\n end",
"def supports_external_drop_constraints?() false; end",
"def clean_join_table\n # these 2 lines run pretty much the same sql, self.parses adds a where in clause\n self.parsers = []\n # OR\n # ActiveRecord::Base.connection.execute(\"DELETE FROM parsers_transformations WHERE transformation_id = #{id}\")\n end",
"def add_rows(reader, table_schemas_lookup,\n row_transformer, validator = nil, copy_options, add_new_data)\n copy_options = [] if copy_options.nil?\n # Remove new lines ensures that all row values have newlines removed.\n remove_new_lines = ::ETL::Transform::RemoveNewlines.new\n row_transformers = [remove_new_lines]\n\n row_transformers << row_transformer unless row_transformer.nil?\n\n # adding this at the end of the line to do the last transformation\n row_transformers << ::ETL::Redshift::NilStringRowTransformer.new(table_schemas_lookup, '*null_string*')\n copy_options << \"NULL AS '*null_string*'\"\n\n opts = { delimiter: @delimiter, tmp_dir: @tmp_dir }\n streamer = ::ETL::S3::CSVFilesUploadingStreamer.new(\n @bucket_manager, table_schemas_lookup.keys, @slices_s3_files, opts)\n\n rows_processed_map = {}\n table_schemas_lookup.keys.each do |t|\n rows_processed_map[t] = 0\n end\n has_rows = false\n\n begin\n reader.each_row do |row|\n values_lookup = transform_row(table_schemas_lookup, row_transformers, row)\n next if values_lookup.is_a? SkipRow\n\n values_lookup.each_pair do |table_name, row_arrays|\n tschema = table_schemas_lookup[table_name]\n row_arrays.each do |values_arr|\n csv_row = CSV::Row.new(tschema.columns.keys, values_arr)\n streamer.add_row(table_name, csv_row)\n rows_processed_map[table_name] += 1\n has_rows = true\n end\n end\n end\n\n if has_rows\n streamer.push_last\n table_schemas_lookup.each_pair do |t, tschema|\n rows_processed = rows_processed_map[t]\n next if rows_processed.zero?\n tmp_table = create_staging_table(tschema.schema, t)\n s3_prefix_path = streamer.parts_s3_folders[t]\n s3_path = \"#{@bucket}/#{s3_prefix_path}\"\n copy_from_s3(tmp_table, s3_path, copy_options)\n execute_staging_table_validation(validator, add_new_data, tmp_table, tschema, t)\n end\n end\n ensure\n # if we hit an exception while processing the inputs, we may still have open file handles\n # so go ahead and close them, then delete the files\n streamer.delete_files if streamer.csv_file_paths.count > 0\n if streamer.data_pushed && @skip_folder_delete.nil?\n @bucket_manager.delete_objects_with_prefix(streamer.s3_folder)\n end\n end\n highest_num_rows_processed = 0\n\n # might need to do something different but doing this for now.\n rows_processed_map.each_pair do |_key, value|\n highest_num_rows_processed = value if highest_num_rows_processed < value\n end\n highest_num_rows_processed\n end",
"def drop_table(klass)\n # Remove leftover data from some join tabkes.\n klass.relations.each do |rel|\n if rel.class.to_s == \"Og::JoinsMany\" and rel.join_table\n target_class = rel.target_class\n exec \"DELETE FROM #{rel.join_table}\"\n end\n end\n exec \"DROP TABLE #{klass.table}\"\n end",
"def post_sql_statements( table_name, options ) # :nodoc:\n post_sql_statements = []\n if options[:on_duplicate_key_update]\n post_sql_statements << sql_for_on_duplicate_key_update( table_name, options[:on_duplicate_key_update] )\n end\n\n #custom user post_sql\n post_sql_statements << options[:post_sql] if options[:post_sql]\n\n #with rollup\n post_sql_statements << rollup_sql if options[:rollup]\n\n post_sql_statements\n end",
"def merge_table_files\n merged_table_file = intermediate(\"Workbook tables\")\n worksheets do |name,xml_filename|\n log.info \"Merging table files for #{name}\"\n worksheet_table_file = input([name, \"Worksheet tables\"])\n worksheet_table_file.each_line do |line|\n merged_table_file.puts line\n end\n close worksheet_table_file\n end\n close merged_table_file\n end",
"def truncate(table_name, name = nil)\n execute \"TRUNCATE TABLE #{quote_table_name(table_name)}\", name\n end",
"def clear_sequence_setup(database, table)\n table_options = options(table)\n if table_options[:adjust_sequences]\n session.send(database).clear_sequence_setup(\n table_options[:rep_prefix], table\n )\n end\n end",
"def prepare_tables\n table_id = 0\n seen = {}\n\n sheets.each do |sheet|\n table_id += sheet.prepare_tables(table_id + 1, seen)\n end\n end",
"def prune_data_to_range(tables, keep_min_id, keep_max_id)\n reconnect(user: app_credentials[:user])\n tables.each do |t|\n output \"Cleaning up data, pruning to only keep range #{keep_min_id}-#{keep_max_id}\", t\n rows_deleted = 0\n [:asc, :desc].each {|direction| rows_deleted += delete_table_data_outside_range(t, keep_min_id, keep_max_id, direction)}\n output \"Done cleanup; #{rows_deleted} rows deleted\", t\n end\n end",
"def prune_data_to_range(tables, keep_min_id, keep_max_id)\n reconnect(user: app_credentials[:user])\n tables.each do |t|\n output \"Cleaning up data, pruning to only keep range #{keep_min_id}-#{keep_max_id}\", t\n rows_deleted = 0\n [:asc, :desc].each {|direction| rows_deleted += delete_table_data_outside_range(t, keep_min_id, keep_max_id, direction)}\n output \"Done cleanup; #{rows_deleted} rows deleted\", t\n end\n end",
"def truncate\r\n truncate_records\r\n redirect_to action: 'index'\r\n end",
"def tables\n [\n ]\n end",
"def cleanup!\n check_if_finished\n raise \"#{self.class}: instance not prepared before doing any move\" unless @prepared\n raise \"#{self.class}: not all data was iterated over\" unless @finished\n\n case @strategy\n when 'incremental'\n # notinh we need to do\n\n when 'rotate'\n postgres.execute(\"DROP TABLE #{@tmp_table_name}\")\n\n end\n end",
"def add_rows(reader, table_schemas_lookup, row_transformer, validator = nil, copy_options, add_new_data)\n # Remove new lines ensures that all row values have newlines removed.\n remove_new_lines = ::ETL::Transform::RemoveNewlines.new\n row_transformers = [remove_new_lines]\n row_transformers << row_transformer unless row_transformer.nil?\n\n csv_files = {}\n csv_file_paths = {}\n rows_processed_map = {}\n table_schemas_lookup.keys.each do |t|\n csv_file_paths[t] = temp_file(t)\n csv_files[t] = ::CSV.open(csv_file_paths[t], 'w', col_sep: @delimiter)\n rows_processed_map[t] = 0\n end\n\n begin\n reader.each_row do |row|\n values_lookup = transform_row(table_schemas_lookup, row_transformers, row)\n next if values_lookup.is_a? SkipRow\n\n values_lookup.each_pair do |table_name, row_arrays|\n tschema = table_schemas_lookup[table_name]\n row_arrays.each do |values_arr|\n csv_row = CSV::Row.new(tschema.columns.keys, values_arr)\n csv_files[table_name].add_row(csv_row)\n rows_processed_map[table_name] += 1\n end\n end\n end\n table_schemas_lookup.each_pair do |t, tschema|\n if rows_processed_map[t] == 0\n log.debug(\"table #{t} has zero rows no upload required\")\n next\n end\n\n csv_files[t].close\n local_file_path = csv_file_paths[t]\n tmp_table = create_staging_table(tschema.schema, t)\n copy_multiple_files_from_s3(tmp_table, local_file_path, copy_options)\n\n full_table = \"#{tschema.schema}.#{t}\"\n where_id_join = ''\n tschema.primary_key.each do |pk|\n where_id_join = if where_id_join == ''\n \"where #{full_table}.#{pk} = #{tmp_table}.#{pk}\"\n else\n \"#{where_id_join} and #{full_table}.#{pk} = #{tmp_table}.#{pk}\"\n end\n end\n\n validator.validate(t, tmp_table, tschema) if validator\n add_sql = add_new_data.build_sql(tmp_table, full_table, where_id_join: where_id_join)\n execute(add_sql)\n end\n end\n highest_num_rows_processed = 0\n\n # might need to do something different but doing this for now.\n rows_processed_map.each_pair do |_key, value|\n highest_num_rows_processed = value if highest_num_rows_processed < value\n end\n highest_num_rows_processed\n end",
"def undo_cleanup_concurrent_column_rename(table, old_column, new_column, type: nil, batch_column_name: :id)\n Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas.require_ddl_mode!\n\n setup_renamed_column(__callee__, table, new_column, old_column, type, batch_column_name)\n\n with_lock_retries do\n install_bidirectional_triggers(table, old_column, new_column)\n end\n end",
"def delete_all\n objs = target\n source.update_attribute(source_attribute, nil)\n objs.each(&:delete)\n end",
"def adapt_source(source); end",
"def merge(source); end",
"def target_sql_mode; end",
"def flag_row_deltas\n tables.each(&:flag_row_deltas)\n end",
"def merge(*tables)\n from(default_table).merge(*tables)\n end",
"def move_rows( field, \n table_struct, \n src_table_name = TABLE_NAME, \n dest_table_name = NEW_TABLE_NAME, \n num_rows = ROWS_PER_TRANSACTION, \n max_rows = MAX_RECORDS, \n sleepy_time = SLEEP_TIME)\n iteration = 1\n count = 0\n\n # prime the pump vars for loop\n keys_for_delete = []\n\n if max_rows != 0 && max_rows < num_rows\n LOGGER.info \"Adjusting per row transaction due to maximum row limit. This move will only require one transaction.\"\n upper_bound = max_rows\n num_rows = max_rows \n else\n upper_bound = num_rows\n end\n\n lower_bound = 1 \n max_iterations = max_rows == 0 ? 0 : (max_rows.to_f/num_rows.to_f).ceil\n remaining_rows = max_rows % num_rows\n \n while ((iteration == 1 || !keys_for_delete.empty?) && (max_iterations == 0 || iteration <= max_iterations)) do\n # sleep if we need another iteration\n if iteration > 1\n LOGGER.info \"Sleeping for #{sleepy_time}\"\n sleep sleepy_time\n end\n\n LOGGER.debug \"upper_bound: #{upper_bound}\\nlower_bound: #{lower_bound}\\nnum_rows: #{num_rows}\\niteration: #{iteration}\\nmax_rows: #{max_rows}\\nmax_rows: #{max_iterations}\" \n \n LOGGER.info \"Starting move transactions iteration #{iteration} (records #{lower_bound} to #{upper_bound})\"\n\n LOGGER.info \"Copying up to #{num_rows} rows...\"\n keys_for_delete = copy_rows(field, table_struct, src_table_name, dest_table_name, num_rows)\n LOGGER.info \"...done\"\n\n if keys_for_delete.size > 0\n count += keys_for_delete.size\n LOGGER.info \"Deleting #{keys_for_delete.size} rows...\"\n delete_rows(keys_for_delete, src_table_name)\n LOGGER.info \"...done\\n\"\n else\n LOGGER.info \"No rows to delete. This could be a problem, but should just mean that it's last iteration.\"\n end\n\n # do calculations for next iterations\n iteration += 1\n\n upper_bound = (iteration * num_rows)\n lower_bound = upper_bound - num_rows + 1\n\n # this is the last iteration\n if remaining_rows != 0 && iteration == max_iterations\n LOGGER.info \"Last iteration, only a partial per transaction is needed.\"\n num_rows = remaining_rows\n upper_bound = max_rows\n end\n\n end\n \n count\nend",
"def initialize(table:, source_connection:)\n @table = table\n @table_name = table.target_table_name.downcase\n @optimised_for_table_name = ENV['POSTGRES_TO_REDSHIFT_OPTIMISED_FOR_TABLE']&.downcase&.strip || table_name\n @keys = Keys.new(source_connection: source_connection, tables: [table_name, optimised_for_table_name].uniq).all\n end",
"def truncate_all\n Content::Version.all.map(&:destroy)\n ContentKey::Version.all.map(&:destroy)\n Content.all.map(&:destroy)\n ContentKey.all.map(&:destroy)\n end",
"def merge(*sources); end",
"def truncate\n end",
"def post_sql_statements( table_name, options ) # :nodoc:\n post_sql_statements = []\n\n if supports_on_duplicate_key_update? && options[:on_duplicate_key_update]\n post_sql_statements << sql_for_on_duplicate_key_update( table_name, options[:on_duplicate_key_update], options[:model], options[:primary_key], options[:locking_column] )\n elsif logger && options[:on_duplicate_key_update]\n logger.warn \"Ignoring on_duplicate_key_update because it is not supported by the database.\"\n end\n\n # custom user post_sql\n post_sql_statements << options[:post_sql] if options[:post_sql]\n\n # with rollup\n post_sql_statements << rollup_sql if options[:rollup]\n\n post_sql_statements\n end",
"def tables\n raise 'SevenZip#tables should never be called'\n end",
"def create_tables\n x = 1\n table_count = (all_guests.length / table_size_limit.to_f).ceil\n while x <= table_count\n Table.create(table_number: x, table_size_limit: table_size_limit, event_id: id)\n x += 1\n end\n end",
"def delete_all_records_from_all_tables\n if Rails.env.production?\n raise \"deleting all records in production is not alllowed\"\n else\n Rake::Task[\"db:schema:load\"].invoke\n end\nend"
] |
[
"0.59406096",
"0.57482046",
"0.5635725",
"0.547253",
"0.53894407",
"0.53232217",
"0.53206587",
"0.52276355",
"0.5222187",
"0.52083164",
"0.52072746",
"0.5196061",
"0.5178575",
"0.5163621",
"0.51179594",
"0.51079535",
"0.5092086",
"0.5033839",
"0.5013097",
"0.5013097",
"0.5013097",
"0.5010149",
"0.49926242",
"0.49859637",
"0.49751058",
"0.49738875",
"0.4935098",
"0.49338323",
"0.49322513",
"0.4927672",
"0.49231273",
"0.49036956",
"0.48976645",
"0.4884428",
"0.4884428",
"0.48574963",
"0.48561323",
"0.48555604",
"0.48542336",
"0.48517877",
"0.48501223",
"0.48077047",
"0.48043048",
"0.48032087",
"0.4800046",
"0.47988385",
"0.47982186",
"0.47911254",
"0.47769487",
"0.47665814",
"0.47338513",
"0.47336063",
"0.47103435",
"0.47001004",
"0.46979612",
"0.4694605",
"0.46676782",
"0.46644244",
"0.46632767",
"0.46598294",
"0.46574682",
"0.46574682",
"0.46412474",
"0.46374545",
"0.46348673",
"0.46344528",
"0.46321684",
"0.46316966",
"0.46300465",
"0.462327",
"0.4621965",
"0.46174327",
"0.4616399",
"0.46152315",
"0.46060637",
"0.4595715",
"0.4593212",
"0.45902336",
"0.45857957",
"0.45857957",
"0.4580313",
"0.45768246",
"0.4576023",
"0.45719323",
"0.45586184",
"0.45501497",
"0.45385554",
"0.45271596",
"0.4513685",
"0.45124403",
"0.45056057",
"0.45008707",
"0.45007136",
"0.45005694",
"0.44959307",
"0.44869018",
"0.4486204",
"0.44828263",
"0.44800997",
"0.44798803"
] |
0.5296199
|
7
|
Only include the primary table in the main delete clause
|
def delete_from_sql(sql)
sql << ' FROM '
source_list_append(sql, @opts[:from][0..0])
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def delete(table, ids) # abstract\n end",
"def prepared_delete\n # SEQUEL5: Remove\n cached_prepared_statement(:fixed, :delete){prepare_statement(filter(prepared_statement_key_array(primary_key)), :delete)}\n end",
"def delete(table, where)\n sql = \"delete from #{table} #{where_clause(where)};\"\n execute sql\n end",
"def _delete_without_checking\n # SEQUEL5: Remove\n if use_prepared_statements_for?(:delete)\n _set_prepared_statement_server(model.send(:prepared_delete)).call(pk_hash)\n else\n super\n end\n end",
"def delete\n table = self.class.to_s.pluralize.underscore\n\n DATABASE.execute(\"DELETE FROM #{table} WHERE id = #{@id};\")\n end",
"def delete\n table_name = self.class.to_s.pluralize.underscore\n DATABASE.execute(\"DELETE FROM #{table_name} WHERE id = #{@id};\")\n end",
"def delete\n # Figure out the table's name from the object we're calling the method on.\n table_name = self.class.to_s.pluralize.underscore\n CONNECTION.execute(\"DELETE FROM #{table_name} WHERE id = #{@id};\") # need to see if this one will work, if not look up.\n end",
"def _delete_without_checking\n if sql = (m = model).fast_instance_delete_sql\n sql = sql.dup\n ds = use_server(m.dataset)\n ds.literal_append(sql, pk)\n ds.with_sql_delete(sql)\n else\n _delete_dataset.delete \n end\n end",
"def _delete_without_checking\n if sql = (m = model).fast_instance_delete_sql\n sql = sql.dup\n ds = use_server(m.dataset)\n ds.literal_append(sql, pk)\n ds.with_sql_delete(sql)\n else\n _delete_dataset.delete \n end\n end",
"def delete\n\n DB.execute(\"DELETE FROM #{table_name} WHERE id = #{@id};\")\n end",
"def delete table, keys = [], transaction_id: nil\n commit transaction_id: transaction_id do |c|\n c.delete table, keys\n end\n end",
"def db_remove\n \"DELETE\" + from_table_where + sql_match_conditions\n end",
"def delete\n table_name = self.class.to_s.pluralize.underscore\n DATABASE.execute(\"DELETE FROM #{table_name} WHERE id = #{@id};\")\n end",
"def delete(table, where)\n self.query(@args[:db].delete(table, where, :return_sql => true))\n return nil\n end",
"def delete\n %w[\n hathi_isbn\n hathi_issn\n hathi_lccn\n hathi_oclc\n hathi_title\n hathi_enumc\n hathi_pubdate\n hathi_publisher\n hathi_sudoc\n hathi_related\n hathi_gd\n ].each do |tablename|\n sql = \"DELETE FROM #{tablename}\";\n q = @conn.prepare(sql);\n puts sql;\n q.execute();\n end\nend",
"def delete_by_primary_key(primary_key)\n query = \"DELETE FROM `#{@table_name}` WHERE \"+build_where({@primary_key => primary_key})\n\n begin\n queryresult = @mysql.query(query)\n rescue Exception => e\n @log.error(\"#{e}\")\n return false\n end\n\n expire_table_cache(get_all_related_tables)\n true\n end",
"def delete_records(table_name, condition)\n raise \"implement in subclass\"\n end",
"def delete\n CONNECTION.execute(\"DELETE FROM '#{tablename}' WHERE id = ?;\", @id)\n \"Deleted.\"\n end",
"def delete_records(records)\n if sql = @reflection.options[:delete_sql]\n records.each { |record| @owner.connection.delete(interpolate_and_sanitize_sql(sql, record)) }\n else\n\n relation = Arel::Table.new(@reflection.options[:join_table], arel_engine)\n \n relation.where(relation[@reflection.primary_key_name].eq(@owner.id).\n and(relation[@reflection.association_foreign_key].in(records.map { |x| x.id }.compact))\n ).delete\n end\n end",
"def delete\n binding.pry\n DATABASE.execute(\"DELETE FROM contents WHERE id = #{id}\")\n end",
"def delete_from_table(db,id,table_name)\n db.execute(\"DELETE FROM #{table_name} WHERE #{table_name}.id =#{id}\")\nend",
"def delete(sql, name = nil) end",
"def delete_all(condition=nil)\n if condition\n query(\"DELETE FROM #{SmartSession::SqlSession.table_name} WHERE #{condition}\")\n else\n query(\"DELETE FROM #{SmartSession::SqlSession.table_name}\")\n end\n end",
"def _delete_records\n @ids_to_delete.each do |table, ids|\n delete_from_table(table, ids)\n end\n end",
"def destroy_without_callbacks\r\n where_clause_terms = [self.class.primary_key, quoted_id].transpose.map do |pair| \r\n \"(#{connection.quote_column_name(pair[0])} = #{pair[1]})\"\r\n end\r\n where_clause = where_clause_terms.join(\" AND \")\r\n unless new_record?\r\n connection.delete(\r\n \"DELETE FROM #{self.class.quoted_table_name} \" +\r\n \"WHERE #{where_clause}\",\r\n \"#{self.class.name} Destroy\"\r\n )\r\n end\r\n freeze\r\n end",
"def delete (table_name, record_id)\n DATABASE.execute(\"DELETE FROM #{table_name} WHERE id = #{record_id}\")\n end",
"def delete_sql(opts = nil)\n opts = opts ? @opts.merge(opts) : @opts\n\n if opts[:group]\n raise Error::InvalidOperation, \"Grouped datasets cannot be deleted from\"\n elsif opts[:from].is_a?(Array) && opts[:from].size > 1\n raise Error::InvalidOperation, \"Joined datasets cannot be deleted from\"\n end\n\n sql = \"DELETE FROM #{source_list(opts[:from])}\"\n\n if where = opts[:where]\n sql << \" WHERE #{literal(where)}\"\n end\n\n sql\n end",
"def exec_delete(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def delete_all(table)\n\t\t\t\ttable.clear\n\t\t\tend",
"def exec_delete(sql, name = nil, binds = []) #:nodoc:\r\n log(sql, \"delete\", binds) { query(sql, binds) }\r\n end",
"def exec_delete(sql, name, binds)\n execute(sql, name, binds)\n end",
"def delete!\n self.update_columns(:deleted => true)\n end",
"def exec_delete(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def delete(arel, name = nil, binds = [])\n exec_delete(to_sql(arel), name, binds)\n end",
"def incomplete_delete\n incomplete.delete_all\n end",
"def delete; raise ActiveRecord::ReadOnlyRecord; end",
"def table_delete_query(table, values)\n query = \"delete from #{quote_table_name(table)}\"\n query << \" where (\" << quote_key_list(table) << \") = (\"\n query << primary_key_names(table).map do |key|\n quote_value(table, key, values[key])\n end.join(', ') << \")\"\n end",
"def deleted_query(records)\n keys = records.\n map { |m| record_key(m.key)[@klass.primary_key] }.\n reject(&:nil?)\n\n @klass.unscoped.where(@klass.primary_key => keys)\n end",
"def delete_table\n TinyDyno::Adapter.delete_table(table_name: self.table_name)\n end",
"def delete(other)\n other = coerce(other)\n delete_left(other).join(delete_right(other))\n end",
"def delete!(defer = false)\n delete_logic( defer, false ) \n end",
"def deleteForeignEntries(keys, main)\n keys.each { |tbl, lnks|\n if (lnks.empty?) then \n raise Puppet::DevError, \"Link missing foreign keys for #{tbl} in foreign_keys variable of vclresource child provider\"\n end\n if self.class.protectedHashKeys.include? tbl then\n # do nothing\n else\n if lnks[:recurse] == nil then\n lnks[:recurse] = []\n end\n lnks.each { |col, lnk|\n if self.class.protectedHashKeys.include? col then\n # do nothing\n elsif lnks[:recurse].include? col then \n if keys[tbl][col][:step][0].split('.')[0] == main then\n qry = \"DELETE FROM #{tbl} WHERE #{keys[tbl][col][:step][1]} NOT IN (SELECT #{keys[tbl][col][:step][0].split('.')[1]} FROM #{main})\"\n self.class.runQuery(qry)\n end\n deleteForeignEntries(lnks, tbl)\n end\n }\n end\n }\n end",
"def execute_delete!( activerecord_class )\n collector = @row_collectors[ activerecord_class.table_name ]\n # ASSERT: the only case in which we don't have a collector defined for the specified class\n # should be for the deletion against Swimmer:\n dup_rows = collector ? collector.duplicate_rows : [ @slave_swimmer ]\n\n process_text_log << \"Deleting #{ activerecord_class.name } #{ dup_rows.size } duplicates...\\r\\n\"\n sql_diff_text_log << \"\\r\\n-- Deletes for #{ activerecord_class.name }:\\r\\n\"\n is_ok = true\n begin\n dup_rows.each do |row|\n row.destroy # Build-up SQL-diff:\n sql_diff_text_log << to_sql_delete( row, false ) # no additional comment\n end\n rescue\n process_text_log << \"\\r\\n*** Swimmer Merge: exception caught!\\r\\n\"\n process_text_log << \"*** Phase '#{ activerecord_class.name } DELETE': #{ $!.to_s }\\r\\n\" if $!\n is_ok = false\n end\n is_ok\n end",
"def delete(key)\n in_transaction_wr\n @table.delete key\n end",
"def delete_table(table_id); delete(\"tables/#{table_id}\"); nil; end",
"def og_delete(store, cascade = true)\n pk_field = self.class.primary_key\n pk_field = self.class.ann(pk_field, :field) || pk_field\n pk = instance_variable_get(\"@#{pk_field}\")\n\n transaction_raise do |tx|\n if cascade && descendants = self.class.ann(:self, :descendants)\n descendants.each do |descendant|\n case descendant\n when ManyToMany\n # delete all entries in the join table pointing to current obj\n tx.exec \"DELETE FROM #{descendant.join_table} WHERE #{descendant.owner_key}=#{store.quote(pk)}\"\n when BelongsTo\n # delete all objecs belonging to the current obj\n descendant.owner_class.find(:where => \"#{descendant.foreign_key} = #{store.quote(pk)}\", :extra_condition => nil).each {|x| x.delete }\n end\n end\n end\n # delete obj itself\n tx.exec \"DELETE FROM #{self.class.table} WHERE #{pk_field}=#{store.quote(pk)}\"\n end\n end",
"def delete table\n table = table.to_sym\n @lookup = @lookup.reject { |k, v| k == table }\n @schema = @schema.reject { |k, v| k == table }\n nil\n end",
"def destroy\n DB.execute <<SQL\nDELETE FROM #{self.class.table}\nWHERE id = #{@hash['id']}\nSQL\n end",
"def delete_related!(*names)\n # Recurse on the statement.\n @statement.delete_related!(*names)\n end",
"def delete\n DATABASE.execute(\"DELETE FROM students WHERE id = #{@id}\")\n end",
"def test_emailers_restrict_delete\n\t\terr = assert_raises PG::ForeignKeyViolation do\n\t\t\tDB.exec(\"DELETE FROM people WHERE id=7\")\n\t\tend\n\t\tassert err.message.include? 'emailers_person_id_fkey'\n\tend",
"def delete_all(klass)\n sql = \"DELETE FROM #{klass.table}\"\n sql << \" WHERE ogtype='#{klass}'\" if klass.schema_inheritance? and not klass.schema_inheritance_root?\n exec sql\n end",
"def delete\n DATABASE.execute(\"DELETE from students WHERE id = #{id}\")\n end",
"def delete_key_statement(opts)\n opts = check_params(opts,[:statement_names])\n super(opts)\n end",
"def delete_using_sql(sql)\n join_from_sql(:USING, sql)\n end",
"def delete_using_sql(sql)\n join_from_sql(:USING, sql)\n end",
"def delete()\n sql = \"DELETE FROM transactions\n WHERE id = $1\"\n values = [@id]\n SqlRunner.run(sql, values)\nend",
"def destroy(table)\n end",
"def db_deleter(database, id)\n database.execute(\"DELETE FROM wine_cellar where id=#{id}\")\nend",
"def delete_statement(statement)\n self.deletes << statement\n end",
"def delete_statement(statement)\n self.deletes << statement\n end",
"def delete_statement(query)\n conditions_statement, bind_values = conditions_statement(query.conditions)\n\n statement = \"DELETE FROM #{quote_name(query.model.storage_name(name))}\"\n statement << \" WHERE #{conditions_statement}\" unless conditions_statement.blank?\n\n return statement, bind_values\n end",
"def delete_table(table)\r\n referenced_by = references(table)\r\n if !referenced_by.empty?\r\n puts \"unable to delete table \\'#{table}\\' because it is referenced by table(s):\"\r\n referenced_by.each{|table_name| puts \"#{table_name}\"}\r\n false\r\n elsif table_exists?(table)\r\n delete_table_cmd = \"DROP TABLE IF EXISTS #{table}\"\r\n @db.execute(delete_table_cmd)\r\n puts \"#{table} was deleted\"\r\n true\r\n end\r\n end",
"def delete_records where: {} , **args\n\t\tif args[:all] == true \n\t\t\twhere = {}\n\t\telse\n\t\t\twhere.merge!(args) if where.is_a?(Hash)\n\t\t\treturn 0 if where.empty?\n\t\tend\n orientdb.delete_records( self, where: where ).count\n\tend",
"def delete_all\n delete_if { |b| true }\n end",
"def delete\n \n end",
"def delete\n FC::DB.query(\"DELETE FROM #{self.class.table_name} WHERE id=#{@id.to_i}\") if @id\n end",
"def delete!\n uniq.both_e.uniq.bulk_job { |e| e.delete! }\n uniq.bulk_job { |e| e.delete! }\n end",
"def delete\n DB.exec(\"DELETE FROM line WHERE id = #{self.id};\")\n end",
"def delete()\n db = PG connect( {dbname: 'bounty_hunter',\n host: 'localhost'\n })\n sql = 'DELETE from bounty_hunter'\n db.prepare('delete_one', sql)\n db.exec_prepared('delete_one', value)\n db.close()\nend",
"def delete(*rest) end",
"def delete(record_id)\n CONNECTION.execute(\"DELETE FROM #{get_table_name} WHERE id = #{record_id}\")\n end",
"def delete\n execute_dui(delete_sql){|c| return c.affected_rows}\n end",
"def delete(defer = false)\n delete_logic( defer )\n end",
"def delete()\n\n db = PG.connect({dbname: \"pizza_shop\", host: \"localhost\"})\n sql = \"DELETE FROM pizza_orders WHERE id = $1\"\n values = [@id]\n db.prepare(\"Delete\", sql)\n db.exec_prepared(\"Delete\", values)\n db.close\n\n end",
"def delete()\n db = PG.connect({ dbname: 'Music_Collection', host: 'localhost'})\n sql = \n \"\n DELETE FROM Music_Collection where id = #{@id};\n \"\n db.exec(sql)\n db.close()\nend",
"def delete(table, table_hash, conditions = {})\n\t\t\t\tlen = table.length - 1\n\n\t\t\t\tlen.downto(0) { |i|\n\t\t\t\t del = true\n\t\t\t\t \n\t\t\t\t\tconditions.each do |key, val|\n\t\t\t\t\t\tif (table[i][table_hash[key]] != val)\n\t\t\t\t\t\t\tdel = false\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n \n\t\t\t\t\tif (del)\n\t\t\t\t\t\ttable.delete_at(i)\n\t\t\t\t\tend\n\t\t\t\t}\n\t\t\tend",
"def delete!\n delete_if { true }\n end",
"def delete\n raise \"'id' is not set.\" if @id == nil\n sql = \"DELETE FROM #{table} WHERE id=#{@id}\"\n Database.transaction(sql)\n @log.debug \"Record[#{self}] is deleted from Table[#{table}]\"\n end",
"def delete type\n return if type == :records\n @db.connect do\n TableUtil::delete_table @table_files \n Logger.<<(__FILE__,\"INFO\",\"Deleted files table for #{@source.name}\")\n end\n end",
"def delete_row(id)\n table_name = self.to_s.pluralize.underscore\n DATABASE.execute(\"DELETE FROM #{table_name} WHERE id = #{id}\")\n end",
"def purge\n @db.execute( \"DELETE FROM #{TABLE_NAME};\" )\n end",
"def delete_table(table_name)\n data.delete(table_name)\n end",
"def dbdelete(table, condition, condition_name)\n if condition.kind_of?(Array) == false\n c = condition.to_s + \"=?\"\n else\n c = \"\"\n i = 0\n while i < condition.length\n c += condition[i].to_s + \"=?\"\n i += 1\n if i < condition.length\n c += \" AND \"\n end\n end\n end\n\n return db.execute(\"DELETE FROM #{table} WHERE #{c}\", condition_name)\nend",
"def delete(*ids)\r\n unless ids.is_a?(Array); raise \"*ids must be an Array\"; end\r\n ids = [ids.to_composite_ids] if not ids.first.is_a?(Array)\r\n where_clause = ids.map do |id_set|\r\n [primary_keys, id_set].transpose.map do |key, id|\r\n \"#{quoted_table_name}.#{connection.quote_column_name(key.to_s)}=#{sanitize(id)}\"\r\n end.join(\" AND \")\r\n end.join(\") OR (\")\r\n delete_all([ \"(#{where_clause})\" ])\r\n end",
"def delete_all\n begin\n db.execute \"TRUNCATE TABLE #{table}\"\n\n rescue SQLite3::Exception => e\n puts \"Database Exception occured\"\n puts e\n \n ensure\n db.close if db\n end\n end",
"def delete_record *rid\n db.delete_record rid\n end",
"def delete_table\n table_name = self.to_s.pluralize.underscore\n DATABASE.execute(\"DROP TABLE #{table_name}\")\n end",
"def delete_operations; end",
"def delete(model)\n id = model.primary_key_value\n store.delete(id, table: table_name)\n end",
"def test_stats_cascade_delete\n\t\tres = DB.exec(\"SELECT person_id FROM stats WHERE id=8\")\n\t\tassert_equal '5', res[0]['person_id']\n\t\tDB.exec(\"DELETE FROM people WHERE id=5\")\n\t\tres = DB.exec(\"SELECT person_id FROM stats WHERE id=8\")\n\t\tassert_equal 0, res.ntuples\n\tend",
"def exec_delete(sql, name, binds)\n execute to_sql(sql, binds), name\n self.affected_rows\n end",
"def delete_entry\n\t\t#delete product from cards\n\t\t\tstatement=@connection.prepare(\"delete from cards where card_no=?\")\n\t\t\tstatement.execute(@card_no)\n\t\t#delete product from inline_products\n\t\t\tstatement1=@connection.prepare(\"delete from inline_products where card_no=?\")\n\t\t\tstatement1.execute(@card_no)\n\tend",
"def delete!(*rest) end",
"def delete\n raise ActiveRecord::ReadOnlyRecord\n end",
"def delete\n raise ActiveRecord::ReadOnlyRecord\n end",
"def delete\n raise ActiveRecord::ReadOnlyRecord\n end",
"def delete_table instance_id, table_id\n tables.delete_table name: table_path(instance_id, table_id)\n end",
"def delete() # EXTENSION\n sql = \"DELETE FROM films WHERE id = $1\"\n values = [@id]\n SqlRunner.run(sql, values)\nend",
"def remove_from_graph\n # Ignores trying to delete nonexistent records\n connection.execute <<-EOS\n DELETE IGNORE FROM #{oqgraph_table_name} WHERE origid = #{self.send(self.class.from_key)} AND destid = #{self.send(self.class.to_key)};\n EOS\n end"
] |
[
"0.71483934",
"0.7042767",
"0.676697",
"0.6694531",
"0.66897273",
"0.66391444",
"0.6638879",
"0.6621127",
"0.6621127",
"0.6612789",
"0.6595038",
"0.65888524",
"0.6579451",
"0.6510613",
"0.6502417",
"0.64435554",
"0.6410139",
"0.6336354",
"0.6303365",
"0.63010603",
"0.6294832",
"0.6256122",
"0.62484235",
"0.624324",
"0.6242666",
"0.62103873",
"0.62044615",
"0.6197967",
"0.61925805",
"0.61817807",
"0.61540323",
"0.6153021",
"0.6139183",
"0.6138797",
"0.6129588",
"0.6127436",
"0.6127284",
"0.6121414",
"0.6113562",
"0.6108432",
"0.60998386",
"0.6092309",
"0.6085887",
"0.6083065",
"0.6054745",
"0.6052157",
"0.60512346",
"0.6016557",
"0.6009793",
"0.60057336",
"0.59948516",
"0.59869015",
"0.59610635",
"0.59607714",
"0.59548736",
"0.59547484",
"0.5952359",
"0.5947512",
"0.59307903",
"0.5923039",
"0.5923039",
"0.5922141",
"0.59214234",
"0.5916806",
"0.5905224",
"0.5903371",
"0.58928835",
"0.58873606",
"0.58822376",
"0.5866941",
"0.5866913",
"0.58526397",
"0.58523923",
"0.5846776",
"0.58298707",
"0.582909",
"0.58263195",
"0.5820641",
"0.58174586",
"0.58122784",
"0.58105576",
"0.580695",
"0.5806009",
"0.58000576",
"0.5798239",
"0.57971615",
"0.5795294",
"0.57926",
"0.5791272",
"0.5790879",
"0.57908446",
"0.5788587",
"0.57865465",
"0.5785522",
"0.57845175",
"0.57845175",
"0.57845175",
"0.57619727",
"0.5759822",
"0.57585144"
] |
0.59046894
|
65
|
Use USING to specify additional tables in a delete query
|
def delete_using_sql(sql)
join_from_sql(:USING, sql)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def delete_sql(opts = nil)\n opts = opts ? @opts.merge(opts) : @opts\n\n if opts[:group]\n raise Error::InvalidOperation, \"Grouped datasets cannot be deleted from\"\n elsif opts[:from].is_a?(Array) && opts[:from].size > 1\n raise Error::InvalidOperation, \"Joined datasets cannot be deleted from\"\n end\n\n sql = \"DELETE FROM #{source_list(opts[:from])}\"\n\n if where = opts[:where]\n sql << \" WHERE #{literal(where)}\"\n end\n\n sql\n end",
"def delete(table, ids) # abstract\n end",
"def delete table\n table = table.to_sym\n @lookup = @lookup.reject { |k, v| k == table }\n @schema = @schema.reject { |k, v| k == table }\n nil\n end",
"def drop_table(klass)\n # Remove leftover data from some join tabkes.\n klass.relations.each do |rel|\n if rel.class.to_s == \"Og::JoinsMany\" and rel.join_table\n target_class = rel.target_class\n exec \"DELETE FROM #{rel.join_table}\"\n end\n end\n exec \"DROP TABLE #{klass.table}\"\n end",
"def delete\n %w[\n hathi_isbn\n hathi_issn\n hathi_lccn\n hathi_oclc\n hathi_title\n hathi_enumc\n hathi_pubdate\n hathi_publisher\n hathi_sudoc\n hathi_related\n hathi_gd\n ].each do |tablename|\n sql = \"DELETE FROM #{tablename}\";\n q = @conn.prepare(sql);\n puts sql;\n q.execute();\n end\nend",
"def delete_queries\n [\n \"DROP TRIGGER tr_#{suffix} ON #{src_table};\",\n \"DROP FUNCTION fn_#{suffix}();\",\n \"DROP TRIGGER tr_#{suffix}_cleaner ON #{surveys_table};\",\n \"DROP FUNCTION fn_#{suffix}_cleaner();\"\n ]\n end",
"def db_remove\n \"DELETE\" + from_table_where + sql_match_conditions\n end",
"def delete_related!(*names)\n # Recurse on the statement.\n @statement.delete_related!(*names)\n end",
"def delete\n # Figure out the table's name from the object we're calling the method on.\n table_name = self.class.to_s.pluralize.underscore\n CONNECTION.execute(\"DELETE FROM #{table_name} WHERE id = #{@id};\") # need to see if this one will work, if not look up.\n end",
"def delete(sql, name = nil) end",
"def delete_from_sql(sql)\n sql << ' FROM '\n source_list_append(sql, @opts[:from][0..0])\n end",
"def delete table, keys = [], transaction_id: nil\n commit transaction_id: transaction_id do |c|\n c.delete table, keys\n end\n end",
"def delete\n table_name = self.class.to_s.pluralize.underscore\n DATABASE.execute(\"DELETE FROM #{table_name} WHERE id = #{@id};\")\n end",
"def delete(table, where)\n sql = \"delete from #{table} #{where_clause(where)};\"\n execute sql\n end",
"def delete_from_table(db,id,table_name)\n db.execute(\"DELETE FROM #{table_name} WHERE #{table_name}.id =#{id}\")\nend",
"def remove_from_graph\n # Ignores trying to delete nonexistent records\n connection.execute <<-EOS\n DELETE IGNORE FROM #{oqgraph_table_name} WHERE origid = #{self.send(self.class.from_key)} AND destid = #{self.send(self.class.to_key)};\n EOS\n end",
"def exec_delete(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def delete\n table = self.class.to_s.pluralize.underscore\n\n DATABASE.execute(\"DELETE FROM #{table} WHERE id = #{@id};\")\n end",
"def delete_table\n TinyDyno::Adapter.delete_table(table_name: self.table_name)\n end",
"def exec_delete(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def delete_records(records)\n if sql = @reflection.options[:delete_sql]\n records.each { |record| @owner.connection.delete(interpolate_and_sanitize_sql(sql, record)) }\n else\n\n relation = Arel::Table.new(@reflection.options[:join_table], arel_engine)\n \n relation.where(relation[@reflection.primary_key_name].eq(@owner.id).\n and(relation[@reflection.association_foreign_key].in(records.map { |x| x.id }.compact))\n ).delete\n end\n end",
"def delete\n table_name = self.class.to_s.pluralize.underscore\n DATABASE.execute(\"DELETE FROM #{table_name} WHERE id = #{@id};\")\n end",
"def deleteuser(del_id)\n db.execute(\"DELETE FROM users WHERE user_id=?\", del_id)\n db.execute(\"DELETE FROM listings WHERE user_id=?\", del_id)\nend",
"def exec_delete(sql, name, binds)\n execute(sql, name, binds)\n end",
"def delete_table(table)\r\n referenced_by = references(table)\r\n if !referenced_by.empty?\r\n puts \"unable to delete table \\'#{table}\\' because it is referenced by table(s):\"\r\n referenced_by.each{|table_name| puts \"#{table_name}\"}\r\n false\r\n elsif table_exists?(table)\r\n delete_table_cmd = \"DROP TABLE IF EXISTS #{table}\"\r\n @db.execute(delete_table_cmd)\r\n puts \"#{table} was deleted\"\r\n true\r\n end\r\n end",
"def drop_join_table(hash, options=OPTS)\n drop_table(join_table_name(hash, options), options)\n end",
"def exec_delete(sql, name = nil, binds = []) #:nodoc:\r\n log(sql, \"delete\", binds) { query(sql, binds) }\r\n end",
"def delete_table(table_id); delete(\"tables/#{table_id}\"); nil; end",
"def destroy(table)\n end",
"def prepared_delete\n # SEQUEL5: Remove\n cached_prepared_statement(:fixed, :delete){prepare_statement(filter(prepared_statement_key_array(primary_key)), :delete)}\n end",
"def delete_related!(*names)\n # Nothing to do by default.\n end",
"def cleanup_tables\n klass_ = self.class.const_get(:DEFAULT_AR_CLASS)\n if klass_.connection.tables.include?('spatial_test')\n klass_.connection.drop_table(:spatial_test)\n end\n end",
"def delete(table, where)\n self.query(@args[:db].delete(table, where, :return_sql => true))\n return nil\n end",
"def delete_table\n table_name = self.to_s.pluralize.underscore\n DATABASE.execute(\"DROP TABLE #{table_name}\")\n end",
"def drop_prejoin_fact_table\r\n connection.drop_table(prejoined_table_name) if connection.tables.include?(prejoined_table_name)\r\n end",
"def resolve_ids_cleanup_sql(source, temptable)\n\t\tsource.connection.execute \"\n\t\tDELETE FROM unresolved_ids \n\t\t WHERE EXISTS (SELECT 1 FROM #{temptable} \n\t\t WHERE unresolved_id = unresolved_ids.id)\n\t\t AND source_id = #{source.id};\"\n\n\t\tsource.connection.execute \"DROP TABLE #{temptable}\"\n\tend",
"def delete\n\n DB.execute(\"DELETE FROM #{table_name} WHERE id = #{@id};\")\n end",
"def clean\n disable_referential_integrity do\n tables_cache.keys.reverse_each do |table|\n ActiveRecord::Base.connection.execute %(\n DELETE FROM #{table}\n )\n end\n end\n end",
"def delete_table(table_name)\n data.delete(table_name)\n end",
"def remove_tables_from_publication(name, tables)\n typed_exec(\"ALTER PUBLICATION #{connection.quote_ident(name)} DROP TABLE #{safe_list(tables)}\")\n end",
"def destroy\n DB.execute <<SQL\nDELETE FROM #{self.class.table}\nWHERE id = #{@hash['id']}\nSQL\n end",
"def delete(id, query_term)\n @conn.exec(\"DELETE FROM products WHERE id = '#{id}';\")\n puts \"DELETE\"\nend",
"def purge\n @db.execute( \"DELETE FROM #{TABLE_NAME};\" )\n end",
"def delete(arel, name = nil, binds = [])\n exec_delete(to_sql(arel), name, binds)\n end",
"def exec_delete(sql, name = nil, binds = [])\n result = without_prepared_statement?(binds) ? exec_no_cache(sql, name, binds) :\n exec_cache(sql, name, binds)\n affected = result.cmd_tuples\n result.clear\n affected\n end",
"def _delete_records\n @ids_to_delete.each do |table, ids|\n delete_from_table(table, ids)\n end\n end",
"def delete()\n db = PG connect( {dbname: 'bounty_hunter',\n host: 'localhost'\n })\n sql = 'DELETE from bounty_hunter'\n db.prepare('delete_one', sql)\n db.exec_prepared('delete_one', value)\n db.close()\nend",
"def clean\n transaction do\n connection.delete \"DELETE FROM line_items WHERE event_id IN (SELECT id FROM events WHERE subscription_id = #{id})\"\n connection.delete \"DELETE FROM account_items WHERE event_id IN (SELECT id FROM events WHERE subscription_id = #{id})\"\n connection.delete \"DELETE FROM tagged_items WHERE event_id IN (SELECT id FROM events WHERE subscription_id = #{id})\"\n\n connection.delete \"DELETE FROM buckets WHERE account_id IN (SELECT id FROM accounts WHERE subscription_id = #{id})\"\n connection.delete \"DELETE FROM statements WHERE account_id IN (SELECT id FROM accounts WHERE subscription_id = #{id})\"\n\n connection.delete \"DELETE FROM actors WHERE subscription_id = #{id}\"\n connection.delete \"DELETE FROM events WHERE subscription_id = #{id}\"\n connection.delete \"DELETE FROM accounts WHERE subscription_id = #{id}\"\n connection.delete \"DELETE FROM tags WHERE subscription_id = #{id}\"\n end\n end",
"def db_deleter(database, id)\n database.execute(\"DELETE FROM wine_cellar where id=#{id}\")\nend",
"def destroy!\n drop_ddl = tables.map(&:name).map do |t|\n \"drop table if exists #{t};\\n\"\n end.join\n ActiveRecord::Base.connection.execute(drop_ddl)\n end",
"def delete_all(klass)\n sql = \"DELETE FROM #{klass.table}\"\n sql << \" WHERE ogtype='#{klass}'\" if klass.schema_inheritance? and not klass.schema_inheritance_root?\n exec sql\n end",
"def exec_delete(sql, name = 'SQL', binds = [])\n log(sql, name, binds) do\n result = without_prepared_statement?(binds) ? exec_no_cache(sql) :\n exec_cache(sql, binds)\n affected = result.cmd_tuples\n result.clear\n affected\n end\n end",
"def drop_movies_table\n c = PGconn.new(:host => \"localhost\", :dbname => \"testdb\")\n c.exec \"DROP TABLE products;\"\n c.close\nend",
"def delete_sql(sql, name = nil)\n result = execute(sql, name)\n result.cmd_tuples\n end",
"def delete_sql(sql, name = nil)\n result = execute(sql, name)\n result.cmd_tuples\n end",
"def delete(filters, user)\n if filters.empty?\n body = \"sql=DELETE FROM #{@id}\"\n connector.post \"https://www.googleapis.com/fusiontables/v2/query\", body\n else\n row_ids = query_row_ids(filters)\n row_ids.each do |row_id|\n body = \"sql=DELETE FROM #{@id} WHERE ROWID = '#{row_id}';\"\n connector.post \"https://www.googleapis.com/fusiontables/v2/query\", body\n end\n end\n end",
"def clean_join_table\n # these 2 lines run pretty much the same sql, self.parses adds a where in clause\n self.parsers = []\n # OR\n # ActiveRecord::Base.connection.execute(\"DELETE FROM parsers_transformations WHERE transformation_id = #{id}\")\n end",
"def test_delete_all_with_joins\n ReferenceCode.joins(:reference_type).where(:reference_type_id => 1).delete_all\n end",
"def delete_all\n neo4j_query(\"MATCH (n:`#{mapped_label_name}`) OPTIONAL MATCH (n)-[r]-() DELETE n,r\")\n end",
"def _delete_without_checking\n # SEQUEL5: Remove\n if use_prepared_statements_for?(:delete)\n _set_prepared_statement_server(model.send(:prepared_delete)).call(pk_hash)\n else\n super\n end\n end",
"def delete_indices(group_name, keys)\n self.class.connection.execute \"DELETE FROM #{index_table_name(group_name)} WHERE #{index_reader_sql(group_name)} AND #{KEY} IN (#{keys.map{|key| connection.quote(key)}.join(',')})\"\n end",
"def delete_all(condition=nil)\n if condition\n query(\"DELETE FROM #{SmartSession::SqlSession.table_name} WHERE #{condition}\")\n else\n query(\"DELETE FROM #{SmartSession::SqlSession.table_name}\")\n end\n end",
"def test_urls_cascade_delete\n\t\tres = DB.exec(\"SELECT person_id FROM urls WHERE id=8\")\n\t\tassert_equal '5', res[0]['person_id']\n\t\tDB.exec(\"DELETE FROM people WHERE id=5\")\n\t\tres = DB.exec(\"SELECT person_id FROM urls WHERE id=8\")\n\t\tassert_equal 0, res.ntuples\n\tend",
"def delete_connection(relation_name, other_object, bidi)\n return Relation.delete(relation_name, self.node, other_object.node, bidi)\n end",
"def drop_tables *table_names\n tables_to_affect = table_names.empty? ? all_tables : table_names\n\n each_table do |connection, table_name|\n connection.drop_table(table_name) if connection && drop_table?(table_name)\n end\n end",
"def delete_table instance_id, table_id\n tables.delete_table name: table_path(instance_id, table_id)\n end",
"def delete_db_post\n # Tell the user\n puts \"> Tar bort aliaset från databasen\".green\n\n # Connect to the database\n conn = PG.connect( dbname: DB_DATABASE_NAME, user: DB_USER, password: DB_PASSWORD )\n\n # Delete the account\n res = conn.exec \"DELETE FROM #{DB_ALIAS_TABLE} WHERE address = '#{$alias}' AND userid = '#{$email}'\" unless $simulate\n\n # Close the connection\n conn.close\nend",
"def unlink\n self.transaction do\n self.class.factory.model.connection.execute <<-SQL\n ALTER TABLE #{name} NO INHERIT #{self.class.factory.model.table_name};\n ALTER TABLE #{name} RENAME TO #{name}_unlinked;\n SQL\n self.destroy\n end\n end",
"def table_delete_query(table, values)\n query = \"delete from #{quote_table_name(table)}\"\n query << \" where (\" << quote_key_list(table) << \") = (\"\n query << primary_key_names(table).map do |key|\n quote_value(table, key, values[key])\n end.join(', ') << \")\"\n end",
"def dropUserTable(tableName)\n @conn.exec(\"DROP TABLE #{tableName}\")\n end",
"def delete_statement(query)\n conditions_statement, bind_values = conditions_statement(query.conditions)\n\n statement = \"DELETE FROM #{quote_name(query.model.storage_name(name))}\"\n statement << \" WHERE #{conditions_statement}\" unless conditions_statement.blank?\n\n return statement, bind_values\n end",
"def delete\n CONNECTION.execute(\"DELETE FROM '#{tablename}' WHERE id = ?;\", @id)\n \"Deleted.\"\n end",
"def delete_operations; end",
"def delete!(*rest) end",
"def delete_statements!(options={}, safety=true)\n\n unless !safety || options.keys.select {|x| [:subj, :pred, :obj].include?(x) }.size > 0\n raise Exception.new(\"You asked to delete all statements in the repository. Either give a subj/pred/obj qualifier, or set safety=false\")\n end\n\n # We have to use net/http, because curb has not yet implemented DELETE as of this writing.\n\n uri = URI.parse(self.uri + \"/statements?\" + self.class.get_parameterize(options.reject{|k,v|\n ![:subj, :pred, :obj, :context, :infer].include?(k)\n }))\n http = Net::HTTP.start(uri.host, uri.port)\n http.delete(uri.path)\n raise(SesameException.new(easy.body_str)) unless easy.response_code == 204\n end",
"def deleteForeignEntries(keys, main)\n keys.each { |tbl, lnks|\n if (lnks.empty?) then \n raise Puppet::DevError, \"Link missing foreign keys for #{tbl} in foreign_keys variable of vclresource child provider\"\n end\n if self.class.protectedHashKeys.include? tbl then\n # do nothing\n else\n if lnks[:recurse] == nil then\n lnks[:recurse] = []\n end\n lnks.each { |col, lnk|\n if self.class.protectedHashKeys.include? col then\n # do nothing\n elsif lnks[:recurse].include? col then \n if keys[tbl][col][:step][0].split('.')[0] == main then\n qry = \"DELETE FROM #{tbl} WHERE #{keys[tbl][col][:step][1]} NOT IN (SELECT #{keys[tbl][col][:step][0].split('.')[1]} FROM #{main})\"\n self.class.runQuery(qry)\n end\n deleteForeignEntries(lnks, tbl)\n end\n }\n end\n }\n end",
"def drop_table(*names)\n names.each {|n| execute(drop_table_sql(n))}\n end",
"def delete_all\n Neo.db.execute_query(\"#{initial_match} OPTIONAL MATCH (n0)-[r]-() DELETE n0,r\")\n end",
"def delete(*rest) end",
"def delete_unused_tags\n ActiveRecord::Base.connection.execute(\n \"delete from tags where id in(\n select t.id from tags as t left join posts_tags as pt on pt.tag_id = t.id where pt.tag_id is null\n )\"\n )\n end",
"def nuclear_option!\n query(\"MATCH (n) DETACH DELETE n\")\n end",
"def delete\n DB.exec(\"DELETE FROM users WHERE id = #{@id};\")\n # DB.exec(\"DELETE FROM checkouts WHERE user_id = #{@id};\") --> delete books from users checkout history, but does not delete the books from the database??\n end",
"def drop_table_statement(repository, model)\n \"DROP TABLE #{quote_table_name(model.storage_name(repository.name))}\"\n end",
"def delete_table instance_id, table_id\n execute do\n tables.delete_table(\n table_path(instance_id, table_id)\n )\n end\n end",
"def delete (table_name, record_id)\n DATABASE.execute(\"DELETE FROM #{table_name} WHERE id = #{record_id}\")\n end",
"def alias_delete(al)\n obj_delete al, Alias\n end",
"def og_delete(store, cascade = true)\n pk_field = self.class.primary_key\n pk_field = self.class.ann(pk_field, :field) || pk_field\n pk = instance_variable_get(\"@#{pk_field}\")\n\n transaction_raise do |tx|\n if cascade && descendants = self.class.ann(:self, :descendants)\n descendants.each do |descendant|\n case descendant\n when ManyToMany\n # delete all entries in the join table pointing to current obj\n tx.exec \"DELETE FROM #{descendant.join_table} WHERE #{descendant.owner_key}=#{store.quote(pk)}\"\n when BelongsTo\n # delete all objecs belonging to the current obj\n descendant.owner_class.find(:where => \"#{descendant.foreign_key} = #{store.quote(pk)}\", :extra_condition => nil).each {|x| x.delete }\n end\n end\n end\n # delete obj itself\n tx.exec \"DELETE FROM #{self.class.table} WHERE #{pk_field}=#{store.quote(pk)}\"\n end\n end",
"def test_stats_cascade_delete\n\t\tres = DB.exec(\"SELECT person_id FROM stats WHERE id=8\")\n\t\tassert_equal '5', res[0]['person_id']\n\t\tDB.exec(\"DELETE FROM people WHERE id=5\")\n\t\tres = DB.exec(\"SELECT person_id FROM stats WHERE id=8\")\n\t\tassert_equal 0, res.ntuples\n\tend",
"def delete(name, options = T.unsafe(nil)); end",
"def delete_plant_row(delete_name)\n CONNECTION.execute(\"DELETE FROM backyard WHERE name = '#{delete_name}';\")\nend",
"def deletes(objs)\n if !@args[:datarow]\n objs.each do |obj|\n self.delete(obj)\n end\n else\n tables = {}\n\n begin\n objs.each do |obj|\n next if obj.deleted?\n tablen = obj.table\n\n if !tables.key?(tablen)\n tables[tablen] = []\n end\n\n tables[tablen] << obj.id\n obj.delete if obj.respond_to?(:delete)\n\n #Remove from ID-cache.\n classname = obj.class.classname.to_sym\n @ids_cache[classname].delete(obj.id.to_i) if @ids_cache_should.key?(classname)\n\n #Unset any data on the object, so it seems deleted.\n obj.destroy\n end\n ensure\n #An exception may occur, and we should make sure, that objects that has gotten 'delete' called also are deleted from their tables.\n tables.each do |table, ids|\n ids.each_slice(1000) do |ids_slice|\n @args[:db].delete(table, {:id => ids_slice})\n end\n end\n end\n end\n end",
"def unlink_from(table)\n invalidate_cache\n remove_layers_from(table)\n end",
"def drop!(db, colls = nil)\n db.in_transaction do |conn|\n schema_tables(conn).each do |table|\n conn.exec \"DROP TABLE IF EXISTS #{table}\"\n end\n end\n end",
"def dropUserTable\n @conn.exec(\"DROPE users\")\n end",
"def delete_all(table)\n\t\t\t\ttable.clear\n\t\t\tend",
"def _delete_without_checking\n if sql = (m = model).fast_instance_delete_sql\n sql = sql.dup\n ds = use_server(m.dataset)\n ds.literal_append(sql, pk)\n ds.with_sql_delete(sql)\n else\n _delete_dataset.delete \n end\n end",
"def _delete_without_checking\n if sql = (m = model).fast_instance_delete_sql\n sql = sql.dup\n ds = use_server(m.dataset)\n ds.literal_append(sql, pk)\n ds.with_sql_delete(sql)\n else\n _delete_dataset.delete \n end\n end",
"def delete()\n db = PG.connect({ dbname: 'Music_Collection', host: 'localhost'})\n sql = \n \"\n DELETE FROM Music_Collection where id = #{@id};\n \"\n db.exec(sql)\n db.close()\nend",
"def remove_relation\n relation_scope.where(@id_key => deleted_remote_objects_ids)\n end"
] |
[
"0.63203835",
"0.6251078",
"0.6079405",
"0.60615814",
"0.60326934",
"0.6020745",
"0.6006609",
"0.59675026",
"0.5950655",
"0.5901165",
"0.58806187",
"0.5856725",
"0.5830617",
"0.58274776",
"0.57967013",
"0.5768996",
"0.5765135",
"0.5707902",
"0.56950194",
"0.5692454",
"0.56786674",
"0.5671857",
"0.5666665",
"0.56632435",
"0.5635317",
"0.55888534",
"0.5576118",
"0.55678666",
"0.55633247",
"0.55598897",
"0.554091",
"0.553892",
"0.55384046",
"0.55204314",
"0.55195755",
"0.5486672",
"0.5477912",
"0.5466738",
"0.5465186",
"0.5463077",
"0.54347175",
"0.5415458",
"0.5401805",
"0.53898513",
"0.5371698",
"0.53683877",
"0.5366389",
"0.53513134",
"0.5346844",
"0.53369236",
"0.53292006",
"0.5320657",
"0.5318202",
"0.5302388",
"0.5302388",
"0.53023875",
"0.52976084",
"0.5282453",
"0.5255124",
"0.5245254",
"0.5239338",
"0.5234436",
"0.5231526",
"0.5219462",
"0.5202884",
"0.52026665",
"0.5194842",
"0.51890975",
"0.51721627",
"0.5165298",
"0.516501",
"0.51637965",
"0.5161412",
"0.5160658",
"0.51553845",
"0.5155037",
"0.5153711",
"0.514872",
"0.51453245",
"0.51435226",
"0.5142655",
"0.5133892",
"0.5126721",
"0.51253563",
"0.5119776",
"0.51194453",
"0.5110425",
"0.51068854",
"0.51064926",
"0.5101088",
"0.5099125",
"0.5097126",
"0.509541",
"0.50914836",
"0.5091317",
"0.5090279",
"0.5090279",
"0.5070516",
"0.50693107"
] |
0.76211977
|
1
|
Add ON CONFLICT clause if it should be used
|
def insert_conflict_sql(sql)
if opts = @opts[:insert_conflict]
sql << " ON CONFLICT"
if target = opts[:constraint]
sql << " ON CONSTRAINT "
identifier_append(sql, target)
elsif target = opts[:target]
sql << ' '
identifier_append(sql, Array(target))
if conflict_where = opts[:conflict_where]
sql << " WHERE "
literal_append(sql, conflict_where)
end
end
if values = opts[:update]
sql << " DO UPDATE SET "
update_sql_values_hash(sql, values)
if update_where = opts[:update_where]
sql << " WHERE "
literal_append(sql, update_where)
end
else
sql << " DO NOTHING"
end
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def sql_for_on_duplicate_key_ignore( *args ) # :nodoc:\n arg = args.first\n conflict_target = sql_for_conflict_target( arg ) if arg.is_a?( Hash )\n \" ON CONFLICT #{conflict_target}DO NOTHING\"\n end",
"def sql_for_on_duplicate_key_ignore( table_name, *args ) # :nodoc:\n arg = args.first\n conflict_target = sql_for_conflict_target( arg ) if arg.is_a?( Hash )\n \" ON CONFLICT #{conflict_target}DO NOTHING\"\n end",
"def sql_for_on_duplicate_key_update( table_name, *args ) # :nodoc:\n arg, model, primary_key, locking_column = args\n arg = { columns: arg } if arg.is_a?( Array ) || arg.is_a?( String )\n return unless arg.is_a?( Hash )\n\n sql = ' ON CONFLICT '.dup\n conflict_target = sql_for_conflict_target( arg )\n\n columns = arg.fetch( :columns, [] )\n condition = arg[:condition]\n if columns.respond_to?( :empty? ) && columns.empty?\n return sql << \"#{conflict_target}DO NOTHING\"\n end\n\n conflict_target ||= sql_for_default_conflict_target( table_name, primary_key )\n unless conflict_target\n raise ArgumentError, 'Expected :conflict_target or :constraint_name to be specified'\n end\n\n sql << \"#{conflict_target}DO UPDATE SET \"\n case columns\n when Array\n sql << sql_for_on_duplicate_key_update_as_array( table_name, model, locking_column, columns )\n when Hash\n sql << sql_for_on_duplicate_key_update_as_hash( table_name, model, locking_column, columns )\n when String\n sql << columns\n else\n raise ArgumentError, 'Expected :columns to be an Array or Hash'\n end\n\n sql << \" WHERE #{condition}\" if condition.present?\n\n sql\n end",
"def sql_for_on_duplicate_key_update( table_name, *args ) # :nodoc:\n arg, model, primary_key, locking_column = args\n arg = { columns: arg } if arg.is_a?( Array ) || arg.is_a?( String )\n return unless arg.is_a?( Hash )\n\n sql = ' ON CONFLICT '.dup\n conflict_target = sql_for_conflict_target( arg )\n\n columns = arg.fetch( :columns, [] )\n condition = arg[:condition]\n if columns.respond_to?( :empty? ) && columns.empty?\n return sql << \"#{conflict_target}DO NOTHING\"\n end\n\n conflict_target ||= sql_for_default_conflict_target( primary_key )\n unless conflict_target\n raise ArgumentError, 'Expected :conflict_target to be specified'\n end\n\n sql << \"#{conflict_target}DO UPDATE SET \"\n case columns\n when Array\n sql << sql_for_on_duplicate_key_update_as_array( table_name, model, locking_column, columns )\n when Hash\n sql << sql_for_on_duplicate_key_update_as_hash( table_name, model, locking_column, columns )\n when String\n sql << columns\n else\n raise ArgumentError, 'Expected :columns to be an Array or Hash'\n end\n\n sql << \" WHERE #{condition}\" if condition.present?\n\n sql\n end",
"def on_conflict_sql(sql)\n @sql_on_conflict = sql\n self\n end",
"def on_conflict(column = nil)\n ::MultiInsert::Query::OnConflict.new(self, column)\n end",
"def insert_ignore\n insert_conflict\n end",
"def merge_conflict?; end",
"def non_sql_option?(key)\n super || key == :cursor || key == :insert_conflict\n end",
"def _insert_dataset\n if upsert_plugin_upserting\n if postgres?\n super.insert_conflict(update: values_to_update, target: self.class.upsert_plugin_identifying_columns)\n elsif mysql?\n columns_to_update = values_to_update.keys - self.class.upsert_plugin_identifying_columns\n super.on_duplicate_key_update(*columns_to_update)\n else\n super\n end\n else\n super\n end\n end",
"def conflicting_or_created_record\n conflict || create\n end",
"def merge_if_exists!\n t = merge_if_exists || self\n t.save!\n end",
"def conflict\n record = model.class.where(conditions).first\n if record\n conflict!\n record\n end\n end",
"def set_timestamp_to_now\n puts 'set_timestamp_to_now'\n db_conn.prepare 'set_timestamp_to_now', \"INSERT INTO #{TABLE} (id, updated_at) VALUES (#{ROW_KEY}, $1)\n ON CONFLICT(id) DO UPDATE SET updated_at = excluded.updated_at\"\n db_conn.exec_prepared 'set_timestamp_to_now', [Time.now]\nend",
"def sneaky_save(avoid_insert_conflict: nil)\n begin\n sneaky_create_or_update(avoid_insert_conflict: avoid_insert_conflict)\n rescue ActiveRecord::StatementInvalid\n false\n end\n end",
"def do_update(values)\n @query.on_conflict_sql(::MultiInsert::QueryBuilder.on_conflict_do_update(@column, values, @query.opts))\n end",
"def treat_reserved_as_conflict; end",
"def on_upsert\n #\n end",
"def add_conflict(entity, id, attr, o, c, p)\n logger.warn { \"Detected conflict on #{entity} #{attr}: [o, c, p] = #{[o, c, p].inspect}\" }\n\n conflicts.add(entity, id, attr, o, c, p)\n end",
"def conflict_clause(start_time)\n \"#{CachedAppeal.table_name}.updated_at < #{ActiveRecord::Base.connection.quote(start_time)}\"\n end",
"def supports_insert_conflict?\n server_version >= 90500\n end",
"def offer_to_overwrite_conflicts\n @host.overwrite = \"true\" if @host.errors.any? and @host.errors.are_all_conflicts?\n end",
"def update_with_conflict_validation(*args)\n update(*args)\n rescue ActiveRecord::StaleObjectError\n self.lock_version = lock_version_was\n errors.add :base, \"この記事は、あなたが編集中に他の人に更新されました。\"\n changes.except(:content, :updated_at).each do |title, values|\n errors.add title, \"was #{values.first}\"\n end\n false\n end",
"def build_insert_set_cols(key)\n \"#{quote_column_name(key)} = EXCLUDED.#{quote_column_name(key)}\"\n end",
"def _update_without_checking(columns)\n ds = _update_dataset\n lc = model.lock_column\n rows = ds.clone(ds.send(:default_server_opts, :sql=>ds.output(nil, [Sequel[:inserted][lc]]).update_sql(columns))).all\n values[lc] = rows.first[lc] unless rows.empty?\n rows.length\n end",
"def treat_reserved_as_conflict=(_arg0); end",
"def multi_insert_sql_strategy\n :union\n end",
"def _update_without_checking(columns)\n super(identifier_hash(columns))\n end",
"def merge_conflict?\n prefix == 'U'\n end",
"def mark_conflicted(filename)\n merge_state.mark_conflicted filename\n end",
"def save_detecting_duplicate_entry_constraint_violation\n begin\n save\n rescue ActiveRecord::StatementInvalid => e\n # Would that rails gave us the nested exception to check...\n if e.message =~ /.*[Dd]uplicate/\n errors.add_to_base(translate_with_theme('duplicate_entry_please_try_again'))\n false\n else\n raise e\n end\n end\n end",
"def _merge_insert_sql(sql, data)\n sql << \" THEN INSERT \"\n columns, values = _parse_insert_sql_args(data[:values])\n _insert_columns_sql(sql, columns)\n if override = data[:override]\n sql << override\n end\n _insert_values_sql(sql, values)\n end",
"def set_column_conflict!(column)\n @set_column_conflicts[:\"#{column}=\"] = @set_column_conflicts[\"#{column}=\"] = column.to_sym\n end",
"def with_pk!(pk)\n with_pk(pk) || raise(NoMatchingRow.new(dataset))\n end",
"def with_pk!(pk)\n with_pk(pk) || raise(NoMatchingRow.new(dataset))\n end",
"def successfull_merge_or_nothing(i,j)\n # try a merge and determinize inside a transaction on the ufds\n @ufds.transactional do\n merge_and_determinize(i, j)\n end\n end",
"def _update_without_checking(columns)\n if use_prepared_statements_for?(:update)\n _set_prepared_statement_server(model.send(:prepared_update, columns.keys)).call(Hash[columns].merge!(pk_hash))\n else\n super\n end\n end",
"def add_conflict(name, specifics)\n @conflicts << Requirement.parse(name, specifics)\n end",
"def merge_insert(*values, &block)\n h = {:type=>:insert, :values=>values}\n if override = @opts[:override]\n h[:override] = insert_override_sql(String.new)\n end\n _merge_when(h, &block)\n end",
"def duplicate_key_update_error?(exception) # :nodoc:\n exception.is_a?(ActiveRecord::StatementInvalid) && exception.to_s.include?('duplicate key')\n end",
"def duplicate_key_update_error?(exception) # :nodoc:\n exception.is_a?(ActiveRecord::StatementInvalid) && exception.to_s.include?('duplicate key')\n end",
"def try_merge!(pin); end",
"def update\n return unless conditions_met?\n super\n end",
"def upsert(obj)\n collection, query, model = get_type_info(obj.dup)\n data_hash = model.merge(Wgit::Model.common_update_data)\n result = @client[collection].replace_one(query, data_hash, upsert: true)\n\n result.matched_count.zero?\n ensure\n @last_result = result\n end",
"def select_lock_sql(sql)\n @opts[:lock] == :update ? sql : super\n end",
"def import_cached_appeals(conflict_columns, columns)\n start_time = Time.now.utc\n\n values_to_cache = yield\n\n CachedAppeal.import(\n values_to_cache,\n on_duplicate_key_update: {\n conflict_target: conflict_columns,\n condition: conflict_clause(start_time),\n columns: columns\n }\n )\n\n values_to_cache\n end",
"def flush\n conn.transaction do\n buffer.flatten.each do |row|\n # check to see if this row's compound key constraint already exists\n # note that the compound key constraint may not utilize virtual fields\n next unless row_allowed?(row)\n\n # add any virtual fields\n add_virtuals!(row)\n \n key_names = []\n key_values = []\n @key_columns.each do |name|\n key_names << \"#{name}\"\n key_values << conn.quote(row[name]) # TODO: this is probably not database agnostic\n end\n\n names = []\n values = []\n (order - @key_columns).each do |name|\n names << \"#{name}\"\n values << conn.quote(row[name]) # TODO: this is probably not database agnostic\n end\n\n all_name_values = (key_names+names).zip(key_values+values)\n\n q = <<EOF\nMERGE INTO #{table_name} d \nUSING (SELECT #{all_name_values.collect {|c,v| \"#{v} #{c}\"}.join(',')} FROM DUAL) s\nON (#{map_src_to_dest(key_names,'s','d').join(' AND ')})\nWHEN MATCHED THEN \nUPDATE SET #{[map_src_to_dest(names,'s','d'), \"d.#{@update_ts_column}=CURRENT_TIMESTAMP\"].flatten.join(',')}\nWHEN NOT MATCHED THEN\nINSERT (#{all_name_values.collect {|c,v| 'd.'+c}.join(',')},d.#{@insert_ts_column})\nVALUES (#{all_name_values.collect {|c,v| 's.'+c}.join(',')},CURRENT_TIMESTAMP)\nEOF\n #q = \"INSERT INTO `#{table_name}` (#{names.join(',')}) VALUES (#{values.join(',')})\"\n ETL::Engine.logger.debug(\"Executing upsert: #{q}\")\n conn.insert(q, \"Upsert row #{current_row}\")\n @current_row += 1\n end\n buffer.clear\n end\n end",
"def upsert(sobject, field, attrs)\n upsert!(sobject, field, attrs)\n rescue *exceptions\n false\n end",
"def raises_uniqueness_violation?(&block)\n transaction(:savepoint=>:only, &block)\n false\n rescue unique_constraint_violation_class => e\n e\n end",
"def result(id, key, value)\n @pgsql.exec(\n 'INSERT INTO result (job, key, value) VALUES ($1, $2, $3) ON CONFLICT (job, key) DO UPDATE SET value = $3',\n [id, key, value]\n )\n end",
"def conflict?\n from_values.select{ |d,e| d != e }.any?\n end",
"def duplicate_key_update_error?(exception)# :nodoc:\n exception.is_a?(ActiveRecord::StatementInvalid) && exception.to_s.include?('Duplicate entry')\n end",
"def prepare_update_statement(o)\n if o.offset || has_group_by_and_having?(o) ||\n has_join_sources?(o) && has_limit_or_offset_or_orders?(o)\n super\n else\n o\n end\n end",
"def update_sql(sql, name = nil) #:nodoc:\n super\n self.affected_rows\n end",
"def update_without_callbacks\r\n where_clause_terms = [self.class.primary_key, quoted_id].transpose.map do |pair| \r\n \"(#{connection.quote_column_name(pair[0])} = #{pair[1]})\"\r\n end\r\n where_clause = where_clause_terms.join(\" AND \")\r\n connection.update(\r\n \"UPDATE #{self.class.quoted_table_name} \" +\r\n \"SET #{quoted_comma_pair_list(connection, attributes_with_quotes(false))} \" +\r\n \"WHERE #{where_clause}\",\r\n \"#{self.class.name} Update\"\r\n )\r\n return true\r\n end",
"def insert_or_update(uniq_keys, values_hash, tbl_name='main_table', opts={})\n all_field_names = values_hash.keys\n field_names_as_symbol_string = all_field_names.map{ |k| \":#{k}\" }.join(',') # need to appear as symbols\n sql_statement = \"INSERT INTO #{tbl_name} (#{format_field_names_as_string(all_field_names)}) VALUES (#{field_names_as_symbol_string})\"\n database.execute(sql_statement, values_hash)\n rescue SQLite3::ConstraintException => e\n unique_key_constraint = uniq_keys.map { |k| \"'#{k}'=:#{k}\" }.join(' AND ')\n update_keys = values_hash.keys\n update_keys -= uniq_keys if !opts[:update_unique_keys]\n update_sql = update_keys.map { |k| \"'#{k}'=:#{k}\" }.join(', ')\n sql_statement = \"UPDATE #{tbl_name} SET #{update_sql} WHERE #{unique_key_constraint}\"\n database.execute sql_statement, values_hash\n rescue SQLite3::SQLException => e\n puts \"Exception (#{e.inspect}) raised\" if verbose?\n case e.message\n when /no such table/\n create_table(tbl_name, all_field_names, uniq_keys)\n retry\n when /has no column/\n add_columns(tbl_name, all_field_names)\n retry\n else\n raise e\n end\n end",
"def sql_for_on_duplicate_key_update( table_name, *args ) # :nodoc:\n sql = ' ON DUPLICATE KEY UPDATE '\n arg = args.first\n if arg.is_a?( Array )\n sql << sql_for_on_duplicate_key_update_as_array( table_name, arg )\n elsif arg.is_a?( Hash )\n sql << sql_for_on_duplicate_key_update_as_hash( table_name, arg )\n elsif arg.is_a?( String )\n sql << arg\n else\n raise ArgumentError.new( \"Expected Array or Hash\" )\n end\n sql\n end",
"def sneaky_save!(avoid_insert_conflict: nil)\n sneaky_create_or_update(avoid_insert_conflict: avoid_insert_conflict)\n end",
"def merge_or_replace(mode:)\n target = replication.target_table\n stage = temporary_table_name\n sql = send(\"#{mode}_statement\", target, stage)\n ::RailsRedshiftReplicator.connection.exec sql\n end",
"def create!\n Upsert.logger.info \"[upsert] Creating or replacing database function #{name.inspect} on table #{table_name.inspect} for selector #{selector_keys.map(&:inspect).join(', ')} and setter #{setter_keys.map(&:inspect).join(', ')}\"\n\n selector_column_definitions = column_definitions.select { |cd| selector_keys.include?(cd.name) }\n setter_column_definitions = column_definitions.select { |cd| setter_keys.include?(cd.name) }\n update_column_definitions = setter_column_definitions.select { |cd| cd.name !~ CREATED_COL_REGEX && !options[\"ignore_on_update\"].include?(cd.name) }\n\n first_try = true\n connection.execute(%{\n CREATE OR REPLACE FUNCTION #{name}(#{(selector_column_definitions.map(&:to_selector_arg) + setter_column_definitions.map(&:to_setter_arg)).join(', ')}) RETURNS VOID AS\n $$\n DECLARE\n first_try INTEGER := 1;\n BEGIN\n LOOP\n -- first try to update the key\n UPDATE #{quoted_table_name} SET #{update_column_definitions.map(&:to_setter).join(', ')}\n WHERE #{selector_column_definitions.map(&:to_selector).join(' AND ') };\n IF found THEN\n RETURN;\n END IF;\n -- not there, so try to insert the key\n -- if someone else inserts the same key concurrently,\n -- we could get a unique-key failure\n BEGIN\n INSERT INTO #{quoted_table_name}(#{setter_column_definitions.map(&:quoted_name).join(', ')}) VALUES (#{setter_column_definitions.map(&:to_setter_value).join(', ')});\n RETURN;\n EXCEPTION WHEN unique_violation THEN\n -- seamusabshere 9/20/12 only retry once\n IF (first_try = 1) THEN\n first_try := 0;\n ELSE\n RETURN;\n END IF;\n -- Do nothing, and loop to try the UPDATE again.\n END;\n END LOOP;\n END;\n $$\n LANGUAGE plpgsql;\n })\n rescue\n if first_try and $!.message =~ /tuple concurrently updated/\n first_try = false\n retry\n else\n raise $!\n end\n end",
"def generate_upsert_options\n if options.empty?\n ''\n else\n ' USING ' <<\n options.map do |key, value|\n serialized_value =\n case key\n when :consistency then value.to_s.upcase\n when :timestamp then (value.to_f * 1_000_000).to_i\n else value\n end\n \"#{key.to_s.upcase} #{serialized_value}\"\n end.join(' AND ')\n end\n end",
"def persist\n prepare do\n unless updates.empty?\n collection.find(selector).update(updates)\n conflicts.each_pair do |key, value|\n collection.find(selector).update({ key => value })\n end\n end\n end\n end",
"def duplicate_key_update_error?(exception)# :nodoc:\n exception.is_a?(ActiveRecord::JDBCError) && exception.to_s.include?('Duplicate entry')\n end",
"def finding_with_ambiguous_select?(select_clause)\n !select_clause && columns.size != 2\n end",
"def set_conflict\n @conflict = Conflict.find(params[:id])\n end",
"def retain_except_on_create\n data[:retain_except_on_create]\n end",
"def upsert_model(model)\n model_hash = model.to_hash\n columns_to_update = model_hash.keys.reject do |k|\n matching_attributes.include?(k) || skip_updating.include?(k)\n end\n upsert_options = { target: matching_attributes }\n unless columns_to_update.empty?\n update_clause = columns_to_update.map { |key| [ key.to_sym, \"excluded__#{key}\".to_sym ] }.to_h\n timestamps = update_timestamps(columns_to_update)\n upsert_options[:update] = update_clause.merge(timestamps) { |key, oldval, newval| oldval }\n end\n model_insert_clause = model_hash.merge(insert_timestamps) { |key, oldval, newval| oldval }\n\n inserted_id = model_class.dataset.insert_conflict(upsert_options).insert(model_insert_clause)\n # If model was not inserted, the above returns nil\n if inserted_id\n model.id = inserted_id\n end\n model.instance_variable_set(:@new, false)\n end",
"def apply_with_context(sql,all_cols,keys_in_all_cols)\n hits = {}\n @pending_rcs.each do |rc|\n hits[rc.key] = rc\n end \n hist = []\n n = 2\n pending = 0\n skipped = false\n noted = false\n last_row = nil\n @db1.fetch(sql,all_cols + [\"__coopy_tag__\"]) do |row|\n tag = row.pop.to_i\n k = keyify(row.values_at(*keys_in_all_cols))\n if hits[k]\n emit_skip(row) if skipped\n hist.each do |row0|\n cells = row0.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n end\n hist.clear\n pending = n\n @patch.apply_row(hits[k])\n hits.delete(k)\n skipped = false\n noted = true\n elsif tag == 1\n # ignore redundant row\n elsif pending>0\n emit_skip(row) if skipped\n cells = row.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n pending = pending-1\n skipped = false\n else\n hist << row\n if hist.length>n\n skipped = true\n last_row = row\n hist.shift\n end\n end\n end\n emit_skip(last_row) if skipped and noted\n end",
"def apply_with_context(sql,all_cols,keys_in_all_cols)\n hits = {}\n @pending_rcs.each do |rc|\n hits[rc.key] = rc\n end \n hist = []\n n = 2\n pending = 0\n skipped = false\n noted = false\n last_row = nil\n @db1.fetch(sql,all_cols + [\"__coopy_tag__\"]) do |row|\n tag = row.pop.to_i\n k = keyify(row.values_at(*keys_in_all_cols))\n if hits[k]\n emit_skip(row) if skipped\n hist.each do |row0|\n cells = row0.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n end\n hist.clear\n pending = n\n @patch.apply_row(hits[k])\n hits.delete(k)\n skipped = false\n noted = true\n elsif tag == 1\n # ignore redundant row\n elsif pending>0\n emit_skip(row) if skipped\n cells = row.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n pending = pending-1\n skipped = false\n else\n hist << row\n if hist.length>n\n skipped = true\n last_row = row\n hist.shift\n end\n end\n end\n emit_skip(last_row) if skipped and noted\n end",
"def apply_with_context(sql,all_cols,keys_in_all_cols)\n hits = {}\n @pending_rcs.each do |rc|\n hits[rc.key] = rc\n end \n hist = []\n n = 2\n pending = 0\n skipped = false\n noted = false\n last_row = nil\n @db1.fetch(sql,all_cols + [\"__coopy_tag__\"]) do |row|\n tag = row.pop.to_i\n k = keyify(row.values_at(*keys_in_all_cols))\n if hits[k]\n emit_skip(row) if skipped\n hist.each do |row0|\n cells = row0.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n end\n hist.clear\n pending = n\n @patch.apply_row(hits[k])\n hits.delete(k)\n skipped = false\n noted = true\n elsif tag == 1\n # ignore redundant row\n elsif pending>0\n emit_skip(row) if skipped\n cells = row.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n pending = pending-1\n skipped = false\n else\n hist << row\n if hist.length>n\n skipped = true\n last_row = row\n hist.shift\n end\n end\n end\n emit_skip(last_row) if skipped and noted\n end",
"def merge_with(other, unique_id_col = 'id')\n raise \"unmergable objects\" if other.class.column_names != self.class.column_names || self.send(unique_id_col.to_sym) != other.send(unique_id_col.to_sym)\n\n column_names = self.class.column_names\n\n self.trackzored_columns.each do |tc|\n has_updated_by_col = column_names.include?(\"#{tc}_updated_by\")\n has_updated_at_col = column_names.include?(\"#{tc}_updated_at\")\n \n if has_updated_at_col\n self_time = self.send(\"#{tc}_updated_at\".to_sym)\n other_time = other.send(\"#{tc}_updated_at\".to_sym)\n else\n self_time = self.updated_at\n other_time = other.updated_at\n end\n\n if self_time.nil? || (!other_time.nil? && other_time > self_time)\n self.send(\"#{tc}_updated_at=\".to_sym, other_time) if has_updated_at_col\n self.send(\"#{tc}_updated_by=\".to_sym, other.send(\"#{tc}_updated_by\".to_sym)) if has_updated_by_col\n self.send(\"#{tc}=\".to_sym, other.send(tc.to_sym))\n end\n end\n\n if other.updated_at > self.updated_at\n (column_names - self.trackzored_columns - self.trackzor_maintained_columns).each do |c|\n self.send(\"#{c}=\".to_sym, other.send(c.to_sym))\n end\n end\n\n puts \"Merged #{self.send(unique_id_col.to_sym)}: #{self.changes.inspect}\" unless self.changes.empty?\n self.send(:update_without_callbacks)\n end",
"def update_except(hash, *except)\n update_restricted(hash, false, except.flatten)\n end",
"def _merge_when(hash, &block)\n hash[:conditions] = Sequel.virtual_row(&block) if block\n\n if merge_when = @opts[:merge_when]\n clone(:merge_when => (merge_when.dup << hash.freeze).freeze)\n else\n clone(:merge_when => [hash.freeze].freeze)\n end\n end",
"def check_column_conflicts\n mod = Sequel::Model\n columns.find_all{|c| mod.method_defined?(c)}.each{|c| get_column_conflict!(c)}\n columns.find_all{|c| mod.method_defined?(\"#{c}=\")}.each{|c| set_column_conflict!(c)}\n end",
"def merge!(with); end",
"def _use_insert_select?(ds)\n (!ds.opts[:select] || ds.opts[:returning]) && ds.supports_insert_select? \n end",
"def _use_insert_select?(ds)\n (!ds.opts[:select] || ds.opts[:returning]) && ds.supports_insert_select? \n end",
"def create!\n Upsert.logger.info \"[upsert] Creating or replacing database function #{name.inspect} on table #{table_name.inspect} for selector #{selector_keys.map(&:inspect).join(', ')} and setter #{setter_keys.map(&:inspect).join(', ')}\"\n first_try = true\n connection.execute(%{\n CREATE OR REPLACE FUNCTION #{name}(#{(selector_column_definitions.map(&:to_selector_arg) + setter_column_definitions.map(&:to_setter_arg) + hstore_delete_handlers.map(&:to_arg)).join(', ')}) RETURNS VOID AS\n $$\n DECLARE\n first_try INTEGER := 1;\n BEGIN\n LOOP\n -- first try to update the key\n UPDATE #{quoted_table_name} SET #{update_column_definitions.map(&:to_setter).join(', ')}\n WHERE #{selector_column_definitions.map(&:to_selector).join(' AND ') };\n IF found THEN\n #{hstore_delete_handlers.map(&:to_pgsql).join(' ')}\n RETURN;\n END IF;\n -- not there, so try to insert the key\n -- if someone else inserts the same key concurrently,\n -- we could get a unique-key failure\n BEGIN\n INSERT INTO #{quoted_table_name}(#{setter_column_definitions.map(&:quoted_name).join(', ')}) VALUES (#{setter_column_definitions.map(&:to_setter_value).join(', ')});\n #{hstore_delete_handlers.map(&:to_pgsql).join(' ')}\n RETURN;\n EXCEPTION WHEN unique_violation THEN\n -- seamusabshere 9/20/12 only retry once\n IF (first_try = 1) THEN\n first_try := 0;\n ELSE\n RETURN;\n END IF;\n -- Do nothing, and loop to try the UPDATE again.\n END;\n END LOOP;\n END;\n $$\n LANGUAGE plpgsql;\n })\n rescue\n if first_try and $!.message =~ /tuple concurrently updated/\n first_try = false\n retry\n else\n raise $!\n end\n end",
"def scope_with_scrooge?( sql )\n sql =~ scrooge_select_regex && \n columns_hash.has_key?(self.primary_key.to_s) &&\n sql !~ ScroogeRegexJoin\n end",
"def merge(r, equalities_resolved = false)\n if ::ActiveRecord::Relation === r && !equalities_resolved\n if self.table_name != r.table_name\n super(r.visited)\n else\n merge_resolving_duplicate_squeel_equalities(r)\n end\n else\n super(r)\n end\n end",
"def merge_entity(table_name, entity_values, options = {})\n if_match = \"*\"\n if_match = options[:if_match] if options[:if_match]\n\n uri = entities_uri(table_name,\n entity_values[:PartitionKey] || entity_values[\"PartitionKey\"],\n entity_values[:RowKey] || entity_values[\"RowKey\"], new_query(options))\n\n headers = { \"X-HTTP-Method\" => \"MERGE\" }\n headers[\"If-Match\"] = if_match || \"*\" unless options[:create_if_not_exists]\n\n body = Serialization.hash_to_json(entity_values)\n\n response = call(:post, uri, body, headers, options)\n response.headers[\"etag\"]\n rescue => e\n raise_with_response(e, response)\n end",
"def conflict(exception)\n # had to remove as gives away details of our db structure.\n # TODO: come up with solution to give further details\n if exception.is_a? ApiBase::Conflict\n message = exception.message || \"Conflict\"\n else\n message = \"Conflict\"\n end\n render status: :conflict, json: { message: message } \n end",
"def raise_if_conflicts # :nodoc:\n if has_conflicts?\n raise Gem::ConflictError.new self, conflicts\n end\n end",
"def merge_select_extra\n relation.select_extra_values.concat(other.select_extra_values).uniq! \\\n if other.select_extra_values.present?\n end",
"def set_column_value(c, v)\n if col = model.set_column_conflicts[c]\n self[col] = v\n else\n super\n end\n end",
"def with_pk!(pk)\n with_pk(pk) || raise(NoMatchingRow.new(self))\n end",
"def with_pk!(pk)\n with_pk(pk) || raise(NoMatchingRow.new(self))\n end",
"def conflicts\n if new_record?\n query = 'unit_id = ? AND daterange(start_at, end_at) && daterange(?, ?)'\n Reservation.where(query, unit_id, start_at, end_at)\n else\n query = 'id <> ? AND unit_id = ? AND daterange(start_at, end_at) && daterange(?, ?)'\n Reservation.where(query, id, unit_id, start_at, end_at)\n end\n end",
"def get_column_conflict!(column)\n @get_column_conflicts[column.to_sym] = @get_column_conflicts[column.to_s] = column.to_sym\n end",
"def generate_upsert_options\n if options.empty?\n ''\n else\n ' USING ' <<\n options.map do |key, value|\n serialized_value =\n case key\n when :timestamp then (value.to_f * 1_000_000).to_i\n else value\n end\n \"#{key.to_s.upcase} #{serialized_value}\"\n end.join(' AND ')\n end\n end",
"def transformed?(old_primary_key)\n @transformed ||= {}\n @transformed.has_key?(old_primary_key)\n end",
"def conflict(val)\n conflicts << val\n conflicts.dup\n end",
"def conflict(val)\n conflicts << val\n conflicts.dup\n end",
"def upsert(hash_key_value, range_key_value = nil, attrs = {}, conditions = {})\n optional_params = [range_key_value, attrs, conditions].compact\n if optional_params.first.is_a?(Hash)\n range_key_value = nil\n attrs, conditions = optional_params[0..1]\n else\n range_key_value = optional_params.first\n attrs, conditions = optional_params[1..2]\n end\n\n options = if range_key\n value_casted = TypeCasting.cast_field(range_key_value, attributes[range_key])\n value_dumped = Dumping.dump_field(value_casted, attributes[range_key])\n { range_key: value_dumped }\n else\n {}\n end\n\n options[:conditions] = conditions\n\n begin\n new_attrs = Dynamoid.adapter.update_item(table_name, hash_key_value, options) do |t|\n attrs.symbolize_keys.each do |k, v|\n value_casted = TypeCasting.cast_field(v, attributes[k])\n value_dumped = Dumping.dump_field(value_casted, attributes[k])\n t.set(k => value_dumped)\n end\n end\n attrs_undumped = Undumping.undump_attributes(new_attrs, attributes)\n new(attrs_undumped)\n rescue Dynamoid::Errors::ConditionalCheckFailedException\n end\n end",
"def upsert(kind, item)\n end",
"def update!(**args)\n @excludes_geometry_of = args[:excludes_geometry_of] if args.key?(:excludes_geometry_of)\n @includes_geometry_of = args[:includes_geometry_of] if args.key?(:includes_geometry_of)\n end",
"def verify_no_uncommitted_merge\n if !overwrite? && @working_changeset.parents.size > 1\n raise abort(\"outstanding uncommitted merges\")\n end\n end",
"def save_if_none_match\n status = orchio_put_if_none_match(to_json_direct) if valid?\n retval status\n end",
"def is_conflict?\n from_json\n (@order && @order.paid?).tap do |x|\n @error = true if x\n end\n end",
"def test_update_name_merge_with_misspellings\n login(\"rolf\")\n name1 = names(:lactarius_alpinus)\n name2 = names(:lactarius_alpigenes)\n name3 = names(:lactarius_kuehneri)\n name4 = names(:lactarius_subalpinus)\n\n # First: merge Y into X, where Y is misspelling of X\n name2.correct_spelling = name1\n name2.change_deprecated(true)\n name2.save\n assert_not(name1.correct_spelling)\n assert_not(name1.deprecated)\n assert(name2.correct_spelling == name1)\n assert(name2.deprecated)\n params = {\n id: name2.id,\n name: {\n text_name: name1.text_name,\n author: name1.author,\n rank: \"Species\",\n deprecated: \"true\"\n }\n }\n put(:update, params: params)\n assert_flash_success\n assert_redirected_to(name_path(name1.id))\n assert_no_emails\n assert_not(Name.exists?(name2.id))\n assert(name1.reload)\n assert_not(name1.correct_spelling)\n assert_not(name1.deprecated)\n\n # Second: merge Y into X, where X is misspelling of Y\n name1.correct_spelling = name3\n name1.change_deprecated(true)\n name1.save\n name3.correct_spelling = nil\n name3.change_deprecated(false)\n name3.save\n assert(name1.correct_spelling == name3)\n assert(name1.deprecated)\n assert_not(name3.correct_spelling)\n assert_not(name3.deprecated)\n params = {\n id: name3.id,\n name: {\n text_name: name1.text_name,\n author: name1.author,\n rank: \"Species\",\n deprecated: \"false\"\n }\n }\n put(:update, params: params)\n assert_flash_success\n assert_redirected_to(name_path(name1.id))\n assert_no_emails\n assert_not(Name.exists?(name3.id))\n assert(name1.reload)\n assert_not(name1.correct_spelling)\n assert(name1.deprecated)\n\n # Third: merge Y into X, where X is misspelling of Z\n name1.correct_spelling = Name.first\n name1.change_deprecated(true)\n name1.save\n name4.correct_spelling = nil\n name4.change_deprecated(false)\n name4.save\n assert(name1.correct_spelling)\n assert(name1.correct_spelling != name4)\n assert(name1.deprecated)\n assert_not(name4.correct_spelling)\n assert_not(name4.deprecated)\n params = {\n id: name4.id,\n name: {\n text_name: name1.text_name,\n author: name1.author,\n rank: \"Species\",\n deprecated: \"false\"\n }\n }\n put(:update, params: params)\n assert_flash_success\n assert_redirected_to(name_path(name1.id))\n assert_no_emails\n assert_not(Name.exists?(name4.id))\n assert(name1.reload)\n assert(name1.correct_spelling == Name.first)\n assert(name1.deprecated)\n end"
] |
[
"0.7435312",
"0.72036123",
"0.63911206",
"0.63625365",
"0.63076633",
"0.62139535",
"0.5894587",
"0.5893596",
"0.58595127",
"0.58219266",
"0.57686543",
"0.55642307",
"0.5488854",
"0.536428",
"0.5363308",
"0.53116924",
"0.5297403",
"0.5259442",
"0.52361304",
"0.5174694",
"0.513627",
"0.510325",
"0.50574076",
"0.50412685",
"0.4996603",
"0.4992127",
"0.49425632",
"0.4935061",
"0.48976666",
"0.4890792",
"0.48904324",
"0.48890966",
"0.4881189",
"0.48809123",
"0.48809123",
"0.48794547",
"0.48675367",
"0.48488045",
"0.48427373",
"0.48267514",
"0.48267514",
"0.48238656",
"0.48194277",
"0.48175967",
"0.48161376",
"0.48108342",
"0.4810034",
"0.4802159",
"0.48006892",
"0.4766713",
"0.475788",
"0.4743009",
"0.47343987",
"0.4729728",
"0.4728933",
"0.4725304",
"0.47108743",
"0.46788976",
"0.46698484",
"0.4664894",
"0.46551666",
"0.46549004",
"0.46343666",
"0.4614643",
"0.4591681",
"0.45914617",
"0.45910808",
"0.45868415",
"0.45868415",
"0.45868415",
"0.45742053",
"0.45672306",
"0.4567111",
"0.4561407",
"0.4545923",
"0.45447415",
"0.45447415",
"0.45343605",
"0.45217302",
"0.45154673",
"0.4513049",
"0.45077854",
"0.45058632",
"0.44648448",
"0.4435178",
"0.4431305",
"0.4431305",
"0.44287148",
"0.442149",
"0.44153872",
"0.44080248",
"0.44032824",
"0.44032824",
"0.43951476",
"0.43722865",
"0.436911",
"0.43414727",
"0.4340553",
"0.4329188",
"0.43278006"
] |
0.68958986
|
2
|
Include aliases when inserting into a single table on PostgreSQL 9.5+.
|
def insert_into_sql(sql)
sql << " INTO "
if (f = @opts[:from]) && f.length == 1
identifier_append(sql, server_version >= 90500 ? f.first : unaliased_identifier(f.first))
else
source_list_append(sql, f)
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def quote_identifier_append(sql, name)\n name = (table_mappings[name.to_sym] || name) if name.respond_to?(:to_sym)\n super(sql, name)\n end",
"def addTableAlias(theAlias)\n @metadata.addTableAlias(theAlias)\n end",
"def addColumnAlias(theAlias)\n @metadata.addColumnAlias(theAlias)\n end",
"def alias_column(pretty, original)\n self.column_aliases[pretty] = original\n end",
"def aliases!\n @schema.aliases!\n end",
"def get_aliases\n # Connect to the database\n conn = PG.connect( dbname: DB_DATABASE_NAME, user: DB_USER, password: DB_PASSWORD )\n\n w = ''\n\n if $alias || $email\n w = \" WHERE\"\n w << \" address LIKE '#{$alias}'\" if $alias\n w << \" AND\" if $alias && $email\n w << \" userid LIKE '#{$email}'\" if $email\n end\n\n # Insert the user into the correct table\n res = conn.exec \"SELECT * FROM #{DB_ALIAS_TABLE}#{w}\"\n\n # Close the connection\n conn.close\n\n return res\nend",
"def extend_sql_avoiding_table_naming_clashes!(sql, addition)\r\n used_table_aliases = table_aliases_from_join_fragment(addition)\r\n old_table_aliases = table_aliases_from_join_fragment(sql)\r\n (used_table_aliases & old_table_aliases).each do |join_table_alias|\r\n i = 0\r\n begin\r\n i += 1\r\n new_alias = \"renamed_join_table_#{i}\"\r\n end until !used_table_aliases.include?(new_alias)\r\n convert_table_name_to_new_alias!(sql, join_table_alias, new_alias)\r\n end\r\n sql << \" #{addition} \"\r\n end",
"def insert_facet_series(table, type, select = 'facet_id, series_id')\n $db.execute <<-SQL\n INSERT INTO destination.#{table}\n SELECT #{select}\n FROM facet_series\n WHERE type = '#{type}';\n SQL\nend",
"def graph_alias_columns(graph_aliases)\n gas = {}\n identifiers = graph_aliases.map do |col_alias, tc| \n table, column, value = Array(tc)\n column ||= col_alias\n gas[col_alias] = [table, column].freeze\n identifier = value || SQL::QualifiedIdentifier.new(table, column)\n identifier = SQL::AliasedExpression.new(identifier, col_alias) if value || column != col_alias\n identifier\n end\n [identifiers, gas]\n end",
"def table_alias\r\n @table_alias || from_table_name\r\n end",
"def table_alias_name(value)\n data.table_alias_name = value\n end",
"def add_piggy_back_sql_data!(reflection_name, prefix, table_alias, attributes, select, joins, conditions, join_type)\n ktn = table_name\n kpkey = primary_key\n reflection = reflections[reflection_name]\n atn = reflection.table_name\n attributes.each do |attr|\n if table_alias\n select << \", #{table_alias}.#{attr} AS #{prefix}_#{attr}\"\n else\n select << \", #{atn}.#{attr} AS #{prefix}_#{attr}\"\n end\n end\n fkey = reflection.primary_key_name\n fpkey = reflection.klass.primary_key\n\n case reflection.macro\n when :belongs_to\n if table_alias\n joins << \" #{join_type} JOIN #{atn} #{table_alias} ON #{table_alias}.#{fpkey}=#{ktn}.#{fkey} \"\n else\n joins << \" #{join_type} JOIN #{atn} ON #{atn}.#{fpkey}=#{ktn}.#{fkey} \"\n end\n when :has_one\n if table_alias\n joins << \" #{join_type} JOIN #{atn} #{table_alias} ON #{table_alias}.#{fkey}=#{ktn}.#{kpkey} \"\n else\n joins << \" #{join_type} JOIN #{atn} ON #{atn}.#{fkey}=#{ktn}.#{kpkey} \"\n end\n when :has_many\n raise \"piggy_back: aliasing not implemented for has_many\" if table_alias\n if reflection.options[:through]\n ttn = reflection.through_reflection.klass.table_name\n tkfkey = reflection.through_reflection.primary_key_name\n tafkey = reflection.source_reflection.primary_key_name\n\n through_conditions = reflection.options[:conditions] ?\n \" AND \" + reflection.options[:conditions] : \"\"\n source_conditions = reflection.through_reflection.options[:conditions] ?\n \" AND \" + reflection.through_reflection.options[:conditions] : \"\"\n\n joins << \" LEFT JOIN #{ttn} ON (#{ttn}.#{tkfkey}=#{ktn}.#{kpkey}#{through_conditions})\"\n joins << \" LEFT JOIN #{atn} ON (#{ttn}.#{tafkey}=#{atn}.#{fpkey}#{source_conditions}) \"\n else\n reflection_conditions = reflection.options[:conditions] ?\n \" AND \" + reflection.options[:conditions] : \"\"\n\n joins << \" LEFT JOIN #{atn} ON (#{atn}.#{fkey}=#{ktn}.#{kpkey}#{reflection_conditions}) \"\n end\n else\n raise \"can't piggy back #{reflection.macro} on class #{klass}\"\n end\n end",
"def visit_Arel_Nodes_InsertStatement o, *a\n [\n \"INSERT INTO #{visit(o.relation).gsub(/\"/, '')}\",\n \"(#{o.columns.map { |x| x.name }.join ', '})\",\n \" VALUES (#{o.values.left.map { |value| value }.join ', '})\"\n ].compact.join ' '\n end",
"def use_any_aliases(custom_fields)\n custom_fields.map { |column_name,column_type| [column_name.gsub(/.*AS\\s+/, ''), column_type] }.to_h\n end",
"def column_alias_for(field)\n column_alias = +field\n column_alias.gsub!(/\\*/, \"all\")\n column_alias.gsub!(/\\W+/, \" \")\n column_alias.strip!\n column_alias.gsub!(/ +/, \"_\")\n @connection.table_alias_for(column_alias)\n end",
"def to_table_reference(table_alias=nil)\n \"(#{sql})#{\" #{quote_identifier(table_alias)}\" if table_alias}\"\n end",
"def add_alias(index, alias_name)\n post_aliases [{ :add => { :index => index, :alias => alias_name }}]\n end",
"def register_alias(string, symbol, extension_synonyms = T.unsafe(nil)); end",
"def copy_table_data(from, to, remaps = [])\n old = columns(from).collect(&:name)\n current = columns(to).collect(&:name)\n remapped_columns = remaps.collect {|c| c.first.to_s}.compact\n common = (current & old).sort - remapped_columns\n from_columns = common.collect {|c| \"`#{c}`\"}\n to_columns = common.collect {|c| \"`#{c}`\"}\n remaps.each do |remap|\n remap = [remap].flatten\n next if remap.length != 2\n from_columns << remap.first\n to_columns << remap.last\n end\n from_columns_to_s = from_columns.join(', ')\n to_columns_to_s = to_columns.join(', ')\n execute \"INSERT INTO #{to}(#{to_columns_to_s}) SELECT #{from_columns_to_s} FROM #{from}\"\n end",
"def alias_decls; end",
"def build_table_aliases(from)\n # for the targets\n returning({}) do |aliases|\n from.map(&:to_s).sort.map(&:to_sym).each_with_index do |plural, t_index|\n table = plural._as_class.table_name\n plural._as_class.columns.map(&:name).each_with_index do |field, f_index|\n aliases[\"#{table}.#{field}\"] = \"t#{t_index}_r#{f_index}\"\n end\n end\n end\n end",
"def generate_pg_insert_query(table_name, keys, rows)\n \"INSERT INTO #{table_name}(#{keys.map { |i| \"\\\"#{i}\\\"\" }.join(',')}) VALUES(#{keys.map { |i| rows[i] == nil ? 'NULL' : \"'\" + pg_conn.escape_string(rows[i]) + \"'\" }.join(',')});\\n\"\n end",
"def set_graph_aliases(graph_aliases)\n columns, graph_aliases = graph_alias_columns(graph_aliases)\n if graph = opts[:graph]\n select(*columns).clone(:graph => Hash[graph].merge!(:column_aliases=>graph_aliases.freeze).freeze)\n else\n Sequel::Deprecation.deprecate(\"Calling Dataset#set_graph_aliases before Dataset#graph\", \"Call Dataset#set_graph_aliases after Dataset#graph now\")\n select(*columns).clone(:graph_aliases=>graph_aliases.freeze) # SEQUEL5: Remove\n end\n end",
"def generate_insert\n @binds = Array.new\n @insert_statement = \"insert into #{fully_qualified_table_name} (\"\n @insert_statement << column_details.keys.sort.map { |k| column_detail(k).column_name }.join(',')\n @insert_statement << ') values ('\n @insert_statement << column_details.keys.sort.map { |k|\n \":#{k}\"\n }.join(',')\n column_details.keys.sort.each { |k|\n if @column_values[k] == nil\n @binds.push [column_type_to_ruby_type(column_details[k]), nil]\n else\n @binds.push @column_values[k]\n end\n }\n @insert_statement << ')'\n @insert_statement\n end",
"def alias(new, old)\n @values << new unless @values.include? new\n @aliases[new] = old\n end",
"def table_name\n respond_to?(:first_source_alias) ? first_source_alias : super\n end",
"def register_alias(type, shortcut); end",
"def to_insert(output, table, row)\n columns = @columns[table].map {|i| i[0] }.join(',')\n values = map_values(row, @columns[table])\n output << \"INSERT INTO #{table} (#{columns}) VALUES (#{values});\\n\"\n end",
"def table_alias\n @target_alias\n end",
"def add_select_into_table(new_table_name, sql_query)\n sql_query.sub(/FROM/i, \"INTO #{new_table_name} FROM\")\n end",
"def aliased_expression_sql(ae)\n \"#{literal(ae.expression)} AS #{quote_identifier(ae.aliaz)}\"\n end",
"def chooseTableAlias\n @metadata.chooseTableAlias\n end",
"def alias(name)\n @aliases << convert(name)\n end",
"def alias(name)\n @aliases << convert(name)\n end",
"def add_synonym(name, table_name, options = {})\n sql = \"CREATE\"\n if options[:force] == true\n sql << \" OR REPLACE\"\n end\n sql << \" SYNONYM #{quote_table_name(name)} FOR #{quote_table_name(table_name)}\"\n execute sql\n end",
"def chooseColumnAlias\n @metadata.chooseColumnAlias\n end",
"def alias_names; end",
"def add_select_into_table(new_table_name, sql_query)\r\n sql_query.sub(/FROM/i, \"INTO #{new_table_name} FROM\")\r\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\nend",
"def add_pseudo_columns(sql_for_columns)\n @sql_for_columns.merge! sql_for_columns\n @pseudo_column_names.merge sql_for_columns.keys.map(&:to_s)\n end",
"def insert_facet(table, type, second_type = nil)\n second_condition = second_type == nil ? nil : \"OR type = '#{second_type}'\"\n\n $db.execute <<-SQL\n INSERT INTO destination.#{table}\n SELECT id, name, created_at, updated_at\n FROM source.facets\n INNER JOIN (SELECT facet_id, type FROM source.facet_series)\n AS facet_series\n ON (facet_series.facet_id = source.facets.id)\n WHERE type = '#{type}' #{second_condition}\n GROUP BY name;\n SQL\nend",
"def aliased_name; end",
"def aliases; end",
"def aliases; end",
"def aliases; end",
"def store_alias(myAlias, fact)\n @bot.debug \"parsing and storing alias #{myAlias}\"\n query = config[:dbh].prepare \"SELECT id FROM aliases WHERE alias = ?\"\n query.execute myAlias\n if row = query.fetch\n query = config[:dbh].prepare \"UPDATE aliases SET original=? WHERE alias=?\";\n query.execute(fact, myAlias)\n else\n query = config[:dbh].prepare \"INSERT INTO aliases (original, alias) values(?, ?)\";\n query.execute(fact, myAlias)\n end\n return \"Aliasing #{myAlias} to #{fact}\"\n end",
"def alias_create(al)\n obj_create al, Alias\n end",
"def sql_insert(record)\n flds, vals = parse_fldsvalues(record)\n ph = vals.map{|x| placeholder }\n\n sql = %Q|insert into #{quoted_table}\n ( #{flds.join ','} )\n output inserted.#{quote_field id_fld}\n values( #{ph.join ','} );|\n\n [sql, vals]\n end",
"def add_alias( name )\n\t\t\t@aliases << name\n\t\tend",
"def values_for_insert\r\n values = []\r\n self.class.column_names.each do |col_name|\r\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\r\n end\r\n values.join(\", \")\r\nend",
"def col_names_for_insert\r\n self.class.column_names.delete_if {|col| col == \"id\"}.join(\", \")\r\nend",
"def print_alias(*) end",
"def add_select_into_table(new_table_name, sql_query)\n \"CREATE TABLE #{new_table_name} \" + sql_query\n end",
"def add_alias as\n @display.print_alias as\n end",
"def alias(name)\n @aliases << normalize(name)\n nil\n end",
"def table_alias\n if @table_expr.is_a?(AliasedExpression)\n @table_expr.alias\n end\n end",
"def add_alias(an_alias, context)\n new_attr = self.class.new(self.text, an_alias.new_name, self.rw,\n self.comment, self.singleton)\n\n new_attr.record_location an_alias.file\n new_attr.visibility = self.visibility\n new_attr.is_alias_for = self\n @aliases << new_attr\n context.add_attribute new_attr\n new_attr\n end",
"def row_sql_insert(table_name, table_struct)\n fields = get_fields(table_struct)\n\n sql = <<-EOF\n INSERT INTO `#{DBNAME}`.`#{table_name}` (\n #{fields.collect { |f| \"`#{f}`\" }.join(\", \")}\n )\n VALUES (\n #{fields.collect { |f| \"'%s'\" }.join(\", \")}\n );\n EOF\n\n sql\nend",
"def add_tables_to_publication(name, tables)\n typed_exec(\"ALTER PUBLICATION #{connection.quote_ident(name)} ADD TABLE #{safe_list(tables)}\")\n end",
"def quoted_identifier_append(sql, c)\n sql << '`%s`' % c\n end",
"def multi_insert_sql_strategy\n :union\n end",
"def add_alias(an_alias, context)\n raise NotImplementedError\n end",
"def table(table)\n @table = table\n\n @aliased_table = @table.new(@table.table_name)\n end",
"def column_aliases(node)\n @name_and_alias_cache[node]\n end",
"def table_alias_name(*partition_key_values)\n return collect_first(*partition_key_values, &:table_alias_name)\n end",
"def db_query_transform__subquery query, tmp_table=\"resultset_table\"\n \"(#{query}) as #{tmp_table}\"\n end",
"def alias(c)\n aliases().push c\n end",
"def create_alias(new_name, old_type)\n register(validate_type_argument(old_type), name: new_name)\n end",
"def add_alias! key, to_key\n key = @schema.validate_new_key! key\n @values.delete key\n @schema.add_alias! key, to_key\n self\n end",
"def alias_entry(from, to)\n from = to_key(from)\n to = to_key(to)\n @aliases[from] = to\n clone_entry(from, to)\n end",
"def column_aliases\n @column_aliases ||= Hash.new\n end",
"def sql_literal_append(ds, sql)\n check_columns!\n sql << 'ROW'\n ds.literal_append(sql, values_at(*columns))\n if db_type\n sql << '::'\n ds.quote_schema_table_append(sql, db_type)\n end\n end",
"def add_select_into_table(new_table_name, sql_query)\n \"CREATE TABLE #{new_table_name} \" + sql_query\n end",
"def aliases=(_arg0); end",
"def aliases=(_arg0); end",
"def copy_into_sql(table, opts)\n sql = String.new\n sql << \"COPY #{literal(table)}\"\n if cols = opts[:columns]\n sql << literal(Array(cols))\n end\n sql << \" FROM STDIN\"\n if opts[:options] || opts[:format]\n sql << \" (\"\n sql << \"FORMAT #{opts[:format]}\" if opts[:format]\n sql << \"#{', ' if opts[:format]}#{opts[:options]}\" if opts[:options]\n sql << ')'\n end\n sql\n end",
"def make_insert(table, columns, fields, row)\n statement = \"INSERT INTO #{table['name']} (#{fields.join(',')}) VALUES (\"\n values = []\n fields.each do |field|\n values << make_val(row[field], columns[field])\n end\n statement << \"#{values.join(',')});\\n\"\n statement\n end",
"def join_table_alias\n final_reverse_edge[:alias]\n end",
"def multi_insert_sql(columns, values)\n table = quote_identifier(@opts[:from].first)\n columns = literal(columns)\n values.map do |r|\n \"INSERT INTO #{table} #{columns} VALUES #{literal(r)}\"\n end\n end",
"def insert_default_values_sql\n \"INSERT INTO #{source_list(@opts[:from])} DEFAULT VALUES\"\n end",
"def table_alias_for(table_name)\n table_name.gsub(/\\./, '_')\n end",
"def create_table_sql(name, generator, options)\n a, b = super(name, generator, options), table_options_sql(options)\n \"#{a}\\n#{b}\"\n\t end",
"def aliases\n end",
"def create_table_as_sql(name, sql, options)\n \"#{create_table_prefix_sql(name, options)} AS (#{sql}) WITH DATA\"\n end",
"def create_table_as_sql(name, sql, options)\n \"#{create_table_prefix_sql(name, options)} AS #{sql}\"\n end",
"def insert_conflict_sql(sql)\n if opts = @opts[:insert_conflict]\n sql << \" ON CONFLICT\"\n\n if target = opts[:constraint] \n sql << \" ON CONSTRAINT \"\n identifier_append(sql, target)\n elsif target = opts[:target]\n sql << ' '\n identifier_append(sql, Array(target))\n if conflict_where = opts[:conflict_where]\n sql << \" WHERE \"\n literal_append(sql, conflict_where)\n end\n end\n\n if values = opts[:update]\n sql << \" DO UPDATE SET \"\n update_sql_values_hash(sql, values)\n if update_where = opts[:update_where]\n sql << \" WHERE \"\n literal_append(sql, update_where)\n end\n else\n sql << \" DO NOTHING\"\n end\n end\n end",
"def create_aggregates(db: EventSourcery::Postgres.config.event_store_database,\n table_name: EventSourcery::Postgres.config.aggregates_table_name)\n db.create_table(table_name) do\n uuid :aggregate_id, primary_key: true\n column :version, :bigint, default: 1\n end\n end",
"def col_names_for_insert\n self.class.column_names.delete_if {|col| col == \"id\"}.join(\", \")\n #\n end",
"def is_alias?; end",
"def col_names_for_insert\n self.class.column_names.delete_if do |col|\n col == \"id\"\n end.join(\", \")\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil) end",
"def col_names_for_insert\n self.class.column_names.delete_if {|col| col == \"id\"}.join(\", \")\n end",
"def col_names_for_insert\n self.class.column_names.delete_if {|col| col == \"id\"}.join(\", \")\n end",
"def col_names_for_insert\n self.class.column_names.delete_if {|col| col == \"id\"}.join(\", \")\n end",
"def col_names_for_insert\n self.class.column_names.delete_if {|col| col == \"id\"}.join(\", \")\n end",
"def col_names_for_insert\n self.class.column_names.delete_if {|col| col == \"id\"}.join(\", \")\n end",
"def col_names_for_insert\n self.class.column_names.delete_if {|col| col == \"id\"}.join(\", \")\n end",
"def col_names_for_insert\n self.class.column_names.delete_if {|col| col == \"id\"}.join(\", \")\n end",
"def col_names_for_insert\n self.class.column_names.delete_if {|col| col == \"id\"}.join(\", \")\n end",
"def col_names_for_insert\n self.class.column_names.delete_if {|col| col == \"id\"}.join(\", \")\n end"
] |
[
"0.6124453",
"0.60013545",
"0.59401596",
"0.5806555",
"0.5685734",
"0.5633251",
"0.55919856",
"0.5528682",
"0.55130064",
"0.54824096",
"0.54548544",
"0.5448377",
"0.54266876",
"0.5383294",
"0.53749365",
"0.5368234",
"0.5361845",
"0.5356271",
"0.53260094",
"0.5319536",
"0.5317014",
"0.5310612",
"0.5307571",
"0.52993107",
"0.52934206",
"0.5288413",
"0.52874357",
"0.52865666",
"0.52758586",
"0.52677953",
"0.52669907",
"0.52301776",
"0.5221117",
"0.5221117",
"0.52196753",
"0.52189755",
"0.5199691",
"0.51914114",
"0.51696825",
"0.51536757",
"0.5148003",
"0.51398087",
"0.5133202",
"0.5133202",
"0.5133202",
"0.51306665",
"0.5128663",
"0.5126336",
"0.51225716",
"0.5119254",
"0.50815105",
"0.5072077",
"0.5071638",
"0.50537014",
"0.50481665",
"0.50470227",
"0.5046146",
"0.5045905",
"0.5036209",
"0.5034558",
"0.5031236",
"0.5031176",
"0.50120115",
"0.5007731",
"0.49996346",
"0.49973023",
"0.49965414",
"0.49961397",
"0.4993278",
"0.49894914",
"0.497072",
"0.4970544",
"0.4965039",
"0.49627495",
"0.49627495",
"0.4961784",
"0.49554688",
"0.4939453",
"0.4936666",
"0.49364713",
"0.49346182",
"0.49273002",
"0.49216956",
"0.49173826",
"0.49163088",
"0.49117538",
"0.49111307",
"0.48990473",
"0.48961145",
"0.4889168",
"0.48877394",
"0.48869228",
"0.4886178",
"0.4886178",
"0.4886178",
"0.4886178",
"0.4886178",
"0.4886178",
"0.4886178",
"0.4886178"
] |
0.5712637
|
4
|
Return the primary key to use for RETURNING in an INSERT statement
|
def insert_pk
(f = opts[:from]) && !f.empty? && (t = f.first)
case t
when Symbol, String, SQL::Identifier, SQL::QualifiedIdentifier
if pk = db.primary_key(t)
Sequel::SQL::Identifier.new(pk)
end
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def autoincrementing_primary_key\n primary_key\n end",
"def autoincrementing_primary_key\n primary_key\n end",
"def insert_pk\n if (f = opts[:from]) && !f.empty?\n case t = f.first\n when Symbol, String, SQL::Identifier, SQL::QualifiedIdentifier\n if pk = db.primary_key(t)\n pk\n end\n end\n end\n end",
"def primary_key(table_name)\n pk_and_sequence_for(table_name)[0]\n rescue\n nil\n end",
"def primary_key\n @primary_key ||= :id\n end",
"def primary_key\n @primary_key\n end",
"def primary_key\n @primary_key || 'id'\n end",
"def pkey\n table = self.class.table_name\n key = get_primary_key_values.first\n return key\n end",
"def primary_key\n self.class.primary_key\n end",
"def primary_key\n self.class.primary_key\n end",
"def primary_key\n self.class.primary_key\n end",
"def primary_key(table_name)\n 'id' # table.primary_key || 'id'\n end",
"def primary_key(table)\n pk_and_sequence = pk_and_sequence_for(table)\n pk_and_sequence && pk_and_sequence.first\n end",
"def primary_key\n @primary_key ||= @klass.primary_key.to_s\n end",
"def last_insert_row_id\n @database.insert_id\n end",
"def primary_key\n select(&:primary_key?)\n end",
"def primary_key\n self[:primary_key]\n end",
"def orchestrate_primary_key\n id\n end",
"def primary_key(table_name)\n pk_and_sequence = pk_and_sequence_for(table_name)\n pk_and_sequence && pk_and_sequence.first\n end",
"def primary_key\n '_id'\n end",
"def last_insert_id(table, sequence_name) #:nodoc:\n identity = select_value(\"SELECT scope_identity()\")\n if identity.class == System::DBNull\n nil\n else\n System::Convert.to_int32(identity)\n end\n end",
"def primary_key\n self.class.primary_key == :id ? id : @saved_attributes[self.class.primary_key]\n end",
"def primary_key\n \"#{quoted_table_name}.#{model_class.send :primary_key}\"\n end",
"def primary_key\n \"#{quoted_table_name}.#{model_class.send :primary_key}\"\n end",
"def primary_key(table_name)\n pk = super\n\n if pk == CockroachDBAdapter::DEFAULT_PRIMARY_KEY\n nil\n else\n pk\n end\n end",
"def last_insert_id\n @connection.sql(\"SELECT @@IDENTITY\")\n unless @connection.cmd_fail?\n id = @connection.top_row_result.rows.first.first\n if id\n id = id.to_i\n id = nil if id == 0\n end\n else\n id = nil\n end\n id\n end",
"def primary_key_name\n @primary_key_name ||= @connection.schema[@table_name.to_s][:primary_key]\n end",
"def last_insert_id(table, sequence_name) #:nodoc:\n Integer(select_value(\"SELECT currval('#{sequence_name}')\"))\n end",
"def pk_from_insert_sql(sql)\n sql[/into\\s+([^\\(]*).*values\\s*\\(/i]\n primary_key($1.strip) if $1\n end",
"def pk_from_insert_sql(sql)\n sql[/into\\s+([^\\(]*).*values\\s*\\(/i]\n primary_key($1.strip) if $1\n end",
"def insert_id\n @insert_id\n end",
"def to_param\n self.primary_key\n end",
"def insert_sql(sql, name = nil, pri_key = nil, id_value = nil, sequence_name = nil)\n unless pri_key\n table_ref = extract_table_ref_from_insert_sql(sql)\n pri_key = primary_key(table_ref) if table_ref\n end\n\n if pri_key\n select_value(\"#{sql} RETURNING #{quote_column_name(pri_key)}\")\n else\n super\n end\n end",
"def last_insert_row_id\n SQLite::API.last_insert_row_id( @handle )\n end",
"def generate_primary_key\n self[self.class.primary_key] ||= self.class.new_primary_key(10_000)\n end",
"def last_insert_id\n @connection.identity_val_local\n end",
"def primary_key\n 'id'\n end",
"def primary_key\n send( self.class.primary_key )\n end",
"def primary_key(value=nil)\n self.primary_key = value unless value.nil?\n \n @primary_key ||= :id\n end",
"def key\n get_primary_key_value_map[self.class.table_name]\n end",
"def association_primary_key(klass = nil)\n active_record.primary_key\n end",
"def last_insert_id(sequence_name)\n r = exec_query(\"SELECT currval('#{sequence_name}')\", 'SQL')\n Integer(r.rows.first.first)\n end",
"def sql_for_insert(sql, pk, id_value, sequence_name, binds)\n unless pk\n table_ref = extract_table_ref_from_insert_sql(sql)\n pk = primary_key(table_ref) if table_ref\n end\n\n sql = \"#{sql} RETURNING #{quote_column_name(pk)}\" if pk\n\n [sql, binds]\n end",
"def primary_key\n @attributes[self.primary_key_attribute]\n end",
"def returning_clause(serial)\n \" RETURNING #{quote_name(serial.field)} INTO :insert_id\"\n end",
"def last_insert_id(conn, opts=OPTS)\n statement(conn) do |stmt|\n rs = stmt.executeQuery('SELECT last_insert_rowid()')\n rs.next\n rs.getLong(1)\n end\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n super\n\n exec_query('SELECT @@IDENTITY AS id')\n end",
"def last_insert_id(conn, opts={})\n stmt = conn.createStatement\n begin\n sql = opts[:prepared] ? 'SELECT @@IDENTITY' : 'SELECT SCOPE_IDENTITY()'\n rs = log_yield(sql){stmt.executeQuery(sql)}\n rs.next\n rs.getInt(1)\n ensure\n stmt.close\n end\n end",
"def key\n stores_foreign_key? ? foreign_key : primary_key\n end",
"def primary_key\n self[:primary_key] ||= self[:model].primary_key\n end",
"def _insert\n ds = _insert_dataset\n if _use_insert_select?(ds) && (h = _insert_select_raw(ds))\n _save_set_values(h)\n nil\n else\n iid = _insert_raw(ds)\n # if we have a regular primary key and it's not set in @values,\n # we assume it's the last inserted id\n if (pk = autoincrementing_primary_key) && pk.is_a?(Symbol) && !(vals = @values)[pk]\n vals[pk] = iid\n end\n pk\n end\n end",
"def _insert\n ds = _insert_dataset\n if _use_insert_select?(ds) && (h = _insert_select_raw(ds))\n _save_set_values(h)\n nil\n else\n iid = _insert_raw(ds)\n # if we have a regular primary key and it's not set in @values,\n # we assume it's the last inserted id\n if (pk = autoincrementing_primary_key) && pk.is_a?(Symbol) && !(vals = @values)[pk]\n vals[pk] = iid\n end\n pk\n end\n end",
"def primary_key\n fail NotImplementedError\n end",
"def last_insert_id(conn, opts=OPTS)\n statement(conn) do |stmt|\n sql = opts[:prepared] ? 'SELECT @@IDENTITY' : 'SELECT SCOPE_IDENTITY()'\n rs = log_connection_yield(sql, conn){stmt.executeQuery(sql)}\n rs.next\n rs.getLong(1)\n end\n end",
"def last_insert_id(conn, opts=OPTS)\n statement(conn) do |stmt|\n sql = opts[:prepared] ? 'SELECT @@IDENTITY' : 'SELECT SCOPE_IDENTITY()'\n rs = log_connection_yield(sql, conn){stmt.executeQuery(sql)}\n rs.next\n rs.getLong(1)\n end\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n return super if id_value\n pk = pk_from_insert_sql(sql) unless pk\n select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n return super if id_value\n pk = pk_from_insert_sql(sql) unless pk\n select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n end",
"def primary_key(table)\n t = dataset.send(:input_identifier, table)\n @primary_keys.fetch(t) do\n pk = fetch(\"SELECT RDB$FIELD_NAME FROM RDB$INDEX_SEGMENTS NATURAL JOIN RDB$RELATION_CONSTRAINTS WHERE RDB$CONSTRAINT_TYPE = 'PRIMARY KEY' AND RDB$RELATION_NAME = ?\", t).single_value\n @primary_keys[t] = dataset.send(:output_identifier, pk.rstrip) if pk\n end\n end",
"def id\n @properties[self.class.primary_key].to_i unless new_record?\n end",
"def find_primary_key(table)\n query = %q{\n SELECT column_name\n FROM information_schema.table_constraints tc\n INNER JOIN\n information_schema.key_column_usage kcu\n ON tc.constraint_name = kcu.constraint_name\n WHERE constraint_type = 'PRIMARY KEY'\n AND tc.table_catalog = 'reaktor'\n AND tc.table_schema = 'public'\n AND tc.table_name = ?\n ORDER BY ordinal_position;\n }\n\n sth = $dbh_pg.prepare(query)\n begin\n sth.execute(table.to_s)\n rescue\n $stderr.puts \"### Error in #{__FILE__} on line #{__LINE__}. See errorlog\"\n Log.write_log('error', \"Could not find primary key. Message: #{$!}. query: #{get_query_string(sth)}\")\n raise\n exit\n end\n pk = []\n while row = sth.fetch\n pk << row[0]\n end\n return pk\nend",
"def primary_key table\n return nil unless table\n table = self[table]\n pk = table.column_names.find{ |c| table[c].primary_key? }\n end",
"def insert(arel, name = nil, pk = nil, id_value = nil, sequence_name = nil, binds = [])\n exec_insert(to_sql(arel), name, binds)\n retval = last_inserted_id(nil)\n retval = id_value if retval == 0\n return retval\n end",
"def primary_key(table, opts=OPTS)\n quoted_table = quote_schema_table(table)\n Sequel.synchronize{return @primary_keys[quoted_table] if @primary_keys.has_key?(quoted_table)}\n value = _select_pk_ds.where_single_value(Sequel[:pg_class][:oid] => regclass_oid(table, opts))\n Sequel.synchronize{@primary_keys[quoted_table] = value}\n end",
"def id_for_update\n { self.class.primary_key.to_s => __id }\n end",
"def primary_key(table_name) #:nodoc:\r\n sql = \"SELECT COLUMN_NAME FROM (EXECUTE PROCEDURE sp_GetBestRowIdentifier( NULL, NULL, '#{table_name}', NULL, FALSE)) as gbri\"\r\n rs = select(sql)\r\n if !rs.nil? and !rs[0].nil?\r\n strip_or_self(rs[0]['COLUMN_NAME'])\r\n else\r\n nil\r\n end\r\n end",
"def primary_key\n cached_fetch(:primary_key){associated_class.primary_key || raise(Error, \"no primary key specified for #{associated_class.inspect}\")}\n end",
"def primary_key\n return @primary_key if @primary_key\n return 'id' if @id\n \n candidates = @columns.find_all { |col| col.unique }.map { |col| col.name }\n return 'id' if candidates.include? 'id'\n candidates.find { |c| c =~ eval(\"/^#{@name}.*id$/i\") } ||\n candidates.find { |c| c =~ eval(\"/^#{singularize}.*id$/i\") } ||\n candidates.find { |c| c =~ eval(\"/^#{pluralize}.*id$/i\") } ||\n candidates.first\n end",
"def inserted_id\n inserted_ids.first\n end",
"def primary_key_constraint_sql_fragment(_)\n 'PRIMARY KEY'\n end",
"def save\n result = DB.exec(\"INSERT INTO books (name, author) VALUES ('#{@name}', '#{@author}') RETURNING id;\")\n @id = result.first().fetch(\"id\").to_i\nend",
"def primary_key\n self[:primary_key] ||= associated_class.primary_key\n end",
"def original_primary_key\n if sourceRD.naturalKey == \"id\"\n \"original_id\"\n else\n sourceRD.naturalKey\n end\n end",
"def primary_key\n primary_key = attributes.find { |a| a.primary_key? }\n error(\"Unable to locate primary key for #{self.name}, attributes => #{attributes.collect { |a| a.name }}\") unless primary_key\n primary_key\n end",
"def primary_key_attribute\n :id\n end",
"def quoted_primary_key\n @quoted_primary_key ||= connection.quote_column_name(primary_key)\n end",
"def to_key\n new_record? ? nil : [ self.send(self.class.primary_key) ]\n end",
"def returning_id\n @sql_returning = ::MultiInsert::QueryBuilder.returning([:id])\n @returning_flat = true\n self\n end",
"def primary_key\n return @primary_key if @primary_key\n @primary_key = dimension_table.to_s.camelize.constantize.primary_key.to_sym\n rescue NameError => e\n ETL::Engine.logger.debug \"couldn't get primary_key from dimension model class, using default :id\"\n @primary_key = :id\n end",
"def primary_key_value(obj)\n obj.pk\n end",
"def primary_key(table_name, opts=OPTS)\n quoted_table = quote_schema_table(table_name)\n Sequel.synchronize{return @primary_keys[quoted_table] if @primary_keys.has_key?(quoted_table)}\n out_identifier, in_identifier = identifier_convertors(opts)\n schema, table = schema_or_current_and_table(table_name, opts)\n dataset = metadata_dataset.\n select(:kc__column_name).\n from(Sequel.as(:information_schema__key_column_usage, 'kc')).\n join(Sequel.as(:information_schema__table_constraints, 'tc'),\n [:table_name, :table_schema, :constraint_name]).\n where(:kc__table_name => in_identifier.call(table),\n :kc__table_schema => schema,\n :tc__constraint_type => 'PRIMARY KEY')\n value = dataset.map do |row|\n out_identifier.call(row.delete(:column_name))\n end\n value = case value.size\n when 0 then nil\n when 1 then value.first\n else value\n end\n Sequel.synchronize{@primary_keys[quoted_table] = value}\n end",
"def last_insert_id(conn, opts={})\n nil\n end",
"def last_insert_id(conn, opts={})\n nil\n end",
"def last_insert_id(klass)\n # return last insert id\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n table = sql.split(\" \", 4)[2]\n super || last_insert_id(table, sequence_name || default_sequence_name(table, pk))\n end",
"def full_primary_key(klass)\n \"#{klass.quoted_table_name}.#{klass.quoted_primary_key}\"\n end",
"def primary_key_type\n \"integer PRIMARY KEY\"\n end",
"def default_primary_key\n model_name.foreign_key.to_s\n end",
"def primary_key_value\n send(self.class.primary_key)\n end",
"def to_key\nnew_record? ? nil : [ self.send(self.class.primary_key) ]\nend",
"def qualified_primary_key\n cached_fetch(:qualified_primary_key){qualify_cur(primary_key)}\n end",
"def primary_key\n @resource_options.fetch :primary_key, :\"#{singular_resource_name}_id\"\n end",
"def insert\n DATABASE.execute(\"INSERT INTO students (name, age, github) VALUES (?, ?, ?)\", @name, @age, @github)\n @id = DATABASE.last_insert_row_id\n end",
"def id\n read_attribute(self.class.primary_key)\n end",
"def id\n read_attribute(self.class.primary_key)\n end",
"def insert()\n query = \"INSERT INTO artists (art_name, art_photo) VALUES ($1, $2) RETURNING art_id\"\n @art_id = DbHelper.run_sql_return_first_row_column_value(query, [@art_name, @art_photo], 'art_id').to_i;\n end",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def primary_key(table_name)\n stmt = @connection.primary_keys(native_case(table_name.to_s))\n result = stmt.fetch_all || []\n stmt.drop unless stmt.nil?\n result[0] && result[0][3]\n end",
"def id\n attributes[self.class.primary_key]\n end",
"def primary_key_sequence(table, opts=OPTS)\n quoted_table = quote_schema_table(table)\n Sequel.synchronize{return @primary_key_sequences[quoted_table] if @primary_key_sequences.has_key?(quoted_table)}\n cond = {Sequel[:t][:oid] => regclass_oid(table, opts)}\n value = if pks = _select_serial_sequence_ds.first(cond)\n literal(SQL::QualifiedIdentifier.new(pks[:schema], pks[:sequence]))\n elsif pks = _select_custom_sequence_ds.first(cond)\n literal(SQL::QualifiedIdentifier.new(pks[:schema], LiteralString.new(pks[:sequence])))\n end\n\n Sequel.synchronize{@primary_key_sequences[quoted_table] = value} if value\n end",
"def primary_key(table_name)\n table_name = table_name.to_s\n\n @primary_keys ||= {}\n @primary_keys[table_name] ||= if @registration[:primary_key].present?\n @registration[:primary_key].call(@connection, table_name)\n else\n @connection.primary_key(table_name)\n end\n end"
] |
[
"0.8037107",
"0.8037107",
"0.7785286",
"0.76749134",
"0.7557144",
"0.7552393",
"0.7490722",
"0.74085295",
"0.73910064",
"0.73910064",
"0.73910064",
"0.7290003",
"0.7267558",
"0.7257927",
"0.7231899",
"0.7226765",
"0.72228605",
"0.72121257",
"0.7200792",
"0.71392804",
"0.713349",
"0.71058714",
"0.7081722",
"0.7081722",
"0.7078666",
"0.70726657",
"0.7064209",
"0.7052648",
"0.7052558",
"0.7052558",
"0.70486814",
"0.7044583",
"0.7016705",
"0.70128375",
"0.7010586",
"0.70029",
"0.6987416",
"0.698403",
"0.6909567",
"0.6846428",
"0.6838702",
"0.68159205",
"0.6790847",
"0.6790318",
"0.67851585",
"0.6769333",
"0.67453504",
"0.6726429",
"0.6719869",
"0.6719409",
"0.67115945",
"0.67115945",
"0.67006224",
"0.666792",
"0.666792",
"0.66678214",
"0.66678214",
"0.6661558",
"0.6651481",
"0.6647675",
"0.6632858",
"0.66300803",
"0.662353",
"0.6622535",
"0.6622143",
"0.66169596",
"0.6609725",
"0.6601476",
"0.659863",
"0.6595215",
"0.6589193",
"0.6580815",
"0.65680546",
"0.6545072",
"0.65386176",
"0.6529045",
"0.6519027",
"0.65145105",
"0.64674634",
"0.6462866",
"0.6451762",
"0.6451762",
"0.64482075",
"0.64315546",
"0.64287394",
"0.64281577",
"0.64273363",
"0.63149357",
"0.6310703",
"0.62892133",
"0.6288388",
"0.62710917",
"0.62665033",
"0.62665033",
"0.6259369",
"0.6251434",
"0.6250028",
"0.6242317",
"0.6240564",
"0.6240541"
] |
0.7425948
|
7
|
Support OVERRIDING SYSTEM|USER VALUE in insert statements
|
def insert_override_sql(sql)
case opts[:override]
when :system
sql << " OVERRIDING SYSTEM VALUE"
when :user
sql << " OVERRIDING USER VALUE"
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def login_failure(user=nil)\n blank(\n la_ip: ::GDO::Net::GDT_IP.current,\n la_user: user == nil ? nil : user.get_id,\n ).insert\n end",
"def add_user(db, user_name)\r\nadd_user = '\r\n\r\n\tINSERT INTO users \r\n\r\n\t(name, cache, expected_income, actual_income, expenses, month)\r\n\tVALUES (?,0,0,0,0,1)'\r\n\tdb.execute(add_user, [user_name]) \r\nend",
"def user_side_create(user)\n user_data = user.for_db\n row_data = map_to_row!(user_data)\n execute_sql(:create, :user) { table.insert(row_data) }\n end",
"def user(value)\n merge(leuser: value.to_s)\n end",
"def insert_key_data_for_user(d)\n if d['name'] == 'pivotal' && config[:skip_pivotal]\n ui.warn \"Skipping pivotal user.\"\n return\n end\n ui.msg \"Updating key data for user[#{d['name']}]\"\n new_id = if config[:skip_ids]\n db[:users].where(:username => d['name']).first[:id]\n else\n d['id']\n end\n Chef::Log.debug(\"Found user id for #{d['name']}: #{new_id}\")\n upsert_key_record(key_record_for_db(d, new_id))\n end",
"def user(value)\n merge(gadruser: value.to_s)\n end",
"def user(value)\n merge(grcuser: value.to_s)\n end",
"def user_assign(value)\n forget_value(\"user\")\n assign value,\"user\"\n end",
"def user(value)\n merge(rvuser: value.to_s)\n end",
"def define_user\n case new_resource.im_install_mode\n when 'admin'\n user = if new_resource.user.nil?\n 'root'\n else\n unless im_user_exists_unix?(new_resource.user)\n Chef::Log.fatal \"User Name provided #{new_resource.user}, does not exist\"\n raise \"User Verification 1: User Name provided #{new_resource.user}, does not exist\"\n end\n new_resource.user\n end\n user\n when 'nonAdmin', 'group'\n user = if new_resource.user.nil?\n Chef::Log.fatal \"User Name not provided! Please provide the user that should be used to install your product\"\n raise \"User Name not provided! Please provide the user that should be used to install your product\"\n else\n unless im_user_exists_unix?(new_resource.user)\n Chef::Log.fatal \"User Name provided #{new_resource.user}, does not exist\"\n raise \"User Verification 1: User Name provided #{new_resource.user}, does not exist\"\n end\n new_resource.user\n end\n user\n end\nend",
"def define_user\n case new_resource.im_install_mode\n when 'admin'\n user = if new_resource.user.nil?\n 'root'\n else\n unless im_user_exists_unix?(new_resource.user)\n Chef::Log.fatal \"User Name provided #{new_resource.user}, does not exist\"\n raise \"User Verification 1: User Name provided #{new_resource.user}, does not exist\"\n end\n new_resource.user\n end\n user\n when 'nonAdmin', 'group'\n user = if new_resource.user.nil?\n Chef::Log.fatal \"User Name not provided! Please provide the user that should be used to install your product\"\n raise \"User Name not provided! Please provide the user that should be used to install your product\"\n else\n unless im_user_exists_unix?(new_resource.user)\n Chef::Log.fatal \"User Name provided #{new_resource.user}, does not exist\"\n raise \"User Verification 1: User Name provided #{new_resource.user}, does not exist\"\n end\n new_resource.user\n end\n user\n end\nend",
"def insert_user(user, pass)\n print_status(\"Attempting New User Insertion for '#{user}' with password '#{pass}'.....\")\n begin\n print_caution(\"Before Update: \")\n get_passwords\n # Insert to mysql.user where shit is stored\n query = @db_connection.query(\"INSERT INTO mysql.user (Host,User,Password,Select_priv,Insert_priv,Update_priv,Delete_priv,Create_priv,Drop_priv,Reload_priv,Shutdown_priv,Process_priv,File_priv,Grant_priv,References_priv,Index_priv,Alter_priv,Show_db_priv,Super_priv,Create_tmp_table_priv,Lock_tables_priv,Execute_priv,Repl_slave_priv,Repl_client_priv,Create_view_priv,Show_view_priv,Create_routine_priv,Alter_routine_priv,Create_user_priv,ssl_type,ssl_cipher,x509_issuer,x509_subject,max_questions,max_updates,max_connections,max_user_connections) VALUES('%','#{user}',PASSWORD('#{pass}'),'Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y');\")\n # Insert into mysql.db for GRANT overrides....working?\n query = @db_connection.query(\"INSERT INTO mysql.db (Host,Db,User,Select_priv,Insert_priv,Update_priv,Delete_priv,Create_priv,Drop_priv,Grant_priv,References_priv,Index_priv,Alter_priv,Create_tmp_table_priv,Lock_tables_priv,Create_view_priv,Show_view_priv,Create_routine_priv,Alter_routine_priv,Execute_priv) VALUES('%','test','#{user}','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y');\")\n query = @db_connection.query('FLUSH PRIVILEGES;')\n print_status(\"After Update: \")\n get_passwords\n print_line(\"\")\n print_status(\"Try logging in with new account credentials to confirm success...\")\n print_caution(\"If issues found, its likely do to GRANT Insertion not working properly!\")\n print_caution(\"Try CREATE method if this is the case....\")\n rescue Mysql::Error => e\n print_error(\"Problem with New User Insertion!\")\n print_error(\"#{e}\")\n end\n end",
"def before_create_save(record)\n do_save_logic(record)\n record.usr_id = session[:usr_id]\n end",
"def sysuser_\n RequestStore[:current_user] = User.system_user\nend",
"def before_create_save(record)\n record.usr_id = session[:usr_id]\n end",
"def before_create_save(record)\n record.usr_id = session[:usr_id]\n end",
"def before_create_save(record)\n record.usr_id = session[:usr_id]\n end",
"def add user, pin = nil\n command = aqhbci <<-CMD\n adduser \\\n --tokentype=#{user.tokentype} \\\n --context=#{user.context} \\\n --bank=#{user.bank} \\\n --user=#{user.userid} \\\n --server=#{user.server} \\\n --username=#{user.name} \\\n --hbciversion=#{user.hbciversion}\n CMD\n stdin, stdout, stderr, wait_thr = Open3.popen3(command.strip)\n success = wait_thr.value.success?\n\n if pin && success\n with_secure_pin user, pin do |f|\n sysid_command = aqhbci(\"getsysid --user=#{user.userid}\", \"--pinfile=#{f.path.strip}\").strip\n stdin, stdout, stderr, wait_thr = Open3.popen3(sysid_command)\n wait_thr.join\n success = success && wait_thr.value.success?\n end\n end\n return success\n end",
"def generate_user_id\n # ap(generate_user_id: {})\n @user_data.insert({}).to_s\n end",
"def _user_add f = {}\n\t\tf[:salt] \t\t= _random_string 5\n\t\tf[:created] \t= Time.now\n\n\t\trequire \"digest/sha1\"\n\t\tf[:pawd] \t\t= Digest::SHA1.hexdigest(f[:pawd] + f[:salt])\n\n\t\t_throw L[:'the user is existing'] if _user? f[:name]\n\n\t\tDB[:_user].insert(f)\n\t\tuid = DB[:_user].filter(:name => f[:name]).get(:uid)\n\t\tuid ? uid : 0\n\tend",
"def identity_create\n # Potential threat of overlap\n identity = Identity.create(uid:rand(100000000..9999999999), provider: 'registration')\n identity.user_id = resource.id\n identity.name = params['user']['name'] #Looks very ugly\n identity.email = resource.email\n identity.save\n end",
"def system_user\n User.find_by('first_name = ? AND last_name = ?', 'system', 'user')\n end",
"def user_assign(value)\n forget_value(\"user\")\n assign(value,\"user\")\n end",
"def new_user?(seminar,user)\n user = $app_ids[seminar][user].to_s\n user[0] == 48\nend",
"def user(value)\n merge(user: value.to_s)\n end",
"def post_insert(sql, name, pk, id_value, sequence_name)\n if @iiEnabled\n begin\n @connection.do(enable_identity_insert(@iiTable, false))\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned off\"\n end\n end\n end",
"def post_insert(sql, name, pk, id_value, sequence_name)\n if @iiEnabled\n begin\n @connection.do(enable_identity_insert(@iiTable, false))\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned off\"\n end\n end\n end",
"def create\n @systemuser = Systemuser.new(params[:systemuser])\n @systemuser.name = params[:systemuser][:name]\n \n if params[:systemuser][:group_id] == 4\n @systemuser.ro = 1\n else\n @systemuser.ro = 0\n end\n \n @systemuser.enabled = 1\n\n respond_to do |format|\n if @systemuser.save\n format.html { redirect_to systemusers_path, notice: 'Wizard user was successfully created.' }\n format.json { render json: @systemuser, status: :created, location: @systemuser }\n else\n format.html { render action: \"new\" }\n format.json { render json: @systemuser.errors, status: :unprocessable_entity }\n end\n end\n end",
"def system?\n id == User::SYSTEM_USER_ID\n end",
"def system?\n id == User::SYSTEM_USER_ID\n end",
"def set_sys_user\n @sys_user = Sys::User.find(params[:id])\n end",
"def set_sys_user\n @sys_user = Sys::User.find(params[:id])\n end",
"def set_UserID2(value)\n set_input(\"UserID2\", value)\n end",
"def get_system_user\n User.where('first_name = ? AND last_name = ?', 'system', 'user').first\n end",
"def user_assign(value)\n forget_value('user')\n assign value, 'user'\n end",
"def register_ip(user_ip)\r\n # Checking database for existing record\r\n {\r\n :ip => user_ip,\r\n :time => Time.now # Check date format compability\r\n }\r\n # Inserting or updating a record\r\n end",
"def impersonate_sql_user(service_instance,verbose)\n\n # Print the current user\n blah = session.sys.config.getuid if verbose == \"true\"\n print_status(\"Current user: #{blah}\") if verbose == \"true\"\n\n # Define target user/pid\n targetuser = \"\"\n targetpid = \"\"\n\n # Identify SQL Server service processes\n print_status(\"Searching for sqlservr.exe processes not running as SYSTEM...\")\n session.sys.process.get_processes().each do |x|\n\n # Search for all sqlservr.exe processes\n if ( x['name'] == \"sqlservr.exe\" and x['user'] != \"NT AUTHORITY\\\\SYSTEM\")\n\n # Found one\n print_good(\"Found \\\"#{x['user']}\\\" running sqlservr.exe process #{x['pid']}\")\n\n # Define target pid / user\n if x['user'] =~ /NT SERVICE/ then\n if x['user'] == \"NT SERVICE\\\\MSSQL$#{service_instance}\" then\n targetuser = \"NT SERVICE\\\\MSSQL$#{service_instance}\"\n targetpid = x['pid']\n end\n else\n targetuser = x['user']\n targetpid = x['pid']\n end\n end\n end\n\n # Attempt to migrate to target sqlservr.exe process\n if targetuser == \"\" then\n print_error(\"Unable to find sqlservr.exe process not running as SYSTEM\")\n return 0\n else\n begin\n # Migrating works, but I can't rev2self after its complete\n print_status(\"Attempting to migrate to process #{targetpid}...\")\n session.core.migrate(targetpid.to_i)\n\n # Statusing\n blah = session.sys.config.getuid if verbose == \"true\"\n print_status(\"Current user: #{blah}\") if verbose == \"true\"\n print_good(\"Successfully migrated to sqlservr.exe process #{targetpid}\")\n return 1\n rescue\n print_error(\"Unable to migrate to sqlservr.exe process #{targetpid}\")\n return 0\n end\n end\n end",
"def find_or_create_user(user_descr, keys = nil)\n debug \"central find_or_create_user: '#{user_descr.inspect}'\"\n raise 'Method not implemented because the Central Manager just need to pass the same requisition to the other' \\\n ' brokers and create the concatenated results'\n end",
"def add_user(db, user_name)\n db.execute(\"INSERT INTO users (user_name) VALUES (?),\" [user_name])\nend",
"def system_user\n # By convention, the first user is always the system user.\n User.find_by_id(1)\n end",
"def insertConUser(idUser,idChannel)\n \n begin\n query = \"INSERT INTO `#{DB_NAME}`.`#{USER_LIST_IN_CHAN_TABBLE}` (`user_id_user`, `channel_id_channel`) \n VALUES (?, ?)\"\n \n self.connect unless self.connected? # => connect to the DB server if not connected\n \n sth = @dbh.prepare(query)\n\n sth.execute(idUser,idChannel)\n sth.finish\n rescue DBI::DatabaseError => e\n puts \"An error occurred\"\n puts \"Error code: #{e.err}\"\n puts \"Error message: #{e.errstr}\"\n @dbh.rollback\n rescue Exception => e \n puts \"error!!! -> : #{e.to_s}\"\n \n ensure\n # disconnect from server\n @dbh.disconnect if @connected\n @connected=false\n end\n end",
"def save_user ops\n ops.each do |op|\n username = get_technician_name(self.jid)\n op.associate(:technician, username)\n end\n end",
"def addUser(stat,typ,email,pwhash)\n @conn.exec_prepared(\"add_user\",[stat,typ,email,pwhash])\n end",
"def givemesystem\n\n # Statusing\n print_status(\"Checking if user is SYSTEM...\")\n\n # Check if user is system\n if session.sys.config.getuid == \"NT AUTHORITY\\\\SYSTEM\"\n print_status(\"User is SYSTEM\")\n return 1\n else\n # Attempt to get LocalSystem privileges\n print_error(\"User is NOT SYSTEM\")\n print_status(\"Attempting to get SYSTEM privileges...\")\n system_status = session.priv.getsystem\n if system_status[0]\n print_good(\"Success!, user is now SYSTEM\")\n return 1\n else\n print_error(\"Unable to obtained SYSTEM privileges\")\n return 0\n end\n end\n end",
"def user_check(resource)\n return true unless self[:name] == \"user\"\n return true unless self[:unless_system_user]\n\n resource[:audit] = :uid\n\n return false if system_users.include?(resource[:name])\n\n current_values = resource.retrieve_resource\n current_values[resource.property(:uid)] > self[:unless_system_user]\n end",
"def current_user_for_segmentation\n User.first_or_create!(first_name: \"John\", last_name: \"Doe\")\n end",
"def uid=(p0) end",
"def substitute_current_user(sql)\n sql\n .gsub(':current_user_preference', current_user&.user_preference&.id&.to_s || 'NULL')\n .gsub(':current_user', current_user&.id&.to_s || 'NULL')\n end",
"def add_system(db, system)\r\n\tdb.execute(\"INSERT OR IGNORE INTO systems (name) VALUES (?)\", [system])\r\nend",
"def create_user(db, user_name)\n\tdb.execute(\"INSERT INTO users (user_name) VALUES (?)\", [user_name])\n\tputs \"added new user\"\nend",
"def create_user(resource)\n session = Puppet::NetDev::CE::Device.session\n\n set_user_xml = '<rpc><edit-config><target><running/></target><default-operation>merge</default-operation><error-option>rollback-on-error</error-option><config><aaa xmlns=\"http://www.huawei.com/netconf/vrp\" content-version=\"1.0\" format-version=\"1.0\"><lam><users><user operation=\"merge\"><userName>' + (resource[:name]).to_s + '</userName>'\n\n if resource[:password]\n set_user_xml += '<password>' + (resource[:password]).to_s + '</password>'\n end\n\n set_user_xml += '</user></users></lam></aaa></config></edit-config></rpc>'\n\n session.rpc.do_config(set_user_xml)\n end",
"def set_transaction_variables\n sessionFactory = HibernateUtil.getSessionFactory(\"serverconf\")\n dialect = sessionFactory.getDialect\n if dialect.class == Java::EeRiaXroadCommonDb::CustomPostgreSQLDialect\n # If we are running on top of Postgres, the name of the logged-in\n # user must be made available within the transaction, for use\n # when updating the history table.\n # The value of user_name will go out of scope when the transaction\n # ends.\n query = @session.createSQLQuery(\n \"SET LOCAL xroad.user_name='#{current_user.name}'\")\n query.executeUpdate()\n end\n end",
"def sys_user_params\n params.require(:sys_user).permit(:login_name, :name)\n end",
"def pre_insert(sql, name, pk, id_value, sequence_name)\n @iiTable = get_table_name(sql)\n @iiCol = get_autounique_column(@iiTable)\n @iiEnabled = false\n\n if @iiCol != nil\n if query_contains_autounique_col(sql, @iiCol)\n begin\n @connection.do(enable_identity_insert(@iiTable, true))\n @iiEnabled = true\n rescue Exception => e\n raise ActiveRecordError, \"IDENTITY_INSERT could not be turned on\"\n end\n end\n end\n end",
"def set_default_exuid\n self.exuid ||=\n Base32.encode(SecureRandom.random_bytes(16)).downcase.sub(/=*$/, '')\n end",
"def set_UserID1(value)\n set_input(\"UserID1\", value)\n end",
"def add_perms_to_user (user, perms)\n perms.each { |v|\n exec_sql \"INSERT INTO u_apis_perms(u_api_id, u_perm_id) VALUES('#{user}', '#{v}')\"\n }\nend",
"def get_user_id(user)\n if user.id < 10\n return \"USR0\" + user.id.to_s\n else\n return \"USR\" + user.id.to_s\n end\n end",
"def when_current_user_is(user)\n current_user =\n case user\n when Symbol then create :user, user\n when User then user\n when nil then nil\n else raise ArgumentError, 'Invalid user type'\n end\n set_current_user current_user\nend",
"def qreg(firstname,lastname,username,devmake,devmodel,devmac,devagent)\n conn = PGconn.connect(\"localhost\",5432,'','',CONFIG['setup']['db']['dbname'],CONFIG['setup']['db']['user'],CONFIG['setup']['db']['password'])\n begin\n t = Time.now \n insert_time = t.strftime(\"%Y-%m-%d %H:%M:%S\")\n $ins = conn.exec(\"insert into maestro_reg values ('#{firstname}','#{lastname}','#{username}','#{devmake}','#{devmodel}','#{devmac}','#{insert_time}','#{devagent}');\");\n rescue\n puts \"insert failed\"\n else\n puts \"insert ok\"\n # // puts \"'#{firstname}','#{lastname}','#{username}','#{devmake}','#{devmodel}','#{devmac}','#{devprob}','#{insert_time}','#{devagent}'\"\n end\n conn.close()\nend",
"def create_developer_user(developer_instance, user_params)\n user = User.new(user_params)\n developer_instance.transaction do\n user.add_role(:developer)\n user.save!\n developer_instance.user = user\n developer_instance.save!\n return {\n developer: developer_instance,\n status: true\n }\n end\n {}\n end",
"def created_by=(user)\n write_attribute(:created_by, user.id) unless user.nil? or user.id.nil?\n end",
"def storeUser(param) \n @Handle.execute(\"INSERT INTO User(email,name,surname,nickname)\" +\n \" values ('#{param.Email}','#{param.Name}','#{param.Surname}','#{param.Nickname}')\")\n end",
"def genrateuid\n\n res1=@con.query(\"select uid from user order by uid desc limit 1\")\n row10=res1.fetch_row\n if row10.nil?\n @id=100\n \n else\n no=row10[0].to_i\n @id=no+1\n end\n\n res2=@con.prepare(\"insert into user values(?,?)\")\n res2.execute(@id,@name)\n end",
"def set_system_user\n @system_user = SystemUser.find(params[:id])\n end",
"def admin_create_work_user_ivars(allow_nil_user=true)\n if @curr_user.admin?\n work_user_id = (params[:work_user] || session[:work_user_id] || 0).to_i\n @work_user = if work_user_id > 0\n User.find(work_user_id)\n elsif allow_nil_user\n nil\n else\n User.find(:first, :order => 'name asc')\n end\n session[:work_user_id] = @work_user ? @work_user.id : nil\n else\n @work_user = session[:work_user_id] = nil\n end\n end",
"def create_user\n user new_resource.user do\n comment \"Service user for #{new_resource.name}\"\n gid new_resource.group if new_resource.group\n home new_resource.home\n shell '/bin/false'\n system true\n uid new_resource.uid\n end\n end",
"def user_column\n IdMethods::USER_COLUMN\n end",
"def register_user(email, username, password_digest, rank, username_downcase)\n $db.execute(\"INSERT INTO users (email, username, password_digest, rank, username_downcase) VALUES (?, ?, ?, ?, ?)\", email, username, password_digest, rank, username_downcase)\nend",
"def user_type; end",
"def when_current_user_is(user)\n current_user =\n case user\n when :anyone, :anybody then create(:user)\n when Symbol then create(:user, user)\n when User, nil then user\n else raise ArgumentError, 'Invalid user type'\n end\n set_current_user(current_user)\nend",
"def user_id; 1; end",
"def user_provider=(_arg0); end",
"def user_id=(value)\n if value == @defaults['userId']\n @values.delete 'userId' if @values.key? 'userId'\n else\n @values['userId'] = value\n end\n end",
"def test_add_mixedup_user\n num_users0 = count_users\n proto = User.new('oauth_id' => '566213105', 'name' => 'Avilay Parekh')\n proto.oauth_id = SecureRandom.uuid\n new_user = @ds.add_or_get_user(proto)\n user = get_user(proto.oauth_id)\n assert(user === new_user)\n assert_equal(num_users0 + 1, count_users)\n assert_equal(user.id, @ds.user_id)\n delete_user(user.oauth_id)\n end",
"def set_create_user_fields\n user_id = AuditModule.get_current_user.uid\n self.created_by = user_id\n self.updated_by = user_id\n end",
"def register_user\n user_data = @view.display_registration \n new_user = User.new(user_data[0], user_data[1], user_data[2], user_data[3]) \n access = user_data[2].to_i\n case access\n when 1\n @client.save_info(new_user)\n login_user\n when 2\n @seller.save_info(new_user)\n login_user\n when 10\n @admin.save_info(new_user)\n login_user \n end \n end",
"def running_as_normaluser?\n\tKitchenplan::Log.debug \"#{self.class} : Running as superuser? UID = #{Process.uid} != 0?\"\n\tProcess.uid != 0\n end",
"def running_as_normaluser?\n\tKitchenplan::Log.debug \"#{self.class} : Running as superuser? UID = #{Process.uid} != 0?\"\n\tProcess.uid != 0\n end",
"def create_user(db, first_name, last_name, age, body_weight, gender)\n\tdb.execute(\"INSERT INTO athlete (first_name, last_name, age, body_weight, gender, gym_trips) VALUES (?,?,?,?,?,?)\", [first_name, last_name, age, body_weight, gender, 0 ])\nend",
"def os_user\n @os_user\n end",
"def prepareInsertUserStatement\n @conn.prepare(\"insert_user\", \"insert into users (id, name) values ($1, $2)\")\n end",
"def insertUser(email,password,lastname,firstname,birthdate,uuid,fbid,access_token,access_token_expiration)\n begin\n success= false\n query = \"INSERT INTO `#{DB_NAME}`.`#{USER_TABLE}` (`email`, `password`, `last_name`, \n `first_name`, `birthdate`, `uuid`,\n `facebook_id`, `access_token`, \n `access_token_expiration`) \n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)\"\n \n self.connect unless self.connected? # => connect to the DB server if not connected\n sth =@dbh.prepare(query)\n sth.execute(email,password,lastname,firstname,birthdate,uuid,fbid,access_token,access_token_expiration)\n sth.finish\n @dbh.commit\n \n success = true\n rescue DBI::DatabaseError => e\n puts \"An error occurred\"\n puts \"Error code: #{e.err}\"\n puts \"Error message: #{e.errstr}\"\n @dbh.rollback\n rescue Exception => e \n puts \"error!!! -> #{e.to_s}\"\n ensure\n # disconnect from server\n @dbh.disconnect if @connected\n @connected=false\n end\n \n return success\n \n end",
"def overriding_user_value\n clone(:override=>:user)\n end",
"def test_get_mixedup_user\n proto = User.new('oauth_id' => '566213105', 'name' => 'Avilay Parekh') \n num_users0 = count_users\n proto.name = 'I An Other'\n assert_raises(RuntimeError) { @ds.add_or_get_user(proto) }\n assert_equal(num_users0, count_users)\n assert_nil(@ds.user_id) \n proto.name = 'Avilay Parekh'\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil) end",
"def add_user_rights_row\n @organization = Organization.find(params[:organization_id])\n @new_ur_identity = Identity.find_or_create(params[:new_ur_identity_id])\n @user_rights = user_rights(@organization.id)\n end",
"def create_user(details)\n puts \"Checking for #{details[:user_name]} user...\"\n\n db_user = User.where(:user_name => details[:user_name]).first\n\n if db_user.blank?\n db_user = User.create(details)\n db_user.creator_id = db_user.id\n db_user.updater_id = db_user.id\n\n db_user.skip_user_name_exclusion_list = true\n\n db_user.save!\n\n db_user.skip_user_name_exclusion_list = false\n\n puts \"... #{details[:user_name]} user created.\"\n else\n puts \"... #{details[:user_name]} user already exists.\"\n end\nend",
"def create_user(db, name, weight, exercise, progression)\n db.execute(\"INSERT INTO user (name, weight, exercise, progression) VALUES (?, ?, ?, ?)\", [name, weight, exercise, progression])\nend",
"def user_id\n raise \"Implement in Client or Advocate\"\n end",
"def create_user (db, first_name, last_name, age, location, social_media,time,date)\n\tdb.execute(\"INSERT INTO users (first_name, last_name, age, location, social_media, time, date) values (?,?,?,?,?,?,?)\",[first_name,last_name,age,location,social_media,time,date])\nend",
"def user\n @user ||= User.find_by_user_key('system')\n return @user if @user\n @user = User.create!(Devise.authentication_keys.first => 'system')\n end",
"def prepareInsertUserStatement(tableName)\n @conn.prepare(\"insert_user\", \"insert into #{tableName} (course_id, name, slug, course_site, instructors, partners, homepage, counter, url_photo, summary) values ($1, $2, $3, $4, $5, $6, $7, $8, $9,$10)\")\n end",
"def create_user(db, first_name, last_name)\n db.execute(\"INSERT INTO users (first_name, last_name) VALUES (?, ?)\", [first_name, last_name])\nend",
"def canonical_user_identities(opennebula_user)\n fail 'User object not provided!' unless opennebula_user\n identities = []\n\n identities << opennebula_user['TEMPLATE/KRB_PRINCIPAL']\n identities << opennebula_user['TEMPLATE/X509_DN'].split('|') if opennebula_user['TEMPLATE/X509_DN']\n identities << opennebula_user['NAME']\n identities << opennebula_user['ID'].to_s\n identities.flatten!\n identities.compact!\n\n Egi::Fedcloud::Vmhound::Log.debug \"[#{self.class}] Assigning identities #{identities.inspect} \" \\\n \"to user #{opennebula_user['ID'].inspect}\"\n identities\n end",
"def add_user(user)\r\n\t\tsend(\"ADC\",\"FL N=#{user.name} F=#{user.safenick}\")\r\n\t\tsend(\"ADC\",\"AL N=#{user.name} F=#{user.safenick}\")\r\n\t\t## XXX changes recorded locally by ADD msg back\r\n\t\treturn 1\r\n\tend",
"def sys_user_params\n params.require(:sys_user).permit(:login_name, :first_name, :last_name, :full_name, :email, :phone, :locked_flag, :locked_time, :locked_until_at, :last_login_at, :status)\n end",
"def fill_up_user_create\n # if self.class.column_names.include? 'updated_by_id'\n # if UserInfo.current_user_id\n # # if updated_by_id && updated_by_id != UserInfo.current_user_id\n # # logger.info \"NOTICE create - self.updated_by_id is different: #{updated_by_id}/#{UserInfo.current_user_id}\"\n # # end\n # # self.updated_by_id = UserInfo.current_user_id\n # end\n # end\n\n # return true if !self.class.column_names.include? 'created_by_id'\n\n return true if !UserInfo.current_user_id\n\n # if created_by_id && created_by_id != UserInfo.current_user_id\n # logger.info \"NOTICE create - self.created_by_id is different: #{created_by_id}/#{UserInfo.current_user_id}\"\n # end\n # self.created_by_id = UserInfo.current_user_id\n true\n end",
"def edit_or_create_user(struct)\n struct.remapkeys!\n if struct.has_key? :user and struct.has_key? :pass\n rt = RT_Client.new(:user => struct[:user], :pass => struct[:pass])\n struct.delete(:user)\n struct.delete(:pass)\n else\n rt = RT_Client.new\n end\n val = rt.edit_or_create_user(struct)\n rt = nil\n val\n end",
"def assign_token(user)\n\t\t0\n\tend"
] |
[
"0.5437121",
"0.54056925",
"0.53587854",
"0.5357034",
"0.53318524",
"0.5330959",
"0.5315511",
"0.5309238",
"0.52904433",
"0.5285804",
"0.5285804",
"0.5282915",
"0.5274498",
"0.52641934",
"0.5258501",
"0.5258501",
"0.5258501",
"0.5237348",
"0.5202416",
"0.5189946",
"0.518118",
"0.5169351",
"0.5167947",
"0.5118656",
"0.51083136",
"0.51045805",
"0.51045805",
"0.5096902",
"0.5076608",
"0.5076608",
"0.50706035",
"0.5050331",
"0.50477266",
"0.5047029",
"0.50355816",
"0.5019012",
"0.5015937",
"0.5013773",
"0.5012875",
"0.50048256",
"0.50035334",
"0.49921378",
"0.49851748",
"0.4983401",
"0.49827614",
"0.49821284",
"0.49711907",
"0.4964694",
"0.49618515",
"0.49607357",
"0.49603617",
"0.49592534",
"0.49584454",
"0.495673",
"0.49531874",
"0.4950953",
"0.49504519",
"0.4950257",
"0.49471325",
"0.49471268",
"0.4946229",
"0.49423036",
"0.4941647",
"0.49355307",
"0.49345976",
"0.49278927",
"0.49264002",
"0.49220315",
"0.49217784",
"0.49192",
"0.49186462",
"0.49171326",
"0.4916714",
"0.49158478",
"0.49092245",
"0.49063897",
"0.490315",
"0.49015808",
"0.49015808",
"0.48996174",
"0.4896341",
"0.48933157",
"0.48893744",
"0.48875508",
"0.48860544",
"0.48752725",
"0.48684654",
"0.4867382",
"0.48664254",
"0.48643824",
"0.48606077",
"0.4856866",
"0.48568508",
"0.48560423",
"0.4855848",
"0.48515365",
"0.48507947",
"0.483949",
"0.48375845",
"0.48278812"
] |
0.6451863
|
0
|
For multiple table support, PostgreSQL requires at least two from tables, with joins allowed.
|
def join_from_sql(type, sql)
if(from = @opts[:from][1..-1]).empty?
raise(Error, 'Need multiple FROM tables if updating/deleting a dataset with JOINs') if @opts[:join]
else
sql << ' ' << type.to_s << ' '
source_list_append(sql, from)
select_join_sql(sql)
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def join_tables(db)\n db.execute(\"SELECT users.user_name, platform.platform_name, music.artist, music.song, music.explicit_lyrics FROM music JOIN users ON music.user_id = users.id JOIN platform ON music.platform_id = platform.id\")\nend",
"def join(*args)\n\t\tif args.count > 1\n\t\t\tjoins = args.map { |arg| \"INNER JOIN #{arg} ON #{arg}.#{table}_id = #{table}.id\"}.join(\" \")\n\t\t\trows = connection.execute <<-SQL \n\t\t\t\tSELECT * FROM #{table} #{joins};\n\t\t\tSQL\n\t\telse\n\t\t\tcase args.first\n\t\t\twhen String\n\t\t\t\trows = connection.execute <<-SQL\n\t\t\t\t\tSELECT * FROM #{table} #{BlocRecord::Utility.sql_strings(args.first)};\n\t\t\t\tSQL\n\t\t\twhen Symbol\n\t\t\t\trows = connection.execute <<-SQL \n\t\t\t\t\tSELECT * FROM #{table}\n\t\t\t\t\tINNER JOIN #{args.first} ON #{arg.first}.#{table}_id = #{table}.id;\n\t\t\t\tSQL\n\t\t\twhen Hash \n\t\t\t\t#extract the options from the hash\n\t\t\t\tsecond_table = args[0].keys.first \n\t\t\t\tthird_table = args[0].keys.first\n\t\t\t\trows = connection.execute <<-SQL \n\t\t\t\t\tSELECT * FROM #{table}\n\t\t\t\t\tINNER JOIN #{second_table} ON #{second_table}.#{table}_id = #{table}.id\n\t\t\t\t\tINNER JOIN #{third_table} ON #{third_table}.#{second_table}_id = #{second_table}.id;\n\t\t\t\tSQL\n\n\t\t\tend \n\t\tend\n\t\trows_to_array(rows)\n\tend",
"def join_from_sql(type, sql)\n if(from = @opts[:from][1..-1]).empty?\n raise(Error, 'Need multiple FROM tables if updating/deleting a dataset with JOINs') if @opts[:join]\n else\n sql << SPACE << type.to_s << SPACE\n source_list_append(sql, from)\n select_join_sql(sql)\n end\n end",
"def relation_method\n :join\n end",
"def test_cross_join_syntactic_sugar\n assert_sql 'SELECT * FROM `t1` CROSS JOIN `t2`', 'SELECT * FROM t1, t2'\n assert_sql 'SELECT * FROM `t1` CROSS JOIN `t2` CROSS JOIN `t3`', 'SELECT * FROM t1, t2, t3'\n end",
"def join_table(type, table, expr=nil, options=OPTS, &block)\n if expr.is_a?(SQL::AliasedExpression) && expr.expression.is_a?(Array) && !expr.expression.empty? && expr.expression.all?\n options = options.merge(:join_using=>true)\n end\n super\n end",
"def custom_sql\n # \"SELECT employees.first_name, stores.name FROM employees, stores WHERE employees.store_id = stores.id;\"\n\n \"SELECT employees,first_name, stores.name FROM employees JOIN stores ON stores.id = employees.store_id;\"\nend",
"def join_table(type, table, *args, &block)\n if table.is_a?(Class) && table < Sequel::Model\n Sequel::Deprecation.deprecate(\"Passing Sequel::Model class to a dataset join method\", \"Pass the model's table name or dataset as the first argument instead\")\n if table.dataset.simple_select_all?\n super(type, table.table_name, *args, &block)\n else\n super(type, table.dataset, *args, &block)\n end\n else\n super\n end\n end",
"def needs_join_table(table_name1, type, table_name2, clause, join_name = nil)\n join_name ||= \"#{table_name1}=#{type}=#{table_name2}\"\n @needed_join_tables[join_name] ||= {}\n @needed_join_tables[join_name][table] ||= begin\n # define join for this part ('table' = unique for each part)\n\n # don't add to list of tables, just get unique alias name\n second_table = get_alias(table_name2)\n\n # create join\n first_table = table(table_name1)\n\n @join_tables[first_table] ||= []\n @join_tables[first_table] << \"#{type} JOIN #{second_table} ON #{clause.gsub('TABLE1',first_table).gsub('TABLE2',second_table)}\"\n second_table\n end\n end",
"def joins(tables,options={})\n # now check for dot notiation\n dot_notation = tables.split \".\"\n parent_table = nil\n options = {}\n dot_notation.each do |j_table|\n options[:from] = parent_table unless parent_table.nil?\n options[:alias] = j_table unless parent_table.nil?\n join(j_table,options)\n parent_table = j_table\n end\n\n self\n end",
"def inner_polymorphic_join(target, options = {})\n options[:on] ||= table_name\n options[:on_table_name] ||= connection.quote_table_name(options[:on])\n options[:target_table] ||= connection.quote_table_name(target.to_s.pluralize)\n options[:as] ||= \"owner\"\n postgres = ::ActiveRecord::Base.connection.adapter_name == \"PostgreSQL\"\n \"INNER JOIN #{options[:target_table]} ON #{options[:target_table]}.id = #{options[:on_table_name]}.#{options[:as]}_id AND \" +\n \"#{options[:on_table_name]}.#{options[:as]}_type = #{postgres ? \"E\" : \"\"}'#{target.to_s.camelize}'\"\n end",
"def create_table_joins(klass)\n if join_tables = klass.ann(:self, :join_tables)\n for info in join_tables\n begin\n # UGGLY hack!\n key_type = klass.ann(:oid, :sql).split(\" \").first\n create_join_table_sql(info, key_type).each do |sql|\n exec(sql, false)\n end\n debug \"Created join table '#{info[:table]}'.\" if $DBG\n rescue Object => ex\n if table_already_exists_exception? ex\n debug \"Join table already exists\" if $DBG\n else\n raise\n end\n end\n end\n end\n end",
"def join(*tables)\n from(default_table).join(*tables)\n end",
"def table_aliases_from_join_fragment(sql)\r\n return [] if sql.blank?\r\n return sql.scan(/JOIN\\s+(`[^`]+`|\"[^\"]+\"|\\[[^\\]]+\\]|\\S+)(?:\\s+(?:AS\\s+)?(`[^`]+`|\"[^\"]+\"|\\[[^\\]]+\\]|\\S+))?/i).collect do |name|\r\n ((name[1] =~ /^ON$/i) ? name[0] : (name[1] || name[0])).gsub(/^[`\"\\[]?(.*)[`\"\\]]?$/, '\\1')\r\n end\r\n end",
"def table_list(our_join = join, our_tables = tables)\n flatten_joins([model.table_name] + our_join + our_tables, false).uniq\n end",
"def default_join_table_qualification\n :symbol\n end",
"def test_join1\n\t\tr = prep(\"program foo;\\nfoo(A,B) :- bar(A,B);\\n\")\n\t\t\n\t\t# set up schema table's predicate\n\t\t## require 'ruby-debug'; debugger\n\t\tterm_schema = @terms.schema_of\n\t\tterm_pred = Predicate.new(false,@terms.name,@terms,term_schema.variables)\n\t\tterm_pred.set(r, \"global\", \"r3\", 1)\n\t\t\n\t\tsj = ScanJoin.new(r, term_pred, @preds.schema_of)\t\n\t\tts = TupleSet.new(\"pred\", *@preds.tuples)\n\t\tres = sj.evaluate(ts)\n\n\t\tassert_equal(2, res.tups.length)\n\tend",
"def def_many_to_many(opts)\n one_through_one = opts[:type] == :one_through_one\n left = (opts[:left_key] ||= opts.default_left_key)\n lcks = opts[:left_keys] = Array(left)\n right = (opts[:right_key] ||= opts.default_right_key)\n rcks = opts[:right_keys] = Array(right)\n left_pk = (opts[:left_primary_key] ||= self.primary_key)\n opts[:eager_loader_key] = left_pk unless opts.has_key?(:eager_loader_key)\n lcpks = opts[:left_primary_keys] = Array(left_pk)\n lpkc = opts[:left_primary_key_column] ||= left_pk\n lpkcs = opts[:left_primary_key_columns] ||= Array(lpkc)\n raise(Error, \"mismatched number of left keys: #{lcks.inspect} vs #{lcpks.inspect}\") unless lcks.length == lcpks.length\n if opts[:right_primary_key]\n rcpks = Array(opts[:right_primary_key])\n raise(Error, \"mismatched number of right keys: #{rcks.inspect} vs #{rcpks.inspect}\") unless rcks.length == rcpks.length\n end\n opts[:uses_left_composite_keys] = lcks.length > 1\n opts[:uses_right_composite_keys] = rcks.length > 1\n opts[:cartesian_product_number] ||= one_through_one ? 0 : 1\n join_table = (opts[:join_table] ||= opts.default_join_table)\n opts[:left_key_alias] ||= opts.default_associated_key_alias\n opts[:graph_join_table_join_type] ||= opts[:graph_join_type]\n opts[:after_load].unshift(:array_uniq!) if opts[:uniq]\n opts[:dataset] ||= opts.association_dataset_proc\n opts[:eager_loader] ||= opts.method(:default_eager_loader)\n \n join_type = opts[:graph_join_type]\n select = opts[:graph_select]\n use_only_conditions = opts.include?(:graph_only_conditions)\n only_conditions = opts[:graph_only_conditions]\n conditions = opts[:graph_conditions]\n graph_block = opts[:graph_block]\n graph_jt_conds = opts[:graph_join_table_conditions] = opts.fetch(:graph_join_table_conditions, []).to_a\n use_jt_only_conditions = opts.include?(:graph_join_table_only_conditions)\n jt_only_conditions = opts[:graph_join_table_only_conditions]\n jt_join_type = opts[:graph_join_table_join_type]\n jt_graph_block = opts[:graph_join_table_block]\n opts[:eager_grapher] ||= proc do |eo|\n ds = eo[:self]\n egls = eo[:limit_strategy]\n if egls && egls != :ruby\n associated_key_array = opts.associated_key_array\n orig_egds = egds = eager_graph_dataset(opts, eo)\n egds = egds.\n inner_join(join_table, rcks.zip(opts.right_primary_keys) + graph_jt_conds, :qualify=>:deep).\n select_all(egds.first_source).\n select_append(*associated_key_array)\n egds = opts.apply_eager_graph_limit_strategy(egls, egds)\n ds.graph(egds, associated_key_array.map(&:alias).zip(lpkcs) + conditions, :qualify=>:deep, :table_alias=>eo[:table_alias], :implicit_qualifier=>eo[:implicit_qualifier], :join_type=>eo[:join_type]||join_type, :from_self_alias=>eo[:from_self_alias], :join_only=>eo[:join_only], :select=>select||orig_egds.columns, &graph_block)\n else\n ds = ds.graph(join_table, use_jt_only_conditions ? jt_only_conditions : lcks.zip(lpkcs) + graph_jt_conds, :select=>false, :table_alias=>ds.unused_table_alias(join_table, [eo[:table_alias]]), :join_type=>eo[:join_type]||jt_join_type, :join_only=>eo[:join_only], :implicit_qualifier=>eo[:implicit_qualifier], :qualify=>:deep, :from_self_alias=>eo[:from_self_alias], &jt_graph_block)\n ds.graph(eager_graph_dataset(opts, eo), use_only_conditions ? only_conditions : opts.right_primary_keys.zip(rcks) + conditions, :select=>select, :table_alias=>eo[:table_alias], :qualify=>:deep, :join_type=>eo[:join_type]||join_type, :join_only=>eo[:join_only], &graph_block)\n end\n end\n \n return if opts[:read_only]\n \n if one_through_one\n opts[:setter] ||= proc do |o|\n h = {}\n lh = lcks.zip(lcpks.map{|k| get_column_value(k)})\n jtds = _join_table_dataset(opts).where(lh)\n\n checked_transaction do\n current = jtds.first\n\n if o\n new_values = []\n rcks.zip(opts.right_primary_key_methods).each{|k, pk| new_values << (h[k] = o.get_column_value(pk))}\n end\n\n if current\n current_values = rcks.map{|k| current[k]}\n jtds = jtds.where(rcks.zip(current_values))\n if o\n if current_values != new_values\n jtds.update(h)\n end\n else\n jtds.delete\n end\n elsif o\n lh.each{|k,v| h[k] = v}\n jtds.insert(h)\n end\n end\n end\n opts[:_setter] = proc{|o| set_one_through_one_associated_object(opts, o)}\n else \n opts[:adder] ||= proc do |o|\n h = {}\n lcks.zip(lcpks).each{|k, pk| h[k] = get_column_value(pk)}\n rcks.zip(opts.right_primary_key_methods).each{|k, pk| h[k] = o.get_column_value(pk)}\n _join_table_dataset(opts).insert(h)\n end\n\n opts[:remover] ||= proc do |o|\n _join_table_dataset(opts).where(lcks.zip(lcpks.map{|k| get_column_value(k)}) + rcks.zip(opts.right_primary_key_methods.map{|k| o.get_column_value(k)})).delete\n end\n\n opts[:clearer] ||= proc do\n _join_table_dataset(opts).where(lcks.zip(lcpks.map{|k| get_column_value(k)})).delete\n end\n end\n end",
"def supports_modifying_joins?\n true\n end",
"def _join_table_dataset(opts)\n ds = model.db.from(opts.join_table_source)\n opts[:join_table_block] ? opts[:join_table_block].call(ds) : ds\n end",
"def print_join\r\n if get_tables.size < 2\r\n puts \"you do not have enough tables to join\"\r\n return nil\r\n end\r\n to_join = []\r\n puts \"choose 2 of these tables to join\"\r\n print_table_names\r\n until to_join.length == 2\r\n puts \"what is your first table you would like to join?\" if to_join.length == 0\r\n puts \"what is your second table you would like to join?\" if to_join.length == 1\r\n answer = gets.chomp\r\n to_join << answer if table_exists?(answer)\r\n end\r\n if references(to_join[1]).include?(to_join[0])\r\n execute = @db.execute(\"SELECT * FROM #{to_join[0]} JOIN #{to_join[1]}\r\n ON #{to_join[0]}.#{to_join[1]}_id = #{to_join[1]}.id ;\")\r\n print_execute_titles(execute)\r\n print_execute(execute)\r\n elsif references(to_join[0]).include?(to_join[1])\r\n execute = @db.execute(\"SELECT * FROM #{to_join[1]} JOIN #{to_join[0]}\r\n ON #{to_join[1]}.#{to_join[0]}_id = #{to_join[0]}.id ;\")\r\n print_execute_titles(execute)\r\n print_execute(execute)\r\n else\r\n puts \"Those two tables are incompatible and cannot be joined\"\r\n end\r\n end",
"def relation_by_sql_form\n # Nothing to do here\n end",
"def add_piggy_back_sql_data!(reflection_name, prefix, table_alias, attributes, select, joins, conditions, join_type)\n ktn = table_name\n kpkey = primary_key\n reflection = reflections[reflection_name]\n atn = reflection.table_name\n attributes.each do |attr|\n if table_alias\n select << \", #{table_alias}.#{attr} AS #{prefix}_#{attr}\"\n else\n select << \", #{atn}.#{attr} AS #{prefix}_#{attr}\"\n end\n end\n fkey = reflection.primary_key_name\n fpkey = reflection.klass.primary_key\n\n case reflection.macro\n when :belongs_to\n if table_alias\n joins << \" #{join_type} JOIN #{atn} #{table_alias} ON #{table_alias}.#{fpkey}=#{ktn}.#{fkey} \"\n else\n joins << \" #{join_type} JOIN #{atn} ON #{atn}.#{fpkey}=#{ktn}.#{fkey} \"\n end\n when :has_one\n if table_alias\n joins << \" #{join_type} JOIN #{atn} #{table_alias} ON #{table_alias}.#{fkey}=#{ktn}.#{kpkey} \"\n else\n joins << \" #{join_type} JOIN #{atn} ON #{atn}.#{fkey}=#{ktn}.#{kpkey} \"\n end\n when :has_many\n raise \"piggy_back: aliasing not implemented for has_many\" if table_alias\n if reflection.options[:through]\n ttn = reflection.through_reflection.klass.table_name\n tkfkey = reflection.through_reflection.primary_key_name\n tafkey = reflection.source_reflection.primary_key_name\n\n through_conditions = reflection.options[:conditions] ?\n \" AND \" + reflection.options[:conditions] : \"\"\n source_conditions = reflection.through_reflection.options[:conditions] ?\n \" AND \" + reflection.through_reflection.options[:conditions] : \"\"\n\n joins << \" LEFT JOIN #{ttn} ON (#{ttn}.#{tkfkey}=#{ktn}.#{kpkey}#{through_conditions})\"\n joins << \" LEFT JOIN #{atn} ON (#{ttn}.#{tafkey}=#{atn}.#{fpkey}#{source_conditions}) \"\n else\n reflection_conditions = reflection.options[:conditions] ?\n \" AND \" + reflection.options[:conditions] : \"\"\n\n joins << \" LEFT JOIN #{atn} ON (#{atn}.#{fkey}=#{ktn}.#{kpkey}#{reflection_conditions}) \"\n end\n else\n raise \"can't piggy back #{reflection.macro} on class #{klass}\"\n end\n end",
"def tables_from(db=current_database)\n end",
"def tables_with_referential_integrity\n schemas_and_tables = select_rows <<-SQL.strip_heredoc\n SELECT s.name, o.name\n FROM sys.foreign_keys i\n INNER JOIN sys.objects o ON i.parent_object_id = o.OBJECT_ID\n INNER JOIN sys.schemas s ON o.schema_id = s.schema_id\n SQL\n schemas_and_tables.map do |schema_table|\n schema, table = schema_table\n \"#{SQLServer::Utils.quoted_raw(schema)}.#{SQLServer::Utils.quoted_raw(table)}\"\n end\n end",
"def join(other, *exps, join_type: :inner)\n unless other.is_a?(Table)\n raise UserError, 'need other table as first argument to join'\n end\n unless JOIN_TYPES.include?(join_type)\n raise UserError, \"join_type may only be: #{JOIN_TYPES.join(', ')}\"\n end\n\n # These may be needed for outer joins.\n self_row_nils = headers.map { |h| [h, nil] }.to_h\n other_row_nils = other.headers.map { |h| [h, nil] }.to_h\n join_exp, other_common_heads =\n build_join_expression(exps, other, join_type)\n ev = Evaluator.new\n result = empty_dup\n other_rows = other.rows\n other_row_matches = Array.new(other_rows.size, false)\n rows.each do |self_row|\n self_row_matched = false\n other_rows.each_with_index do |other_row, k|\n # Same as other_row, but with keys that are common with self and equal\n # in value, removed, so the output table need not repeat them.\n locals = build_locals_hash(row_a: self_row, row_b: other_row)\n matches = ev.evaluate(join_exp, locals: locals)\n next unless matches\n\n self_row_matched = other_row_matches[k] = true\n out_row = build_out_row(row_a: self_row, row_b: other_row,\n common_heads: other_common_heads,\n type: join_type)\n result << out_row\n end\n next unless [:left, :full].include?(join_type)\n next if self_row_matched\n\n result << build_out_row(row_a: self_row,\n row_b: other_row_nils,\n type: join_type)\n end\n if [:right, :full].include?(join_type)\n other_rows.each_with_index do |other_row, k|\n next if other_row_matches[k]\n\n result << build_out_row(row_a: self_row_nils,\n row_b: other_row,\n type: join_type)\n end\n end\n result.normalize_boundaries\n result\n end",
"def construct_simple_join_sql(num)\n connection = klass.connection\n key_value_table = klass.table_name\n\n main_table = definition.klass.table_name\n main_table_pk, value_table_fk_main = reflection_keys(definition.reflection_by_name(definition.klass, relation))\n\n join_sql = \"\\n INNER JOIN #{connection.quote_table_name(key_value_table)} #{key_value_table}_#{num} ON (#{connection.quote_table_name(main_table)}.#{connection.quote_column_name(main_table_pk)} = #{key_value_table}_#{num}.#{connection.quote_column_name(value_table_fk_main)})\"\n return join_sql\n end",
"def create_join_excluded_tbl(preserve_null_pk = true)\n if @excluded_join_tbl.nil?\n join_list =join_list()\n cross_join_from = ''\n full_join_from = ''\n satisfied_tbl = create_satisfied_tbl()\n 0.upto(join_list.count-1) do |i|\n join = join_list.find{|j| j['id'] ==i }\n l_rel_list = join['l_rel_list']\n quals = join['quals']\n q = ReverseParseTree.whereClauseConst(quals)\n has_quals = (not join['quals'].nil?)\n join_type = ReverseParseTree.joinTypeConvert(join['jointype'].to_s, has_quals)\n\n r_rel = join['r_rel_list'][0]\n l_arg = (i==0 ? \"#{l_rel_list[0].relname} #{l_rel_list[0].relalias}\" : \"\")\n # for efficiency only change the last join to cross join\n if i == join_list.count-1\n cross_join_from = cross_join_from + \"#{l_arg} CROSS JOIN #{r_rel.relname} #{r_rel.relalias} \"\n else\n cross_join_from = cross_join_from + \"#{l_arg} #{join_type} #{r_rel.relname} #{r_rel.relalias} on #{q} \"\n end\n # full_join_from = full_join_from + \"#{l_arg} CROSS JOIN #{r_rel.relname} #{r_rel.relalias} on #{q}\"\n end\n @excluded_join_tbl = \"#{@table}_join_excluded\"\n # renamed_pk_col = @pk_full_list.map { |pk| \"#{pk['col']} as #{pk['alias']}_pk\" }.join(', ')\n \n if preserve_null_pk\n renamed_pk_col = @pk_full_list.map { |pk| \"#{pk['col']} as #{pk['alias']}_pk\" }.join(', ')\n else\n renamed_pk_col = @pk_full_list.map do |pk|\n pkcol = @all_cols.find{|col| col.colname == pk['colname'] and col.relname==pk['relname']}\n \"COALESCE(#{pkcol.select_name},#{pkcol.null_replacement}) as #{pkcol.colalias}_pk\"\n end.join(',')\n end\n targetListReplacement = \"#{renamed_pk_col},#{@all_cols_select}\"\n query = ReverseParseTree.reverseAndreplace(@parseTree, targetListReplacement, '')\n old_from = from_query()\n # cross join\n all_cols_renamed()\n cross_join_query = query.gsub(/#{old_from}/i,cross_join_from)\n # pk_join_satisfied_tbl = @pk_full_list.map { |pk| \"t.#{pk['alias']}_pk = s.#{pk['alias']}_pk\" }.join(' AND ')\n # pk_not_in_satisfied_tbl = @pk_full_list.map { |pk| \"s.#{pk['alias']}_pk is null\" }.join(' OR ')\n\n create_tbl_query = \"select * from #{satisfied_tbl} where 1=2\"\n create_tbl_query = QueryBuilder.create_tbl(@excluded_join_tbl, '', create_tbl_query)\n DBConn.exec(create_tbl_query)\n # limit to 1000 rows due to resource limitation\n cross_join_query = \"with cross_join as (#{cross_join_query} limit 1000) INSERT INTO #{@excluded_join_tbl} select * from (select t.* from cross_join as t except select * from #{satisfied_tbl}) as tmp\"\n puts cross_join_query\n DBConn.exec(cross_join_query)\n\n # unless preserve_null_pk\n # pk = @pk_full_list.map { |pk| \"#{pk['alias']}_pk\" }.join(',')\n # DBConn.update_null_columns(@excluded_join_tbl,pk)\n # end\n # # full join\n # full_join_query = query.gsub(old_from,full_join_from)\n # full_join_query = \"(#{full_join_query} except select #{@all_cols_renamed} from #{satisfied_tbl})\"\n # full_join_query = \"INSERT INTO #{@excluded_tbl} #{full_join_query}\"\n # DBConn.exec(query)\n end\n return @excluded_join_tbl\n end",
"def on_table?; @on_table; end",
"def readers_join_table\n self.class.readers_join_table\n end",
"def fat_record_joins\n joins = \" LEFT JOIN active_sources AS obj_sources ON semantic_relations.object_id = obj_sources.id AND semantic_relations.object_type = 'TaliaCore::ActiveSource'\"\n joins << \" LEFT JOIN semantic_properties AS obj_props ON semantic_relations.object_id = obj_props.id AND semantic_relations.object_type = 'TaliaCore::SemanticProperty'\"\n joins << \" LEFT JOIN active_sources AS subject_sources ON semantic_relations.subject_id = subject_sources.id\"\n joins\n end",
"def tables_and_joins\n sql = \"#{fact_class.table_name}\"\n cube_class.dimensions_hierarchies.each do |dimension_name, hierarchy_name|\n dimension_table_name = fact_class.dimension_class(dimension_name).table_name\n sql += \" LEFT JOIN #{dimension_table_name} as #{dimension_name}\"\n sql += \" ON #{fact_class.table_name}.\"\n sql += \"#{fact_class.dimension_relationships[dimension_name].foreign_key}\"\n sql += \" = #{dimension_name}.id\\n\"\n end\n sql\n end",
"def add_join_table( id_left, table_left, id_right, table_right, name = nil, &block )\n name ||= [ table_left, table_right ].sort.join( '_' )\n add_table name do\n foreign_key id_left, table_left\n foreign_key id_right, table_right\n primary_key [ id_left, id_right ]\n unique [ id_right, id_left ]\n instance_eval &block if block\n end\n end",
"def schema_ds_join(table_name, opts)\n [:information_schema__columns, {:table_catalog=>:table_catalog,\n :table_schema => :table_schema, :table_name => :table_name} , :c]\n end",
"def join_people_by_relationship_type\n\n sql = <<EOF\nselect supers.id, supers.name, subs.id, subs.name\nfrom person_associations pa\ninner join people as supers\n on supers.id = pa.source_id\ninner join people as subs\n on subs.id = pa.sink_id\nand association_type = 'direct_reporting'\norder by supers.id\nEOF\n\n r = ActiveRecord::Base.connection.execute(sql)\n end",
"def join_table_source\n cached_fetch(:join_table_source){split_join_table_alias[0]}\n end",
"def schema\n execute(<<-eosql).collect { |row| row[0] }.collect { |t| table_schema(t) }\nSELECT rdb$relation_name FROM rdb$relations WHERE rdb$system_flag != 1\neosql\n end",
"def auto_assoc(opts = {})\n except = opts[:except] || []\n\n assocs = db.schema_parse_associations(table_name)\n relations = process_join_tables(assocs)\n\n relations.each do |row|\n src_tbl = row[:src_tbl]\n src_col = row[:src_col]\n if src_tbl == table_name && ! (src_col & except).empty?\n # TODO enable except for *_to_many\n next\n end\n src_uniq = row[:src_uniq]\n src_cardinality = cardinality(src_uniq)\n\n join_tbl = row[:join_tbl]\n\n dst_tbl = row[:dst_tbl]\n dst_col = row[:dst_col]\n dst_uniq = row[:dst_uniq]\n dst_cardinality = cardinality(dst_uniq)\n\n TABLE_MODELS.wait_all(src_tbl, dst_tbl) do |src_cls, dst_cls|\n self_ref = src_cls == dst_cls\n\n src = self_ref ? :child : underscore(src_cls.name).to_sym\n src = src_uniq ? singularize(src).to_sym : pluralize(src).to_sym\n\n dst = self_ref ? :parent : underscore(dst_cls.name).to_sym\n dst = dst_uniq ? singularize(dst).to_sym : pluralize(dst).to_sym\n\n if join_tbl\n left_col = row[:left_col]\n right_col = row[:right_col]\n send :many_to_many, src, :class => src_cls, :join_table => join_tbl,\n :left_key => left_col, :left_primary_key => dst_col,\n :right_key => right_col, :right_primary_key => src_col\n else\n # TODO name overrides\n\n if self == dst_cls\n # dst holds the foreign key -> one_to_*\n meth = dst_cardinality + '_to_' + src_cardinality\n send meth, src, :class => src_cls, :key => src_col, :primary_key => dst_col\n end\n\n if self == src_cls\n # src holds the foreign key -> *_to_one\n meth = src_cardinality + '_to_' + dst_cardinality\n\n # one_to_one requires to swap pk and fk\n src_col, dst_col = dst_col, src_col if src_uniq\n send meth, dst, :class => dst_cls, :key => src_col, :primary_key => dst_col\n end\n end\n\n end\n end\n end",
"def join(table, field1, field2, join_type = 'INNER JOIN')\n @join = \" #{join_type} #{table} ON #{@from}.#{field1}=#{table}.#{field2}\"\n\n self\n end",
"def custom_sql\n \"SELECT bookings.full_name, suites.number FROM bookings LEFT OUTER JOIN suites ON bookings.suite_id = suites.id;\"\nend",
"def tables(opts=OPTS, &block)\n pg_class_relname(['r', 'p'], opts, &block)\n end",
"def join_rows(rows)\n return @join_rows if defined? @join_rows\n\n conn = @model.connection\n join_table = conn.quote_table_name @ref.join_table\n assoc_fkey = conn.quote_column_name @ref.association_foreign_key\n fkey = conn.quote_column_name @ref.foreign_key\n quoted_ids = rows.map { |r| conn.quote r.send @ref.active_record_primary_key }\n\n @join_rows = conn.\n exec_query(\"SELECT #{fkey}, #{assoc_fkey} FROM #{join_table} WHERE #{fkey} IN (#{quoted_ids.join ','})\").\n rows\n end",
"def extend_sql_avoiding_table_naming_clashes!(sql, addition)\r\n used_table_aliases = table_aliases_from_join_fragment(addition)\r\n old_table_aliases = table_aliases_from_join_fragment(sql)\r\n (used_table_aliases & old_table_aliases).each do |join_table_alias|\r\n i = 0\r\n begin\r\n i += 1\r\n new_alias = \"renamed_join_table_#{i}\"\r\n end until !used_table_aliases.include?(new_alias)\r\n convert_table_name_to_new_alias!(sql, join_table_alias, new_alias)\r\n end\r\n sql << \" #{addition} \"\r\n end",
"def fetch_join_rows(rows)\n conn = @model.connection\n join_table = conn.quote_table_name @ref.join_table\n assoc_fkey = conn.quote_column_name @ref.association_foreign_key\n fkey = conn.quote_column_name @ref.foreign_key\n quoted_ids = rows.map { |row|\n begin\n id = row.send @ref.active_record_primary_key\n rescue NoMethodError => e\n raise MissingColumnError.new(row, e.name)\n end\n conn.quote id\n }\n\n quoted_ids.any? ? conn.\n exec_query(\"SELECT #{fkey}, #{assoc_fkey} FROM #{join_table} WHERE #{fkey} IN (#{quoted_ids.join ','})\").\n rows : []\n end",
"def many_to_many_associated_tables\n @many_to_many_associated_tables\n end",
"def join_query(*queries, *tables, join_type=nil, *conditions)\n\tquery = \"select \"\n\tqueries.each do |statement|\n\t\tquery += \" #{statement}\"\n\tend\n\n\tquery += \" from\"\n\n\ttables.each do |table|\n\t\tquery += \"#{table}\"\n\tend\n\n\tquery += \"#{join_type}\" if join_type != nil\n\n\tif join_type == \"INNER\"\n\t\tquery += \"INNER JOIN\"\n\t\tconditions.each do |condition|\n\t\t\tquery += \"ON\"\n\t\t\tquery += \" #{condition}\"\n\t\tend\n\tend\n\n\tif join_type == \"CROSS\"\n\t\tquery += \"CROSS JOIN\"\n\t\tquery += conditions[0]\n\tend\n\n\treturn query\n\nend",
"def test_003\n\n target_sql = \"select d.id as id,\nd.taxnumber as taxnumber,\nd.social_security_type as social_security_type,\nd.taxnumber_exemption as taxnumber_exemption\nfrom distributors d\nleft join distributor_addons da on (d.id = da.distributor_id)\nwhere d.id = (11,12,13,14,15,16)\norder by d.id\"\n \n @sql.select do\n d :id, :taxnumber, :social_security_type, :taxnumber_exemption\n end\n\n @sql.from(distributors: 'd') do\n left_join distributor_addons: 'da', on: 'd.id = da.distributor_id'\n end\n\n dist_ids = [11,12,13,14,15,16]\n \n @sql.where do\n d id: dist_ids\n end\n\n @sql.order('d.id')\n\n assert_equal @sql.to_s, target_sql\n end",
"def subquery(definition, other_definition, conditions)\n validate_definition_instance(definition)\n validate_definition_instance(other_definition)\n [conditions].flatten.each { |c| validate_node_or_attribute(c) }\n\n current_model = definition.model\n #current_table = definition.table\n current_joins = definition.joins\n\n other_table = other_definition.table\n other_model = other_definition.model\n #other_joins = other_definition.joins\n\n # build an exist subquery to apply conditions that\n # refer to another table\n\n subquery = other_definition.table\n\n # add conditions to subquery\n [conditions].flatten.each do |c|\n subquery = subquery.where(c)\n end\n\n # add joins that provide other table access to current table\n\n\n which_joins = current_joins\n join_paths_index = nil\n join_path_current_index = nil\n join_path_other_index = nil\n which_joins.each_with_index do |item, index|\n join_path_current_index = item.find_index { |j| j[:join] == current_model }\n join_path_other_index = item.find_index { |j| j[:join] == other_model }\n if !join_path_current_index.nil? && !join_path_other_index.nil?\n join_paths_index = index\n break\n end\n end\n\n first_index = [join_path_current_index, join_path_other_index].min\n last_index = [join_path_current_index, join_path_other_index].max\n relevant_joins = which_joins[join_paths_index][first_index..last_index]\n\n\n relevant_joins.each do |j|\n join_table = j[:join]\n join_condition = j[:on]\n\n # assume this is an arel_table if it doesn't respond to .arel_table\n arel_table = join_table.respond_to?(:arel_table) ? join_table.arel_table : join_table\n\n if arel_table.name == other_table.name && !join_condition.nil?\n # add join as condition if this is the main table in the subquery\n subquery = subquery.where(join_condition)\n elsif arel_table.name != other_table.name && !join_condition.nil?\n # add full join if this is not the main table in the subquery\n subquery = subquery.join(arel_table).on(join_condition)\n end\n\n end\n\n subquery.project(1).exists\n end",
"def add_joins!(sql, options, scope = :auto)\r\n scope = scope(:find) if :auto == scope\r\n join = (scope && scope[:joins]) || options[:joins]\r\n return if join.blank?\r\n extend_sql_avoiding_table_naming_clashes!(sql, scope && scope[:joins])\r\n extend_sql_avoiding_table_naming_clashes!(sql, options[:joins])\r\n end",
"def build_join_expression(exps, other, type)\n return ['true', []] if type == :cross\n\n a_heads = headers\n b_heads = other.headers\n common_heads = a_heads & b_heads\n b_common_heads = []\n if exps.empty?\n if common_heads.empty?\n msg = \"#{type}-join with no common column names needs join expression\"\n raise UserError, msg\n else\n # A Natural join on all common heads\n common_heads.each do |h|\n ensure_common_types!(self_h: h, other_h: h, other: other)\n end\n nat_exp = common_heads.map { |h| \"(#{h}_a == #{h}_b)\" }.join(' && ')\n [nat_exp, common_heads]\n end\n else\n # We have join expressions to evaluate\n and_conds = []\n partial_result = nil\n last_sym = nil\n exps.each do |exp|\n case exp\n when Symbol\n case exp.to_s.clean\n when /\\A(?<sy>.*)_a\\z/\n a_head = Regexp.last_match[:sy].to_sym\n unless a_heads.include?(a_head)\n raise UserError, \"no column '#{a_head}' in table\"\n end\n\n if partial_result\n # Second of a pair\n ensure_common_types!(self_h: a_head,\n other_h: last_sym,\n other: other)\n partial_result << \"#{a_head}_a)\"\n and_conds << partial_result\n partial_result = nil\n else\n # First of a pair of _a or _b\n partial_result = +\"(#{a_head}_a == \"\n end\n last_sym = a_head\n when /\\A(?<sy>.*)_b\\z/\n b_head = Regexp.last_match[:sy].to_sym\n unless b_heads.include?(b_head)\n raise UserError, \"no column '#{b_head}' in second table\"\n end\n\n if partial_result\n # Second of a pair\n ensure_common_types!(self_h: last_sym,\n other_h: b_head,\n other: other)\n partial_result << \"#{b_head}_b)\"\n and_conds << partial_result\n partial_result = nil\n else\n # First of a pair of _a or _b\n partial_result = +\"(#{b_head}_b == \"\n end\n b_common_heads << b_head\n last_sym = b_head\n else\n # No modifier, so must be one of the common columns\n unless partial_result.nil?\n # We were expecting the second of a modified pair, but got an\n # unmodified symbol instead.\n msg =\n \"follow '#{last_sym}' by qualified exp from the other table\"\n raise UserError, msg\n end\n # We have an unqualified symbol that must appear in both tables\n unless common_heads.include?(exp)\n msg = \"unqualified column '#{exp}' must occur in both tables\"\n raise UserError, msg\n end\n ensure_common_types!(self_h: exp, other_h: exp, other: other)\n and_conds << \"(#{exp}_a == #{exp}_b)\"\n b_common_heads << exp\n end\n when String\n # We have a string expression in which all column references must be\n # qualified.\n and_conds << \"(#{exp})\"\n else\n msg = \"invalid join expression '#{exp}' of class #{exp.class}\"\n raise UserError, msg\n end\n end\n [and_conds.join(' && '), b_common_heads]\n end\n end",
"def references_eager_loaded_tables?(options)\n joined_tables = joined_tables(options)\n include_eager_order?(options, nil, joined_tables) || include_eager_conditions?(options, nil, joined_tables)\n end",
"def def_many_through_many(opts)\n one_through_many = opts[:type] == :one_through_many\n opts[:read_only] = true\n opts[:after_load].unshift(:array_uniq!) if opts[:uniq]\n opts[:cartesian_product_number] ||= one_through_many ? 0 : 2\n opts[:through] = opts[:through].map do |e|\n case e\n when Array\n raise(Error, \"array elements of the through option/argument for many_through_many associations must have at least three elements\") unless e.length == 3\n {:table=>e[0], :left=>e[1], :right=>e[2]}\n when Hash\n raise(Error, \"hash elements of the through option/argument for many_through_many associations must contain :table, :left, and :right keys\") unless e[:table] && e[:left] && e[:right]\n e\n else\n raise(Error, \"the through option/argument for many_through_many associations must be an enumerable of arrays or hashes\")\n end\n end\n\n left_key = opts[:left_key] = opts[:through].first[:left]\n opts[:left_keys] = Array(left_key)\n opts[:uses_left_composite_keys] = left_key.is_a?(Array)\n left_pk = (opts[:left_primary_key] ||= self.primary_key)\n raise(Error, \"no primary key specified for #{inspect}\") unless left_pk\n opts[:eager_loader_key] = left_pk unless opts.has_key?(:eager_loader_key)\n opts[:left_primary_keys] = Array(left_pk)\n lpkc = opts[:left_primary_key_column] ||= left_pk\n lpkcs = opts[:left_primary_key_columns] ||= Array(lpkc)\n opts[:dataset] ||= opts.association_dataset_proc\n\n opts[:left_key_alias] ||= opts.default_associated_key_alias\n opts[:eager_loader] ||= opts.method(:default_eager_loader)\n\n join_type = opts[:graph_join_type]\n select = opts[:graph_select]\n graph_block = opts[:graph_block]\n only_conditions = opts[:graph_only_conditions]\n use_only_conditions = opts.include?(:graph_only_conditions)\n conditions = opts[:graph_conditions]\n opts[:eager_grapher] ||= proc do |eo|\n ds = eo[:self]\n iq = eo[:implicit_qualifier]\n egls = eo[:limit_strategy]\n if egls && egls != :ruby\n associated_key_array = opts.associated_key_array\n orig_egds = egds = eager_graph_dataset(opts, eo)\n opts.reverse_edges.each{|t| egds = egds.join(t[:table], Array(t[:left]).zip(Array(t[:right])), :table_alias=>t[:alias], :qualify=>:deep)}\n ft = opts.final_reverse_edge\n egds = egds.join(ft[:table], Array(ft[:left]).zip(Array(ft[:right])), :table_alias=>ft[:alias], :qualify=>:deep).\n select_all(egds.first_source).\n select_append(*associated_key_array)\n egds = opts.apply_eager_graph_limit_strategy(egls, egds)\n ds.graph(egds, associated_key_array.map(&:alias).zip(Array(lpkcs)) + conditions, :qualify=>:deep, :table_alias=>eo[:table_alias], :implicit_qualifier=>iq, :join_type=>eo[:join_type]||join_type, :join_only=>eo[:join_only], :from_self_alias=>eo[:from_self_alias], :select=>select||orig_egds.columns, &graph_block)\n else\n opts.edges.each do |t|\n ds = ds.graph(t[:table], t.fetch(:only_conditions, (Array(t[:right]).zip(Array(t[:left])) + t[:conditions])), :select=>false, :table_alias=>ds.unused_table_alias(t[:table]), :join_type=>eo[:join_type]||t[:join_type], :join_only=>eo[:join_only], :qualify=>:deep, :implicit_qualifier=>iq, :from_self_alias=>eo[:from_self_alias], &t[:block])\n iq = nil\n end\n fe = opts.final_edge\n ds.graph(opts.associated_class.dataset, use_only_conditions ? only_conditions : (Array(opts.right_primary_key).zip(Array(fe[:left])) + conditions), :select=>select, :table_alias=>eo[:table_alias], :qualify=>:deep, :join_type=>eo[:join_type]||join_type, :join_only=>eo[:join_only], &graph_block)\n end\n end\n end",
"def authors_join_table\n self.class.authors_join_table\n end",
"def build_tables\n @db.exec(%q[\n CREATE TABLE IF NOT EXISTS breeds(\n id serial NOT NULL PRIMARY KEY,\n breed varchar(30),\n price integer\n )])\n\n @db.exec(%q[\n CREATE TABLE IF NOT EXISTS puppies(\n id serial NOT NULL PRIMARY KEY,\n breed varchar(30),\n name varchar(30),\n age integer,\n created_at timestamp NOT NULL DEFAULT current_timestamp\n )])\n\n @db.exec(%q[\n CREATE TABLE IF NOT EXISTS requests(\n id serial NOT NULL PRIMARY KEY,\n breed text,\n status text,\n created_at timestamp NOT NULL DEFAULT current_timestamp\n )])\n end",
"def to_sql\n @join.association_join.gsub(/::ts_join_alias::/,\n \"#{@reflection.klass.connection.quote_table_name(@join.parent.aliased_table_name)}\"\n )\n end",
"def build_query_parts\n # Always include the following columns:\n select_clause = ['DISTINCT(Person.' + _(:id, :person) + ') as person_id'] # person id\n select_clause += ['Person.' + _(:first_name, :person) + ' as First_Name'] # first name\n select_clause += ['Person.' + _(:last_name, :person) + ' as Last_Name'] # last name\n \n # Always include the person table\n tables = ['Person']\n tables_clause = Person.table_name + ' as Person'\n # Always include the campus involvements table\n tables << 'CampusInvolvement'\n tables_clause += \" LEFT JOIN #{CampusInvolvement.table_name} as CampusInvolvement on Person.#{_(:id, :person)} = CampusInvolvement.#{_(:person_id, :campus_involvement)} AND CampusInvolvement.end_date IS NULL\"\n # Always include the ministry involvements table\n tables << 'MinistryInvolvement'\n tables_clause += \" LEFT JOIN #{MinistryInvolvement.table_name} as MinistryInvolvement on Person.#{_(:id, :person)} = MinistryInvolvement.#{_(:person_id, :ministry_involvement)} AND MinistryInvolvement.end_date IS NULL\"\n # Always include the current address\n tables << 'CurrentAddress'\n tables_clause += \" LEFT JOIN #{CurrentAddress.table_name} as CurrentAddress on Person.#{_(:id, :person)} = CurrentAddress.#{_(:person_id, :address)} AND #{_(:address_type, :address)} = 'current'\"\n # Hooks to support different schemas\n tables += build_query_parts_custom_tables if self.respond_to?(:build_query_parts_custom_tables)\n tables_clause += build_query_parts_custom_tables_clause if self.respond_to?(:build_query_parts_custom_tables_clause)\n\n columns.each do |column|\n raise inspect if column.nil? # If something goes wrong, we want good information\n # Add table to table clause\n table_name = column.from_clause.constantize.table_name if column.from_clause.present?\n unless !column.from_clause.present? || tables.include?(column.from_clause)\n tables << column.from_clause\n source_model = (column.source_model.to_s.empty? ? 'Person' : column.source_model).constantize\n source_column = column.source_column.to_s.empty? ? 'id' : column.source_column\n foreign_key = column.foreign_key.to_s.empty? ? 'person_id' : column.foreign_key\n source_table_name = source_model.table_name\n join_on_left = \"#{source_model}.#{_(source_column.to_sym, source_model.name.downcase.to_sym)}\"\n join_on_right = \"#{column.from_clause}.#{_(foreign_key.to_sym, column.from_clause.underscore.to_sym)}\"\n tables_clause += \" LEFT JOIN #{table_name} as #{column.from_clause} on #{join_on_left} = #{join_on_right}\"\n tables_clause += \" AND \" + column.join_clause unless column.join_clause.blank?\n end\n \n # Don't add id, first name or last name here because we added them earlier\n unless ['id','first_name','last_name'].include?(column.select_clause)\n # Add column to select clause\n unless column.select_clause.first == '('\n select_clause << \"#{column.from_clause}.#{_(column.select_clause, column.from_clause.underscore)} as #{column.safe_name}\"\n else\n select_clause << \"#{column.select_clause} as #{column.safe_name}\"\n end\n end\n end\n self.select_clause = select_clause.join(', ')\n self.tables_clause = tables_clause\n return tables\n end",
"def select_name_and_series_subgenres_of_authors\n \"SELECT authors.name, subgenres.name\n FROM series\n INNER JOIN authors\n ON series.author_id = authors.id\n INNER JOIN subgenres\n ON series.subgenre_id = subgenres.id\"\nend",
"def non_join_table_name?(key)\n (key.in?(NonJoinTableNames) || is_selection_type(key))\n end",
"def test_002\n target_sql = \"select shipments.number as shipment_number,\nvariants.sku as sku,\nvariants.price as price,\nvariants.weight as weight,\nvariants.height as height,\nvariants.width as width,\nvariants.depth as length,\nproducts.description as product_description,\norders.number as order_number\nfrom orders\njoin shipments on (shipments.order_id = orders.id)\njoin line_items on (line_items.order_id = orders.id)\njoin variants on (line_items.variant_id = variants.id)\njoin products on (variants.product_id = products.id)\njoin state_events on (state_events.stateful_id = orders.id and state_events.name = 'payment' and state_events.stateful_type = 'Order' and state_events.next_state in ('paid','credit_owed'))\nwhere orders.state = 'complete' and orders.shipment_state = 'ready' and state_events.created_at >= '2012-10-01' and state_events.created_at <= '2015-03-08' and shipments.warehouse_id = 28\"\n \n @sql.select do\n shipments number: 'shipment_number'\n variants :sku, :price, :weight, :height, :width, depth: 'length'\n products description: 'product_description'\n orders number: 'order_number'\n end\n \n @sql.from :orders do\n join :shipments, on: 'shipments.order_id = orders.id'\n join :line_items, on: 'line_items.order_id = orders.id'\n join :variants, on: 'line_items.variant_id = variants.id'\n join :products, on: 'variants.product_id = products.id'\n join :state_events do\n state_events stateful_id: :'orders.id',\n name: 'payment',\n stateful_type: 'Order',\n next_state: %w(paid credit_owed)\n end\n end\n\n begin_date = '2012-10-01'\n end_date = '2015-03-08'\n warehouse_id = 28\n \n @sql.where do\n orders state: 'complete', shipment_state: 'ready'\n \n con '>=' do\n state_events created_at: begin_date\n end\n con '<=' do\n state_events created_at: end_date\n end\n \n shipments warehouse_id: warehouse_id\n end\n\n assert_equal @sql.to_s, target_sql\n end",
"def and_relation(relation)\n q = all\n raise \"incompatible FROM clauses: #{q.to_sql}; #{relation.to_sql}\" if !q.from_clause.empty? && q.from_clause != relation.from_clause\n raise \"incompatible GROUP BY clauses: #{q.to_sql}; #{relation.to_sql}\" if !q.group_values.empty? && q.group_values != relation.group_values\n\n q = q.select(q.select_values + relation.select_values) if !relation.select_values.empty?\n q = q.from(relation.from_clause.value) if !relation.from_clause.empty?\n q = q.joins(relation.joins_values + q.joins_values) if relation.joins_values.present?\n q = q.where(relation.where_clause.ast) if relation.where_clause.present?\n q = q.group(relation.group_values) if relation.group_values.present?\n q = q.order(relation.order_values) if relation.order_values.present? && !relation.reordering_value\n q = q.reorder(relation.order_values) if relation.order_values.present? && relation.reordering_value\n q\n end",
"def create_database\n $conn.exec(\"\"\"\n CREATE TABLE users (\n id SERIAL NOT NULL,\n name varchar(255) NOT NULL,\n created_at timestamp NOT NULL,\n PRIMARY KEY (id)\n );\n CREATE UNIQUE INDEX user_names ON users (name);\n CREATE TABLE blathers (\n id SERIAL NOT NULL,\n text varchar(141) NOT NULL,\n created_at timestamp NOT NULL,\n user_id integer NOT NULL,\n PRIMARY KEY (id)\n );\n CREATE TABLE blathers_mentioned_users (\n blather_id integer NOT NULL,\n user_id integer NOT NULL,\n PRIMARY KEY (blather_id, user_id)\n );\n \"\"\")\nend",
"def use_scrooge?(model_class, original_sql)\n original_sql =~ select_regexp(model_class.table_name) &&\n model_class.columns_hash.has_key?(model_class.primary_key) &&\n original_sql !~ ScroogeRegexJoin\n end",
"def select!\n # Ensure we can never be destructive by nilifying :update.\n Mao.sql(with_options(:update => nil).sql) do |pg_result|\n if @options[:join]\n other = Mao.query(@options[:join][0])\n pg_result.map {|result|\n Mao.normalize_join_result(result, self, other)\n }\n else\n pg_result.map {|result| Mao.normalize_result(result, @col_types)}\n end\n end\n end",
"def tables_for_sql\n @tables_for_sql ||= RailsRedshiftReplicator.replicable_target_tables.join(\",\")\n end",
"def has_joins?\n !@options[:joins].blank?\n end",
"def multi_insert_sql_strategy\n :union\n end",
"def create_users_products\n @db.create_join_table(:user_id => :users, :product_id => :products) do\n end\n end",
"def relation_table(model)\n validate_model(model)\n model.arel_table\n end",
"def table_name_prefix; end",
"def tables\n [\n ]\n end",
"def joins\n []\n end",
"def create_join_table!(hash, options=OPTS)\n drop_table?(join_table_name(hash, options))\n create_join_table(hash, options)\n end",
"def create_join_table?(hash, options=OPTS)\n if supports_create_table_if_not_exists? && options[:no_index]\n create_join_table(hash, options.merge(:if_not_exists=>true))\n elsif !table_exists?(join_table_name(hash, options))\n create_join_table(hash, options)\n end\n end",
"def join_using_clause_using_sql_append(sql, using_columns)\n if using_columns.is_a?(SQL::AliasedExpression)\n super(sql, using_columns.expression)\n sql << ' AS '\n identifier_append(sql, using_columns.alias)\n else\n super\n end\n end",
"def admins_join_table\n self.class.admins_join_table\n end",
"def only(*columns)\n columns = columns.flatten\n\n if columns.length == 1 and columns[0].is_a?(Hash)\n unless @options[:join]\n raise ArgumentError, \"#only with a Hash must be used only after #join\"\n end\n\n other = Mao.query(@options[:join][0])\n columns = columns[0]\n columns.each do |table, table_columns|\n unless table_columns.is_a? Array\n raise ArgumentError, \"#{table_columns.inspect} is not an Array\"\n end\n\n if table == @table\n table_columns.each do |column|\n check_column(column, @table, @col_types)\n end\n elsif table == other.table\n table_columns.each do |column|\n check_column(column, other.table, other.col_types)\n end\n else\n raise ArgumentError, \"#{table} is not a column in this query\"\n end\n end\n else\n columns.each do |column|\n check_column(column, @table, @col_types)\n end\n end\n\n with_options(:only => columns)\n end",
"def joins\n @joins\n end",
"def writers_join_table\n self.class.writers_join_table\n end",
"def multi_query_builder\n\n query = \"\n SELECT\n #{select_arr.join(\",\\n\\t\")}\n FROM \\t#{groups.first.parent_table}\n #{pk_join_arr.join(\"\\n\")}\n #{fk_join_arr.join(\"\\n\")};\"\n\n return query\n end",
"def join_dependency\n @join_dependency ||= (\n build_join_dependency(\n Arel::SelectManager.new(table.engine, table),\n joins_values\n ) && @join_dependency\n )\n end",
"def build_inheritances_joins(arel, types)\n columns = Hash.new{ |h, k| h[k] = [] }\n base_on_key = model.arel_table[primary_key]\n base_attributes = model.attribute_names\n\n # Iterate over each casted dependent calculating the columns\n types.each.with_index do |model, idx|\n join_table = model.arel_table.alias(\"\\\"i_#{idx}\\\"\")\n arel.outer_join(join_table).on(base_on_key.eq(join_table[primary_key]))\n (model.attribute_names - base_attributes).each do |column|\n columns[column] << join_table\n end\n end\n\n # Return the list of needed columns\n columns.default_proc = nil\n columns\n end",
"def assign_join association = nil\n @table_alias = association ? \"#{association.aliased_table_name}.\" : \"\"\n end",
"def to_sql\n sql = @primary_table.to_sql\n for join in @joins\n sql << \" \" << join.to_sql\n end\n sql\n end",
"def tables; ActiveRecord::Base.connection.tables; end",
"def select_name_and_series_subgenres_of_authors\n \"SELECT authors.name, subgenres.name FROM authors INNER JOIN series ON authors.id=series.author_id INNER JOIN subgenres ON series.subgenre_id = subgenres.id; \"\nend",
"def left_joins_by_alias\n {}\n end",
"def tables(query)\n SqlAssess::Parsers::Tables.new(query).tables.map do |table|\n if table.key?(:join_type)\n table[:table][:table].remove('`')\n else\n table[:table].remove('`')\n end\n end\n end",
"def construct_join_sql(key_relation, num)\n join_sql = \"\"\n connection = klass.connection\n key = key_relation.to_s.singularize.to_sym\n\n key_table = definition.reflection_by_name(klass, key).table_name\n value_table = klass.table_name.to_s\n\n value_table_fk_key, key_table_pk = reflection_keys(definition.reflection_by_name(klass, key))\n\n main_reflection = definition.reflection_by_name(definition.klass, relation)\n if main_reflection\n main_table = definition.klass.table_name\n main_table_pk, value_table_fk_main = reflection_keys(definition.reflection_by_name(definition.klass, relation))\n\n join_sql = \"\\n INNER JOIN #{connection.quote_table_name(value_table)} #{value_table}_#{num} ON (#{main_table}.#{main_table_pk} = #{value_table}_#{num}.#{value_table_fk_main})\"\n value_table = \" #{value_table}_#{num}\"\n end\n join_sql += \"\\n INNER JOIN #{connection.quote_table_name(key_table)} #{key_table}_#{num} ON (#{key_table}_#{num}.#{key_table_pk} = #{value_table}.#{value_table_fk_key}) \"\n\n return join_sql\n end",
"def get_has_many_through_relation_query_sql(relation,query)\n fields = build_select_fields(relation[:table_fields].split(','),'a')\n\n qry = \"SELECT #{fields},`c`.`#{relation[:this_key]}` AS `_table_key` FROM `#{relation[:table]}` AS `a`, `#{relation[:link_table]}` AS `b`, `#{@table_name}` AS `c` WHERE (`a`.`#{relation[:table_key]}` = `b`.`#{relation[:link_field]}` AND `b`.`#{relation[:link_key]}` = `c`.`#{relation[:this_key]}`)\"\n where = build_where_ns(query,'c')\n qry += \" AND #{where}\" unless where.length == 0\n qry\n end",
"def has_one_through(name, through_name, source_name)\n\n define_method(name) do\n through_options = self.class.assoc_options[through_name]\n source_options = through_options.model_class.assoc_options[source_name]\n\n source_table = source_options.table_name\n through_table = through_options.table_name\n source_foreign_key = source_options.foreign_key\n source_primary_key = source_options.primary_key\n through_foreign_key = through_options.foreign_key\n through_primary_key = through_options.primary_key\n\n query = <<-SQL\n SELECT\n #{source_table}.*\n FROM\n #{through_table}\n JOIN\n #{source_table}\n ON\n #{through_table}.#{source_foreign_key} = #{source_table}.#{source_primary_key}\n WHERE\n #{through_table}.#{through_primary_key} = ?\n SQL\n\n column = DBConnection.execute(query, self.send(through_foreign_key))\n source_options.model_class.parse_all(column).first\n end\n\n end",
"def supports_combining_alter_table_ops?\n false\n end",
"def join_table_alias\n final_reverse_edge[:alias]\n end",
"def query_table\n raise StandardError, 'The query is not defined yet' if query.nil?\n return query.arel_table if relation_query?(query)\n @query_table\n end",
"def query_table\n raise StandardError, 'The query is not defined yet' if query.nil?\n return query.arel_table if relation_query?(query)\n @query_table\n end",
"def def_many_to_many(opts)\n super\n def_association_pks_getter(opts) do\n _join_table_dataset(opts).filter(opts[:left_key]=>send(opts[:left_primary_key])).select_map(opts[:right_key])\n end\n def_association_pks_setter(opts) do |pks|\n checked_transaction do\n ds = _join_table_dataset(opts).filter(opts[:left_key]=>send(opts[:left_primary_key]))\n ds.exclude(opts[:right_key]=>pks).delete\n pks -= ds.select_map(opts[:right_key])\n pks.each{|pk| ds.insert(opts[:left_key]=>send(opts[:left_primary_key]), opts[:right_key]=>pk)}\n end\n end\n end",
"def handle_joins fields, select = nil\n ret = select || scoped\n fields.each do |qualified_field|\n assoc, foreign_table, field = parse_field(qualified_field)\n ret = ret.joins(join_string(assoc, foreign_table)) if assoc\n end\n ret\n end",
"def sql opts = EMPTY_HASH\n opts = options.merge(opts)\n\n join_tables = BELONGS_TO.map{|x| x.to_s.pluralize} + [ ContentStatus.table_name ]\n join_tables.uniq!\n\n tables = join_tables.dup\n clauses = [ ]\n\n @model_class = Content::Version if self.latest || self.versions\n\n connection = model_class.connection\n\n t = :updater_users\n tables << \"#{User.table_name} AS #{t}\"\n join_tables << t\n\n t = :creator_users\n tables << \"#{User.table_name} AS #{t}\"\n join_tables << t\n\n unless (version_list_name = params[:version_list_name]).blank?\n @model_class = Content::Version\n tables << \n (t1 = VersionListName.table_name) << \n (t2 = VersionList.table_name) <<\n (t3 = VersionListContent.table_name)\n clauses << \n \"#{t1}.name = #{connection.quote(version_list_name)}\" <<\n \"#{t3}.version_list_id = #{t1}.version_list_id\"\n else\n version_list_name = nil\n end\n\n unless (version_list_id = params[:version_list_id]).blank?\n version_list_id = version_list_id.to_i\n @model_class = Content::Version\n tables <<\n (t3 = VersionListContent.table_name)\n clauses << \n \"#{t3}.version_list_id = #{connection.quote(version_list_id)}\"\n else\n version_list_id = nil\n end\n\n\n table_name = \n opts[:table_name] || \n model_class.table_name\n\n if version_list_name || version_list_id \n tables << \n (t3 = VersionListContent.table_name)\n clauses << \n \"#{t3}.content_version_id = contents.id\"\n end\n\n select_table = \"contents\"\n if self.latest || self.versions\n select_table = 'cv'\n end\n select_values = \"#{select_table}.*\"\n\n order_by = Content.order_by\n order_by = order_by.split('), (').join(\"),\\n (\") \n order_by.gsub!(/\\.id = /, \".id = #{select_table}.\")\n order_by << \",\\n version DESC\" if self.versions\n \n if opts[:count]\n select_values = \"COUNT(#{select_values})\"\n order_by = nil\n end\n\n\n ##################################################################\n # Generate SQL\n #\n\n sql = ''\n\n if self.latest || self.versions\n sql << \"SELECT #{select_values} FROM #{model_class.table_name} AS cv\"\n sql << \"\\nWHERE cv.id IN (\\n\"\n case \n when self.latest\n select_values = 'MAX(contents.id)'\n when self.versions\n select_values = 'contents.id'\n end\n end\n\n \n sql << <<\"END\"\nSELECT #{select_values}\nFROM #{table_name} AS contents, #{tables.uniq * ', '}, content_types\nWHERE\n #{join_tables.map{|x| \"(#{x}.id = contents.#{x.to_s.singularize}_id)\"} * \"\\nAND \"}\nAND (content_types.id = content_keys.content_type_id)\nEND\n\n # Join clauses:\n# pp opts\n clauses << opts[:conditions] unless opts[:conditions].blank?\n unless clauses.empty?\n sql << \"\\nAND \" << (clauses.map{| x | \"(#{x})\"} * \"\\nAND \")\n end\n\n # Search clauses:\n unless (where = sql_where_clauses(opts)).empty?\n sql << \"\\nAND \" << where\n end\n\n if self.latest || self.versions\n sql << \"\\nAND contents.content_id = cv.content_id\" \n sql << \"\\n)\"\n end\n\n # Ordering:\n if order_by\n sql << \"\\nORDER BY\\n \" << order_by\n end\n\n # Limit:\n if x = (opts[:limit])\n sql << \"\\nLIMIT #{x}\"\n end\n\n if opts[:dump_sql] # || true\n $stderr.puts \" params = #{params.inspect}\"\n $stderr.puts \" sql =\\n #{sql}\"\n # raise \"LKSDJFLKSJDF\"\n end\n\n sql\n end",
"def supports_combining_alter_table_ops?\n true\n end",
"def cross_join(other)\n join(other, join_type: :cross)\n end",
"def validate_schema\n all_cols1 = @db1.column_names(@table1)\n all_cols2 = @db2.column_names(@table2)\n if all_cols1 != all_cols2\n raise \"Columns do not match, please use full coopy toolbox\"\n end\n\n key_cols1 = @db1.primary_key(@table1)\n key_cols2 = @db2.primary_key(@table2)\n if key_cols1 != key_cols2\n raise \"Primary keys do not match, please use full coopy toolbox\"\n end\n end"
] |
[
"0.6400713",
"0.63369375",
"0.6314119",
"0.6087215",
"0.6051302",
"0.60380393",
"0.5940293",
"0.5855021",
"0.58443964",
"0.5838234",
"0.5803235",
"0.57699335",
"0.57361025",
"0.57264787",
"0.5725699",
"0.57225955",
"0.5708174",
"0.5697971",
"0.5676513",
"0.5663057",
"0.56616735",
"0.5660461",
"0.5607606",
"0.56002647",
"0.55824083",
"0.55683887",
"0.55239177",
"0.55234724",
"0.5515184",
"0.5512941",
"0.5500676",
"0.5500322",
"0.5487994",
"0.5479884",
"0.54473",
"0.54271877",
"0.5417401",
"0.53904283",
"0.53898156",
"0.5380186",
"0.5376314",
"0.53740036",
"0.53693664",
"0.5367103",
"0.536321",
"0.533996",
"0.5334149",
"0.5322726",
"0.5321972",
"0.5315295",
"0.5304277",
"0.52903885",
"0.527203",
"0.52685624",
"0.52671754",
"0.52667916",
"0.52642137",
"0.5263035",
"0.5256763",
"0.5251828",
"0.52502245",
"0.52501655",
"0.5237988",
"0.52353877",
"0.5234353",
"0.5228784",
"0.52262944",
"0.52228284",
"0.52006924",
"0.5182846",
"0.51751137",
"0.5170653",
"0.5168744",
"0.51641464",
"0.5157204",
"0.5149505",
"0.51461816",
"0.51452833",
"0.5138452",
"0.51367414",
"0.512581",
"0.5120025",
"0.5118041",
"0.51028234",
"0.5089117",
"0.5079576",
"0.5077478",
"0.50669163",
"0.50646776",
"0.5044577",
"0.50426483",
"0.50384855",
"0.5031264",
"0.5031264",
"0.5030037",
"0.502874",
"0.50188303",
"0.50176704",
"0.5008577",
"0.5000845"
] |
0.6261544
|
3
|
Support table aliases for USING columns
|
def join_using_clause_using_sql_append(sql, using_columns)
if using_columns.is_a?(SQL::AliasedExpression)
super(sql, using_columns.expression)
sql << ' AS '
identifier_append(sql, using_columns.alias)
else
super
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def table_alias\r\n @table_alias || from_table_name\r\n end",
"def build_table_aliases(from)\n # for the targets\n returning({}) do |aliases|\n from.map(&:to_s).sort.map(&:to_sym).each_with_index do |plural, t_index|\n table = plural._as_class.table_name\n plural._as_class.columns.map(&:name).each_with_index do |field, f_index|\n aliases[\"#{table}.#{field}\"] = \"t#{t_index}_r#{f_index}\"\n end\n end\n end\n end",
"def table_alias\n @target_alias\n end",
"def join_table_alias\n cached_fetch(:join_table_alias) do\n s, a = split_join_table_alias\n a || s\n end\n end",
"def join_table_alias\n final_reverse_edge[:alias]\n end",
"def table_alias\n if @table_expr.is_a?(AliasedExpression)\n @table_expr.alias\n end\n end",
"def column_alias_for(field)\n column_alias = +field\n column_alias.gsub!(/\\*/, \"all\")\n column_alias.gsub!(/\\W+/, \" \")\n column_alias.strip!\n column_alias.gsub!(/ +/, \"_\")\n @connection.table_alias_for(column_alias)\n end",
"def graph_alias_columns(graph_aliases)\n gas = {}\n identifiers = graph_aliases.map do |col_alias, tc| \n table, column, value = Array(tc)\n column ||= col_alias\n gas[col_alias] = [table, column].freeze\n identifier = value || SQL::QualifiedIdentifier.new(table, column)\n identifier = SQL::AliasedExpression.new(identifier, col_alias) if value || column != col_alias\n identifier\n end\n [identifiers, gas]\n end",
"def chooseTableAlias\n @metadata.chooseTableAlias\n end",
"def chooseColumnAlias\n @metadata.chooseColumnAlias\n end",
"def aliases!\n @schema.aliases!\n end",
"def column_aliases(node)\n @name_and_alias_cache[node]\n end",
"def split_join_table_alias\n associated_class.dataset.split_alias(self[:join_table])\n end",
"def addTableAlias(theAlias)\n @metadata.addTableAlias(theAlias)\n end",
"def target_alias\n @model.table_name\n end",
"def alias_decls; end",
"def table_alias_for(table_name)\n table_name.gsub(/\\./, '_')\n end",
"def aliases; end",
"def aliases; end",
"def aliases; end",
"def schema_ds_join(table_name, opts)\n [:information_schema__columns, {:table_catalog=>:table_catalog,\n :table_schema => :table_schema, :table_name => :table_name} , :c]\n end",
"def to_table_reference(table_alias=nil)\n \"(#{sql})#{\" #{quote_identifier(table_alias)}\" if table_alias}\"\n end",
"def extend_sql_avoiding_table_naming_clashes!(sql, addition)\r\n used_table_aliases = table_aliases_from_join_fragment(addition)\r\n old_table_aliases = table_aliases_from_join_fragment(sql)\r\n (used_table_aliases & old_table_aliases).each do |join_table_alias|\r\n i = 0\r\n begin\r\n i += 1\r\n new_alias = \"renamed_join_table_#{i}\"\r\n end until !used_table_aliases.include?(new_alias)\r\n convert_table_name_to_new_alias!(sql, join_table_alias, new_alias)\r\n end\r\n sql << \" #{addition} \"\r\n end",
"def aliased_name; end",
"def table_name\n respond_to?(:first_source_alias) ? first_source_alias : super\n end",
"def alias_names; end",
"def set_graph_aliases(graph_aliases)\n columns, graph_aliases = graph_alias_columns(graph_aliases)\n if graph = opts[:graph]\n select(*columns).clone(:graph => Hash[graph].merge!(:column_aliases=>graph_aliases.freeze).freeze)\n else\n Sequel::Deprecation.deprecate(\"Calling Dataset#set_graph_aliases before Dataset#graph\", \"Call Dataset#set_graph_aliases after Dataset#graph now\")\n select(*columns).clone(:graph_aliases=>graph_aliases.freeze) # SEQUEL5: Remove\n end\n end",
"def addColumnAlias(theAlias)\n @metadata.addColumnAlias(theAlias)\n end",
"def assign_join association = nil\n @table_alias = association ? \"#{association.aliased_table_name}.\" : \"\"\n end",
"def aliases\n end",
"def aliased_table_name_for_with_sqlserver_support(name,suffix=nil)\n if !parent.table_joins.blank? && parent.table_joins.to_s.downcase =~ %r{join(\\s+\\w+)?\\s+#{Regexp.escape(active_record.connection.quote_table_name(name.downcase))}\\son}i\n @join_dependency.table_aliases[name] += 1\n end\n unless @join_dependency.table_aliases[name].zero?\n # if the table name has been used, then use an alias\n name = active_record.connection.table_alias_for \"#{pluralize(reflection.name)}_#{parent_table_name}#{suffix}\"\n table_index = @join_dependency.table_aliases[name]\n @join_dependency.table_aliases[name] += 1\n name = name[0..active_record.connection.table_alias_length-3] + \"_#{table_index+1}\" if table_index > 0\n else\n @join_dependency.table_aliases[name] += 1\n end\n name\n end",
"def column_aliases\n @column_aliases ||= Hash.new\n end",
"def table_alias_name(value)\n data.table_alias_name = value\n end",
"def table_aliases_from_join_fragment(sql)\r\n return [] if sql.blank?\r\n return sql.scan(/JOIN\\s+(`[^`]+`|\"[^\"]+\"|\\[[^\\]]+\\]|\\S+)(?:\\s+(?:AS\\s+)?(`[^`]+`|\"[^\"]+\"|\\[[^\\]]+\\]|\\S+))?/i).collect do |name|\r\n ((name[1] =~ /^ON$/i) ? name[0] : (name[1] || name[0])).gsub(/^[`\"\\[]?(.*)[`\"\\]]?$/, '\\1')\r\n end\r\n end",
"def column_aliases\n if @table_expr.is_a?(AliasedExpression)\n @table_expr.columns\n end\n end",
"def aliases\n\n end",
"def alias_column(pretty, original)\n self.column_aliases[pretty] = original\n end",
"def get_alias(use_name, table_name = nil, avoid_alias = true)\n table_name ||= use_name\n\n base = use_name[0..1]\n list = (@unique_alias[base] ||= [])\n list2 = @table_alias[use_name] ||= []\n if avoid_alias && !@tables.include?(table_name)\n alias_name = use_name\n elsif @tables.include?(use_name)\n # links, li1, li2, li3\n alias_name = \"#{base}#{list.size}\"\n else\n # ob1, obj2, objects\n alias_name = \"#{base}#{list.size + 1}\"\n end\n\n # We add to both because @table_alias[use_name] is used in table(use_name)\n # and @table_alias[use_name]\n list << alias_name\n list2 << alias_name\n alias_name\n end",
"def convert_table_name_to_new_alias!(sql, old_table_name, new_alias)\r\n regex = Regexp.new(\"(?:(?:JOIN|AS)?\\\\s+|\\\\()[`\\\"\\\\[]?#{old_table_name}[`\\\"\\\\]]?(?:\\\\s+(?:AS\\\\s+)?(?:`[^`]+`|\\\"[^\\\"]+\\\"|\\\\[[^\\\\]]+\\\\]|\\\\S+)|\\\\.|\\\\s)\", Regexp::IGNORECASE)\r\n sql.gsub!(regex) do |match|\r\n prefix = (match =~ /^\\(/) ? '(' : ''\r\n suffix = match.gsub(/^.*?(\\s+ON|.)$/i, '\\1')\r\n if test = match.match(/^JOIN\\s+(?:`[^`]+`|\"[^\"]+\"|\\[[^\\]]+\\]|\\S+)(\\s+(?:AS\\s+)?(?:`[^`]+`|\"[^\"]+\"|\\[[^\\]]+\\]|\\S+))/i) and !(test.captures.first =~ /^ ON$/i)\r\n # If the table name is already aliased within this match, don't replace it\r\n result = match\r\n else\r\n replacement = \"JOIN #{old_table_name} AS #{new_alias}\" if match =~ /^JOIN\\s/i\r\n replacement = \"AS #{new_alias}\" if match =~ /^AS\\s/i\r\n replacement = \" #{new_alias}\" unless match =~ /^(JOIN|AS)\\s/i\r\n result = \"#{prefix}#{replacement}#{suffix}\"\r\n end\r\n result\r\n end\r\n end",
"def alias(name)\n Column.new(jcolumn.as(name))\n end",
"def join_alias(join)\r\n table_name = join.model_class.table_name\r\n new_alias = table_name\r\n if @join_aliases[table_name]\r\n new_alias = \"#{pluralize(join.reflection)}_#{join.parent.model_class.table_name}\"\r\n if @join_aliases[table_name].include? new_alias\r\n new_alias += '1'\r\n while @join_aliases[table_name].include? new_alias\r\n new_alias = new_alias.succ\r\n end\r\n end\r\n end\r\n (@join_aliases[table_name] ||= []) << new_alias\r\n return new_alias\r\n end",
"def default_join_table_qualification\n :symbol\n end",
"def table_alias_length\n 31\n end",
"def to_sql\n @join.association_join.gsub(/::ts_join_alias::/,\n \"#{@reflection.klass.connection.quote_table_name(@join.parent.aliased_table_name)}\"\n )\n end",
"def alias_of; end",
"def v(o)\n case o\n when Symbol\n t, column, aliaz = Sequel.split_symbol(o)\n if t\n o\n elsif aliaz\n SQL::AliasedExpression.new(SQL::QualifiedIdentifier.new(@table, SQL::Identifier.new(column)), aliaz)\n else\n SQL::QualifiedIdentifier.new(@table, o)\n end\n when SQL::Identifier\n SQL::QualifiedIdentifier.new(@table, o)\n when SQL::QualifiedIdentifier, SQL::JoinClause\n # Return these directly, so we don't accidentally qualify symbols in them.\n o\n else\n super\n end\n end",
"def v(o)\n case o\n when Symbol\n t, column, aliaz = Sequel.split_symbol(o)\n if t\n o\n elsif aliaz\n SQL::AliasedExpression.new(SQL::QualifiedIdentifier.new(@table, SQL::Identifier.new(column)), aliaz)\n else\n SQL::QualifiedIdentifier.new(@table, o)\n end\n when SQL::Identifier\n SQL::QualifiedIdentifier.new(@table, o)\n when SQL::QualifiedIdentifier, SQL::JoinClause\n # Return these directly, so we don't accidentally qualify symbols in them.\n o\n else\n super\n end\n end",
"def alias(name, visibility, original_name, original_exec, original_mod)\n Rubinius.primitive :methodtable_alias\n raise PrimitiveFailure, \"MethodTable#alias primitive failed\"\n end",
"def calculate_reverse_edge_aliases(reverse_edges)\n aliases = [associated_class.table_name]\n reverse_edges.each do |e|\n table_alias = e[:table]\n if aliases.include?(table_alias)\n i = 0\n table_alias = loop do\n ta = :\"#{table_alias}_#{i}\"\n break ta unless aliases.include?(ta)\n i += 1\n end\n end\n aliases.push(e[:alias] = table_alias)\n end\n end",
"def aliases(other)\n common_names(other).each_with_object({}) { |name, aliases|\n left, right = fetch(name), other.fetch(name)\n aliases[name] = :\"#{name}_#{right}\" if left != right\n }\n end",
"def aliases=(_arg0); end",
"def aliases=(_arg0); end",
"def symbol_to_column_ref(sym)\n c_table, column, c_alias = split_symbol(sym)\n \"#{\"#{quote_identifier(c_table)}.\" if c_table}#{quote_identifier(column)}#{\" AS #{quote_identifier(c_alias)}\" if c_alias}\"\n end",
"def find_alias(associations)\n if BabySqueel::ActiveRecord::VersionHelper.at_least_6_1?\n # construct_tables! got removed by rails\n # https://github.com/rails/rails/commit/590b045ee2c0906ff162e6658a184afb201865d7\n #\n # construct_tables_for_association! is a method from the polyamorous (ransack) gem\n join_root = join_dependency.send(:join_root)\n join_root.each_children do |parent, child|\n join_dependency.construct_tables_for_association!(parent, child)\n end\n else\n # If we tell join_dependency to construct its tables, Active Record\n # handles building the correct aliases and attaching them to its\n # JoinDepenencies.\n join_dependency.send(:construct_tables!, join_dependency.send(:join_root))\n end\n\n join_association = find_join_association(associations)\n join_association.table\n end",
"def translation_columns_for_select(columns, options = {})\n tname = options[:translation_table_alias] || translation_table_name\n join_name = options[:join_alias] || translation_join_alias\n columns.collect {|column| \"#{tname}.#{column} as #{join_name}_#{column}\" }.join(',')\n end",
"def aliases\n short ? [name, short] : [name]\n end",
"def columns_aliases_and_tables_for_properties(properties)\n columns_by_property, aliased_columns, tables = super\n unless @restricted_to_types && @restricted_to_types.length == 1\n aliased_columns << @aliased_type_column\n tables << self.class.type_column_table unless tables.include?(self.class.type_column_table)\n end\n return columns_by_property, aliased_columns, tables\n end",
"def is_alias?; end",
"def metaalias(to, from)\n metaclass.instance_eval{alias_method to, from}\n end",
"def aliased_expression_sql(ae)\n \"#{literal(ae.expression)} AS #{quote_identifier(ae.aliaz)}\"\n end",
"def as(alias_name)\n \"#{self} as #{alias_name}\".to_sym\n end",
"def as(alias_name)\n \"#{self} as #{alias_name}\".to_sym\n end",
"def column_definitions(table_name)\r\n query <<-end_sql\r\n SELECT a.attname, format_type(a.atttypid, a.atttypmod), d.adsrc, a.attnotnull, c.consrc\r\n FROM pg_attribute a LEFT JOIN pg_attrdef d\r\n ON a.attrelid = d.adrelid AND a.attnum = d.adnum\r\n LEFT JOIN pg_constraint c ON a.attrelid = c.conrelid AND \r\n c.contype = 'c' AND c.conkey[1] = a.attnum\r\n WHERE a.attrelid = '#{table_name}'::regclass\r\n AND a.attnum > 0 AND NOT a.attisdropped\r\n ORDER BY a.attnum\r\n end_sql\r\n end",
"def arel_table\n @arel_table ||= begin\n t= Arel::Table.new(table)\n t.table_alias = alias_name if alias_name != table\n t\n end\n end",
"def relation_alias(embed)\n relation_aliases[embed]\n end",
"def use_any_aliases(custom_fields)\n custom_fields.map { |column_name,column_type| [column_name.gsub(/.*AS\\s+/, ''), column_type] }.to_h\n end",
"def quote_identifier_append(sql, name)\n name = (table_mappings[name.to_sym] || name) if name.respond_to?(:to_sym)\n super(sql, name)\n end",
"def aliases_for attributes\n attributes.each do |attr, nicks|\n [nicks].flatten.each do |nick|\n self.class_eval(\"alias #{nick} #{attr}\n alias #{nick}= #{attr}=\")\n end\n end\n end",
"def table(table)\n @table = table\n\n @aliased_table = @table.new(@table.table_name)\n end",
"def full_name\n \"#{@table_alias}#{name}\"\n end",
"def add_table(use_name, table_name = nil, avoid_alias = true, type = nil, &block)\n alias_name = get_alias(use_name, table_name, avoid_alias)\n add_alias_to_tables(table_name || use_name, alias_name, type, &block)\n end",
"def ref_table(method, source, col) \n (method ?\n source.reflect_on_association(method).klass :\n source).table_name\n end",
"def table_name\n \"#{Dynamoid::Config.namespace}_index_\" + source.table_name.sub(\"#{Dynamoid::Config.namespace}_\", '').singularize + \"_#{name.collect(&:to_s).collect(&:pluralize).join('_and_')}\"\n end",
"def join_table_source\n cached_fetch(:join_table_source){split_join_table_alias[0]}\n end",
"def rename(aliases)\n new(relation.rename(aliases))\n end",
"def scoped_table(name)\n table(name).as(\"#{property.name}_#{name}\")\n end",
"def aliasing_hash_aliases\n @aliases ||= {}\n end",
"def aliases\n @aliases ||= FIELD_ALIASES.dup\n end",
"def left_joins_by_alias\n {}\n end",
"def preserve_query_aliases\n class << self\n # I have to do the interesting hack below instead of using alias_method\n # because there's some sort of weirdness going on with how __all binds\n # to all in Ruby 2.0.\n __all = self.instance_method(:all)\n\n define_method(:__all) do\n __all.bind(self).call\n end\n\n # From ActiveRecord::Querying\n delegate :find, :take, :take!, :first, :first!, :last, :last!, :exists?, :any?, :many?, :to => :__all\n delegate :first_or_create, :first_or_create!, :first_or_initialize, :to => :__all\n delegate :find_or_create_by, :find_or_create_by!, :find_or_initialize_by, :to => :__all\n delegate :find_by, :find_by!, :to => :__all\n delegate :destroy, :destroy_all, :delete, :delete_all, :update, :update_all, :to => :__all\n delegate :find_each, :find_in_batches, :to => :__all\n delegate :select, :group, :order, :except, :reorder, :limit, :offset, :joins,\n :where, :preload, :eager_load, :includes, :from, :lock, :readonly,\n :having, :create_with, :uniq, :distinct, :references, :none, :unscope, :to => :__all\n delegate :count, :average, :minimum, :maximum, :sum, :calculate, :pluck, :ids, :to => :__all\n end\n end",
"def find_compatible_table_alias( clause, parent_query )\n alias_ = false\n\n # Sanity check. Only IN queries use the JOIN syntax .\n return alias_ if parent_query['operator'].blank? || 'IN' != parent_query['operator']\n\n # Since we're only checking IN queries, we're only concerned with OR relations.\n return alias_ if parent_query['relation'].blank? || 'OR' != parent_query['relation']\n\n compatible_operators = [ 'IN' ]\n parent_query.each do |sibling|\n next if !sibling.is_a?(Array) || !is_first_order_clause(sibling)\n next if sibling['alias'].blank? || sibling['operator'].blank?\n # The sibling must both have compatible operator to share its alias.\n if compatible_operators.include? sibling['operator'].upcase\n alias_ = sibling['alias']\n break\n end\n end\n\n alias_\n end",
"def include_table(ref, use_index = nil)\n parts = ref.split(' ')\n if parts.size == 3\n parts.delete_at(1)\n end\n @data.table_ids.merge(parts)\n\n if parts.size == 2\n tbl_name, tbl_alias = parts\n else\n tbl_name = tbl_alias = parts.first\n end\n return SqlStmtLib::SqlTable.new(ref, tbl_name, tbl_alias, use_index)\n end",
"def table_name_prefix; end",
"def add_piggy_back_sql_data!(reflection_name, prefix, table_alias, attributes, select, joins, conditions, join_type)\n ktn = table_name\n kpkey = primary_key\n reflection = reflections[reflection_name]\n atn = reflection.table_name\n attributes.each do |attr|\n if table_alias\n select << \", #{table_alias}.#{attr} AS #{prefix}_#{attr}\"\n else\n select << \", #{atn}.#{attr} AS #{prefix}_#{attr}\"\n end\n end\n fkey = reflection.primary_key_name\n fpkey = reflection.klass.primary_key\n\n case reflection.macro\n when :belongs_to\n if table_alias\n joins << \" #{join_type} JOIN #{atn} #{table_alias} ON #{table_alias}.#{fpkey}=#{ktn}.#{fkey} \"\n else\n joins << \" #{join_type} JOIN #{atn} ON #{atn}.#{fpkey}=#{ktn}.#{fkey} \"\n end\n when :has_one\n if table_alias\n joins << \" #{join_type} JOIN #{atn} #{table_alias} ON #{table_alias}.#{fkey}=#{ktn}.#{kpkey} \"\n else\n joins << \" #{join_type} JOIN #{atn} ON #{atn}.#{fkey}=#{ktn}.#{kpkey} \"\n end\n when :has_many\n raise \"piggy_back: aliasing not implemented for has_many\" if table_alias\n if reflection.options[:through]\n ttn = reflection.through_reflection.klass.table_name\n tkfkey = reflection.through_reflection.primary_key_name\n tafkey = reflection.source_reflection.primary_key_name\n\n through_conditions = reflection.options[:conditions] ?\n \" AND \" + reflection.options[:conditions] : \"\"\n source_conditions = reflection.through_reflection.options[:conditions] ?\n \" AND \" + reflection.through_reflection.options[:conditions] : \"\"\n\n joins << \" LEFT JOIN #{ttn} ON (#{ttn}.#{tkfkey}=#{ktn}.#{kpkey}#{through_conditions})\"\n joins << \" LEFT JOIN #{atn} ON (#{ttn}.#{tafkey}=#{atn}.#{fpkey}#{source_conditions}) \"\n else\n reflection_conditions = reflection.options[:conditions] ?\n \" AND \" + reflection.options[:conditions] : \"\"\n\n joins << \" LEFT JOIN #{atn} ON (#{atn}.#{fkey}=#{ktn}.#{kpkey}#{reflection_conditions}) \"\n end\n else\n raise \"can't piggy back #{reflection.macro} on class #{klass}\"\n end\n end",
"def alias(alias_name)\n clone.alias! alias_name\n end",
"def reference_columns(excluded_supertypes) #:nodoc:\n trace :columns, \"Reference Columns for #{name}\" do\n\n if absorbed_via and\n # If this is not a subtype, or is a subtype that has its own identification, use the id.\n (all_type_inheritance_as_subtype.size == 0 ||\n all_type_inheritance_as_subtype.detect{|ti| ti.provides_identification })\n rc = absorbed_via.from.reference_columns(excluded_supertypes)\n # The absorbed_via reference gets skipped here, and also in object_type.rb\n trace :columns, \"Skipping #{absorbed_via}\"\n absorbed_mirror ||= absorbed_via.reversed\n rc.each{|col| col.prepend(absorbed_mirror)}\n return rc\n end\n\n # REVISIT: Should have built preferred_identifier_references\n preferred_identifier.role_sequence.all_role_ref.map do |role_ref|\n # REVISIT: Should index references by to_role:\n ref = references_from.detect {|ref| ref.to_role == role_ref.role}\n\n raise \"reference for role #{role_ref.describe} not found on #{name} in #{references_from.size} references:\\n\\t#{references_from.map(&:to_s)*\"\\n\\t\"}\" unless ref\n\n ref.columns({})\n end.flatten\n end\n end",
"def join_table(type, table, expr=nil, options=OPTS, &block)\n if expr.is_a?(SQL::AliasedExpression) && expr.expression.is_a?(Array) && !expr.expression.empty? && expr.expression.all?\n options = options.merge(:join_using=>true)\n end\n super\n end",
"def attr_alias(name)\n name = name.intern\n if synonym = self.class.attr_alias(name)\n return synonym\n else\n return name\n end\n end",
"def using(key, &block)\n if @type == :natural then cmd = 'JOIN '\n elsif @type == :left then cmd = 'LEFT JOIN '\n elsif @type == :right then cmd = 'RIGHT JOIN '\n end\n key = key.to_s\n using_string = \"#{@base_klass.table_name}.#{key} = \"\n using_string << \"#{@join_klass.table_name}.#{key}\"\n @string = \"\\n\" << cmd << @join_klass.table_name << ' ON (' << using_string << ') '\n @clause_parser.append_join(self)\n yield @clause_parser # use extended clause parser for inner block argument\n end",
"def relation_aliases\n @relation_aliases ||= relations.inject(HashWithIndifferentAccess.new) do |h, (k, v)|\n store_as = Mongoid::Compatibility::Version.mongoid7_or_newer? ? v.store_as : v[:store_as]\n h[store_as || k] = k\n h\n end\n end",
"def columns_for_distinct(columns, orders) #:nodoc:\n order_columns = orders.reject(&:blank?).map{ |s|\n # Convert Arel node to string\n s = s.to_sql unless s.is_a?(String)\n # Remove any ASC/DESC modifiers\n s.gsub(/\\s+(?:ASC|DESC)\\b/i, '')\n .gsub(/\\s+NULLS\\s+(?:FIRST|LAST)\\b/i, '')\n }.reject(&:blank?).map.with_index { |column, i| \"#{column} AS alias_#{i}\" }\n\n [super, *order_columns].join(', ')\n end",
"def columns(table_name, name = nil) end",
"def columns_for_distinct(columns, orders)\n # Lifted from the default Postgres implementation\n order_columns = orders.map{ |s|\n # Convert Arel node to string\n s = s.to_sql unless s.is_a?(String)\n # Remove any ASC/DESC modifiers\n s.gsub(/\\s+(ASC|DESC)\\s*(NULLS\\s+(FIRST|LAST)\\s*)?/i, '')\n }.reject(&:blank?).map.with_index { |column, i| \"#{column} AS alias_#{i}\" }\n\n [super, *order_columns].join(', ')\n end",
"def table_alias_length\n @table_alias_length ||= 63\n end",
"def db_query_transform__subquery query, tmp_table=\"resultset_table\"\n \"(#{query}) as #{tmp_table}\"\n end",
"def column_all_sql(ca)\n \"#{quote_identifier(ca.table)}.*\"\n end",
"def table_alias_name(*partition_key_values)\n return collect_first(*partition_key_values, &:table_alias_name)\n end",
"def default_associated_key_alias\n self[:uses_left_composite_keys] ? (0...self[:through].first[:left].length).map{|i| :\"x_foreign_key_#{i}_x\"} : :x_foreign_key_x\n end",
"def columns_for_distinct(columns, orders) #:nodoc:\n order_columns = orders.reject(&:blank?).map{ |s|\n # Convert Arel node to string\n s = s.to_sql unless s.is_a?(String)\n # Remove any ASC/DESC modifiers\n s.gsub(/\\s+(?:ASC|DESC)\\b/i, '')\n .gsub(/\\s+NULLS\\s+(?:FIRST|LAST)\\b/i, '')\n }.reject(&:blank?).map.with_index { |column, i| \"#{column} AS alias_#{i}\" }\n\n (order_columns << super).join(', ')\n end",
"def alias_view(to, from, node = nil)\n trait[:alias_view] || trait(:alias_view => {})\n trait[:alias_view][to.to_s] = node ? [from.to_s, node] : from.to_s\n end"
] |
[
"0.73351204",
"0.68861693",
"0.6870926",
"0.6854861",
"0.6820711",
"0.6785581",
"0.6688088",
"0.66173035",
"0.6586291",
"0.65842116",
"0.6562811",
"0.6548786",
"0.65157646",
"0.64271873",
"0.63693255",
"0.63578135",
"0.6356919",
"0.6354602",
"0.6354602",
"0.6354602",
"0.6329644",
"0.6324852",
"0.6320547",
"0.6307218",
"0.6301448",
"0.6296561",
"0.62472534",
"0.62343955",
"0.62156135",
"0.6188894",
"0.6183736",
"0.6172649",
"0.61663294",
"0.61531377",
"0.61462927",
"0.6125778",
"0.6118975",
"0.6021609",
"0.59912246",
"0.5990813",
"0.58525914",
"0.58516103",
"0.58147204",
"0.58025163",
"0.57437557",
"0.57409817",
"0.57409817",
"0.5735618",
"0.57304215",
"0.5689596",
"0.56636435",
"0.56636435",
"0.56356305",
"0.56336457",
"0.5622611",
"0.5617771",
"0.5594107",
"0.55920774",
"0.55841076",
"0.55687135",
"0.55685663",
"0.55685663",
"0.5554542",
"0.5531325",
"0.55292886",
"0.551545",
"0.54761994",
"0.5457323",
"0.54561204",
"0.54547906",
"0.5447902",
"0.5409634",
"0.5396271",
"0.5396088",
"0.5373804",
"0.5371839",
"0.53699625",
"0.5369114",
"0.5361891",
"0.5361001",
"0.5352581",
"0.53334564",
"0.5332819",
"0.5316876",
"0.5316565",
"0.5312228",
"0.5303451",
"0.53028154",
"0.5299505",
"0.5297925",
"0.5285501",
"0.52737284",
"0.52653867",
"0.52565426",
"0.5243585",
"0.5234031",
"0.52285564",
"0.5210943",
"0.52025914",
"0.51998293"
] |
0.66456246
|
7
|
Use a generic blob quoting method, hopefully overridden in one of the subadapter methods
|
def literal_blob_append(sql, v)
sql << "'" << v.gsub(/[\000-\037\047\134\177-\377]/n){|b| "\\#{("%o" % b[0..1].unpack("C")[0]).rjust(3, '0')}"} << "'"
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def quote(value, column = nil)\n case value\n when String, ActiveSupport::Multibyte::Chars\n value_S = value.to_s\n if column && column.type == :binary && column.class.respond_to?(:string_to_binary)\n \"'#{column.class.string_to_binary(value_S)}'\"\n else\n super(value, column)\n end\n else\n super(value, column)\n end\n end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote(value); end",
"def blob(s)\n if s.is_a?(SQL::Blob)\n s\n else\n SQL::Blob.new(s)\n end\n end",
"def quoted_string; end",
"def quote(value, column = nil) #:nodoc:\n if value.kind_of?(String) && column && column.sql_type =~ /^xml$/\n \"xml '#{quote_string(value)}'\"\n elsif value.kind_of?(Numeric) && column && column.sql_type =~ /^money$/\n # Not truly string input, so doesn't require (or allow) escape string syntax.\n \"'#{value.to_s}'\"\n elsif value.kind_of?(String) && column && column.sql_type =~ /^bit/\n case value\n when /^[01]*$/\n \"B'#{value}'\" # Bit-string notation\n when /^[0-9A-F]*$/i\n \"X'#{value}'\" # Hexadecimal notation\n end\n elsif column && column.sql_type =~ /^datetime$/\n if (not value.nil?) && (value.acts_like?(:date) || value.acts_like?(:time))\n \"CONVERT(datetime,'#{quoted_date(value)}',120)\"\n else\n # Fixtures#insert_fixtures sets fields like +updated_now+ to a String instance (\"Time.now.to_s(:db)\")\n super\n end\n elsif column && column.sql_type =~ /^boolean$/\n \"'#{value ? 1 : 0}'\"\n elsif value.class.to_s == 'System::Byte[]' && column && column.sql_type =~ /^binary$/\n \"CONVERT(varbinary(max),'0x#{bytes_to_string(value)}',1)\" \n else\n super\n end\n end",
"def literal_blob_append(sql, v)\n sql << \"'\" << db.synchronize(@opts[:server]){|c| c.escape_bytea(v)} << \"'\"\n end",
"def quotes; end",
"def blob; end",
"def blob; end",
"def quote(value, column = nil) #:nodoc:\n return super unless column\n\n case value\n when Float\n return super unless value.infinite? && column.type == :datetime\n \"'#{value.to_s.downcase}'\"\n when Numeric\n return super unless column.sql_type == 'money'\n # Not truly string input, so doesn't require (or allow) escape string syntax.\n \"'#{value}'\"\n when String\n case column.sql_type\n when 'bytea' then \"E'#{escape_bytea(value)}'::bytea\"\n when 'xml' then \"xml '#{quote_string(value)}'\"\n when /^bit/\n case value\n when /^[01]*$/ then \"B'#{value}'\" # Bit-string notation\n when /^[0-9A-F]*$/i then \"X'#{value}'\" # Hexadecimal notation\n end\n else\n super\n end\n else\n super\n end\n end",
"def blob(s)\n SQL::Blob.new(s)\n end",
"def store_blob(object,field_name,blob)\n super #=> returns blob[:tempfile]\n end",
"def my_quote\n\tend",
"def quotes\n end",
"def literal_blob_append(sql, v)\n if v.empty?\n sql << \"''\"\n else\n sql << \"x'#{v.unpack('H*').first}'\"\n end\n end",
"def typecast_value_blob(value)\n value.is_a?(Sequel::SQL::Blob) ? value : Sequel::SQL::Blob.new(value)\n end",
"def quote(name)\n raise NotImplementedError\n end",
"def quote(character: T.unsafe(nil)); end",
"def quote(character: T.unsafe(nil)); end",
"def quote(character: T.unsafe(nil)); end",
"def set_quote\n \n end",
"def store_blob(blob)\n super #=> returns blob[:tempfile]\n end",
"def as_you_like_it_quote; end",
"def quote_and_escape(value)\n case value\n when \"NULL\"\n value\n else\n value = value.gsub(/\\\\/, ARRAY_ESCAPE)\n value.gsub!(/\"/,\"\\\\\\\"\")\n \"\\\"#{value}\\\"\"\n end\n end",
"def convert_type(v)\n case v\n when Java::NetSourceforgeJtdsJdbc::ClobImpl\n convert_type(v.getSubString(1, v.length))\n else\n super\n end\n end",
"def type_literal_generic_file(column)\n :blob\n end",
"def quote(string)\n\t\t\tstring.nil? ? \"NULL\" : \"'#{super}'\"\n\t\tend",
"def quote(value, column = nil)\n if value.kind_of?(GeoRuby::SimpleFeatures::Geometry)\n \"'#{value.as_hex_ewkb}'\"\n else\n original_quote(value,column)\n end\n end",
"def quote(value, column = nil)\n if value.kind_of?(GeoRuby::SimpleFeatures::Geometry)\n \"'#{value.as_hex_ewkb}'\"\n else\n original_quote(value,column)\n end\n end",
"def quote(value, column = nil)\n if value.kind_of?(GeoRuby::SimpleFeatures::Geometry)\n \"'#{value.as_hex_ewkb}'\"\n else\n original_quote(value,column)\n end\n end",
"def quote(value)\n sanity_check\n @handle.quote(value)\n end",
"def quote_string( s ) #:nodoc:\n @connection.escape( s )\n end",
"def quote(value, column = nil)\n if value.kind_of?(GeoRuby::SimpleFeatures::Geometry)\n \"GeomFromWKB(0x#{value.as_hex_wkb},#{value.srid})\"\n else\n original_quote(value,column)\n end\n end",
"def force_quotes?() @force_quotes end",
"def to_blob; end",
"def to_blob; end",
"def to_blob; end",
"def to_blob; end",
"def to_blob; end",
"def to_blob; end",
"def hamlet_quote; end"
] |
[
"0.6421807",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.64217883",
"0.6249342",
"0.62267023",
"0.61998266",
"0.6136577",
"0.6121026",
"0.60496074",
"0.6015192",
"0.6015192",
"0.5928459",
"0.58845276",
"0.5868109",
"0.5847851",
"0.5818831",
"0.5719846",
"0.56805617",
"0.56579536",
"0.56303865",
"0.56295395",
"0.56295395",
"0.56238735",
"0.5567255",
"0.5546605",
"0.55390126",
"0.5487305",
"0.5481843",
"0.5479986",
"0.5433299",
"0.5433299",
"0.5432572",
"0.542038",
"0.5410178",
"0.53879106",
"0.5374121",
"0.53693616",
"0.53693616",
"0.53693616",
"0.53693616",
"0.53693616",
"0.53693616",
"0.5361755"
] |
0.61955583
|
63
|
PostgreSQL uses FALSE for false values
|
def literal_false
'false'
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def db_boolean(val)\n if adapter == :postgres\n val ? 'TRUE' : 'FALSE'\n else\n val ? '1' : '0'\n end\n end",
"def to_bool() false end",
"def sql_boolean\n BooleanExpression.new(:NOOP, self)\n end",
"def sql_boolean\n BooleanExpression.new(:NOOP, self)\n end",
"def sql_boolean\n self\n end",
"def to_bool() true end",
"def boolean cols\n decode_values :boolean, cols\n end",
"def literal_false\n BOOL_FALSE\n end",
"def column_value_boolean\n case Utilities.adapter\n when 'mysql2', 'postgresql'\n column_value ? \"true\" : \"false\"\n when 'sqlite3', 'sqlserver'\n column_value ? \"1\" : \"0\"\n end\n end",
"def typecast_value_boolean(value)\n case value\n when false, 0, \"0\", /\\Af(alse)?\\z/i, /\\Ano?\\z/i\n false\n else\n blank_object?(value) ? nil : true\n end\n end",
"def boolify(val)\n\nend",
"def type_literal_generic_boolean(column)\n :boolean\n end",
"def typecast_value_boolean(opts={});true;end",
"def boolean_expr safe_column_name\n safe_column_name\n end",
"def value_to_boolean(value)\n value = get_param(value) if value.is_a?(Symbol)\n ActiveRecord::ConnectionAdapters::Column.value_to_boolean(value)\n end",
"def query_yields_boolean?\n false\n end",
"def booleans cols\n decode_values :boolean, cols, true\n end",
"def prologify\n \"false\"\n end",
"def to_bool(value)\n ! FALSE_VALUES.include?(value)\n end",
"def to_b\n self != \"false\"\n end",
"def cast_to_boolean(input)\n ActiveRecord::Type::Boolean.new.type_cast_from_user(input)\n end",
"def to_bool\n true\n end",
"def parse_bool() false end",
"def parse_bool() false end",
"def munge_boolean(value)\n case value\n when true, 'true', :true\n 'true'\n when false, 'false', :false\n 'false'\n else\n raise \"Invalid value for munge_boolean #{value}\"\n end\n end",
"def boolean(value)\n value ? true : false\n end",
"def on_false(node)\n :false\n end",
"def supports_is_true?\n false\n end",
"def false_true\n alternatives false, true\n end",
"def to_bool str\r\n ActiveRecord::Type::Boolean.new.type_cast_from_user(str)\r\n end",
"def query_yields_boolean?\n false\n end",
"def boolean\n Util.from_bytes :boolean, value\n end",
"def get_boolean value #:nodoc:\n # some exceptions\n value = false if value == :low or value == 0 or value == nil or value == :off or value == :ground or value == :gnd\n !! value # double invert value in to boolean form\n end",
"def to_boo string\n\t\tActiveRecord::Type::Boolean.new.cast(string)\n\tend",
"def boolean_type\n 'Boolean'\n end",
"def to_boolean(value)\n case value\n when :true, 'true'\n true\n else\n false\n end\n end",
"def true?\n !false?\n end",
"def to_bool; self; end",
"def to_bool; self; end",
"def literal_true\n 'true'\n end",
"def false \n \"false\" \n end",
"def false?\n value.nil? || value == false\n end",
"def convert_to_boolean(value)\n value != 'no'\n end",
"def format_boolean(val, istruct)\n return istruct.nil_text if val.nil?\n\n val ? istruct.true_text : istruct.false_text\n end",
"def format_boolean(val, istruct)\n return istruct.nil_text if val.nil?\n\n val ? istruct.true_text : istruct.false_text\n end",
"def value_to_boolean(value)\n if value.is_a?(String) && value.blank?\n nil\n else\n TRUE_VALUES.include?(value)\n end\n end",
"def literal_true\n BOOL_TRUE\n end",
"def clean_up_boolean(_, q_val)\n %w(t true).include? q_val\n end",
"def get_boolean_value\n\t\tend",
"def to_bool\n if (self.to_bool == 1)\n puts \"TRUE\"\n elsif (self.to_bool == 0)\n puts \"FALSE\"\n elsif (self.to_bool == -1)\n puts \"NaN\"\n end\nend",
"def sql_boolean\n BooleanExpression.new(self.op, *self.args)\n end",
"def to_b(value)\n [0,false,nil].include?(value) ? false : true\n end",
"def value_to_boolean(value)\n return value if value==true || value==false\n case value.to_s.downcase\n when \"true\", \"t\", \"1\" then true\n else false\n end\n end",
"def value_to_boolean(value)\n if value.is_a?(String) && value.empty?\n nil\n else\n TRUE_VALUES.include?(value)\n end\n end",
"def fix_booleans\n Post.where.not(published: 't').update_all(\"published = 'f'\")\n end",
"def to_boolean(val)\n val && (val.to_s.match(/(true|t|yes|y|1)$/i) != nil)\n end",
"def value_if_false=(value)\n @value_if_false = value\n end",
"def to_bool\n return false if self.downcase == \"false\"\n return true\n end",
"def sql_true\n Rails.env.production? ? \"true\" : \"'t'\"\n end",
"def type_literal_generic_trueclass(column)\n :boolean\n end",
"def to_bool\n true\n end",
"def coerce_bool(value)\n case value\n when nil, false then false\n when Numeric then !value.zero?\n else\n true\n end\n end",
"def boolean(**props)\n transform(type: :boolean, **props) do |value|\n !!value\n end\n end",
"def value_if_false\n return @value_if_false\n end",
"def to_bool(value)\n value.to_s.downcase == 'true' ? true : false\n end",
"def to_bool\n !!self\n end",
"def unquoted_false\n 'f'\n end",
"def boolean_default_true(value)\n value.nil? || value\n end",
"def nullify(bool)\n bool ? bool : nil\n end",
"def post_process_boolean( val )\n\t\t\treturn TRUE_VALUES.include?( val.to_s.downcase )\n\t\tend",
"def parse_bool() true end",
"def boolean(arg)\n case arg\n when 'true'\n 1\n when 'false'\n 0\n when nil\n 0\n end\n end",
"def conditionally_false\n\t\t!self\n\tend",
"def boolean_value(object)\n\treturn object if object.is_a?(PRBool)\n\treturn PRBool.new(false) if object.is_a?(PRNil)\n\tif object.is_a?(PRNumber) then\n\t\treturn PRBool.new(object._value != 0)\n\tend\n\treturn PRBool.new(true)\nend",
"def schema_column_type(db_type)\n Sequel::Mysql2.convert_tinyint_to_bool && db_type == 'tinyint(1)' ? :boolean : super\n end",
"def value?\n return !self.flag?\n end",
"def false_values\n [false, 'false', :false, :no, 'no', :undef, nil, :absent]\n end",
"def bool_conv(value)\n value == HEX_FALSE ? false : true\n end",
"def convert_to_boolean(input)\n case input\n when false, 0, '0', 'false', 'no', nil then false\n else\n true\n end\n end",
"def booleanish_to_boolean(arguments, ddl)\n arguments.keys.each do |key|\n if ddl[:input].keys.include?(key)\n if ddl[:input][key][:type] == :boolean\n arguments[key] = true if arguments[key] == \"true\"\n arguments[key] = true if arguments[key] == \"yes\"\n arguments[key] = true if arguments[key] == \"1\"\n arguments[key] = false if arguments[key] == \"false\"\n arguments[key] = false if arguments[key] == \"no\"\n arguments[key] = false if arguments[key] == \"0\"\n end\n end\n end\n rescue\n true\n end",
"def single_value?\n return false\n end",
"def to_boolean(str)\n !(str == 'false' || str == '0')\nend",
"def boolean?\n !to_bool.nil?\n end",
"def tpl_boolean; @original; end",
"def boolean?(column)\n column.type == :boolean\n end",
"def convert_smallint_to_bool\n opts.has_key?(:convert_smallint_to_bool) ? opts[:convert_smallint_to_bool] : db.convert_smallint_to_bool\n end",
"def rs_cassandra_bool_config(config_val)\n if config_val.is_a?(String) then\n return config_val\n elsif config_val then\n return \"true\"\n else\n return \"false\"\n end\nend",
"def schema_column_type(db_type)\n convert_tinyint_to_bool && db_type =~ /\\Atinyint\\(1\\)/ ? :boolean : super\n end",
"def boolean(name)\n (p = first(:conditions => { :name => name })) ? p.value != 'f' : true\n end",
"def boolean\n map ->(primitive) { primitive.to_bool }\n end",
"def schema_column_type(db_type)\n if convert_smallint_to_bool && db_type =~ /smallint/i \n :boolean\n else\n super\n end\n end",
"def boolean_to_string(b)\n if b \n \"true\"\n else\n \"false\"\n end\nend",
"def false?(val)\n FALSE_VALUES.include? val\n end",
"def true \n \"true\" \n end",
"def to_boolean(value)\n [\"true\", \"1\", \"yes\"].include?(value.to_s) ? true : false\n end",
"def true_false(data)\n if data\n \"Evaluates: TRUE\"\n else\n \"Evaluates: FALSE\"\n end\nend",
"def to_b\n return true if self =~ BOOLEAN_REGEX\n false\n end",
"def to_boolean(value)\n [\"true\", \"1\", \"yes\"].include?(value.to_s) ? true : false\n end",
"def false_value?(val)\n EnvHelpers::Utils.false_value?(val)\n end",
"def get_boolean_value(field_name)\n\t\tend"
] |
[
"0.7533041",
"0.73353285",
"0.71181154",
"0.71181154",
"0.70923305",
"0.69954467",
"0.68945616",
"0.68405515",
"0.676199",
"0.6748788",
"0.67112726",
"0.6697309",
"0.6685205",
"0.66401607",
"0.66356605",
"0.6620472",
"0.660391",
"0.6573759",
"0.65710753",
"0.6537756",
"0.65251374",
"0.65144014",
"0.65061957",
"0.65061957",
"0.6499423",
"0.64966613",
"0.6493506",
"0.6482165",
"0.6461766",
"0.64512134",
"0.64457506",
"0.644174",
"0.6430926",
"0.6411498",
"0.63587517",
"0.63581145",
"0.6346933",
"0.633908",
"0.633908",
"0.63340724",
"0.632658",
"0.6313394",
"0.63096905",
"0.6309681",
"0.6309681",
"0.62827903",
"0.6281098",
"0.62687266",
"0.626688",
"0.6258082",
"0.6246117",
"0.62354726",
"0.62246686",
"0.62086195",
"0.6208567",
"0.6206546",
"0.6195475",
"0.6192438",
"0.61685133",
"0.61639124",
"0.6160562",
"0.61567736",
"0.6156133",
"0.6148994",
"0.6143569",
"0.6129888",
"0.6121414",
"0.6119687",
"0.61196244",
"0.61051303",
"0.61029077",
"0.60998476",
"0.60942465",
"0.6091629",
"0.6078521",
"0.60644585",
"0.60532534",
"0.60440934",
"0.6044006",
"0.60374373",
"0.60364366",
"0.60347825",
"0.60338825",
"0.60280913",
"0.60209054",
"0.60101557",
"0.6004102",
"0.600342",
"0.60019493",
"0.5994007",
"0.5957199",
"0.5949148",
"0.59484375",
"0.5947269",
"0.5944843",
"0.5938179",
"0.5932382",
"0.59295577",
"0.5922401",
"0.59211326"
] |
0.68685883
|
7
|
PostgreSQL quotes NaN and Infinity.
|
def literal_float(value)
if value.finite?
super
elsif value.nan?
"'NaN'"
elsif value.infinite? == 1
"'Infinity'"
else
"'-Infinity'"
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def allow_nan?()\n #This is a stub, used for indexing\n end",
"def ∞ℂ; ::Float::INFINITY_COMPLEX; end",
"def nan\n BigDecimal('0')/BigDecimal('0')\n end",
"def handle_nan(result)\n result.nan? ? 0.0 : result\n end",
"def handle_nan(result)\n result.nan? ? 0.0 : result\n end",
"def nan?() end",
"def infinite_timestamp_value(value)\n case convert_infinite_timestamps\n when :nil\n nil\n when :string\n value\n else\n value == 'infinity' ? PLUS_INFINITY : MINUS_INFINITY\n end\n end",
"def nan?\n end",
"def test_NaN\n\n x = 0.0 / 0.0 # => NaN\n\n assert_raises RangeError do\n ln(x)\n end\n\n end",
"def sql_quote(value)\n value = value.to_s.downcase if %i[nil null NULL].include?(value)\n # noinspection RubyMismatchedReturnType\n case value\n when nil, 'nil', 'null', 'NULL' then nil\n when /^-?\\d+$/ then value.to_i\n when SQL_NUMBER then value.to_f\n when String, Symbol then \"'#{value}'\"\n else value\n end\n end",
"def checked(value)\n value = zero() if value.respond_to?(:nan?) && value.nan?\n value = zero() if value.respond_to?(:infinity?) && value.infinity?\n value = zero() if value.nil?\n value = value.to_f if value.is_a? BigDecimal\n\n value\n end",
"def format_value_to_null(_value)\n 'null'\n end",
"def inspect\n if @positive \n \"INFINITY\"\n else\n \"-INFINITY\"\n end\n end",
"def numeric?; float?; end",
"def sql_valuify\n nil? ? 'NULL' : \"'#{to_s.gsub(/\\\\/, '\\&\\&').gsub(/'/, \"''\").gsub(/\\t/, \"\\\\t\").gsub(/\\r/, \"\\\\r\").gsub(/\\n/, \"\\\\n\")}'\"\n end",
"def finite?\n @special != 'i' && !self.nan?\n end",
"def escape_value(v) \n if v.nil?\n # Nulls (nil)\n return \"NULL\"\n elsif v.kind_of? Integer or v.kind_of? Float\n # Integers / Floats\n return v.to_s\n else\n # Everything Else\n return \"'#{@mysql.escape(v.to_s)}'\" \n end\n end",
"def sql_number\n NumericExpression.new(:NOOP, self)\n end",
"def sql_number\n NumericExpression.new(:NOOP, self)\n end",
"def json_formatter\n Proc.new do |f,h|\n h.keys.each do |k|\n if h[k].class == Float\n h[k] = nil if (h[k].nan? or h[k].infinite?)\n end\n end\n f.puts h.to_json\n end\n end",
"def nan?(value)\n value.is_a?(Float) && value.nan?\n end",
"def quote(value, column = nil) #:nodoc:\n return super unless column\n\n case value\n when Float\n return super unless value.infinite? && column.type == :datetime\n \"'#{value.to_s.downcase}'\"\n when Numeric\n return super unless column.sql_type == 'money'\n # Not truly string input, so doesn't require (or allow) escape string syntax.\n \"'#{value}'\"\n when String\n case column.sql_type\n when 'bytea' then \"E'#{escape_bytea(value)}'::bytea\"\n when 'xml' then \"xml '#{quote_string(value)}'\"\n when /^bit/\n case value\n when /^[01]*$/ then \"B'#{value}'\" # Bit-string notation\n when /^[0-9A-F]*$/i then \"X'#{value}'\" # Hexadecimal notation\n end\n else\n super\n end\n else\n super\n end\n end",
"def singleq2null\n self.gsub(\"'\", '%00%27')\n end",
"def nan?(str)\n str !~ /^\\s*[+-]?((\\d+_?)*\\d+(\\.(\\d+_?)*\\d+)?|\\.(\\d+_?)*\\d+)(\\s*|([eE][+-]?(\\d+_?)*\\d+)\\s*)$/\n end",
"def quote_with_negative_one(value, column = nil)\n result = quote_without_negative_one(value, column)\n (value.is_a?(TrueClass) and result == '1') ? '-1' : result\n end",
"def nan?\n @special == 'n'\n end",
"def infinity?\n @infinity == true\n end",
"def test_infinity\n\n x = 1.0 / 0.0 # => Infinity\n\n assert_raises RangeError do\n ln(x)\n end\n\n end",
"def to_f\n if isFloat? then\n value = exp.to_f\n return value\n elsif exp == \"Infinity\" then return @@MAX_VALUE\n elsif exp == \"-Infinity\" then return @@MIN_VALUE\n else\n raise \"Non-float conversion in expression: \" + exp\n end\n end",
"def explicit_value\n return if value.nil?\n value < 0 ? value : \"+#{value.to_s}\"\n end",
"def normalize_scalar_property_value(value)\n return \"NaN\" if value.kind_of?(Float) && value.nan?\n\n case value\n when true, false, nil then value\n when ActiveSupport::Duration then value.to_i\n when Numeric then value\n when String then value.strip\n when Symbol then value.to_s.strip\n when Time then value.getutc.strftime(\"%Y-%m-%dT%H:%M:%S\")\n when IPAddr then value.to_s\n when FLOAT_INFINITY then \"+infinity\"\n when -FLOAT_INFINITY then \"-infinity\"\n when Array then\n out = value.map { |e| normalize_scalar_property_value(e) }\n out = :invalid_property_value if out.detect { |e| e == :invalid_property_value }\n out\n else :invalid_property_value\n end\n end",
"def quoted_value(options = {}) \n options = normalize_options(options)\n col = find_column(@column,options)\n return ActiveRecord::Base.quote_value(value) unless col \n return ActiveRecord::Base.quote_value(col.type_cast(value)) if col.number?\n return ActiveRecord::Base.quote_value(value)\n end",
"def process_nil(exp)\n return \"Qnil\"\n end",
"def round_nil_safe(value, n = 1)\n value.round(n) if value\nend",
"def type_literal_generic_float(column)\n :\"double precision\"\n end",
"def operator\n @operator ||= case [min, max]\n when [0, 0] then ''\n when [0, 1] then '?'\n when [1, Infinity] then '+'\n else\n [min, max].map {|n| n == 0 || n == Infinity ? '' : n.to_s }.join('*')\n end\n end",
"def operator\n @operator ||= case [min, max]\n when [0, 0] then ''\n when [0, 1] then '?'\n when [1, Infinity] then '+'\n else\n [min, max].map {|n| n == 0 || n == Infinity ? '' : n.to_s }.join('*')\n end\n end",
"def sql_strings(value)\n case value\n when String\n \"'#{value}'\"\n when Numeric\n value.to_s\n else\n \"null\"\n end\n end",
"def null_or_value(value)\n return \"NULL\" if value.empty?\n # Escape any single quotes to encure values returned do not not cause\n # issues with the SQL insert statement\n return \"'#{value.gsub(\"'\", \"\\\\\\\\'\")}'\"\nend",
"def nanp_format\n strfphone(NANP_FORMAT)\n end",
"def handle_float(float, lineno_column)\n Literal.new float.to_f\n end",
"def na\n field_fetch('NA')\n end",
"def process_nil(exp)\n return \"NULL\"\n end",
"def quote_numeric(value)\n value.to_s\n end",
"def to_numeric_or_nil\n self == 0 ? nil : self\n end",
"def round_nil_safe(value, n = 1)\n value.round(n) if value\n end",
"def test_do_not_raise_when_int_is_not_wider_than_64bit\n value = 9223372036854775807\n assert_equal \"'9223372036854775807'\", @conn.quote(value)\n\n value = -9223372036854775808\n assert_equal \"'-9223372036854775808'\", @conn.quote(value)\n end",
"def to_s\n @string || case\n when @object.nan? then 'NaN'\n when @object.infinite? then @object.to_s[0...-'inity'.length].upcase\n else @object.to_s\n end\n end",
"def nan?\n @special._equal?(2)\n end",
"def sql_quoted(value)\n Arel::Nodes.build_quoted(value)\n end",
"def infinity\n @infinity ||= Point.new(self, :infinity)\n end",
"def sql_null_to_blank\n self.map {|v| \"IFNULL(#{v}, '')\" }\n end",
"def quote(value)\n case value\n when NilClass then\n \"NULL\"\n when TrueClass then\n \"TRUE\"\n when FalseClass then\n \"FALSE\"\n when Float, Fixnum, Bignum then\n value.to_s\n # BigDecimals need to be output in a non-normalized form and quoted.\n when BigDecimal then\n value.to_s('F')\n else\n value.inspect\n end\n end",
"def replace_nil\n lambda { |val| val.nil? ? 'NULL' : val }\n end",
"def normalize_qvalue( qvalue )\n\t\treturn Q_DEFAULT unless qvalue\n\t\tqvalue = Float( qvalue.to_s.sub(/q=/, '') ) unless qvalue.is_a?( Float )\n\n\t\tif qvalue > Q_MAX\n\t\t\tself.log.notice \"Squishing invalid qvalue %p to %0.1f\" %\n\t\t\t\t[ qvalue, Q_DEFAULT ]\n\t\t\treturn Q_DEFAULT\n\t\tend\n\n\t\treturn qvalue\n\tend",
"def non_negative_float(value, epsilon: nil)\n result = to_float(value) or return\n result unless epsilon ? (result < -epsilon) : result.negative?\n end",
"def numeric(x)\n true if Float(x) rescue false\n end",
"def replace_nil_with_nan\n hydrate_array\n @outcome_array.each { |row| row[row.index(nil)] = \"nan\" if row.include?(nil) } \n print \"#{@outcome_array} \\n\"\nend",
"def to_d\n @object.respond_to?(:to_d) ? @object.to_d : BigDecimal(@object.to_s)\n rescue FloatDomainError\n ::Float::NAN\n end",
"def to_d\n @object.respond_to?(:to_d) ? @object.to_d : BigDecimal(@object.to_s)\n rescue FloatDomainError\n ::Float::NAN\n end",
"def ∅?; self.real.zero?; end",
"def item_to_s(item)\n if item == INFINITY\n \"*\"\n elsif item.is_a?(Numeric)\n sprintf(\"%0.2f\", item)\n else\n item.to_s\n end\n end",
"def format_if_result_is_zero(result)\n if result == 0 || result == 0.0\n result = 0\n end\n return result\nend",
"def test_format_decimal_nil\n assert_equal \"-\", Spout::Helpers::TableFormatting.format_number(nil, :decimal)\n end",
"def numeric(input)\n return Float(input) != nil rescue false\n end",
"def test_native_float_insert_manual_vs_automatic\n correct_value = \"0012345678901234567890.0123456789\".to_f\n\n connection.add_column \"test_models\", \"wealth\", :float\n\n # Do a manual insertion\n connection.transaction {\n connection.execute \"insert into test_models (id, wealth) values (#{generate_id}, 12345678901234567890.0123456789)\"\n }\n\n # SELECT\n row = TestModel.first\n assert_kind_of Float, row.wealth\n\n # If this assert fails, that means the SELECT is broken!\n assert_equal correct_value, row.wealth\n\n # Reset to old state\n TestModel.delete_all\n\n # Now use the Rails insertion\n TestModel.create wealth: BigDecimal(\"12345678901234567890.0123456789\")\n\n # SELECT\n row = TestModel.first\n assert_kind_of Float, row.wealth\n\n # If these asserts fail, that means the INSERT (create function, or cast to SQL) is broken!\n assert_equal correct_value, row.wealth\n end",
"def to_sparql(**options)\n \"isNumeric(\" + operands.first.to_sparql(**options) + \")\"\n end",
"def force_numeric?(column)\n (column.nil? || [:integer, :float, :decimal].include?(column.type))\n end",
"def test_NilClass_InstanceMethod_to_f\n\t\tassert_equal(0.0, nil.to_f)\n\tend",
"def finite?() end",
"def to_s\n if (@value)\n if (@value.to_i == @value)\n @value.to_i.to_s\n else\n\n float_digits = get_minimum_float\n\n rounded_value = \"%.#{float_digits}f\" % [@value.to_f]\n\n if Rational(rounded_value) != @value\n rounded_value += \"…\"\n end\n\n rounded_value\n end\n else\n raise CalculationError, 'Value of operand is not set'\n end\n end",
"def serialize(object)\n return nil if object.blank?\n numeric(object) rescue object\n end",
"def sanitize_js_pct(inJSPct)\n\t if inJSPct < 0.0\n\t\t 0.0\n\t end\n\t if inJSPct > 1.0 and inJSPct < 2.0\n\t\t 1.0\n\t end\n\t if inJSPct > 100.0\n\t\t 100.0\n\t end\n\t if inJSPct > 1.0\n\t\t inJSPct / 100.0\n\t else\n\t\t inJSPct\n\t end\n end",
"def numeric_empty(num)\n false\n end",
"def casted_value\n case qtype.name\n when 'date' then date_value\n when 'time' then time_value\n when 'datetime' then datetime_value\n when 'integer' then value.blank? ? nil : value.to_i\n when 'decimal' then value.blank? ? nil : value.to_f\n else value.blank? ? nil : value\n end\n end",
"def to_float(value)\n return Float(value) rescue Float::NAN\n end",
"def parse_null_expression\n return ScopedSearch::QueryLanguage::AST::OperatorNode.new(next_token, [parse_value])\n end",
"def replace_null(value)\n return 'U' if value.nil?\n\n value\nend",
"def to_s\n \"#{( \"%.f\" % @value )}#{'%'} \"\n end",
"def numeric?(object)\n true if Float(object) rescue false\nend",
"def sanitize_value(value)\n value == \"-\" ? nil : value\n end",
"def positive(from: T.unsafe(nil), to: T.unsafe(nil)); end",
"def to_sparql(**options)\n \"COALESCE(#{operands.to_sparql(delimiter: ', ', **options)})\"\n end",
"def format_math(el, opts = T.unsafe(nil)); end",
"def numeric?\n false\n end",
"def test_native_decimal_insert_manual_vs_automatic\n correct_value = '0012345678901234567890.0123456789'.to_d\n\n Person.delete_all\n Person.connection.add_column \"people\", \"wealth\", :decimal, :precision => '30', :scale => '10'\n Person.reset_column_information\n\n # Do a manual insertion\n if current_adapter?(:OracleAdapter)\n Person.connection.execute \"insert into people (id, wealth) values (people_seq.nextval, 12345678901234567890.0123456789)\"\n elsif current_adapter?(:OpenBaseAdapter) || (current_adapter?(:MysqlAdapter) && Mysql.client_version < 50003) #before mysql 5.0.3 decimals stored as strings\n Person.connection.execute \"insert into people (wealth) values ('12345678901234567890.0123456789')\"\n else\n Person.connection.execute \"insert into people (wealth) values (12345678901234567890.0123456789)\"\n end\n\n # SELECT\n row = Person.find(:first)\n assert_kind_of BigDecimal, row.wealth\n\n # If this assert fails, that means the SELECT is broken!\n unless current_adapter?(:SQLite3Adapter)\n assert_equal correct_value, row.wealth\n end\n\n # Reset to old state\n Person.delete_all\n\n # Now use the Rails insertion\n assert_nothing_raised { Person.create :wealth => BigDecimal.new(\"12345678901234567890.0123456789\") }\n\n # SELECT\n row = Person.find(:first)\n assert_kind_of BigDecimal, row.wealth\n\n # If these asserts fail, that means the INSERT (create function, or cast to SQL) is broken!\n unless current_adapter?(:SQLite3Adapter)\n assert_equal correct_value, row.wealth\n end\n\n # Reset to old state\n Person.connection.del_column \"people\", \"wealth\" rescue nil\n Person.reset_column_information\n end",
"def to\n @to == Float::INFINITY ? nil : @to\n end",
"def test_do_not_raise_when_raise_int_wider_than_64bit_is_false\n ActiveRecord.raise_int_wider_than_64bit = false\n value = 9223372036854775807 + 1\n assert_equal \"'9223372036854775808'\", @conn.quote(value)\n ActiveRecord.raise_int_wider_than_64bit = @raise_int_wider_than_64bit\n end",
"def canonicalize\n # Can't use simple %f transformation due to special requirements from\n # N3 tests in representation\n @string = case\n when @object.nan? then 'NaN'\n when @object.infinite? then @object.to_s[0...-'inity'.length].upcase\n when @object.zero? then '0.0E0'\n else\n i, f, e = ('%.16E' % @object.to_f).split(/[\\.E]/)\n f.sub!(/0*$/, '') # remove any trailing zeroes\n f = '0' if f.empty? # ...but there must be a digit to the right of the decimal point\n e.sub!(/^\\+?0+(\\d)$/, '\\1') # remove the optional leading '+' sign and any extra leading zeroes\n \"#{i}.#{f}E#{e}\"\n end unless @object.nil?\n self\n end",
"def my_parse_int(string)\n string.respond_to?(:to_i) ? string.to_i : \"NaN\"\nend",
"def casted_minimum\n minimum.blank? ? nil : (qtype_name == \"decimal\" ? minimum : minimum.to_i)\n end",
"def convert_float( text_value )\r\n (!text_value.empty?) ? text_value.to_f : 0.0\r\nend",
"def numeric?\n !!Float(self) rescue false\n end",
"def drop_last_value\n if @value.to_i.to_s.length > 1\n if (@value > 0)\n @value = (@value.to_i / 10).to_r\n else\n # Negative truncations need to be rounded UP\n @value = (@value.to_i / 10 + 1).to_r\n end\n else\n @value = 0\n end\n end",
"def compose_not_gteq_node(node, value)\n compose_gteq_node(node, value).not\n end",
"def qvaluestring\n\t\t# 3 digit precision, trim excess zeros\n\t\treturn sprintf( \"q=%0.3f\", self.qvalue ).gsub(/0{1,2}$/, '')\n\tend",
"def log\n Double.new(Math.log(self.to_f))\n rescue Math::DomainError\n Double.new(::Float::NAN)\n end",
"def lenient_value_coercions; end",
"def finite?\n @special._equal?(0)\n end",
"def clean_value(value)\n case value\n when Symbol then value.to_s\n when TrueClass then 1\n when FalseClass then 0\n when /^\\d+$/ then value.to_i # Integer\n when /^\\d+(\\.\\d+)?([eE]\\d+)?$/ then value.to_f # Float\n when blank? then nil\n else value\n end \n end"
] |
[
"0.675201",
"0.6434023",
"0.63592994",
"0.62690026",
"0.62690026",
"0.5924871",
"0.5916958",
"0.57710654",
"0.5722384",
"0.5592909",
"0.5577168",
"0.555948",
"0.54728085",
"0.54715145",
"0.5422187",
"0.5416626",
"0.5352652",
"0.5331749",
"0.5331749",
"0.53267884",
"0.5315962",
"0.52731097",
"0.52723604",
"0.52685463",
"0.52299106",
"0.5220378",
"0.51652515",
"0.5129672",
"0.5031552",
"0.501504",
"0.5010682",
"0.49830064",
"0.49370977",
"0.49152303",
"0.4905504",
"0.48632237",
"0.48632237",
"0.4861619",
"0.4859092",
"0.48534665",
"0.48494053",
"0.48429713",
"0.48167413",
"0.4808453",
"0.48079735",
"0.47963673",
"0.4766599",
"0.47609308",
"0.47357687",
"0.4706895",
"0.4704932",
"0.46829268",
"0.4681014",
"0.46765998",
"0.46691534",
"0.46579993",
"0.46469712",
"0.46453995",
"0.46350822",
"0.46350822",
"0.4630366",
"0.46143043",
"0.46126914",
"0.46038145",
"0.46029064",
"0.4598564",
"0.45889285",
"0.4588186",
"0.4576123",
"0.45705494",
"0.45636854",
"0.4545783",
"0.454192",
"0.4505959",
"0.45046088",
"0.44985855",
"0.44969133",
"0.4493188",
"0.44917083",
"0.44888747",
"0.448552",
"0.44824818",
"0.4473631",
"0.44669822",
"0.4449816",
"0.44476694",
"0.44401857",
"0.443783",
"0.4437653",
"0.44345507",
"0.4424708",
"0.4423434",
"0.44231504",
"0.44231367",
"0.44200698",
"0.4418468",
"0.44128227",
"0.4407734",
"0.43997782",
"0.43945485"
] |
0.6937389
|
0
|
Handle Ruby integers outside PostgreSQL bigint range specially.
|
def literal_integer(v)
if v > 9223372036854775807 || v < -9223372036854775808
literal_integer_outside_bigint_range(v)
else
v.to_s
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def literal_integer_outside_bigint_range(v)\n raise IntegerOutsideBigintRange, \"attempt to literalize Ruby integer outside PostgreSQL bigint range: #{v}\"\n end",
"def test_do_not_raise_when_int_is_not_wider_than_64bit\n value = 9223372036854775807\n assert_equal \"'9223372036854775807'\", @conn.quote(value)\n\n value = -9223372036854775808\n assert_equal \"'-9223372036854775808'\", @conn.quote(value)\n end",
"def test_do_not_raise_when_raise_int_wider_than_64bit_is_false\n ActiveRecord.raise_int_wider_than_64bit = false\n value = 9223372036854775807 + 1\n assert_equal \"'9223372036854775808'\", @conn.quote(value)\n ActiveRecord.raise_int_wider_than_64bit = @raise_int_wider_than_64bit\n end",
"def type_literal_generic_bignum_symbol(column)\n :bigint\n end",
"def integer_to_sql(limit)\n return 'integer' if limit.nil?\n case limit\n when 1..2 then 'smallint'\n when 3..4 then 'integer'\n when 5..8 then 'bigint'\n else raise(ActiveRecordError, \"No integer type has byte size #{limit}. Use a NUMERIC with PRECISION 0 instead.\")\n end\n end",
"def xtest_int_literal_big\n check(C::IntLiteral, <<-EOS)\n |10000000000\n EOS\n end",
"def type_literal_generic_bignum(column)\n column[:serial] ? :bigserial : super\n end",
"def type_to_sql(type, limit = nil, precision = nil, scale = nil)\n return super unless type.to_s == 'integer'\n return 'integer' unless limit\n\n case limit\n when 1, 2; 'smallint'\n when 3, 4; 'integer'\n when 5..8; 'bigint'\n else raise(ActiveRecordError, \"No integer type has byte size #{limit}. Use a numeric with precision 0 instead.\")\n end\n end",
"def type_to_sql(type, limit = nil, precision = nil, scale = nil)\n return super unless type.to_s == 'integer'\n return 'integer' unless limit\n\n case limit\n when 1..8; 'integer'\n else raise(ActiveRecordError, \"No integer type has byte size #{limit}. Use a numeric with precision 0 instead.\")\n end\n end",
"def net_unpack_bigint(net_int)\n sum = 0\n net_int.chars.reverse.each_with_index do |c, i|\n if i.zero?\n sum = net_unpack_int(c)\n else\n sum += net_unpack_int(c) * ((NET_MAX_INT + 1)**i)\n end\n end\n sum\nend",
"def type_to_sql(type, limit = nil, precision = nil, scale = nil)\n return super unless type.to_s == 'integer'\n \n if limit.nil? || limit == 4\n 'integer'\n elsif limit < 4\n 'smallint'\n else\n 'bigint'\n end\n end",
"def to_i(base=10) end",
"def bind_hugeint(i, value)\n case value\n when Integer\n bind_varchar(i, value.to_s)\n else\n raise(ArgumentError, \"2nd argument `#{value}` must be Integer.\")\n end\n end",
"def type_literal_generic_fixnum(column)\n type_literal_generic_integer(column)\n end",
"def type_literal_generic_bigdecimal(column)\n type_literal_generic_numeric(column)\n end",
"def to_i\n 9999\n end",
"def type_literal_generic_integer(column)\n :integer\n end",
"def handle_int(int, lineno_column)\n Literal.new int.to_i\n end",
"def format_int(n)\n return - 2**31 if n < -2**31\n return 2**31 - 1 if n > 2**31 - 1\n n\nend",
"def type_to_sql(type, limit = nil, precision = nil, scale = nil)\n case type.to_s\n when 'integer'\n return 'integer' unless limit\n\n case limit\n when 1, 2; 'smallint'\n when nil, 3, 4; 'integer'\n when 5..8; 'bigint'\n else raise(ActiveRecordError, \"No integer type has byte size #{limit}. Use a numeric with precision 0 instead.\")\n end\n else\n super\n end\n end",
"def make_signed_int64(high, low); end",
"def to_int(*) end",
"def type_literal_generic_bignum_symbol(column)\n column[:serial] ? :bigserial : super\n end",
"def to_int() end",
"def to_int() end",
"def append_hugeint(value)\n lower, upper = integer_to_hugeint(value)\n _append_hugeint(lower, upper)\n end",
"def _match_class_max_Integer\n 9223372036854775807\n end",
"def int64()\n # Read an unsigned value, then convert it to signed\n val = _uint64(\"int64\")\n\n val >= 2**63 ? -(2**64 - val): val\n end",
"def _match_class_convert_Integer(value)\n value = super\n value if value <= _match_class_max_Integer\n end",
"def cast_tinyint_integer?(field)\n field.length != 1\n end",
"def cast_numeric(arg, sql_type = nil)\n cast(arg, sql_type || Integer).sql_number\n end",
"def type_literal_generic_integer(column)\n column[:serial] ? :serial : super\n end",
"def type_literal_generic_integer(column)\n column[:serial] ? :serial : super\n end",
"def net_pack_int(int)\n net_error \"#{__method__}: '#{int}' is too low allowed range #{NET_MIN_INT}-#{NET_MAX_INT}\" if int < NET_MIN_INT\n net_error \"#{__method__}: '#{int}' is too high allowed range #{NET_MIN_INT}-#{NET_MAX_INT}\" if int > NET_MAX_INT\n int += NET_INT_OFFSET\n int.chr\nend",
"def int64(val)\n raise ArgumentError, \"int64() requires an Integer argument\" \\\n unless val.is_a?(Integer)\n raise RangeError, \"argument to int64() must be in the range \" +\n \"-2**63 <= arg <= 2**63-1\" \\\n unless val >= -2**63 && val <= 2**63-1\n\n # Convert val to an unsigned equivalent\n val += 2**64 if val < 0;\n\n self.uint64(val)\n end",
"def DISABLED_test_non_negative_integers\n (0..1000).each do |digit|\n assert_tokenises_as digit.to_s, IntegerToken.new(digit)\n end\n assert_tokenises_as '0 ', IntegerToken.new(0)\n assert_tokenises_as ' 0', IntegerToken.new(0)\n end",
"def DISABLED_test_negative_integers\n (-1000..-1).each do |digit|\n assert_tokenises_as digit.to_s, IntegerToken.new(digit)\n end\n assert_tokenises_as '-1 ', IntegerToken.new(-1)\n assert_tokenises_as ' -1', IntegerToken.new(-1)\n end",
"def typecast_value_integer(value)\n value.to_i\n end",
"def big_numbers(array_of_integers)\n # TODO\nend",
"def bignumeric?\n type == \"BIGNUMERIC\"\n end",
"def on_numeric(n)\n n\n end",
"def on_numeric(n)\n n\n end",
"def Integer(p0) end",
"def isIntConstant(i)\n if i == \"0\" or i.to_i > 0\n return (i.to_i >= 0 and i.to_i < 2**15) # 2^15 is maximum integer in JACK\n end\n\n return false\nend",
"def net_pack_bigint(int, size)\n sum = ''\n div = size - 1\n (size - 1).times do\n buf = int / ((NET_MAX_INT + 1)**div)\n sum += net_pack_int(buf)\n int = int % ((NET_MAX_INT + 1)**div)\n div -= 1\n end\n sum += net_pack_int(int)\n # TODO: check reminder and so on\n # throw and error when int is too big for size\n int /= NET_MAX_INT\n sum\nend",
"def test_IntegerLiterals_sample02\n assert_equal(\"Fixnum\", 1_000_000_000.class.to_s)\n end",
"def initialize(value)\n if value.is_a?(self.class)\n @value = value.value\n return\n end\n\n unless value.bson_int32?\n raise RangeError.new(\"#{value} cannot be stored in 32 bits\")\n end\n @value = value.freeze\n end",
"def bson_int64?\n (MIN_64BIT <= self) && (self <= MAX_64BIT)\n end",
"def _nonnegative_int(value)\n return 1 if value.nil? || !value.is_a?(Integer) || value < 0\n value\n end",
"def pack_db value\r\n pack_data(\r\n if value > -10\r\n value * 10000\r\n else\r\n (-1 * Math.log10((value / 10).abs) * 200000) - 100000\r\n end\r\n )\r\nend",
"def integer(i, n)\n limit = 2**n - 1\n return [i].pack('C') if i < limit\n\n bytes = []\n bytes.push limit unless n.zero?\n\n i -= limit\n while (i >= 128)\n bytes.push((i % 128) + 128)\n i /= 128\n end\n\n bytes.push i\n bytes.pack('C*')\n end",
"def is_real_int(value)\n value.ceil == value.floor\n end",
"def write_number number\n if number >= MIN_INTEGER && number <= MAX_INTEGER #check valid range for 29 bits\n write_integer number\n else #overflow to a double\n write_double number \n end\n end",
"def post_process_integer( val )\n\t\t\treturn Integer( val.to_s )\n\t\tend",
"def is_uinteger?(); @type == GRT_UINTEGER; end",
"def integer?() end",
"def to_int\n end",
"def number(digits: T.unsafe(nil)); end",
"def set_integer!(value)\n @objects = nil\n @memory = nil\n\n if value < 0\n self[:type] = :negative_integer\n self[:values][:i64] = value\n else\n self[:type] = :positive_integer\n self[:values][:u64] = value\n end\n end",
"def adjust(type)\n case type\n when :Byte, :byte, :b, :B\n primitive_max = 2**7 - 1\n primitive_min = -2**7\n when :Short, :short, :s, :S\n primitive_max = 2**15 - 1\n primitive_min = -2**15\n when :Integer, :Int, :int, :i, :I\n primitive_max = 2**31 - 1\n primitive_min = -2**31\n when :Long, :long, :l, :L\n primitive_max = 2**63 - 1\n primitive_min = -2**63\n when :Nibble, :nibble, :n, :N\n primitive_max = 2**4 - 1\n primitive_min = -2**4\n else\n primitive_max = 2**31 - 1\n primitive_min = -2**31\n end\n self < -primitive_max ? -1 * (-self & primitive_max) : self\n self > primitive_min ? (self & primitive_max) : self\n end",
"def integer(i, n)\n limit = 2**n - 1\n return [i].pack('C') if (i < limit)\n\n bytes = []\n bytes.push limit if !n.zero?\n\n i -= limit\n while (i >= 128) do\n bytes.push((i % 128) + 128)\n i = i / 128\n end\n\n bytes.push i\n bytes.pack('C*')\n end",
"def to_i; end",
"def to_i; end",
"def to_i; end",
"def to_i; end",
"def to_i; end",
"def to_i; end",
"def rb_integer(from: T.unsafe(nil), to: T.unsafe(nil)); end",
"def integer(i, n)\n limit = 2**n - 1\n return [i].pack('C') if (i < limit)\n\n bytes = []\n bytes.push limit if !n.zero?\n\n i -= limit\n q = 1\n\n while (q > 0) do\n q, r = i.divmod(128)\n r += 128 if (q > 0)\n i = q\n\n bytes.push(r)\n end\n\n bytes.pack('C*')\n end",
"def to_i() end",
"def to_i() end",
"def to_i() end",
"def to_i() end",
"def to_i() end",
"def to_integer(input); end",
"def class\n Fixnum\n end",
"def numeric_literal\n hex_integer_literal || octal_integer_literal || decimal_literal\n end",
"def typecast_value_integer(value)\n Integer(value.is_a?(String) ? value.sub(LEADING_ZERO_RE, LEADING_ZERO_REP) : value)\n end",
"def handle_integer_conversion(id) \n (Integer(id) rescue nil).nil? || id.to_i.to_s != id.to_s ? id : id.to_i\n end",
"def require_integer(value)\n if value.is_a?(Integer) != true\n log_error_and_raise(\"expected integer value, got #{value.class}, #{value.inspect}\")\n end\n end",
"def integer(limit = nil)\n Validation.new { |d| d.is_a?(Integer) && in_range?(d, limit) }\n end",
"def integer?\n type == \"INTEGER\" || type == \"INT64\"\n end",
"def binary(digits: T.unsafe(nil)); end",
"def promotion_code(digits: T.unsafe(nil)); end",
"def test_decode_small_big_integer()\n input = [131, 110, 5, 0, 5, 228, 183, 122, 4]\n expected = 19238740997\n\n stream = Erlang::StreamEmulator.new(input)\n actual = Erlang::decode(stream)\n\n assert_equal(expected, actual)\n end",
"def convertRawEntry(num)\n @num = num\n if @num > 8192 || @num < -8192\n puts \"Please enter a number between -8192 and 8192\"\n else\n @num = @num + 8192\n end\n return @num\nend",
"def bson_int32?\n (MIN_32BIT <= self) && (self <= MAX_32BIT)\n end",
"def diminished_radix_complement(int)\n if int > UID_MAX\n int - UINT\n else\n int\n end\n end",
"def cast_numeric(sql_type = nil)\n cast(sql_type || :integer).sql_number\n end",
"def coerce_integer(value, _options = {})\n value = value.to_s\n return unless value =~ /\\A0|[1-9]\\d*\\z/\n\n value.to_i\n end",
"def read_bignum; end",
"def to_i(*) end",
"def make_signed_int32(long); end",
"def to_i\n in_native.to_i\n end",
"def test_encode_integer\n\n # Fixnum\n #\n #assert_equal( \"\\x02\\x02\\x96\\x46\", -27_066.to_ber )\n #assert_equal( \"\\x02\\x02\\xFF\\x7F\", -129.to_ber )\n #assert_equal( \"\\x02\\x01\\x80\", -128.to_ber )\n #assert_equal( \"\\x02\\x01\\xFF\", -1.to_ber )\n\n assert_equal( \"\\x02\\x01\\x00\", 0.to_ber )\n assert_equal( \"\\x02\\x01\\x01\", 1.to_ber )\n assert_equal( \"\\x02\\x01\\x7F\", 127.to_ber )\n assert_equal( \"\\x02\\x01\\x80\", 128.to_ber )\n assert_equal( \"\\x02\\x01\\xFF\", 255.to_ber )\n\n assert_equal( \"\\x02\\x02\\x01\\x00\", 256.to_ber )\n assert_equal( \"\\x02\\x02\\xFF\\xFF\", 65535.to_ber )\n\n assert_equal( \"\\x02\\x03\\x01\\x00\\x00\", 65536.to_ber )\n assert_equal( \"\\x02\\x03\\xFF\\xFF\\xFF\", 16_777_215.to_ber )\n\n assert_equal( \"\\x02\\x04\\x01\\x00\\x00\\x00\", 0x01000000.to_ber )\n assert_equal( \"\\x02\\x04\\x3F\\xFF\\xFF\\xFF\", 0x3FFFFFFF.to_ber )\n\n # Bignum\n #\n assert_equal( \"\\x02\\x04\\x4F\\xFF\\xFF\\xFF\", 0x4FFFFFFF.to_ber )\n #assert_equal( \"\\x02\\x05\\x00\\xFF\\xFF\\xFF\\xFF\", 0xFFFFFFFF.to_ber )\n end",
"def validate_non_negative_integer(source,value)\n if not value.kind_of?(Integer) or value < 0\n raise ArgumentError, \"#{source} must be a non-negative integer\"\n end\n value\n end",
"def any_int(*options)\n any_number(*options).to_i\n end",
"def integer?(value)\n value.is_a?(Integer)\n end",
"def convert_to_signed_binary(binary)\n binary_int = binary.to_i(2)\n if binary_int >= 2**15\n return binary_int - 2**16\n else\n return binary_int\n end\n end",
"def type_to_sql(type, limit = nil, precision = nil, scale = nil)\n return super if defined?(::Jdbc::H2) || type.to_s != 'integer' || limit == nil\n\n type\n end"
] |
[
"0.8245709",
"0.6992368",
"0.66835093",
"0.6479705",
"0.64078766",
"0.6316349",
"0.63103896",
"0.62356555",
"0.6216988",
"0.6173353",
"0.6133337",
"0.6091662",
"0.6076664",
"0.6071689",
"0.6012722",
"0.596731",
"0.5948034",
"0.5931467",
"0.5915999",
"0.58987486",
"0.5872905",
"0.584481",
"0.5836269",
"0.582913",
"0.582913",
"0.580716",
"0.5772173",
"0.57621723",
"0.57569975",
"0.5726644",
"0.570816",
"0.57081175",
"0.57081175",
"0.5691661",
"0.5679879",
"0.5667418",
"0.5631532",
"0.5629615",
"0.5629328",
"0.5628407",
"0.56253755",
"0.56253755",
"0.5621552",
"0.55785453",
"0.5570159",
"0.556534",
"0.5545537",
"0.55350477",
"0.5532288",
"0.5517664",
"0.5514932",
"0.5503856",
"0.5498756",
"0.5493155",
"0.545977",
"0.54426616",
"0.54362106",
"0.543154",
"0.5426584",
"0.54223216",
"0.54215485",
"0.5405371",
"0.5405371",
"0.5405371",
"0.5405371",
"0.5405371",
"0.5405371",
"0.5381514",
"0.53767675",
"0.5375682",
"0.5375682",
"0.5375682",
"0.5375682",
"0.5375682",
"0.536691",
"0.53635406",
"0.53514695",
"0.5342303",
"0.53403074",
"0.533963",
"0.53241754",
"0.5314614",
"0.53049326",
"0.5304249",
"0.53022593",
"0.52983975",
"0.5296989",
"0.52960545",
"0.5281646",
"0.52811456",
"0.5278807",
"0.52708834",
"0.5269982",
"0.526855",
"0.5259084",
"0.52508694",
"0.5245829",
"0.5244305",
"0.5233318",
"0.52318543"
] |
0.7218923
|
1
|
Raise IntegerOutsideBigintRange when attempting to literalize Ruby integer outside PostgreSQL bigint range, so PostgreSQL doesn't treat the value as numeric.
|
def literal_integer_outside_bigint_range(v)
raise IntegerOutsideBigintRange, "attempt to literalize Ruby integer outside PostgreSQL bigint range: #{v}"
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def literal_integer(v)\n if v > 9223372036854775807 || v < -9223372036854775808\n literal_integer_outside_bigint_range(v)\n else\n v.to_s\n end\n end",
"def test_do_not_raise_when_int_is_not_wider_than_64bit\n value = 9223372036854775807\n assert_equal \"'9223372036854775807'\", @conn.quote(value)\n\n value = -9223372036854775808\n assert_equal \"'-9223372036854775808'\", @conn.quote(value)\n end",
"def test_do_not_raise_when_raise_int_wider_than_64bit_is_false\n ActiveRecord.raise_int_wider_than_64bit = false\n value = 9223372036854775807 + 1\n assert_equal \"'9223372036854775808'\", @conn.quote(value)\n ActiveRecord.raise_int_wider_than_64bit = @raise_int_wider_than_64bit\n end",
"def bind_hugeint(i, value)\n case value\n when Integer\n bind_varchar(i, value.to_s)\n else\n raise(ArgumentError, \"2nd argument `#{value}` must be Integer.\")\n end\n end",
"def xtest_int_literal_big\n check(C::IntLiteral, <<-EOS)\n |10000000000\n EOS\n end",
"def type_literal_generic_bignum_symbol(column)\n :bigint\n end",
"def type_literal_generic_bigdecimal(column)\n type_literal_generic_numeric(column)\n end",
"def handle_int(int, lineno_column)\n Literal.new int.to_i\n end",
"def type_literal_generic_bignum(column)\n column[:serial] ? :bigserial : super\n end",
"def type_literal_generic_fixnum(column)\n type_literal_generic_integer(column)\n end",
"def integer_to_sql(limit)\n return 'integer' if limit.nil?\n case limit\n when 1..2 then 'smallint'\n when 3..4 then 'integer'\n when 5..8 then 'bigint'\n else raise(ActiveRecordError, \"No integer type has byte size #{limit}. Use a NUMERIC with PRECISION 0 instead.\")\n end\n end",
"def cast_numeric(arg, sql_type = nil)\n cast(arg, sql_type || Integer).sql_number\n end",
"def type_to_sql(type, limit = nil, precision = nil, scale = nil)\n return super unless type.to_s == 'integer'\n return 'integer' unless limit\n\n case limit\n when 1..8; 'integer'\n else raise(ActiveRecordError, \"No integer type has byte size #{limit}. Use a numeric with precision 0 instead.\")\n end\n end",
"def type_literal_generic_integer(column)\n :integer\n end",
"def append_hugeint(value)\n lower, upper = integer_to_hugeint(value)\n _append_hugeint(lower, upper)\n end",
"def type_to_sql(type, limit = nil, precision = nil, scale = nil)\n return super unless type.to_s == 'integer'\n return 'integer' unless limit\n\n case limit\n when 1, 2; 'smallint'\n when 3, 4; 'integer'\n when 5..8; 'bigint'\n else raise(ActiveRecordError, \"No integer type has byte size #{limit}. Use a numeric with precision 0 instead.\")\n end\n end",
"def to_i\n 9999\n end",
"def net_unpack_bigint(net_int)\n sum = 0\n net_int.chars.reverse.each_with_index do |c, i|\n if i.zero?\n sum = net_unpack_int(c)\n else\n sum += net_unpack_int(c) * ((NET_MAX_INT + 1)**i)\n end\n end\n sum\nend",
"def type_literal_generic_bignum_symbol(column)\n column[:serial] ? :bigserial : super\n end",
"def bind_hugeint_internal(index, value)\n lower, upper = integer_to_hugeint(value)\n _bind_hugeint(index, lower, upper)\n end",
"def initialize(value)\n if value.is_a?(self.class)\n @value = value.value\n return\n end\n\n unless value.bson_int32?\n raise RangeError.new(\"#{value} cannot be stored in 32 bits\")\n end\n @value = value.freeze\n end",
"def int64(val)\n raise ArgumentError, \"int64() requires an Integer argument\" \\\n unless val.is_a?(Integer)\n raise RangeError, \"argument to int64() must be in the range \" +\n \"-2**63 <= arg <= 2**63-1\" \\\n unless val >= -2**63 && val <= 2**63-1\n\n # Convert val to an unsigned equivalent\n val += 2**64 if val < 0;\n\n self.uint64(val)\n end",
"def cast_to_int(s, max_num_size = DEFAULT_MAX_NUM_SIZE)\n data = s.htb\n raise '\"script number overflow\"' if data.bytesize > max_num_size\n if require_minimal && data.bytesize > 0\n if data.bytes[-1] & 0x7f == 0 && (data.bytesize <= 1 || data.bytes[data.bytesize - 2] & 0x80 == 0)\n raise 'non-minimally encoded script number'\n end\n end\n Script.decode_number(s)\n end",
"def _match_class_convert_Integer(value)\n value = super\n value if value <= _match_class_max_Integer\n end",
"def typecast_value_integer(value)\n value.to_i\n end",
"def to_i(base=10) end",
"def int64()\n # Read an unsigned value, then convert it to signed\n val = _uint64(\"int64\")\n\n val >= 2**63 ? -(2**64 - val): val\n end",
"def post_process_integer( val )\n\t\t\treturn Integer( val.to_s )\n\t\tend",
"def set_integer!(value)\n @objects = nil\n @memory = nil\n\n if value < 0\n self[:type] = :negative_integer\n self[:values][:i64] = value\n else\n self[:type] = :positive_integer\n self[:values][:u64] = value\n end\n end",
"def require_integer(value)\n if value.is_a?(Integer) != true\n log_error_and_raise(\"expected integer value, got #{value.class}, #{value.inspect}\")\n end\n end",
"def convertRawEntry(num)\n @num = num\n if @num > 8192 || @num < -8192\n puts \"Please enter a number between -8192 and 8192\"\n else\n @num = @num + 8192\n end\n return @num\nend",
"def to_int() end",
"def to_int() end",
"def type_to_sql(type, limit = nil, precision = nil, scale = nil)\n return super unless type.to_s == 'integer'\n \n if limit.nil? || limit == 4\n 'integer'\n elsif limit < 4\n 'smallint'\n else\n 'bigint'\n end\n end",
"def type_literal_generic_integer(column)\n column[:serial] ? :serial : super\n end",
"def type_literal_generic_integer(column)\n column[:serial] ? :serial : super\n end",
"def type_to_sql(type, limit = nil, precision = nil, scale = nil)\n case type.to_s\n when 'integer'\n return 'integer' unless limit\n\n case limit\n when 1, 2; 'smallint'\n when nil, 3, 4; 'integer'\n when 5..8; 'bigint'\n else raise(ActiveRecordError, \"No integer type has byte size #{limit}. Use a numeric with precision 0 instead.\")\n end\n else\n super\n end\n end",
"def _match_class_max_Integer\n 9223372036854775807\n end",
"def to_int(*) end",
"def net_pack_int(int)\n net_error \"#{__method__}: '#{int}' is too low allowed range #{NET_MIN_INT}-#{NET_MAX_INT}\" if int < NET_MIN_INT\n net_error \"#{__method__}: '#{int}' is too high allowed range #{NET_MIN_INT}-#{NET_MAX_INT}\" if int > NET_MAX_INT\n int += NET_INT_OFFSET\n int.chr\nend",
"def numeric_literal\n hex_integer_literal || octal_integer_literal || decimal_literal\n end",
"def validate_non_negative_integer(source,value)\n if not value.kind_of?(Integer) or value < 0\n raise ArgumentError, \"#{source} must be a non-negative integer\"\n end\n value\n end",
"def typecast_to_integer(value)\n typecast_to_numeric(value, :to_i)\n end",
"def to_i\n in_native.to_i\n end",
"def coerce_integer(value, _options = {})\n value = value.to_s\n return unless value =~ /\\A0|[1-9]\\d*\\z/\n\n value.to_i\n end",
"def on_numeric(n)\n n\n end",
"def on_numeric(n)\n n\n end",
"def make_signed_int64(high, low); end",
"def bignumeric?\n type == \"BIGNUMERIC\"\n end",
"def overflow=(n)\n @overflow = BigDecimal(n.to_s)\n end",
"def DISABLED_test_negative_integers\n (-1000..-1).each do |digit|\n assert_tokenises_as digit.to_s, IntegerToken.new(digit)\n end\n assert_tokenises_as '-1 ', IntegerToken.new(-1)\n assert_tokenises_as ' -1', IntegerToken.new(-1)\n end",
"def integer(limit = nil)\n Validation.new { |d| d.is_a?(Integer) && in_range?(d, limit) }\n end",
"def _nonnegative_int(value)\n return 1 if value.nil? || !value.is_a?(Integer) || value < 0\n value\n end",
"def bigdecimal\n Util.from_bytes :bigdecimal, value\n end",
"def test_IntegerLiterals_sample02\n assert_equal(\"Fixnum\", 1_000_000_000.class.to_s)\n end",
"def typecast_to_bigdecimal(value)\n typecast_to_numeric(value, :to_d)\n end",
"def to_i; end",
"def to_i; end",
"def to_i; end",
"def to_i; end",
"def to_i; end",
"def to_i; end",
"def DISABLED_test_non_negative_integers\n (0..1000).each do |digit|\n assert_tokenises_as digit.to_s, IntegerToken.new(digit)\n end\n assert_tokenises_as '0 ', IntegerToken.new(0)\n assert_tokenises_as ' 0', IntegerToken.new(0)\n end",
"def net_pack_bigint(int, size)\n sum = ''\n div = size - 1\n (size - 1).times do\n buf = int / ((NET_MAX_INT + 1)**div)\n sum += net_pack_int(buf)\n int = int % ((NET_MAX_INT + 1)**div)\n div -= 1\n end\n sum += net_pack_int(int)\n # TODO: check reminder and so on\n # throw and error when int is too big for size\n int /= NET_MAX_INT\n sum\nend",
"def to_i\n Integer(value)\n end",
"def out_of_range(value, expected_range, msg = nil)\n raise ValueRangeError.exception(value, expected_range, msg)\n end",
"def cast_numeric(sql_type = nil)\n cast(sql_type || :integer).sql_number\n end",
"def convert_to_integer(value)\n if value == \"0\"\n value = 0\n elsif value.to_i == 0\n # Set to -1 as we are dealing with range 0-255 so -1 will raise error\n value = -1\n else\n value = value.to_i\n end\n return value\nend",
"def type_literal_generic_numeric(column)\n column[:size] ? \"numeric(#{Array(column[:size]).join(', ')})\" : :numeric\n end",
"def test_decode_small_big_integer()\n input = [131, 110, 5, 0, 5, 228, 183, 122, 4]\n expected = 19238740997\n\n stream = Erlang::StreamEmulator.new(input)\n actual = Erlang::decode(stream)\n\n assert_equal(expected, actual)\n end",
"def cmd_numeric(obj)\n return handle_return_object(obj[:val].to_i)\n end",
"def value_to_integer(value)\n if @slop.strict?\n begin\n Integer(value.to_s, 10)\n rescue ArgumentError\n raise InvalidArgumentError, \"#{value} could not be coerced into Integer\"\n end\n else\n value.to_s.to_i\n end\n end",
"def to_i() end",
"def to_i() end",
"def to_i() end",
"def to_i() end",
"def to_i() end",
"def coerce_integer(value, options = {})\n value = value.to_s\n if value.match(/\\A0|[1-9]\\d*\\z/)\n value.to_i\n else\n nil\n end\n end",
"def coerce_integer(value, options = {})\n value = value.to_s\n if value.match(/\\A0|[1-9]\\d*\\z/)\n value.to_i\n else\n nil\n end\n end",
"def to_integer(input); end",
"def to_int\n end",
"def format_int(n)\n return - 2**31 if n < -2**31\n return 2**31 - 1 if n > 2**31 - 1\n n\nend",
"def big_numbers(array_of_integers)\n # TODO\nend",
"def convert_value(value, column_options)\n if column_options\n case column_options[:type]\n when 'Number'\n return value.to_i\n end\n end\n\n value\n end",
"def isIntConstant(i)\n if i == \"0\" or i.to_i > 0\n return (i.to_i >= 0 and i.to_i < 2**15) # 2^15 is maximum integer in JACK\n end\n\n return false\nend",
"def read_bignum; end",
"def test_max_int\n assert_equal(2, max_int(1,2))\n assert_equal(2, max_int(2,1))\n\n assert_raise(RT) { max_int('str', true) }\n end",
"def match_integer( val )\n\t\treturn Integer( val ) rescue nil\n\tend",
"def set_value( value )\n if value == nil\n return\n end\n \n value = value.to_i\n \n satisfying_min = ((get_min == nil) or ((get_min != nil) and (get_min <= value)))\n satisfying_max = ((get_max == nil) or ((get_max != nil) and (value <= get_max)))\n \n if satisfying_min and satisfying_max\n @value = value\n else\n raise \"integer value out of range: #{ value.to_s }\"\n end\n end",
"def is_int(value)\n true if Integer(value) rescue false\n end",
"def bson_int64?\n (MIN_64BIT <= self) && (self <= MAX_64BIT)\n end",
"def int(max_int)\n NumGen.new max_int\n end",
"def to_i(*) end",
"def rb_integer(from: T.unsafe(nil), to: T.unsafe(nil)); end",
"def get_int_noerr(arg)\n b = @frame ? @frame.binding : nil\n val = Integer(eval(arg, b))\n rescue SyntaxError\n nil\n rescue \n nil\n end",
"def to_i\n end",
"def random_integer(limit)\n Rubinius.primitive :randomizer_rand_int\n raise PrimitiveFailure, \"Randomizer#rand_int primitive failed\"\n end",
"def random_integer(limit)\n Rubinius.primitive :randomizer_rand_int\n raise PrimitiveFailure, \"Randomizer#rand_int primitive failed\"\n end",
"def typecast_value_integer(value)\n Integer(value.is_a?(String) ? value.sub(LEADING_ZERO_RE, LEADING_ZERO_REP) : value)\n end",
"def to_i\n end"
] |
[
"0.70062757",
"0.67205375",
"0.6706813",
"0.63642526",
"0.63092715",
"0.6266042",
"0.62429863",
"0.61111265",
"0.59855026",
"0.5961269",
"0.5949774",
"0.5680745",
"0.567375",
"0.5669552",
"0.56378376",
"0.5611179",
"0.56030303",
"0.55616546",
"0.55498075",
"0.5537962",
"0.55265427",
"0.54903835",
"0.5429265",
"0.5423642",
"0.5423215",
"0.5412056",
"0.5397522",
"0.53937966",
"0.5386516",
"0.5377665",
"0.53325325",
"0.53104436",
"0.53104436",
"0.5292964",
"0.529209",
"0.529209",
"0.52789956",
"0.5269957",
"0.5268882",
"0.5268343",
"0.52543855",
"0.5253185",
"0.5245755",
"0.5229381",
"0.5225277",
"0.52153087",
"0.52153087",
"0.52018774",
"0.52000046",
"0.51925784",
"0.5190019",
"0.5185661",
"0.5144661",
"0.51386166",
"0.5136361",
"0.51361555",
"0.5132484",
"0.5132484",
"0.5132484",
"0.5132484",
"0.5132484",
"0.5132484",
"0.5118455",
"0.51169336",
"0.5111389",
"0.5107091",
"0.5103576",
"0.50986403",
"0.50973976",
"0.5064227",
"0.50494665",
"0.5048523",
"0.50471824",
"0.50471824",
"0.50471824",
"0.50471824",
"0.50471824",
"0.5046233",
"0.5046233",
"0.5044486",
"0.5016028",
"0.5016023",
"0.49950936",
"0.49905217",
"0.49811214",
"0.49783212",
"0.49754572",
"0.4972335",
"0.49661213",
"0.4958192",
"0.4950267",
"0.49476877",
"0.49472123",
"0.49459663",
"0.49416748",
"0.4939139",
"0.4933132",
"0.4933132",
"0.49302733",
"0.49290788"
] |
0.8686653
|
0
|
Assume that SQL standard quoting is on, per Sequel's defaults
|
def literal_string_append(sql, v)
sql << "'" << v.gsub("'", "''") << "'"
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def quote(s, column = nil)\n dummy_conn.quote(s)\n end",
"def sql_quoted(value)\n Arel::Nodes.build_quoted(value)\n end",
"def quoted(value)\n sql _scope.connection.quote(value)\n end",
"def quoted(value)\n sql _scope.connection.quote(value)\n end",
"def custom_sql(q)\n query = q + ';' unless q =~ /;$/\n query = @db_connection.query(\"#{query}\")\n query.each { |x| print_line(\"#{x.join(',')}\") } unless query.empty?\n end",
"def sql_string(value)\n \"'#{value.gsub(\"'\", \"''\")}'\" \nend",
"def escaped_sql\n sql % binds.reduce({}) { |a, (col, val)|\n a[col.to_sym] = if val.is_a? Array\n val.map { |x| @conn.quote x }.join(', ')\n else\n @conn.quote val\n end\n a\n }\n end",
"def q(str)\n str.class == String ? ActiveRecord::Base.connection.quote_string(str) : str\n end",
"def sql_valuify\n nil? ? 'NULL' : \"'#{to_s.gsub(/\\\\/, '\\&\\&').gsub(/'/, \"''\").gsub(/\\t/, \"\\\\t\").gsub(/\\r/, \"\\\\r\").gsub(/\\n/, \"\\\\n\")}'\"\n end",
"def escape_sql(args)\n return @text if args.empty?\n sql = @text.dup\n vars = args.dup\n\n replacements = 0\n mismatch = false\n\n sql.gsub!(/'[^']*'|\"[^\"]*\"|`[^`]*`|\\?/) do |x|\n next x unless x == '?'\n replacements += 1\n if vars.empty?\n mismatch = true\n else\n var = vars.shift\n connection.quote_value(var)\n end\n end\n\n if !vars.empty? || mismatch\n raise ArgumentError, \"Binding mismatch: #{args.size} for #{replacements}\"\n else\n sql\n end\n end",
"def quote(value, column = nil)\n connection.quote(value, column)\n end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quoted_string; end",
"def sql! sql=nil\n require 'niceql'\n puts Niceql::Prettifier.prettify_sql sql || $last_sql_command\n end",
"def test_sql_escaped\n exc_handler = HumanParseExceptionHandler.new\n new_str = exc_handler.get_human_result_for_string(\"fo'ob\\\"ar\",\"ParseException\")\n assert_equal(false, new_str)\n end",
"def _format_sql(sql)\n sql = sql.delete '\"'\n sql.downcase\n end",
"def force_quotes?() @force_quotes end",
"def escape_for_cql_double_quotes(str)\n str = str.gsub('\"', \" \")\n str = str.gsub(\"'\", \"''\")\n\n return str\n end",
"def quoted\n with_opts(:quoted=>true)\n end",
"def i(s)\n connection.quote_column_name(s)\n end",
"def quote_string(string)\n @logger.unknown(\"ODBCAdapter#quote_string>\") if @trace\n\n # MySQL requires backslashes to be escaped\t\t\t\t\n string.gsub(/\\\\/, '\\&\\&').gsub(/'/, \"''\")\n end",
"def double_quote(value)\n return if value.nil?\n\n case value.to_s\n # Ignore keys that contain double quotes or a Arel.star (*)[all columns]\n # or if a table has already been explicitly declared (ex: users.id)\n when \"*\", /((^\".+\"$)|(^[[:alpha:]]+\\.[[:alnum:]]+)|\\(.+\\))/\n value\n else\n PG::Connection.quote_ident(value.to_s)\n end\n end",
"def literal_string_append(sql, s)\n sql << \"'\" << db.synchronize(@opts[:server]){|c| c.escape(s)} << \"'\"\n end",
"def quote(value); end",
"def quote\n %q[(?:\"|')?]\n end",
"def escape(string)\n # This code is taken directly from the documentation so we dont have to rely on the SQLite3::Database class. This way it can also be used with JRuby and IronRuby...\n # http://sqlite-ruby.rubyforge.org/classes/SQLite/Database.html\n string.to_s.gsub(\"'\", \"''\")\n end",
"def quote(value, column = nil) #:nodoc:\n if value.kind_of?(String) && column && column.sql_type =~ /^xml$/\n \"xml '#{quote_string(value)}'\"\n elsif value.kind_of?(Numeric) && column && column.sql_type =~ /^money$/\n # Not truly string input, so doesn't require (or allow) escape string syntax.\n \"'#{value.to_s}'\"\n elsif value.kind_of?(String) && column && column.sql_type =~ /^bit/\n case value\n when /^[01]*$/\n \"B'#{value}'\" # Bit-string notation\n when /^[0-9A-F]*$/i\n \"X'#{value}'\" # Hexadecimal notation\n end\n elsif column && column.sql_type =~ /^datetime$/\n if (not value.nil?) && (value.acts_like?(:date) || value.acts_like?(:time))\n \"CONVERT(datetime,'#{quoted_date(value)}',120)\"\n else\n # Fixtures#insert_fixtures sets fields like +updated_now+ to a String instance (\"Time.now.to_s(:db)\")\n super\n end\n elsif column && column.sql_type =~ /^boolean$/\n \"'#{value ? 1 : 0}'\"\n elsif value.class.to_s == 'System::Byte[]' && column && column.sql_type =~ /^binary$/\n \"CONVERT(varbinary(max),'0x#{bytes_to_string(value)}',1)\" \n else\n super\n end\n end",
"def sql_string\n self\n end",
"def quote(val, column=nil)\n return val unless val.is_a?(String)\n \"'#{val.gsub(/\\'/, \"\\\\\\\\'\")}'\" # \" <= for Emacs font-lock\n end",
"def sql_quote(value)\n value = value.to_s.downcase if %i[nil null NULL].include?(value)\n # noinspection RubyMismatchedReturnType\n case value\n when nil, 'nil', 'null', 'NULL' then nil\n when /^-?\\d+$/ then value.to_i\n when SQL_NUMBER then value.to_f\n when String, Symbol then \"'#{value}'\"\n else value\n end\n end",
"def literal_string_append(sql, v)\n sql << \"'\" << ::Mysql.quote(v) << \"'\"\n end",
"def generate_sql_escape(token)\n escaped_token = token.gsub(/\\\\|'/, '\\0\\0\\0\\0').gsub(\"?\", \"\\\\\\\\77\")\n \"'\" + escaped_token + \"'\"\n end",
"def quotes; end",
"def sql sql\n @master.puts \"#{sql};\"\n end",
"def sql\n @sql ||= begin\n bind_params = []\n i = 1\n (selector_keys.length + setter_keys.length).times do\n bind_params << \"$#{i}\"\n i += 1\n end\n hstore_delete_handlers.length.times do\n bind_params << \"$#{i}::text[]\"\n i += 1\n end\n %{SELECT #{name}(#{bind_params.join(', ')})}\n end\n end",
"def sql\n @sql ||= begin\n bind_params = []\n i = 1\n (selector_keys.length + setter_keys.length).times do\n bind_params << \"$#{i}\"\n i += 1\n end\n hstore_delete_handlers.length.times do\n bind_params << \"$#{i}::text[]\"\n i += 1\n end\n %{SELECT #{name}(#{bind_params.join(', ')})}\n end\n end",
"def quote_value(value, column = nil) #:nodoc:\n self.class.connection.quote(value, column)\n end",
"def quote_value(value, column = nil) #:nodoc:\n self.class.connection.quote(value, column)\n end",
"def quote_value(value, column = nil)\n self.class.connection.quote(value, column)\n end",
"def quote_column_if_needed(column); end",
"def format_quote(q)\n if q == \"'\"\n '\"\\'\"'\n else\n \"'#{q}'\"\n end\n end",
"def sql_literal(*)\n @dataset.sql\n end"
] |
[
"0.73580086",
"0.67877775",
"0.67741084",
"0.67741084",
"0.6671209",
"0.661895",
"0.6582974",
"0.65012497",
"0.6495129",
"0.64906394",
"0.6488084",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.648066",
"0.6479785",
"0.63999426",
"0.63786316",
"0.63786185",
"0.63279605",
"0.63076437",
"0.62818867",
"0.6231091",
"0.6217997",
"0.6192454",
"0.61587137",
"0.6143933",
"0.6142934",
"0.60937417",
"0.6093636",
"0.60771704",
"0.60758156",
"0.6055727",
"0.6047809",
"0.602364",
"0.60234535",
"0.6017292",
"0.6016879",
"0.6016879",
"0.60114515",
"0.60114515",
"0.6002747",
"0.59999084",
"0.59924185",
"0.5991806"
] |
0.60761344
|
86
|
PostgreSQL uses true for true values
|
def literal_true
'true'
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def db_boolean(val)\n if adapter == :postgres\n val ? 'TRUE' : 'FALSE'\n else\n val ? '1' : '0'\n end\n end",
"def sql_boolean\n self\n end",
"def to_bool() true end",
"def column_value_boolean\n case Utilities.adapter\n when 'mysql2', 'postgresql'\n column_value ? \"true\" : \"false\"\n when 'sqlite3', 'sqlserver'\n column_value ? \"1\" : \"0\"\n end\n end",
"def type_literal_generic_boolean(column)\n :boolean\n end",
"def value_to_boolean(value)\n value = get_param(value) if value.is_a?(Symbol)\n ActiveRecord::ConnectionAdapters::Column.value_to_boolean(value)\n end",
"def sql_boolean\n BooleanExpression.new(:NOOP, self)\n end",
"def sql_boolean\n BooleanExpression.new(:NOOP, self)\n end",
"def typecast_value_boolean(opts={});true;end",
"def boolean cols\n decode_values :boolean, cols\n end",
"def boolify(val)\n\nend",
"def literal_true\n BOOL_TRUE\n end",
"def cast_to_boolean(input)\n ActiveRecord::Type::Boolean.new.type_cast_from_user(input)\n end",
"def to_bool() false end",
"def typecast_value_boolean(value)\n case value\n when false, 0, \"0\", /\\Af(alse)?\\z/i, /\\Ano?\\z/i\n false\n else\n blank_object?(value) ? nil : true\n end\n end",
"def to_boolean(value)\n case value\n when :true, 'true'\n true\n else\n false\n end\n end",
"def sql_boolean\n BooleanExpression.new(self.op, *self.args)\n end",
"def booleans cols\n decode_values :boolean, cols, true\n end",
"def to_boo string\n\t\tActiveRecord::Type::Boolean.new.cast(string)\n\tend",
"def to_bool str\r\n ActiveRecord::Type::Boolean.new.type_cast_from_user(str)\r\n end",
"def type_literal_generic_trueclass(column)\n :boolean\n end",
"def boolean\n Util.from_bytes :boolean, value\n end",
"def boolean(value)\n value ? true : false\n end",
"def boolean_expr safe_column_name\n safe_column_name\n end",
"def to_bool\n true\n end",
"def on_true(node)\n :true\n end",
"def boolean\n map ->(primitive) { primitive.to_bool }\n end",
"def munge_boolean(value)\n case value\n when true, 'true', :true\n 'true'\n when false, 'false', :false\n 'false'\n else\n raise \"Invalid value for munge_boolean #{value}\"\n end\n end",
"def quoted_true # :nodoc:\n quote(boolean_domain[:true])\n end",
"def true \n \"true\" \n end",
"def config_boolean_true?(boolean_field)\n data[boolean_field].to_i == 1\n end",
"def get_boolean_value\n\t\tend",
"def supports_is_true?\n false\n end",
"def sql_true\n Rails.env.production? ? \"true\" : \"'t'\"\n end",
"def boolean_type\n 'Boolean'\n end",
"def convert_smallint_to_bool\n opts.has_key?(:convert_smallint_to_bool) ? opts[:convert_smallint_to_bool] : db.convert_smallint_to_bool\n end",
"def query_yields_boolean?\n false\n end",
"def value_to_boolean(value)\n return value if value==true || value==false\n case value.to_s.downcase\n when \"true\", \"t\", \"1\" then true\n else false\n end\n end",
"def boolean_default_true(value)\n value.nil? || value\n end",
"def booleanish_to_boolean(arguments, ddl)\n arguments.keys.each do |key|\n if ddl[:input].keys.include?(key)\n if ddl[:input][key][:type] == :boolean\n arguments[key] = true if arguments[key] == \"true\"\n arguments[key] = true if arguments[key] == \"yes\"\n arguments[key] = true if arguments[key] == \"1\"\n arguments[key] = false if arguments[key] == \"false\"\n arguments[key] = false if arguments[key] == \"no\"\n arguments[key] = false if arguments[key] == \"0\"\n end\n end\n end\n rescue\n true\n end",
"def true?\n self.eql?(true)\n end",
"def false_true\n alternatives false, true\n end",
"def true?\n self.value == '1'\n end",
"def true?\n self.value == '1'\n end",
"def get_boolean value #:nodoc:\n # some exceptions\n value = false if value == :low or value == 0 or value == nil or value == :off or value == :ground or value == :gnd\n !! value # double invert value in to boolean form\n end",
"def to_bool\n if (self.to_bool == 1)\n puts \"TRUE\"\n elsif (self.to_bool == 0)\n puts \"FALSE\"\n elsif (self.to_bool == -1)\n puts \"NaN\"\n end\nend",
"def process_true(exp)\n \"Qtrue\"\n end",
"def to_bool; self; end",
"def to_bool; self; end",
"def to_bool(value)\n value.to_s.downcase == 'true' ? true : false\n end",
"def query_yields_boolean?\n false\n end",
"def literal_false\n 'false'\n end",
"def parse_bool() true end",
"def boolean?(column)\n column.type == :boolean\n end",
"def boolean(arg)\n case arg\n when 'true'\n 1\n when 'false'\n 0\n when nil\n 0\n end\n end",
"def to_boolean(val)\n val && (val.to_s.match(/(true|t|yes|y|1)$/i) != nil)\n end",
"def boolean(**props)\n transform(type: :boolean, **props) do |value|\n !!value\n end\n end",
"def true?\n !false?\n end",
"def bool(val)\n raise ArgumentError, \"bool() requires a boolean argument\" \\\n unless val == true || val == false\n\n self.int32(val ? 1 : 0)\n end",
"def value_if_true=(value)\n @value_if_true = value\n end",
"def post_process_boolean( val )\n\t\t\treturn TRUE_VALUES.include?( val.to_s.downcase )\n\t\tend",
"def bool_value(value)\n value = @filters[value] if value.is_a? Symbol\n ActiveRecord::Type::Boolean.new.cast(value)\n end",
"def format_boolean(val, istruct)\n return istruct.nil_text if val.nil?\n\n val ? istruct.true_text : istruct.false_text\n end",
"def format_boolean(val, istruct)\n return istruct.nil_text if val.nil?\n\n val ? istruct.true_text : istruct.false_text\n end",
"def to_bool\n true\n end",
"def to_boolean(value)\n [\"true\", \"1\", \"yes\"].include?(value.to_s) ? true : false\n end",
"def value_to_boolean(value)\n if value.is_a?(String) && value.blank?\n nil\n else\n TRUE_VALUES.include?(value)\n end\n end",
"def get_boolean_value(field_name)\n\t\tend",
"def to_boolean(value)\n [\"true\", \"1\", \"yes\"].include?(value.to_s) ? true : false\n end",
"def result\n map_value(converted_value: RDF::Literal.new(\n value.value,\n datatype: PermissiveSchema.valkyrie_bool\n ))\n end",
"def conditionally_true\n\t\t!!self\n\tend",
"def value\n true\n end",
"def value_to_boolean(value)\n if value.is_a?(String) && value.empty?\n nil\n else\n TRUE_VALUES.include?(value)\n end\n end",
"def prologify\n \"false\"\n end",
"def test_boolean_as_integer\n e = DbType.find(:first)\n\n # true\n e.sample_boolean = 1\n assert_equal true, e.sample_boolean\n assert_equal true, e.sample_boolean?\n e.save!\n\n e = DbType.find(:first)\n assert_equal true, e.sample_boolean\n assert_equal true, e.sample_boolean?\n\n # false\n e.sample_boolean = 0\n assert_equal false, e.sample_boolean\n assert_equal false, e.sample_boolean?\n e.save!\n\n e = DbType.find(:first)\n assert_equal false, e.sample_boolean\n assert_equal false, e.sample_boolean?\n end",
"def to_true_string(value)\n DS::Number.is_i?(value) && value.to_i == 1 ? 'true' : nil\n end",
"def boolean_value(object)\n\treturn object if object.is_a?(PRBool)\n\treturn PRBool.new(false) if object.is_a?(PRNil)\n\tif object.is_a?(PRNumber) then\n\t\treturn PRBool.new(object._value != 0)\n\tend\n\treturn PRBool.new(true)\nend",
"def quote_boolean(value)\n value.to_s.upcase\n end",
"def true_string?(field)\n field == 'true' || field == true\n end",
"def tpl_boolean; @original; end",
"def to_b\n self != \"false\"\n end",
"def set_boolean_value\n @values = [Value.new(@context, true)]\n end",
"def always_true\n true\n end",
"def draw_boolean(col)\n _draw_head(col){\n @f.select(col.name, [\n ['(both)', ''],\n ['', 'false'],\n ['√', 'true']])\n }\n end",
"def boolval \n\n\t$cst.add_branch(\"boolval\")\n\n\tmatch_token(\"T_BOOLEAN\", $tokens[$index])\n\t\n\t$cst.ascend\n\nend",
"def clean_up_boolean(_, q_val)\n %w(t true).include? q_val\n end",
"def coerce_bool(value)\n case value\n when nil, false then false\n when Numeric then !value.zero?\n else\n true\n end\n end",
"def fix_booleans\n Post.where.not(published: 't').update_all(\"published = 'f'\")\n end",
"def literal_false\n BOOL_FALSE\n end",
"def to_boolean(string)\n string == 'true'\nend",
"def exercise_1111 (bool_values)\n end",
"def boolean_to_string(b)\n if true\n \"true\"\n else\n \"false\"\n end\nend",
"def schema_column_type(db_type)\n Sequel::Mysql2.convert_tinyint_to_bool && db_type == 'tinyint(1)' ? :boolean : super\n end",
"def schema_column_type(db_type)\n if convert_smallint_to_bool && db_type =~ /smallint/i \n :boolean\n else\n super\n end\n end",
"def boolean_to_string(b)\n if b \n \"true\"\n else\n \"false\"\n end\nend",
"def schema_column_type(db_type)\n convert_tinyint_to_bool && db_type =~ /\\Atinyint\\(1\\)/ ? :boolean : super\n end",
"def single_value?\n return false\n end",
"def value_if_true\n return @value_if_true\n end",
"def gen_boolean\n [true, false].sample\n end",
"def _boolinze(val)\n return true if ['true', '1'].include?(val)\n return false if ['false', '0'].include?(val)\n val\n end"
] |
[
"0.76973087",
"0.7310421",
"0.7061751",
"0.6996198",
"0.69679326",
"0.69623554",
"0.6950713",
"0.6950713",
"0.6926004",
"0.6911913",
"0.68247306",
"0.6776206",
"0.6755321",
"0.67295533",
"0.66371375",
"0.661686",
"0.66165036",
"0.6613014",
"0.65885556",
"0.65867907",
"0.6555428",
"0.6547909",
"0.6530612",
"0.6523141",
"0.6513466",
"0.6513461",
"0.6487653",
"0.6479072",
"0.6468219",
"0.646591",
"0.6453566",
"0.6452535",
"0.64421463",
"0.64306533",
"0.6412496",
"0.6404741",
"0.6399458",
"0.6394628",
"0.6379121",
"0.636795",
"0.6345842",
"0.63399625",
"0.6337822",
"0.6329923",
"0.6327144",
"0.6295456",
"0.6293639",
"0.62930983",
"0.62930983",
"0.6276173",
"0.6275674",
"0.62609476",
"0.625823",
"0.6255577",
"0.62510604",
"0.6250859",
"0.62492657",
"0.6245176",
"0.6239738",
"0.62141085",
"0.62136626",
"0.62106925",
"0.62072563",
"0.62072563",
"0.61965424",
"0.61908376",
"0.61810017",
"0.617843",
"0.61774164",
"0.615678",
"0.6155004",
"0.61489695",
"0.61230505",
"0.6120877",
"0.61153847",
"0.6100167",
"0.60979766",
"0.6091593",
"0.6083254",
"0.6070824",
"0.60644907",
"0.6063489",
"0.60558057",
"0.6045112",
"0.6044453",
"0.6041978",
"0.60410935",
"0.6041084",
"0.6031243",
"0.60277796",
"0.60248566",
"0.6021292",
"0.6021095",
"0.60155344",
"0.6011679",
"0.600439",
"0.599733",
"0.59959495",
"0.5992526",
"0.59843034"
] |
0.6945706
|
8
|
PostgreSQL supports multiple rows in INSERT.
|
def multi_insert_sql_strategy
:values
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def insert!(*rows)\n Mao.sql(with_options(:insert => rows.flatten).sql) do |pg_result|\n if @options[:returning]\n pg_result.map {|result| Mao.normalize_result(result, @col_types)}\n else\n pg_result.cmd_tuples\n end\n end\n end",
"def multi_insert(tuples)\n pks = relation.multi_insert(tuples, return: :primary_key)\n relation.where(relation.primary_key => pks).to_a\n end",
"def insert_many( sql, values, _options = {}, *args ) # :nodoc:\n number_of_inserts = 0\n\n base_sql, post_sql = case sql\n when String\n [sql, '']\n when Array\n [sql.shift, sql.join( ' ' )]\n end\n\n value_sets = ::ActiveRecord::Import::ValueSetsRecordsParser.parse(values,\n max_records: SQLITE_LIMIT_COMPOUND_SELECT)\n\n transaction(requires_new: true) do\n value_sets.each do |value_set|\n number_of_inserts += 1\n sql2insert = base_sql + value_set.join( ',' ) + post_sql\n insert( sql2insert, *args )\n end\n end\n\n ActiveRecord::Import::Result.new([], number_of_inserts, [], [])\n end",
"def supports_multi_insert?\n true\n end",
"def insert(*values)\n if @opts[:sql] || @opts[:returning]\n super\n else\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def multi_insert_sql(columns, values)\n table = quote_identifier(@opts[:from].first)\n columns = literal(columns)\n values.map do |r|\n \"INSERT INTO #{table} #{columns} VALUES #{literal(r)}\"\n end\n end",
"def multi_insert_sql_strategy\n :union\n end",
"def insert(tuples)\n pks = tuples.map { |tuple| relation.insert(tuple) }\n relation.where(relation.primary_key => pks).to_a\n end",
"def insert_many( sql, values, *args ) # :nodoc:\n # the number of inserts default\n number_of_inserts = 0\n \n base_sql,post_sql = if sql.is_a?( String )\n [ sql, '' ]\n elsif sql.is_a?( Array )\n [ sql.shift, sql.join( ' ' ) ]\n end\n \n sql_size = QUERY_OVERHEAD + base_sql.size + post_sql.size \n\n # the number of bytes the requested insert statement values will take up\n values_in_bytes = self.class.sum_sizes( *values )\n \n # the number of bytes (commas) it will take to comma separate our values\n comma_separated_bytes = values.size-1\n \n # the total number of bytes required if this statement is one statement\n total_bytes = sql_size + values_in_bytes + comma_separated_bytes\n \n max = max_allowed_packet\n \n # if we can insert it all as one statement\n if NO_MAX_PACKET == max or total_bytes < max\n number_of_inserts += 1\n sql2insert = base_sql + values.join( ',' ) + post_sql\n insert( sql2insert, *args )\n else\n value_sets = self.class.get_insert_value_sets( values, sql_size, max )\n value_sets.each do |values|\n number_of_inserts += 1\n sql2insert = base_sql + values.join( ',' ) + post_sql\n insert( sql2insert, *args )\n end\n end \n\n number_of_inserts\n end",
"def _construct_multiple_insert_sql(table, fields, rows)\n \n return nil if table.nil? || fields.nil? || rows.nil?\n \n insert_sql = \"insert into #{table} ( #{fields.join(\", \")} ) values\"\n x = 0\n \n rows.each do |row|\n insert_sql += \",\" if x > 0\n insert_sql += \" (\"\n y = 0\n \n row.each do |value|\n insert_sql += \",\" if y > 0\n insert_sql += _construct_sql_value(value)\n y += 1\n end\n \n insert_sql += \")\"\n \n x += 1\n end\n \n insert_sql\n \n end",
"def _import(columns, values, opts=OPTS)\n if @opts[:returning]\n # no transaction: our multi_insert_sql_strategy should guarantee\n # that there's only ever a single statement.\n sql = multi_insert_sql(columns, values)[0]\n returning_fetch_rows(sql).map{|v| v.length == 1 ? v.values.first : v}\n elsif opts[:return] == :primary_key\n returning(insert_pk)._import(columns, values, opts)\n else\n super\n end\n end",
"def insert_select(*values)\n with_sql_first(insert_select_sql(*values))\n end",
"def insert_multi(tablename, arr_hashes, args = {})\n sql = \"INSERT INTO `#{tablename}` (\"\n\n first = true\n if args && args[:keys]\n keys = args[:keys]\n elsif arr_hashes.first.is_a?(Hash)\n keys = arr_hashes.first.keys\n else\n raise \"Could not figure out keys.\"\n end\n\n keys.each do |col_name|\n sql << \",\" unless first\n first = false if first\n sql << quote_column(col_name)\n end\n\n sql << \") VALUES (\"\n\n first = true\n arr_hashes.each do |hash|\n if first\n first = false\n else\n sql << \"),(\"\n end\n\n first_key = true\n if hash.is_a?(Array)\n hash.each do |val|\n if first_key\n first_key = false\n else\n sql << \",\"\n end\n\n sql << @db.quote_value(val)\n end\n else\n hash.each do |_key, val|\n if first_key\n first_key = false\n else\n sql << \",\"\n end\n\n sql << @db.quote_value(val)\n end\n end\n end\n\n sql << \")\"\n\n return sql if args && args[:return_sql]\n\n query_no_result_set(sql)\n\n if args && args[:return_id]\n first_id = last_id\n raise \"Invalid ID: #{first_id}\" if first_id.to_i <= 0\n ids = [first_id]\n 1.upto(arr_hashes.length - 1) do |count|\n ids << first_id + count\n end\n\n ids_length = ids.length\n arr_hashes_length = arr_hashes.length\n raise \"Invalid length (#{ids_length}, #{arr_hashes_length}).\" if ids_length != arr_hashes_length\n\n return ids\n else\n return nil\n end\n end",
"def insert_multi(tablename, arr_hashes, args = {})\n sql = \"INSERT INTO `#{tablename}` (\"\n\n first = true\n if args && args[:keys]\n keys = args[:keys]\n elsif arr_hashes.first.is_a?(Hash)\n keys = arr_hashes.first.keys\n else\n raise \"Could not figure out keys.\"\n end\n\n keys.each do |col_name|\n sql << \",\" unless first\n first = false if first\n sql << quote_column(col_name)\n end\n\n sql << \") VALUES (\"\n\n first = true\n arr_hashes.each do |hash|\n if first\n first = false\n else\n sql << \"),(\"\n end\n\n first_key = true\n if hash.is_a?(Array)\n hash.each do |val|\n if first_key\n first_key = false\n else\n sql << \",\"\n end\n\n sql << @db.quote_value(val)\n end\n else\n hash.each do |_key, val|\n if first_key\n first_key = false\n else\n sql << \",\"\n end\n\n sql << @db.quote_value(val)\n end\n end\n end\n\n sql << \")\"\n\n return sql if args && args[:return_sql]\n\n query(sql)\n\n if args && args[:return_id]\n first_id = last_id\n raise \"Invalid ID: #{first_id}\" if first_id.to_i <= 0\n ids = [first_id]\n 1.upto(arr_hashes.length - 1) do |count|\n ids << first_id + count\n end\n\n ids_length = ids.length\n arr_hashes_length = arr_hashes.length\n raise \"Invalid length (#{ids_length}, #{arr_hashes_length}).\" unless ids_length == arr_hashes_length\n\n return ids\n else\n return nil\n end\n end",
"def insert_select(*values)\n unless @opts[:disable_insert_returning]\n ds = opts[:returning] ? self : returning\n ds.insert(*values){|r| return r}\n end\n end",
"def insert_sql(*values)\n if values.size == 1 && (v = values[0]).is_a?(Sequel::Model) && !v.respond_to?(:sql_literal_append)\n Sequel::Deprecation.deprecate(\"Passing Sequel::Model instance argument to Sequel::Dataset#insert\", \"Pass model_instance.values or model_instance.to_hash as the argument instead\")\n super(v.to_hash)\n else\n super\n end\n end",
"def insert_sequenced(row)\n sql = row.type.insert_sql_minus_key\n vals = row.field_values_minus_key\n#$stderr.puts sql\n#$stderr.puts vals.inspect\n\n db.do(sql, *vals)\n insert_id = db.select_one(row.type.get_insert_id_sql)[0]\n row.send(row.type.primary_key.setter_name, insert_id)\n row.reset_changed\n end",
"def insert_select(*values)\n return unless supports_insert_select?\n # Handle case where query does not return a row\n server?(:default).with_sql_first(insert_select_sql(*values)) || false\n end",
"def multi_insert_sql(columns, values)\n values = values.map {|r| \"SELECT #{expression_list(r)}\" }.join(\" UNION ALL \")\n [\"#{insert_sql_base}#{source_list(@opts[:from])} (#{identifier_list(columns)}) #{values}\"]\n end",
"def insert_sql_each\n return enum_for(__method__) unless block_given?\n each_row do |row|\n yield table_dataset.insert_sql( row )\n end\n end",
"def merge_insert(*values, &block)\n h = {:type=>:insert, :values=>values}\n if override = @opts[:override]\n h[:override] = insert_override_sql(String.new)\n end\n _merge_when(h, &block)\n end",
"def dump_insert_multi(io, table_obj, rows)\n print \"Inserting #{rows.length} into #{table_obj.name}.\\n\" if @debug\n sqls = @args[:db].insert_multi(table_obj.name, rows, :return_sql => true, :keys => @keys)\n sqls.each do |sql|\n io.write(\"#{sql};\\n\")\n end\n \n rows.clear\n \n #Ensure garbage collection or we might start using A LOT of memory.\n GC.start\n end",
"def insert(*objects)\n objects = objects.first if objects.size == 1 && objects.first.is_a?(Array)\n res = @db.insert_into_db(@name, objects)\n res.size > 1 ? res : res.first\n end",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(*insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def insert_record(table, values)\n execute table_insert_query(table, values)\n end",
"def make_insert(table, columns, fields, row)\n statement = \"INSERT INTO #{table['name']} (#{fields.join(',')}) VALUES (\"\n values = []\n fields.each do |field|\n values << make_val(row[field], columns[field])\n end\n statement << \"#{values.join(',')});\\n\"\n statement\n end",
"def insert_into(table, data)\n\t\tkeys = \"(#{data.keys.join(', ')})\"\n\t\tvalues = \"(#{data.values.map{ |value| \"'#{value}'\" }.join(', ')})\"\n\t\texecute_with_retry \"INSERT INTO #{table} #{keys} VALUES #{values}; \"\n\tend",
"def insert(*values)\n execute_dui(insert_sql(*values)){|c| return c.last_id}\n end",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def insert_sql(*values)\n if values.empty?\n insert_default_values_sql\n else\n values = values[0] if values.size == 1\n \n # if hash or array with keys we need to transform the values\n if @transform && (values.is_a?(Hash) || (values.is_a?(Array) && values.keys))\n values = transform_save(values)\n end\n from = source_list(@opts[:from])\n\n case values\n when Array\n if values.empty?\n insert_default_values_sql\n else\n \"INSERT INTO #{from} VALUES #{literal(values)}\"\n end\n when Hash\n if values.empty?\n insert_default_values_sql\n else\n fl, vl = [], []\n values.each {|k, v| fl << literal(k.is_a?(String) ? k.to_sym : k); vl << literal(v)}\n \"INSERT INTO #{from} (#{fl.join(COMMA_SEPARATOR)}) VALUES (#{vl.join(COMMA_SEPARATOR)})\"\n end\n when Dataset\n \"INSERT INTO #{from} #{literal(values)}\"\n else\n if values.respond_to?(:values)\n insert_sql(values.values)\n else\n \"INSERT INTO #{from} VALUES (#{literal(values)})\"\n end\n end\n end\n end",
"def dump_insert_multi(io, table_obj, rows)\n debug \"Inserting #{rows.length} into #{table_obj.name}.\"\n sqls = @export_db.insert_multi(\n table_obj.name,\n rows,\n replace_line_breaks: true,\n return_sql: true,\n keys: @keys\n )\n sqls.each do |sql|\n io.write(\"#{sql};\\n\")\n end\n\n rows.clear\n\n # Ensure garbage collection or we might start using A LOT of memory.\n GC.start\n end",
"def insert_rows(rows, field, table_struct, dest_table_name = NEW_TABLE_NAME)\n fields = get_fields(table_struct)\n insert_tmplt = row_sql_insert(dest_table_name, table_struct)\n primary_keys = get_pkey_fields(table_struct) \n errs = []\n row_action_data = []\n del_keys = []\n \n if (rows) then\n rows.each_hash do | row |\n row_action_data << {\n :sql_insert => make_sql_insert_row(fields, insert_tmplt, row), \n :key => make_key_hash_for_row(primary_keys, row)\n }\n end\n end\n\n row_action_data.each { |row|\n begin\n dbres = do_sql_command(row[:sql_insert])\n if dbres.nil?\n del_keys << row[:key]\n end\n rescue Mysql::Error\n if !($! =~ /^Duplicate entry .* for key/).nil?\n # i'll consider a duplicate entry okay for a delete\n LOGGER.warn \"Database error! Duplicate key found on insert, marking for deletion anyway, moving on: #{$!}\"\n del_keys << row[:key]\n else\n #errs << \"Database error, moving on: #{$!}\"\n LOGGER.error \"Database error, not sure what, moving on: #{$!}\"\n end\n end\n }\n\n del_keys\nend",
"def insert_in_database\n Fetch.new(insertion_query).array\n end",
"def generate_pg_insert_query(table_name, keys, rows)\n \"INSERT INTO #{table_name}(#{keys.map { |i| \"\\\"#{i}\\\"\" }.join(',')}) VALUES(#{keys.map { |i| rows[i] == nil ? 'NULL' : \"'\" + pg_conn.escape_string(rows[i]) + \"'\" }.join(',')});\\n\"\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def batch_insert(data)\n hash = Hash[data.collect { |v| [v, '1'] }]\n @redis.write_multi(hash)\n end",
"def append_row!(*args)\r\n insert_row!(*args)\r\n end",
"def row_sql_insert(table_name, table_struct)\n fields = get_fields(table_struct)\n\n sql = <<-EOF\n INSERT INTO `#{DBNAME}`.`#{table_name}` (\n #{fields.collect { |f| \"`#{f}`\" }.join(\", \")}\n )\n VALUES (\n #{fields.collect { |f| \"'%s'\" }.join(\", \")}\n );\n EOF\n\n sql\nend",
"def visit_Arel_Nodes_InsertStatement o, *a\n [\n \"INSERT INTO #{visit(o.relation).gsub(/\"/, '')}\",\n \"(#{o.columns.map { |x| x.name }.join ', '})\",\n \" VALUES (#{o.values.left.map { |value| value }.join ', '})\"\n ].compact.join ' '\n end",
"def multiple(sql, values = [])\n r = $db.exec_params(sql, values)\n return [] if r.ntuples == 0\n r.map { |row| convert_to_ruby_types(row) }\nend",
"def insert(values)\n primary_key_value = nil\n\n if primary_key && Hash === values\n primary_key_value = values[values.keys.find { |k|\n k.name == primary_key\n }]\n\n if !primary_key_value && connection.prefetch_primary_key?(klass.table_name)\n primary_key_value = connection.next_sequence_value(klass.sequence_name)\n values[klass.arel_table[klass.primary_key]] = primary_key_value\n end\n end\n\n im = arel.create_insert\n\n # ****** BEGIN PARTITIONED PATCH ******\n actual_arel_table = @klass.dynamic_arel_table(Hash[*values.map{|k,v| [k.name,v]}.flatten]) if @klass.respond_to?(:dynamic_arel_table)\n actual_arel_table = @table unless actual_arel_table\n # Original line:\n # im.into @table\n im.into actual_arel_table\n # ****** END PARTITIONED PATCH ******\n\n conn = @klass.connection\n\n substitutes = values.sort_by { |arel_attr,_| arel_attr.name }\n binds = substitutes.map do |arel_attr, value|\n [@klass.columns_hash[arel_attr.name], value]\n end\n\n substitutes.each_with_index do |tuple, i|\n tuple[1] = conn.substitute_at(binds[i][0], i)\n end\n\n if values.empty? # empty insert\n im.values = Arel.sql(connection.empty_insert_statement_value)\n else\n im.insert substitutes\n end\n\n conn.insert(\n im,\n 'SQL',\n primary_key,\n primary_key_value,\n nil,\n binds)\n end",
"def _insert_raw(ds)\n if use_prepared_statements_for?(:insert)\n _set_prepared_statement_server(model.send(:prepared_insert, @values.keys)).call(@values)\n else\n super\n end\n end",
"def insert_by_data data, table\n sql = \"insert into #{table} \"\n case data\n when Array\n data.each do |d|\n insert_by_data(d, table)\n end\n when Hash\n columns = data.keys.to_s.gsub('[','(').gsub(']',')').gsub('\"','')\n values = data.values.to_s.gsub('[','(').gsub(']',')').gsub('nil','NULL')\n sql = sql + columns + \" values \" + values\n query(sql)\n end\n end",
"def batch_insert(docs)\n execute_batch_push(docs)\n end",
"def insert values\n if $VERBOSE\n warn <<-eowarn\ninsert (#{caller.first}) is deprecated and will be removed in ARel 3.0.0. Please\nswitch to `compile_insert`\n eowarn\n end\n @engine.connection.insert compile_insert(values).to_sql\n end",
"def insert_many(documents, options = nil)\n native.insert_many(documents, options || {}).inserted_ids\n end",
"def insert_facet_series(table, type, select = 'facet_id, series_id')\n $db.execute <<-SQL\n INSERT INTO destination.#{table}\n SELECT #{select}\n FROM facet_series\n WHERE type = '#{type}';\n SQL\nend",
"def insert(row, values)\n values = Array(values)\n unless values.empty?\n @data.expand(row, values.count)\n @data.values.insert row, Array.new([@data.columns_count, values.count].max) { |index| values[index] }\n end\n self\n end",
"def insert(template, *data) # :nodoc:\n chk_conn\n conn = @hibernate_session.connection\n stmt = conn.prepare_statement(template)\n data.each do |d|\n d.each_with_index do |item, index|\n if item.kind_of?(Array)\n set_prepared_statement(stmt, item[0], index+1, item[1])\n else\n set_prepared_statement(stmt, item, index+1, nil)\n end\n end\n stmt.execute_update\n end\n conn.commit\n ensure\n stmt.close rescue nil\n end",
"def execute_insert(sql, opts=OPTS)\n _execute(sql, opts){|conn| log_connection_yield(sql, conn){conn.execute_batch(sql)}; conn.last_insert_rowid}\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |column_name|\n values << \"'#{send(column_name)}'\" unless send(column_name).nil?\n end \n #The above code, however, will result in a values array. We need comma separated values for our SQL statement. Let's join this array into a string:\n values.join(\", \")\n end",
"def insert_multiple(array, &block)\n if block\n array.each {|i| insert(block[i])}\n else\n array.each {|i| insert(i)}\n end\n end",
"def sql_insert(record)\n flds, vals = parse_fldsvalues(record)\n ph = vals.map{|x| placeholder }\n\n sql = %Q|insert into #{quoted_table}\n ( #{flds.join ','} )\n output inserted.#{quote_field id_fld}\n values( #{ph.join ','} );|\n\n [sql, vals]\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n #We need comma separated values for our SQL statement. Let's join this array into a string:\n values.join(\", \")\n end",
"def multiple_value_sets_insert_sql(table_name, column_names, options) # :nodoc:\n \"INSERT #{options[:ignore] ? 'IGNORE ':''}INTO #{table_name} (#{column_names.join(',')}) VALUES \"\n end",
"def insert(*args)\n dataset.insert(*args)\n self\n end",
"def insert(*values)\n raise NotImplementedError, NOTIMPL_MSG\n end",
"def insert\n array = [[@name, @tagline, @github, @twitter, @blog_url, @image_url, @biography]]\n ins = DB[:conn].prepare(\"INSERT INTO students (name, tagline, github, twitter, blog_url, image_url, biography) VALUES (?, ?, ?, ?, ?, ?, ?);\")\n array.each { |s| ins.execute(s)}\n self.id = DB[:conn].execute(\"SELECT last_insert_rowid() FROM students;\")[0][0]\n #ask steven re. index figures\n #inserting data into an instance\n end",
"def insert values\n im = InsertManager.new @engine\n im.insert values\n @engine.connection.insert im.to_sql\n end",
"def _insert\n return super if model.cti_tables.length == 1\n model.cti_models.each do |m|\n v = {}\n m.cti_table_columns.each{|c| v[c] = @values[c] if @values.include?(c)}\n ds = use_server(m.cti_instance_dataset)\n if ds.supports_insert_select? && (h = ds.insert_select(v))\n @values.merge!(h)\n else\n nid = ds.insert(v)\n @values[primary_key] ||= nid\n end\n end\n db.dataset.supports_insert_select? ? nil : @values[primary_key]\n end",
"def fast_insert(rows, base_cmd, end_cmd = '')\n RawDB.fast_insert(db, rows, base_cmd, end_cmd)\n end",
"def insert(sql, name = nil, pk = nil, id_value = nil) end",
"def to_insert(output, table, row)\n columns = @columns[table].map {|i| i[0] }.join(',')\n values = map_values(row, @columns[table])\n output << \"INSERT INTO #{table} (#{columns}) VALUES (#{values});\\n\"\n end",
"def build_insert(data)\n fields = \"\"\n values = \"\"\n data.each do |k,v|\n fields += \"`#{escape_str_field(k)}`, \"\n values += escape_value(v)+\", \"\n end\n \"(\"+fields.chomp(', ')+\") VALUES (\"+values.chomp(', ')+\")\"\n end",
"def _insert_raw(ds)\n ds.insert(_insert_values)\n end",
"def _insert_raw(ds)\n ds.insert(_insert_values)\n end",
"def insert_product(product)\n db_connection do |conn|\n result = conn.exec(\"SELECT id FROM products WHERE product = $1\", [product[:product]])\n if result.to_a.empty?\n sql = \"INSERT INTO products (product) VALUES ($1) RETURNING id\"\n result = conn.exec(sql, [product[:product]])\n end\n result.first[\"id\"]\n end\nend",
"def exec_insert(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def sql_for_insert(sql, pk, id_value, sequence_name, binds)\n unless pk\n table_ref = extract_table_ref_from_insert_sql(sql)\n pk = primary_key(table_ref) if table_ref\n end\n\n sql = \"#{sql} RETURNING #{quote_column_name(pk)}\" if pk\n\n [sql, binds]\n end",
"def execute(tuples)\n insert_tuples = with_input_tuples(tuples) do |tuple|\n attributes = input[tuple]\n attributes.to_h\n end\n\n if insert_tuples.length > 1\n multi_insert(insert_tuples)\n else\n insert(insert_tuples)\n end\n end",
"def create_insert(headers, values, model, upsert_fields, ret_vals, ig_cols)\r\n\t\t\tputs \"Creating insert query:\"\r\n\t\t\tputs \"There are #{values.length} rows to insert.\"\r\n\r\n\t\t\tp \"HEADERS:\"\r\n\t\t\tp headers\r\n\r\n\t\t\tp \"IGNORED COLUMNS BEFORE HACK:\"\r\n\t\t\tp ig_cols\r\n\r\n\t\t\tig_cols = [] if ig_cols == nil\t# Weird hack because of an error ruby was throwing\r\n\t\t\treturn_results = []\r\n\r\n\t\t\tp \"IGNORED COLUMNS:\"\r\n\t\t\tp ig_cols\r\n\r\n\t\t\t# Loop through the array of arrays of values to insert\r\n\t\t\tvalues.each do |values_array|\r\n\t\t\t\tupsert_attributes = {}\r\n\t\t\t\tinner_array = []\r\n\t\t\t\t# Now loop through the single array of values\r\n\t\t\t\tp \"VALUES ARRAY:\"\r\n\t\t\t\tp values_array\r\n\r\n\t\t\t\tvalues_array.each_with_index do |val, index|\r\n\t\t\t\t\t# puts \"INDEX: #{index}\"\r\n\r\n\t\t\t\t\tnext if ig_cols.include?(index) # IMPORTANT: Need to ignore the indices of the columns in the CSV that the user specifies\r\n\t\t\t\t\tassociated_column_name = headers.at(index).to_sym\t# Get the header name for the row - need it to match in return values\r\n\r\n\t\t\t\t\t# Store the attributes we want to do the upsert on to pass into find_or_create_by method\r\n\t\t\t\t\tupsert_attributes[associated_column_name] = val if upsert_fields.include?(associated_column_name)\r\n\r\n\t\t\t\t\t# puts \"Line 282: #{upsert_attributes}\"\r\n\t\t\t\tend\r\n\r\n\t\t\t\t# Use ActiveRecord's method to return the updated or inserted row\r\n\t\t\t\t# Workaround - do a select and then insert since I can't figure out how to dynamically add the values to the class\r\n\t\t\t\t# select_result = model.find_by(upsert_attributes)\r\n\t\t\t\tinsert_attributes = {}\r\n\t\t\t\tvalues_array.each_with_index do |val, i|\r\n\t\t\t\t\tif !(upsert_attributes.has_key?(headers[i]))\r\n\t\t\t\t\t\t# puts \"VALUE: #{val}\"\r\n\t\t\t\t\t\tinsert_attributes[headers[i].to_sym] = val\r\n\t\t\t\t\tend\r\n\t\t\t\tend\r\n\t\t\t\tinsert_attributes = insert_attributes.merge upsert_attributes\r\n\r\n\t\t\t\t# if select_result == nil\r\n\t\t\t\t# \tinsert_result = model.create(insert_attributes)\r\n\t\t\t\t# else\r\n\t\t\t\t# \tinsert_result = model.update(insert_attributes)\r\n\t\t\t\t# end\r\n\r\n\t\t\t\t# upsert_result = model.find_or_create_by(upsert_attributes) do |klass|\r\n\t\t\t\t# \t# Check to see that we haven't already included the column and value in the upsert_attributes\r\n\t\t\t\t# \t# and if we haven't, include it as a field we need to add to the database along with the value\r\n\t\t\t\t# \t# puts \"#{klass.instance_variables}\"\r\n\t\t\t\t# \tvalues_array.each_with_index do |val, i|\r\n\t\t\t\t# \t\tif !(upsert_attributes.has_key?(headers[i]))\r\n\t\t\t\t# \t\t\t# puts \"VALUE: #{val}\"\r\n\t\t\t\t# \t\t\tklass.send :write_attribute, headers[i].to_sym, val\r\n\t\t\t\t# \t\tend\r\n\t\t\t\t# \tend\r\n\t\t\t\t# end\r\n\r\n\t\t\t\tp upsert_attributes\r\n\r\n\t\t\t\tupsert_result = model.find_or_initialize_by(upsert_attributes)\r\n\t\t\t\tupsert_result.update_attributes(insert_attributes)\r\n\r\n\t\t\t\t# Return what the user asked for\r\n\t\t\t\t#ret_vals.each { |val| inner_array.push(insert_result[val]) }\r\n\t\t\t\tret_vals.each { |val| inner_array.push(upsert_result[val]) }\r\n\r\n\t\t\t\t# Concatenate the arrays of information the user wants back\r\n\t\t\t\treturn_results.push(inner_array)\r\n\r\n\t\t\tend\r\n\t\t\t# p return_results\r\n\t\t\treturn return_results\r\n\t\tend",
"def _insert_select_raw(ds)\n if use_prepared_statements_for?(:insert_select)\n if ps = model.send(:prepared_insert_select, @values.keys)\n _set_prepared_statement_server(ps).call(@values)\n end\n else\n super\n end\n end",
"def insert_invoices(transaction, foreign_keys)\n arguments = [\n transaction[:invoice_no],\n transaction[:sale_amount],\n transaction[:units_sold],\n transaction[:sale_date],\n transaction[:invoice_frequency],\n foreign_keys[:customer_id],\n foreign_keys[:employee_id],\n foreign_keys[:product_id],\n]\n\n db_connection do |conn|\n sql = <<-eos\n INSERT INTO invoices (invoice_no, sale_amount, units_sold, sale_date, invoice_frequency,\n customer_id, employee_id, product_id)\n VALUES ($1, $2, $3, $4, $5, $6, $7, $8)\n eos\n conn.exec_params(sql, arguments)\n end\nend",
"def exec_insert(sql, name = nil, binds = [], pk = nil, sequence_name = nil)\n exec_query(sql, name, binds)\n end",
"def dbinsert(table, variables, variable_names)\n i = 1\n marks = \"?\"\n while i < variables.length\n marks += \",?\"\n i += 1\n end\n\n v = \"\"\n i = 0\n while i < variables.length\n v += variables[i].to_s \n i += 1\n if i < variables.length\n v += \", \"\n end\n end\n\n return db.execute(\"INSERT INTO #{table}(#{v}) VALUES (#{marks})\", variable_names)\nend",
"def single_insert(table_name, hash)\n status = true\n begin\n columns = []\n values = []\n hash.keys.each do |item|\n columns.push(item)\n values.push(\"'#{hash[item]}'\")\n end\n columns = columns.join(\",\")\n values = values.join(\",\")\n @mysql_client.query(\"INSERT INTO #{table_name} (#{columns}) VALUES (#{values})\")\n rescue\n status = false\n end\n return status\n end",
"def batch_upload_rows(rows, per_page=100); batch_modify_rows('put', rows, per_page); end",
"def insert_multi_field(datastream_name, fields, opts={})\n\t \tds = self.datastreams[datastream_name]\n node, index = ds.insert_multi_field(fields, opts)\n if opts[:value]\n node.inner_text = opts[:value]\n end\n return node, index\n\tend",
"def insert(data) \n set data, 1\n end",
"def insert(data) \n set data, 1\n end",
"def _merge_insert_sql(sql, data)\n sql << \" THEN INSERT \"\n columns, values = _parse_insert_sql_args(data[:values])\n _insert_columns_sql(sql, columns)\n if override = data[:override]\n sql << override\n end\n _insert_values_sql(sql, values)\n end",
"def exec_insert(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def supports_insert_select?\n !@opts[:disable_insert_returning]\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n\n # Let's iterate over the column names stored in #column_names and use the #send method with each individual column name to invoke the method by that same name and capture the return value:\n # values = []\n # self.class.column_names.each do |col_name|\n # values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n # end\n # Here, we push the return value of invoking a method via the #send method, unless that value is #nil (as it would be for the id method before a record is saved, for instance).\n # Notice that we are wrapping the return value in a string. That is because we are trying to craft #a string of SQL. Also notice that each individual value will be enclosed in single quotes, ' ', #inside that string. That is because the final SQL string will need to look like this:\n\n # INSERT INTO songs (name, album)\n # VALUES 'Hello', '25';\n # SQL expects us to pass in each column value in single quotes.\n # The above code, however, will result in a values array\n # [\"'the name of the song'\", \"'the album of the song'\"]\n # We need comma separated values for our SQL statement. Let's join this array into a string:\n # values.join(\", \")\n end",
"def insert(objects)\n # Make sure the prepared statements are ready\n prepare unless $has_prepared\n\n objects.map do |o|\n params = {\n plant_id: o['id'],\n value: o['value'],\n }\n case o['type']\n when 'temp'\n DB.call(:insert_temp, params)\n when 'moisture'\n DB.call(:insert_moisture, params)\n when 'light'\n DB.call(:insert_light, params)\n else\n {\n error: \"Unknown metric type: #{o['type']}\",\n params: params,\n }\n end\n end\nend",
"def execute_insert(sql, opts={})\n synchronize(opts[:server]) do |conn|\n conn.execute(sql)\n insert_result(conn, opts[:table], opts[:values])\n end\n end",
"def to_inserts(args={})\n args[:table] ||= Pathname.new(@filename).basename.to_s.downcase.gsub(/\\W/, '_')\n args[:before] ||= @@defaults[:before]\n args[:after] ||= @@defaults[:after]\n insert_sql = args[:ignore] ? 'insert ignore' : 'insert'\n if args[:bulk]\n args[:before] += \"#{insert_sql} into #{args[:table]} values\"\n args[:values_glue] ||= \", \"\n args[:row_format] ||= \" (%s)\"\n args[:row_glue] ||= \",\\n\"\n else\n args[:before] ||= \"\"\n args[:values_glue] ||= \", \"\n args[:row_format] ||= \"#{insert_sql} into #{args[:table]} values(%s)\"\n args[:row_glue] ||= \";\\n\"\n end\n to_any args\n end",
"def insert(table, id, attributes) # abstract\n end",
"def push(*rows)\n rows.each { |row| self << row }\n self\n end",
"def InsertPubmedRecords(publications)\n row_iterator(publications) { |publication|\n abstract = InsertPublication(publication)\n }\nend",
"def mass_import(rows)\n self.import([:id, :value], rows, validate: false, timestamps: false)\n end",
"def insert_returning_columns(ds)\n return unless ds.supports_returning?(:insert)\n return unless values = ds.opts[:select]\n\n values = values.map{|v| ds.unqualified_column_for(v)}\n if values.all?\n values\n end\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n return super if id_value\n pk = pk_from_insert_sql(sql) unless pk\n select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n end",
"def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)\n return super if id_value\n pk = pk_from_insert_sql(sql) unless pk\n select_value(\"#{sql} RETURNING #{quote_column_name(pk)}\")\n end",
"def copy_rows( field, \n table_struct, \n src_table_name = TABLE_NAME, \n dest_table_name = NEW_TABLE_NAME, \n num_rows = ROWS_PER_TRANSACTION)\n rows = grab_rows(field, src_table_name, num_rows)\n keys_for_delete = insert_rows(rows, field, table_struct, dest_table_name)\n keys_for_delete\nend",
"def values_for_insert\r\n values = []\r\n self.class.column_names.each do |col_name|\r\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\r\n end\r\n values.join(\", \")\r\nend",
"def assert_replicate(sql)\n res = _test_replicate(sql)\n assert res.all? {|i| i['nextval'] == \"2\"}\n end",
"def insert_facet(table, type, second_type = nil)\n second_condition = second_type == nil ? nil : \"OR type = '#{second_type}'\"\n\n $db.execute <<-SQL\n INSERT INTO destination.#{table}\n SELECT id, name, created_at, updated_at\n FROM source.facets\n INNER JOIN (SELECT facet_id, type FROM source.facet_series)\n AS facet_series\n ON (facet_series.facet_id = source.facets.id)\n WHERE type = '#{type}' #{second_condition}\n GROUP BY name;\n SQL\nend"
] |
[
"0.7640881",
"0.74330044",
"0.69389695",
"0.69068533",
"0.66917443",
"0.66913056",
"0.6682041",
"0.6633522",
"0.65172905",
"0.6516788",
"0.65022826",
"0.6486905",
"0.6388606",
"0.63865644",
"0.6357242",
"0.6326598",
"0.62928814",
"0.62393683",
"0.62268454",
"0.6203298",
"0.61544645",
"0.61543167",
"0.6114331",
"0.6101116",
"0.6098586",
"0.6092337",
"0.6080487",
"0.6075963",
"0.6072705",
"0.6071856",
"0.60693216",
"0.60482293",
"0.60478216",
"0.60341954",
"0.59864235",
"0.59864235",
"0.59864235",
"0.59712887",
"0.5945667",
"0.5935916",
"0.5934161",
"0.59302086",
"0.59120196",
"0.58886707",
"0.58677936",
"0.5838108",
"0.5832956",
"0.5828825",
"0.58286357",
"0.58276933",
"0.57987",
"0.57644594",
"0.5754801",
"0.57406044",
"0.5726249",
"0.5707216",
"0.5691813",
"0.56688493",
"0.5650334",
"0.56485915",
"0.5648371",
"0.56445366",
"0.5642398",
"0.5641366",
"0.5619713",
"0.56165713",
"0.5603457",
"0.5603457",
"0.5583382",
"0.5577105",
"0.5572354",
"0.55629724",
"0.55529046",
"0.5533407",
"0.5524655",
"0.5522785",
"0.5521869",
"0.5519518",
"0.5516808",
"0.55123514",
"0.5511378",
"0.5511378",
"0.54949105",
"0.54791045",
"0.5469897",
"0.54622114",
"0.5457917",
"0.54538774",
"0.5450826",
"0.54425627",
"0.54381955",
"0.54293877",
"0.5427063",
"0.5419378",
"0.5401796",
"0.5401796",
"0.5379677",
"0.5378198",
"0.53716445",
"0.5358347"
] |
0.6972604
|
2
|
Dataset options that do not affect the generated SQL.
|
def non_sql_option?(key)
super || key == :cursor || key == :insert_conflict
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def _metadata_dataset\n super.with_convert_smallint_to_bool(false)\n end",
"def datatable_exclude_fields\n # None\n []\n end",
"def options\n {\n keywords: ['Boolean', \"Quote all keywords, not just reserved words\"],\n restrict: ['String', \"Restrict generation to tables in the specified group (e.g. bdv, rdv)\"],\n joiner: ['String', \"Use 'str' instead of the default joiner between words in table and column names\"],\n unicode: ['Boolean', \"Use Unicode for all text fields by default\"],\n tables: [%w{cap title camel snake shout}, \"Case to use for table names\"],\n columns: [%w{cap title camel snake shout}, \"Case to use for table names\"],\n surrogates: [%w{counter guid hash}, \"Method to use for assigning surrogate keys\"],\n fks: [%w{no yes delay}, \"Emit foreign keys, delay them to the end, or omit them\"],\n }\n end",
"def default_data_options\n {}\n end",
"def apply_dataset_options(type, request, ds)\n ds = apply_filter(type, request, ds)\n if order = order_for(type, request)\n ds = ds.order(*order)\n end\n if eager = eager_for(type, request)\n ds = ds.eager(eager)\n end\n if eager_graph = eager_graph_for(type, request)\n ds = ds.eager_graph(eager_graph)\n end\n ds\n end",
"def orig_dataset\n @opts[:orig_dataset]\n end",
"def select_table_options_sql(sql)\n sql << \" WITH #{@opts[:table_options]}\" if @opts[:table_options]\n end",
"def compound_dataset_sql_append(sql, ds)\n sql << '('\n super\n sql << ')'\n end",
"def flag_option_sql(attrs, key, off=\"NO#{key}\".upcase, on=key.to_s.upcase, implicit=IMPLICIT_FLAG_ATTRIBUTES[key])\n\t case attrs[key]\n\t when NilClass, implicit\n\t when TrueClass then on\n\t when FalseClass then off\n\t else raise Error, \"Unsupported or invalid #{key} option\"\n\t end\n\t end",
"def sql(options={})\n get_location\n # TODO: validate options\n @params[:sql] = FEATURE_DEFAULTS[:sql].merge(options)\n @params[:sql][:generate] = true\n end",
"def dataset\n DB[SQL, from: from_truncated, to: to_truncated, tick: tick]\n end",
"def sql_modes; end",
"def table_options_sql(options)\n\t sql = []\n\t sql << flag_option_sql(options, :parallel)\n\t sql << flag_option_sql(options, :logging)\n\t sql << flag_option_sql(options, :monitoring)\n\t sql << \"TABLESPACE #{quote_identifier(options[:tablespace])}\" if options[:tablespace]\n\t sql << compress_option_sql(options)\n\t sql << options[:options] if String === options[:options]\n\t sql.compact.join ' '\n\t end",
"def legacy_options\n {\n :conditions => options[:where],\n :include => options[:includes],\n :limit => options[:limit],\n :order => options[:order],\n :offset => options[:offset],\n :select => options[:select],\n :group => options[:group],\n }.delete_blanks\n end",
"def delete_options_statement\n super\n end",
"def no_where\n @data.where_behavior = :exclude\n return self\n end",
"def sql_literal(*)\n @dataset.sql\n end",
"def connection_configuration_sqls\n sqls = super\n sqls << \"SET DateStyle = 'ISO'\" if @use_iso_date_format\n sqls\n end",
"def non_sql_option?(key)\n NON_SQL_OPTIONS.include?(key)\n end",
"def unfiltered\n cached_dataset(:_unfiltered_ds){clone(:where => nil, :having => nil)}\n end",
"def skip_schema_queries; end",
"def generate_upsert_options\n if options.empty?\n ''\n else\n ' USING ' <<\n options.map do |key, value|\n serialized_value =\n case key\n when :consistency then value.to_s.upcase\n when :timestamp then (value.to_f * 1_000_000).to_i\n else value\n end\n \"#{key.to_s.upcase} #{serialized_value}\"\n end.join(' AND ')\n end\n end",
"def set_unused_options_for_association(opts, unused)\n opts[:read_only] = true if unused.include?('read_only')\n opts[:no_dataset_method] = true if unused.include?('dataset_method')\n opts[:no_association_method] = true if unused.include?('association_method')\n opts[:adder] = nil if unused.include?('adder')\n opts[:remover] = nil if unused.include?('remover')\n opts[:clearer] = nil if unused.include?('clearer')\n opts\n end",
"def generate_upsert_options\n if options.empty?\n ''\n else\n ' USING ' <<\n options.map do |key, value|\n serialized_value =\n case key\n when :timestamp then (value.to_f * 1_000_000).to_i\n else value\n end\n \"#{key.to_s.upcase} #{serialized_value}\"\n end.join(' AND ')\n end\n end",
"def except(dataset, opts={})\n opts = {:all=>opts} unless opts.is_a?(Hash)\n raise(Sequel::Error, \"EXCEPT ALL not supported\") if opts[:all]\n compound_clone(:minus, dataset, opts)\n end",
"def skip_sql_param_names; end",
"def save_metas_options_skip\n false\n end",
"def table_options\n @options\n end",
"def dataset(opts=nil)\n Sequel::JDBC::MSSQL::Dataset.new(self, opts)\n end",
"def _dataset(opts)\n raise(Sequel::Error, \"model object #{inspect} does not have a primary key\") if opts.dataset_need_primary_key? && !pk\n ds = if opts[:dataset].arity == 1\n instance_exec(opts, &opts[:dataset])\n else\n instance_exec(&opts[:dataset])\n end\n _apply_association_options(opts, ds)\n end",
"def data(options = {})\n add_required_columns(options[:required_columns])\n @rows ||= aggregate\n end",
"def column_references_sql(options)\n sql = [super(options)]\n\t sql << flag_option_sql(options, :rely)\n\t sql << flag_option_sql(options, :enable, 'DISABLE')\n\t sql << flag_option_sql(options, :validate)\n\t sql.join ' '\n end",
"def dataset(opts=nil)\n Sequel::Swift::Postgres::Dataset.new(self, opts)\n end",
"def skip_schema_queries=(_arg0); end",
"def filter_options options\n other_tables = options[:other_tables].dup || []\n temp_options = options.dup\n temp_options.keep_if do |k,v|\n ['name','page','search','order'].include? k.to_s\n end\n other_tables << temp_options\n end",
"def dataset(opts = nil)\n Mysql2::Dataset.new(self, opts)\n end",
"def canonic_opts\n o = { :unique => false, :name => default_index_name }\n o.merge!( opts )\n o.delete_if{ |key, value| IGNORED_OPTS.include? key }\n end",
"def options; [] end",
"def invert\n cached_dataset(:_invert_ds) do\n having, where = @opts.values_at(:having, :where)\n if having.nil? && where.nil?\n where(false)\n else\n o = {}\n o[:having] = SQL::BooleanExpression.invert(having) if having\n o[:where] = SQL::BooleanExpression.invert(where) if where\n clone(o)\n end\n end\n end",
"def dataset(opts=nil)\n Sequel::JDBC::Pervasive::Dataset.new(self)\n end",
"def skip_sql_param_names=(_arg0); end",
"def chart_options\n {}\n end",
"def aggreate_dataset_use_from_self?\n super || @opts[:values]\n end",
"def sanitize_select_options(options)#:nodoc:\n o = options.dup\n select = o.delete :select\n o[:override_select] = select ? select_column_sql(select) : ' * '\n o\n end",
"def query_options; end",
"def dataset(opts=nil)\n Sequel::JDBC::Access::Dataset.new(self, opts)\n end",
"def skip_limit_check\n cached_dataset(:_skip_limit_check_ds) do\n clone(:skip_limit_check=>true)\n end\n end",
"def dataset(opts=nil)\n Sequel::JDBC::AS400::Dataset.new(self, opts)\n end",
"def dataset(opts=nil)\n Sequel::JDBC::AS400::Dataset.new(self, opts)\n end",
"def target_sql_mode=(_arg0); end",
"def options() end",
"def to_sql\n nil\n end",
"def dataset_extend(mod, opts=OPTS)\n @dataset = @dataset.with_extend(mod) if @dataset\n reset_instance_dataset\n dataset_method_modules << mod\n unless opts[:create_class_methods] == false\n mod.public_instance_methods.each{|meth| def_model_dataset_method(meth)}\n end\n end",
"def dataset_extend(mod, opts=OPTS)\n @dataset = @dataset.with_extend(mod) if @dataset\n reset_instance_dataset\n dataset_method_modules << mod\n unless opts[:create_class_methods] == false\n mod.public_instance_methods.each{|meth| def_model_dataset_method(meth)}\n end\n end",
"def setDataOptions(options)\n @fields['data_options'] = options\n self\n end",
"def setDataOptions(options)\n @fields['data_options'] = options\n self\n end",
"def initialize(_layout, _data=[], _queries=[])\n @layout = _layout\n @queries = _queries\n #puts \"DATASET NEW queries:#{@queries}\"\n super(_data)\n end",
"def schema_ds_select(table_name, opts)\n cols = [:column_name___column, :data_type___db_type, :character_maximum_length___max_chars, \\\n :numeric_precision, :column_default___default, :is_nullable___allow_null]\n cols << :c__table_name unless table_name\n cols\n end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end",
"def options; end"
] |
[
"0.6126969",
"0.6093773",
"0.60819745",
"0.6073446",
"0.5967321",
"0.5946746",
"0.59196484",
"0.58290535",
"0.5820168",
"0.5818987",
"0.581032",
"0.57358986",
"0.5735619",
"0.5730669",
"0.57108414",
"0.5682038",
"0.56603724",
"0.5653939",
"0.5626986",
"0.56221044",
"0.5605688",
"0.5596994",
"0.5577427",
"0.5556613",
"0.5539701",
"0.5527121",
"0.55219173",
"0.55181366",
"0.5488773",
"0.5488078",
"0.5429705",
"0.5428637",
"0.54273605",
"0.54227823",
"0.54139966",
"0.5387215",
"0.5367076",
"0.5359545",
"0.53563994",
"0.5334529",
"0.5328885",
"0.5310975",
"0.529356",
"0.52916074",
"0.5282183",
"0.5279325",
"0.52653587",
"0.5259204",
"0.5259204",
"0.52506506",
"0.5237224",
"0.52214146",
"0.5216333",
"0.5216333",
"0.5216212",
"0.5216212",
"0.5215117",
"0.52102774",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286",
"0.5208286"
] |
0.0
|
-1
|
PostgreSQL requires parentheses around compound datasets if they use CTEs, and using them in other places doesn't hurt.
|
def compound_dataset_sql_append(sql, ds)
sql << '('
super
sql << ')'
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def sql_literal(*)\n @dataset.sql\n end",
"def with(name, dataset, opts=OPTS)\n raise(Error, 'This dataset does not support common table expressions') unless supports_cte?\n if hoist_cte?(dataset)\n s, ds = hoist_cte(dataset)\n s.with(name, ds, opts)\n else\n clone(:with=>((@opts[:with]||EMPTY_ARRAY) + [Hash[opts].merge!(:name=>name, :dataset=>dataset)]).freeze)\n end\n end",
"def select(*args); dataset.select(*args); end",
"def immediate_subqueries\n my_nodes_tagged(:subquery)\n end",
"def immediate_subqueries\n my_nodes_tagged(:subquery)\n end",
"def pipe_cte_with!(subquery)\n return self unless subquery.try(:with_values?)\n\n # Add subquery CTE's to the parents query stack. (READ THE SPECIAL NOTE ABOVE!)\n if @scope.with_values?\n @scope.cte.pipe_cte_with!(subquery.cte)\n else\n # Top level has no with values\n @scope.with!(subquery.cte)\n end\n\n self\n end",
"def to_sql\n \"\n SELECT row_to_json(fc)\n FROM ( SELECT 'FeatureCollection' AS type, array_to_json(array_agg(f)) AS features\n FROM ( SELECT 'Feature' AS type\n , ST_AsGeoJSON(subquery.geom)::json AS geometry\n , row_to_json(\n (SELECT l FROM (SELECT id, geoid) AS l)\n ) AS properties\n\n FROM (\n SELECT\n ct.id,\n ct.geom,\n ct.geoid,\n ST_Area(ST_SetSRID(geom,4326)) as d,\n ST_Area(\n ST_Intersection(\n ST_SetSRID( ST_GeomFromGeoJSON('#{ @geojson }'), 4326),\n ST_SetSRID(geom,4326)\n )\n ) as n\n FROM census_tracts_2010 AS ct\n WHERE\n ST_Intersects(\n ST_SetSRID(geom,4326),\n ST_SetSRID( ST_GeomFromGeoJSON('#{ @geojson }'), 4326)\n )\n ) subquery\n WHERE (n/d*100) >= 15\n\n\n ) AS f\n ) AS fc;\n \"\n end",
"def data_complextest(db); end",
"def compound_clone(type, dataset, opts)\n if dataset.is_a?(Dataset) && dataset.opts[:with] && !supports_cte_in_compounds?\n s, ds = hoist_cte(dataset)\n return s.compound_clone(type, ds, opts)\n end\n ds = compound_from_self.clone(:compounds=>(Array(@opts[:compounds]).map(&:dup) + [[type, dataset.compound_from_self, opts[:all]].freeze]).freeze)\n opts[:from_self] == false ? ds : ds.from_self(opts)\n end",
"def sql_inventory_groups\n \"WITH ooc_groups AS\n (\n SELECT assg.asset_id, grp.ooc_group_id as group_id,grp.ooc_group_name as group_name,\n grp.ooc_group_type as group_type, grp.ooc_group_status as group_status\n FROM hip_ooc_asset_group_v AS assg\n JOIN hip_ooc_group_v AS grp ON grp.ooc_group_id = assg.ooc_group_id\n WHERE grp.ooc_group_status != 'deleted'\n AND grp.org_l1_id = #{org_l1_id}\n AND grp.org_id = #{org_id}\n ),\n hc_groups as(\n SELECT assg.asset_id, grp.hc_group_id as group_id,grp.group_name,'hc cycle'as group_type ,\n grp.is_current\n FROM hip_asset_group_v AS assg\n JOIN hip_hc_group_v AS grp ON grp.hc_group_id = assg.hc_group_id\n WHERE grp.org_l1_id = #{org_l1_id}\n AND grp.org_id = #{org_id}\n ),\n all_groups as (\n select * from ooc_groups\n union\n select * from hc_groups\n )\n SELECT assh.host_name,assh.ip_string_list, assh.hc_start_date,assh.security_policy_name,\n assh.tool_asset_id,os.os_product,assh.system_status,assh.hc_auto_flag,assh.hc_auto_interval_weeks,\n assh.hc_manual_interval_weeks,assh.hc_manual_flag,\n #{group_type_columns_frag}\n CASE\n WHEN assh.hc_auto_flag='y' and assh.hc_manual_flag='y' then 'Yes'\n WHEN assh.hc_auto_flag='n' and assh.hc_manual_flag='n' then 'No'\n ELSE NULL\n END AS hc_required \n FROM dim_comm_tool_asset_hist_v AS assh\n LEFT join all_groups AS g ON g.asset_id = assh.tool_asset_id\n JOIN dim_comm_os_v AS os ON os.os_id=assh.os_id\n WHERE\n assh.org_l1_id=#{org_l1_id} AND assh.org_id=#{org_id}\n AND CURRENT_TIMESTAMP BETWEEN assh.row_from_timestamp AND COALESCE(assh.row_to_timestamp, CURRENT_TIMESTAMP)\n group by assh.host_name,assh.ip_string_list,assh.hc_start_date,assh.security_policy_name,\n assh.tool_asset_id,os.os_product,assh.system_status,assh.hc_auto_flag, assh.hc_auto_interval_weeks, assh.hc_manual_interval_weeks,\n assh.hc_manual_flag\n ORDER BY assh.host_name\"\n end",
"def sql_of( expr, dataset)\n dataset.send :literal_expression_append, rv=String.new, expr\n rv\n end",
"def subquery\n subqueries.\n compact.\n inject(&:merge)\n end",
"def hoist_cte?(ds)\n ds.is_a?(Dataset) && ds.opts[:with] && !supports_cte_in_subqueries?\n end",
"def ct_subquery_sql(options)\n # the source query contains a variable number of \"extra\" columns\n # ones needed in the output but not involved in pivoting\n source_sql = \"SELECT ARRAY[#{sql_row_name_columns.join(', ')}],\n #{sql_crosstab_columns.join(', ')}, year, gross_quantity\n FROM (#{subquery_sql(options)}) subquery\n ORDER BY 1, #{sql_crosstab_columns.length + 2}\" # order by row_name and year\n source_sql = ActiveRecord::Base.send(:sanitize_sql_array, [source_sql, years])\n source_sql = ActiveRecord::Base.connection.quote_string(source_sql)\n # the categories query returns values by which to pivot (years)\n categories_sql = 'SELECT * FROM UNNEST(ARRAY[?])'\n categories_sql = ActiveRecord::Base.send(:sanitize_sql_array, [categories_sql, years.map(&:to_i)])\n ct_columns = [\n 'row_name TEXT[]',\n report_crosstab_columns.map.each_with_index { |c, i| \"#{sql_crosstab_columns[i]} #{crosstab_columns[c][:pg_type]}\" },\n years_columns.map { |y| \"#{y} numeric\" }\n ].flatten.join(', ')\n # a set returning query requires that output columns are specified\n <<-SQL\n SELECT * FROM CROSSTAB('#{source_sql}', '#{categories_sql}')\n AS ct(#{ct_columns})\n SQL\n end",
"def supports_cte_in_subqueries?\n supports_cte?\n end",
"def paren_nest; end",
"def paren_nest; end",
"def paren_nest; end",
"def union(dataset, opts=OPTS)\n compound_clone(:union, dataset, opts)\n end",
"def wrap_dataset(dataset)\n if relation.is_a?(Relation::Composite)\n relation.new(dataset).to_a\n else\n dataset\n end\n end",
"def grouping_parentheses(o, collector)\n if o.expr.is_a? Nodes::SelectStatement\n collector << \"(\"\n visit o.expr, collector\n collector << \")\"\n else\n visit o.expr, collector\n end\n end",
"def find_recursive_datasets(datasets)\n all_datasets = datasets['included'] + datasets['excluded']\n single = []\n recursive = []\n cleaned_recursive = []\n\n ### Find datasets that must be single, or are eligible for recursive\n datasets['included'].each do |dataset|\n excluded_child = false\n # Find all children_datasets\n children_datasets = all_datasets.select { |child_dataset| child_dataset.name.start_with? dataset.name }\n children_datasets.each do |child_dataset|\n if datasets['excluded'].include?(child_dataset)\n excluded_child = true\n single << dataset\n break\n end\n end\n unless excluded_child\n recursive << dataset\n end\n end\n\n ## Cleanup recursive\n recursive.each do |dataset|\n if dataset.name.include?('/')\n parts = dataset.name.rpartition('/')\n parent = all_datasets.find { |parent_dataset| parent_dataset.name == parts[0] }\n else\n parent = dataset\n end\n\n # Parent dataset\n if parent == dataset\n cleaned_recursive << dataset\n next\n end\n\n # Only add this if its parent is not in the recursive list\n cleaned_recursive << dataset unless recursive.include?(parent)\n end\n\n # If any children have a DB, need to set it in the recursive parent\n cleaned_recursive.each do |parent|\n all_datasets.each do |dataset|\n # Is this dataset a child of the parent?\n next if !dataset.name.include?(parent.name)\n # If this dataset has a DB, set the parent to contain it as well.\n if dataset.db\n parent.contains_db!(dataset.db)\n end\n end\n end\n\n\n {\n 'single' => single,\n 'recursive' => cleaned_recursive,\n 'included' => datasets['included'],\n 'excluded' => datasets['excluded'],\n }\nend",
"def query(&block)\n dataset.query(&block)\n end",
"def query(&block)\n dataset.query(&block)\n end",
"def query(&block)\n dataset.query(&block)\n end",
"def split_multiple_result_sets\n raise(Error, \"Can't split multiple statements on a graphed dataset\") if opts[:graph]\n ds = clone(:split_multiple_result_sets=>true)\n ds = ds.with_row_proc(proc{|x| x.map{|h| row_proc.call(h)}}) if row_proc\n ds\n end",
"def hoist_cte(ds)\n [clone(:with => ((opts[:with] || EMPTY_ARRAY) + ds.opts[:with]).freeze), ds.clone(:with => nil)]\n end",
"def test_003\n\n target_sql = \"select d.id as id,\nd.taxnumber as taxnumber,\nd.social_security_type as social_security_type,\nd.taxnumber_exemption as taxnumber_exemption\nfrom distributors d\nleft join distributor_addons da on (d.id = da.distributor_id)\nwhere d.id = (11,12,13,14,15,16)\norder by d.id\"\n \n @sql.select do\n d :id, :taxnumber, :social_security_type, :taxnumber_exemption\n end\n\n @sql.from(distributors: 'd') do\n left_join distributor_addons: 'da', on: 'd.id = da.distributor_id'\n end\n\n dist_ids = [11,12,13,14,15,16]\n \n @sql.where do\n d id: dist_ids\n end\n\n @sql.order('d.id')\n\n assert_equal @sql.to_s, target_sql\n end",
"def subquery_sql(options)\n net_imports_query(options)\n end",
"def compile_query\n #puts \"DATASET COMPILE self #{self}\"\n #puts \"DATASET COMPILE queries #{queries}\"\n \n # Old way: works but doesn't handle fmp compound queries.\n #query.each_with_object([{},{}]){|x,o| o[0].merge!(x[0] || {}); o[1].merge!(x[1] || {})}\n \n # New way: handles compound queries. Reqires ginjo-rfm 3.0.11.\n return unless queries # This should help introspecting dataset that results from record deletion. TODO: test this.\n queries.inject {|new_query,scope| apply_scope(new_query, scope)} ##puts \"SCOPE INJECTION scope:#{scope} new_query:#{new_query}\"; \n end",
"def test_9bGroupAsExpression\n\n assert_nothing_thrown(\"Creating AST objects\") { \n\n # Set debug log level\n Common::Logger.setLogLevel(Common::VAR_DEBUG)\n\n var_AST = AST.new(\"sampledata/data.xml\")\n\n expression = GroupAsExpression.new(FloatTerminal.new(11.321), \"test_object\")\n\n expression.execute(var_AST)\n\n result = var_AST.VAR_QRES().pop()\n\n assert_equal(\"QRES::BinderResult\", result.class.to_s())\n assert_equal(11.321, result.VAR_OBJECT().VAR_OBJECT())\n assert_equal(\"test_object\", result.VAR_NAME())\n\n expression = GroupAsExpression.new(\n StructExpression.new(CommaExpression.new(IntegerTerminal.new(888), FloatTerminal.new(11.321))),\n \"test_complex_object\")\n\n expression.execute(var_AST)\n\n result = var_AST.VAR_QRES().pop()\n\n assert_equal(\"QRES::BinderResult\", result.class.to_s())\n \n result = result.pop()\n \n assert_equal(11.321, result.pop().VAR_OBJECT())\n assert_equal(888, result.pop().VAR_OBJECT()) \n\n expression = GroupAsExpression.new(\n BagExpression.new(CommaExpression.new(IntegerTerminal.new(888), FloatTerminal.new(11.321))),\n \"test_complex_object\")\n\n expression.execute(var_AST)\n\n result = var_AST.VAR_QRES().pop()\n\n assert_equal(\"QRES::BinderResult\", result.class.to_s())\n \n result = result.pop()\n \n assert_equal(\"QRES::BagResult\", result.class.to_s())\n \n assert_equal(11.321, result.pop().VAR_OBJECT())\n assert_equal(888, result.pop().VAR_OBJECT())\n }\n end",
"def test_cross_join_syntactic_sugar\n assert_sql 'SELECT * FROM `t1` CROSS JOIN `t2`', 'SELECT * FROM t1, t2'\n assert_sql 'SELECT * FROM `t1` CROSS JOIN `t2` CROSS JOIN `t3`', 'SELECT * FROM t1, t2, t3'\n end",
"def graph(dataset, join_conditions = nil, options = OPTS, &block)\n # Allow the use of a dataset or symbol as the first argument\n # Find the table name/dataset based on the argument\n table_alias = options[:table_alias]\n table = dataset\n create_dataset = true\n\n case dataset\n when Symbol\n # let alias be the same as the table name (sans any optional schema)\n # unless alias explicitly given in the symbol using ___ notation\n table_alias ||= split_symbol(table).compact.last\n when Dataset\n if dataset.simple_select_all?\n table = dataset.opts[:from].first\n table_alias ||= table\n else\n table_alias ||= dataset_alias((@opts[:num_dataset_sources] || 0)+1)\n end\n create_dataset = false\n when SQL::Identifier\n table_alias ||= table.value\n when SQL::QualifiedIdentifier\n table_alias ||= split_qualifiers(table).last\n when SQL::AliasedExpression\n return graph(table.expression, join_conditions, {:table_alias=>table.alias}.merge!(options), &block)\n else\n raise Error, \"The dataset argument should be a symbol or dataset\"\n end\n table_alias = table_alias.to_sym\n\n if create_dataset\n dataset = db.from(table)\n end\n\n # Raise Sequel::Error with explanation that the table alias has been used\n raise_alias_error = lambda do\n raise(Error, \"this #{options[:table_alias] ? 'alias' : 'table'} has already been been used, please specify \" \\\n \"#{options[:table_alias] ? 'a different alias' : 'an alias via the :table_alias option'}\") \n end\n\n # Only allow table aliases that haven't been used\n raise_alias_error.call if @opts[:graph] && @opts[:graph][:table_aliases] && @opts[:graph][:table_aliases].include?(table_alias)\n \n table_alias_qualifier = qualifier_from_alias_symbol(table_alias, table)\n implicit_qualifier = options[:implicit_qualifier]\n ds = self\n\n # Use a from_self if this is already a joined table (or from_self specifically disabled for graphs)\n if (@opts[:graph_from_self] != false && !@opts[:graph] && joined_dataset?)\n from_selfed = true\n implicit_qualifier = options[:from_self_alias] || first_source\n ds = ds.from_self(:alias=>implicit_qualifier)\n end\n \n # Join the table early in order to avoid cloning the dataset twice\n ds = ds.join_table(options[:join_type] || :left_outer, table, join_conditions, :table_alias=>table_alias_qualifier, :implicit_qualifier=>implicit_qualifier, :qualify=>options[:qualify], &block)\n\n return ds if options[:join_only]\n\n opts = ds.opts\n\n # Whether to include the table in the result set\n add_table = options[:select] == false ? false : true\n # Whether to add the columns to the list of column aliases\n add_columns = !ds.opts.include?(:graph_aliases) # SEQUEL5: Remove graph_aliases support\n\n if graph = opts[:graph]\n graph = graph.dup\n select = opts[:select].dup\n [:column_aliases, :table_aliases, :column_alias_num].each{|k| graph[k] = graph[k].dup}\n else\n # Setup the initial graph data structure if it doesn't exist\n qualifier = ds.first_source_alias\n master = alias_symbol(qualifier)\n raise_alias_error.call if master == table_alias\n\n # Master hash storing all .graph related information\n graph = {}\n\n # Associates column aliases back to tables and columns\n column_aliases = graph[:column_aliases] = {}\n\n # Associates table alias (the master is never aliased)\n table_aliases = graph[:table_aliases] = {master=>self}\n\n # Keep track of the alias numbers used\n ca_num = graph[:column_alias_num] = Hash.new(0)\n\n # All columns in the master table are never\n # aliased, but are not included if set_graph_aliases\n # has been used.\n if add_columns\n if (select = @opts[:select]) && !select.empty? && !(select.length == 1 && (select.first.is_a?(SQL::ColumnAll)))\n select = select.map do |sel|\n raise Error, \"can't figure out alias to use for graphing for #{sel.inspect}\" unless column = _hash_key_symbol(sel)\n column_aliases[column] = [master, column]\n if from_selfed\n # Initial dataset was wrapped in subselect, selected all\n # columns in the subselect, qualified by the subselect alias.\n Sequel.qualify(qualifier, Sequel.identifier(column))\n else\n # Initial dataset not wrapped in subslect, just make\n # sure columns are qualified in some way.\n qualified_expression(sel, qualifier)\n end\n end\n else\n select = columns.map do |column|\n column_aliases[column] = [master, column]\n SQL::QualifiedIdentifier.new(qualifier, column)\n end\n end\n end\n end\n\n # Add the table alias to the list of aliases\n # Even if it isn't been used in the result set,\n # we add a key for it with a nil value so we can check if it\n # is used more than once\n table_aliases = graph[:table_aliases]\n table_aliases[table_alias] = add_table ? dataset : nil\n\n # Add the columns to the selection unless we are ignoring them\n if add_table && add_columns\n column_aliases = graph[:column_aliases]\n ca_num = graph[:column_alias_num]\n # Which columns to add to the result set\n cols = options[:select] || dataset.columns\n # If the column hasn't been used yet, don't alias it.\n # If it has been used, try table_column.\n # If that has been used, try table_column_N \n # using the next value of N that we know hasn't been\n # used\n cols.each do |column|\n col_alias, identifier = if column_aliases[column]\n column_alias = :\"#{table_alias}_#{column}\"\n if column_aliases[column_alias]\n column_alias_num = ca_num[column_alias]\n column_alias = :\"#{column_alias}_#{column_alias_num}\" \n ca_num[column_alias] += 1\n end\n [column_alias, SQL::AliasedExpression.new(SQL::QualifiedIdentifier.new(table_alias_qualifier, column), column_alias)]\n else\n ident = SQL::QualifiedIdentifier.new(table_alias_qualifier, column)\n [column, ident]\n end\n column_aliases[col_alias] = [table_alias, column].freeze\n select.push(identifier)\n end\n end\n [:column_aliases, :table_aliases, :column_alias_num].each{|k| graph[k].freeze}\n ds = ds.clone(:graph=>graph.freeze)\n add_columns ? ds.select(*select) : ds\n end",
"def select_with_sql_cte(sql, cte)\n super\n select_with_sql_cte_search_cycle(sql, cte)\n end",
"def data(&block)\n if block_given?\n project_graph(nil) do |statement|\n block.call(statement) unless statement.variable? ||\n has_graph?(statement.subject) ||\n has_graph?(statement.object)\n end\n end\n enum_data\n end",
"def database_bloat\n data = select(<<-SQL, \"Database Bloat\")\n SELECT tablename AS table_name\n , ' ' AS index_name\n , reltuples::bigint AS rows\n , relpages::bigint AS pages\n , otta\n , ROUND(CASE WHEN otta = 0 OR sml.relpages = 0 OR sml.relpages = otta THEN 0.0 ELSE sml.relpages / otta::numeric END, 1) AS percent_bloat\n , CASE WHEN relpages < otta THEN 0 ELSE relpages::bigint - otta END AS wasted_pages\n , CASE WHEN relpages < otta THEN 0 ELSE (blocksize * (relpages - otta))::bigint END AS wasted_size\n , CASE WHEN relpages < otta THEN 0 ELSE blocksize * (sml.relpages - otta)::bigint END AS wasted_bytes\n FROM ( SELECT schemaname\n , tablename\n , cc.reltuples\n , cc.relpages\n , blocksize\n , CEIL((cc.reltuples * ((datahdr + pagesize - (CASE WHEN datahdr % pagesize = 0 THEN pagesize\n ELSE datahdr % pagesize END)) + nullhdr2 + 4)) / (blocksize - 20::float)\n ) AS otta\n FROM ( SELECT pagesize\n , blocksize\n , schemaname\n , tablename\n , (datawidth + (hdr + pagesize - (CASE WHEN hdr%pagesize = 0 THEN pagesize\n ELSE hdr%pagesize END)))::numeric AS datahdr\n , (maxfracsum * (nullhdr + pagesize - (CASE WHEN nullhdr % pagesize = 0 THEN pagesize\n ELSE nullhdr % pagesize END))) AS nullhdr2\n FROM ( SELECT schemaname\n , tablename\n , hdr\n , pagesize\n , blocksize\n , SUM((1 - null_frac) * avg_width) AS datawidth\n , MAX(null_frac) AS maxfracsum\n , hdr + ( SELECT 1 + count(*) / 8\n FROM pg_stats s2\n WHERE null_frac <> 0\n AND s2.schemaname = s.schemaname\n AND s2.tablename = s.tablename\n ) AS nullhdr\n FROM pg_stats s\n , ( SELECT\n ( SELECT current_setting('block_size')::numeric) AS blocksize\n , CASE WHEN SUBSTRING(SPLIT_PART(v, ' ', 2) FROM '#\"[0-9]+.[0-9]+#\"%' for '#')\n IN ('8.0','8.1','8.2') THEN 27 ELSE 23 END AS hdr\n , CASE WHEN v ~ 'mingw32' OR v ~ '64-bit' THEN 8 ELSE 4 END AS pagesize\n FROM ( SELECT version() AS v) AS foo\n ) AS constants\n GROUP BY 1, 2, 3, 4, 5\n ) AS foo\n ) AS rs\n JOIN pg_class cc\n ON cc.relname = rs.tablename\n JOIN pg_namespace nn\n ON cc.relnamespace = nn.oid\n AND nn.nspname = rs.schemaname\n AND nn.nspname <> 'information_schema'\n ) AS sml\n WHERE schemaname = 'public'\n\n UNION\n\n SELECT tablename AS table_name\n , iname AS index_name\n , ituples::bigint AS rows\n , ipages::bigint AS pages\n , iotta AS otta\n , ROUND(CASE WHEN iotta = 0 OR ipages = 0 OR ipages = iotta THEN 0.0 ELSE ipages / iotta::numeric END, 1) AS percent_bloat\n , CASE WHEN ipages < iotta THEN 0 ELSE ipages::bigint - iotta END AS wasted_pages\n , CASE WHEN ipages < iotta THEN 0 ELSE (blocksize * (ipages - iotta))::bigint END AS wasted_size\n , CASE WHEN ipages < iotta THEN 0 ELSE blocksize * (ipages - iotta) END AS wasted_bytes\n\n FROM ( SELECT schemaname\n , tablename\n , cc.reltuples\n , cc.relpages\n , blocksize\n , CEIL((cc.reltuples * ((datahdr + pagesize - (CASE WHEN datahdr % pagesize = 0 THEN pagesize\n ELSE datahdr % pagesize END)) + nullhdr2 + 4)) / (blocksize - 20::float)\n ) AS otta\n , COALESCE(c2.relname,'?') AS iname, COALESCE(c2.reltuples, 0) AS ituples, COALESCE(c2.relpages, 0) AS ipages\n , COALESCE(CEIL((c2.reltuples * (datahdr - 12)) / (blocksize - 20::float)), 0) AS iotta\n FROM ( SELECT pagesize\n , blocksize\n , schemaname\n , tablename\n , (datawidth + (hdr + pagesize - ( CASE WHEN hdr%pagesize = 0 THEN pagesize\n ELSE hdr%pagesize END)))::numeric AS datahdr\n , (maxfracsum * (nullhdr + pagesize - ( CASE WHEN nullhdr % pagesize = 0 THEN pagesize\n ELSE nullhdr % pagesize END))) AS nullhdr2\n FROM ( SELECT schemaname\n , tablename\n , hdr\n , pagesize\n , blocksize\n , SUM((1 - null_frac) * avg_width) AS datawidth\n , MAX(null_frac) AS maxfracsum\n , hdr + ( SELECT 1 + count(*) / 8\n FROM pg_stats s2\n WHERE null_frac <> 0\n AND s2.schemaname = s.schemaname\n AND s2.tablename = s.tablename\n ) AS nullhdr\n FROM pg_stats s\n , ( SELECT\n ( SELECT current_setting('block_size')::numeric) AS blocksize\n , CASE WHEN SUBSTRING(SPLIT_PART(v, ' ', 2) FROM '#\"[0-9]+.[0-9]+#\"%' for '#')\n IN ('8.0','8.1','8.2') THEN 27 ELSE 23 END AS hdr\n , CASE WHEN v ~ 'mingw32' OR v ~ '64-bit' THEN 8 ELSE 4 END AS pagesize\n FROM ( SELECT version() AS v) AS foo\n ) AS constants\n GROUP BY 1, 2, 3, 4, 5\n ) AS foo\n ) AS rs\n JOIN pg_class cc\n ON cc.relname = rs.tablename\n JOIN pg_namespace nn\n ON cc.relnamespace = nn.oid\n AND nn.nspname = rs.schemaname\n AND nn.nspname <> 'information_schema'\n LEFT JOIN pg_index i\n ON indrelid = cc.oid\n LEFT JOIN pg_class c2\n ON c2.oid = i.indexrelid\n ) AS sml\n WHERE schemaname = 'public'\n ORDER BY 1, 2\n SQL\n\n integer_columns = %w(\n otta\n pages\n pagesize\n rows\n wasted_bytes\n wasted_pages\n wasted_size\n )\n\n float_columns = %w(\n percent_bloat\n )\n\n data.each do |datum|\n integer_columns.each { |c| datum[c] = datum[c].to_i }\n float_columns.each { |c| datum[c] = datum[c].to_f }\n end\n\n data.to_a\n end",
"def subquery_sql(options)\n gross_exports_query(options)\n end",
"def each_dataset(&block)\n datasets.each(&block)\n end",
"def snapshots_redact_sql_queries; end",
"def each_dataset(&blk)\n if block_given?\n metadata[:datasets].each { |name| blk.call(dataset(name)) }\n else\n to_enum(:each_dataset)\n end\n end",
"def test_3BagExpression\n \n assert_nothing_thrown(\"Creating AST objects\") { \n \n # Set debug log level\n Common::Logger.setLogLevel(Common::VAR_DEBUG)\n \n var_AST = AST.new(\"sampledata/data.xml\")\n \n expression = BagExpression.new(CommaExpression.new(IntegerTerminal.new(600), FloatTerminal.new(123.321)))\n \n expression.execute(var_AST)\n \n result = var_AST.VAR_QRES().pop()\n \n assert_equal(\"QRES::BagResult\", result.class.to_s()) \n assert_equal(123.321, result.pop().VAR_OBJECT())\n assert_equal(600, result.pop().VAR_OBJECT())\n \n expression = BagExpression.new(CommaExpression.new(IntegerTerminal.new(1), CommaExpression.new(IntegerTerminal.new(2), IntegerTerminal.new(3))))\n \n expression.execute(var_AST)\n\n result = var_AST.VAR_QRES().pop()\n\n assert_equal(\"QRES::BagResult\", result.class.to_s())\n\n #improvement\n struct = QRES::StructResult.new()\n \n\n struct.push(result)\n }\n end",
"def multi_insert_sql_strategy\n :union\n end",
"def _schema_ds\n @_schema_ds ||= begin\n ds = metadata_dataset.select{[\n pg_attribute[:attname].as(:name),\n SQL::Cast.new(pg_attribute[:atttypid], :integer).as(:oid),\n SQL::Cast.new(basetype[:oid], :integer).as(:base_oid),\n SQL::Function.new(:format_type, basetype[:oid], pg_type[:typtypmod]).as(:db_base_type),\n SQL::Function.new(:format_type, pg_type[:oid], pg_attribute[:atttypmod]).as(:db_type),\n SQL::Function.new(:pg_get_expr, pg_attrdef[:adbin], pg_class[:oid]).as(:default),\n SQL::BooleanExpression.new(:NOT, pg_attribute[:attnotnull]).as(:allow_null),\n SQL::Function.new(:COALESCE, SQL::BooleanExpression.from_value_pairs(pg_attribute[:attnum] => SQL::Function.new(:ANY, pg_index[:indkey])), false).as(:primary_key),\n Sequel[:pg_type][:typtype],\n (~Sequel[Sequel[:elementtype][:oid]=>nil]).as(:is_array),\n ]}.\n from(:pg_class).\n join(:pg_attribute, :attrelid=>:oid).\n join(:pg_type, :oid=>:atttypid).\n left_outer_join(Sequel[:pg_type].as(:basetype), :oid=>:typbasetype).\n left_outer_join(Sequel[:pg_type].as(:elementtype), :typarray=>Sequel[:pg_type][:oid]).\n left_outer_join(:pg_attrdef, :adrelid=>Sequel[:pg_class][:oid], :adnum=>Sequel[:pg_attribute][:attnum]).\n left_outer_join(:pg_index, :indrelid=>Sequel[:pg_class][:oid], :indisprimary=>true).\n where{{pg_attribute[:attisdropped]=>false}}.\n where{pg_attribute[:attnum] > 0}.\n order{pg_attribute[:attnum]}\n\n # :nocov:\n if server_version > 100000\n # :nocov:\n ds = ds.select_append{pg_attribute[:attidentity]}\n\n # :nocov:\n if server_version > 120000\n # :nocov:\n ds = ds.select_append{Sequel.~(pg_attribute[:attgenerated]=>'').as(:generated)}\n end\n end\n\n ds\n end\n end",
"def snapshots_redact_sql_queries=(_arg0); end",
"def build_compound_select_string(data, table, *columns)\n qry = []\n placeholder_args = []\n qry_string = build_qrystring(table, columns) \n (1...MAX_SQLITE_STATEMENTS).each do |index|\n qry_string << \" UNION SELECT ?\" << \",?\" * (columns.size-1)\n end #index\n (0..data.size).step(MAX_SQLITE_STATEMENTS) do |index|\n if ((data.size - index) < MAX_SQLITE_STATEMENTS)\n qry_string = build_qrystring(table, columns)\n (1...data.size - index).each do |newstr|\n qry_string << \" UNION SELECT ?\" << \",?\" * (columns.size-1)\n end #end newstr\n qry.insert(-1, qry_string)\n holder_args = data.slice(index, data.size-index)\n placeholder_args.insert(-1, holder_args) if holder_args[0].class == String\n placeholder_args.insert(-1, holder_args.flatten) if holder_args[0].class == Array\n else \n qry.insert(-1, qry_string)\n holder_args = data.slice(index, MAX_SQLITE_STATEMENTS)\n placeholder_args.insert(-1, holder_args) if holder_args[0].class == String\n placeholder_args.insert(-1, holder_args.flatten) if holder_args[0].class == Array\n #placeholder_args.insert(-1, data.slice(index, MAX_SQLITE_STATEMENTS))\n end # endif\n end #end index\n \n return qry, placeholder_args\n end",
"def apply_with_context(sql,all_cols,keys_in_all_cols)\n hits = {}\n @pending_rcs.each do |rc|\n hits[rc.key] = rc\n end \n hist = []\n n = 2\n pending = 0\n skipped = false\n noted = false\n last_row = nil\n @db1.fetch(sql,all_cols + [\"__coopy_tag__\"]) do |row|\n tag = row.pop.to_i\n k = keyify(row.values_at(*keys_in_all_cols))\n if hits[k]\n emit_skip(row) if skipped\n hist.each do |row0|\n cells = row0.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n end\n hist.clear\n pending = n\n @patch.apply_row(hits[k])\n hits.delete(k)\n skipped = false\n noted = true\n elsif tag == 1\n # ignore redundant row\n elsif pending>0\n emit_skip(row) if skipped\n cells = row.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n pending = pending-1\n skipped = false\n else\n hist << row\n if hist.length>n\n skipped = true\n last_row = row\n hist.shift\n end\n end\n end\n emit_skip(last_row) if skipped and noted\n end",
"def apply_with_context(sql,all_cols,keys_in_all_cols)\n hits = {}\n @pending_rcs.each do |rc|\n hits[rc.key] = rc\n end \n hist = []\n n = 2\n pending = 0\n skipped = false\n noted = false\n last_row = nil\n @db1.fetch(sql,all_cols + [\"__coopy_tag__\"]) do |row|\n tag = row.pop.to_i\n k = keyify(row.values_at(*keys_in_all_cols))\n if hits[k]\n emit_skip(row) if skipped\n hist.each do |row0|\n cells = row0.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n end\n hist.clear\n pending = n\n @patch.apply_row(hits[k])\n hits.delete(k)\n skipped = false\n noted = true\n elsif tag == 1\n # ignore redundant row\n elsif pending>0\n emit_skip(row) if skipped\n cells = row.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n pending = pending-1\n skipped = false\n else\n hist << row\n if hist.length>n\n skipped = true\n last_row = row\n hist.shift\n end\n end\n end\n emit_skip(last_row) if skipped and noted\n end",
"def apply_with_context(sql,all_cols,keys_in_all_cols)\n hits = {}\n @pending_rcs.each do |rc|\n hits[rc.key] = rc\n end \n hist = []\n n = 2\n pending = 0\n skipped = false\n noted = false\n last_row = nil\n @db1.fetch(sql,all_cols + [\"__coopy_tag__\"]) do |row|\n tag = row.pop.to_i\n k = keyify(row.values_at(*keys_in_all_cols))\n if hits[k]\n emit_skip(row) if skipped\n hist.each do |row0|\n cells = row0.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n end\n hist.clear\n pending = n\n @patch.apply_row(hits[k])\n hits.delete(k)\n skipped = false\n noted = true\n elsif tag == 1\n # ignore redundant row\n elsif pending>0\n emit_skip(row) if skipped\n cells = row.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"\",cells)\n rc.columns = @rc_columns\n @patch.apply_row(rc)\n pending = pending-1\n skipped = false\n else\n hist << row\n if hist.length>n\n skipped = true\n last_row = row\n hist.shift\n end\n end\n end\n emit_skip(last_row) if skipped and noted\n end",
"def select(*args)\n dataset.select(*args)\n end",
"def build_subselect(key, o)\n stmt = Nodes::SelectStatement.new\n core = stmt.cores.first\n core.froms = o.relation\n core.wheres = o.wheres\n core.projections = [key]\n stmt.limit = o.limit\n stmt.offset = o.offset\n stmt.orders = []\n stmt\n end",
"def true_eager_limit_strategy\n if self[:eager_graph] || (offset && !associated_dataset.supports_offsets_in_correlated_subqueries?)\n # An SQL-based approach won't work if you are also eager graphing,\n # so use a ruby based approach in that case.\n :ruby\n else\n :union \n end\n end",
"def true_eager_graph_limit_strategy\n r = super\n ds = associated_dataset\n if r == :ruby && ds.supports_limits_in_correlated_subqueries? && (Array(associated_class.primary_key).length == 1 || ds.supports_multiple_column_in?) && (!offset || ds.supports_offsets_in_correlated_subqueries?)\n :correlated_subquery\n else\n r\n end\n end",
"def table_bloat\n data = select(<<-SQL, \"Table Bloat\")\n SELECT tablename AS table_name\n , reltuples::bigint AS rows\n , relpages::bigint AS pages\n , otta\n , ROUND(CASE WHEN otta = 0 OR sml.relpages = 0 OR sml.relpages = otta THEN 0.0\n ELSE sml.relpages / otta::numeric END, 1) AS percent_bloat\n , CASE WHEN relpages < otta THEN 0\n ELSE relpages::bigint - otta END AS wasted_pages\n , CASE WHEN relpages < otta THEN 0\n ELSE (blocksize * (relpages - otta))::bigint END AS wasted_size\n , CASE WHEN relpages < otta THEN 0\n ELSE blocksize * (sml.relpages - otta)::bigint END AS wasted_bytes\n FROM ( SELECT schemaname\n , tablename\n , cc.reltuples\n , cc.relpages\n , blocksize\n , CEIL((cc.reltuples * ((datahdr + pagesize - (CASE WHEN datahdr%pagesize = 0 THEN pagesize\n ELSE datahdr%pagesize END)) + nullhdr2 + 4)) / (blocksize - 20::float)\n ) AS otta\n FROM ( SELECT pagesize\n , blocksize\n , schemaname\n , tablename\n , (datawidth + (hdr + pagesize - (CASE WHEN hdr%pagesize = 0 THEN pagesize\n ELSE hdr%pagesize END)))::numeric\n AS datahdr\n , (maxfracsum * (nullhdr + pagesize - (CASE WHEN nullhdr%pagesize = 0 THEN pagesize\n ELSE nullhdr%pagesize END)))\n AS nullhdr2\n FROM ( SELECT schemaname\n , tablename\n , hdr\n , pagesize\n , blocksize\n , SUM((1 - null_frac) * avg_width) AS datawidth\n , MAX(null_frac) AS maxfracsum\n , hdr + ( SELECT 1 + count(*) / 8\n FROM pg_stats s2\n WHERE null_frac <> 0\n AND s2.schemaname = s.schemaname\n AND s2.tablename = s.tablename\n ) AS nullhdr\n FROM pg_stats s\n , ( SELECT\n ( SELECT current_setting('block_size')::numeric) AS blocksize\n , CASE WHEN SUBSTRING(SPLIT_PART(v, ' ', 2)\n FROM '#\"[0-9]+.[0-9]+#\"%' for '#')\n IN ('8.0','8.1','8.2') THEN 27 ELSE 23 END AS hdr\n , CASE WHEN v ~ 'mingw32' OR v ~ '64-bit' THEN 8\n ELSE 4 END AS pagesize\n FROM ( SELECT version() AS v) AS foo\n ) AS constants\n GROUP BY 1, 2, 3, 4, 5\n ) AS foo\n ) AS rs\n JOIN pg_class cc\n ON cc.relname = rs.tablename\n JOIN pg_namespace nn\n ON cc.relnamespace = nn.oid\n AND nn.nspname = rs.schemaname AND nn.nspname <> 'information_schema'\n ) AS sml\n WHERE schemaname = 'public'\n ORDER BY 1\n SQL\n\n integer_columns = %w(\n otta\n pages\n pagesize\n rows\n wasted_bytes\n wasted_pages\n wasted_size\n )\n\n float_columns = %w(\n percent_bloat\n )\n\n data.each do |datum|\n integer_columns.each { |c| datum[c] = datum[c].to_i }\n float_columns.each { |c| datum[c] = datum[c].to_f }\n end\n\n data.to_a\n end",
"def db_queries_operate__samples\n db_queries_operate__samples_non_recursive +\n db_queries_operate__samples_recursive\n end",
"def index_bloat\n data = select(<<-SQL, \"Index Bloat\")\n SELECT tablename AS table_name\n , iname AS index_name\n , ituples::bigint AS rows\n , ipages::bigint AS pages\n , iotta AS otta\n , ROUND(CASE WHEN iotta = 0 OR ipages = 0 OR ipages = iotta THEN 0.0 ELSE ipages / iotta::numeric END, 1) AS percent_bloat\n , CASE WHEN ipages < iotta THEN 0 ELSE ipages::bigint - iotta END AS wasted_pages\n , CASE WHEN ipages < iotta THEN 0 ELSE (blocksize * (ipages - iotta))::bigint END AS wasted_size\n , CASE WHEN ipages < iotta THEN 0 ELSE blocksize * (ipages - iotta) END AS wasted_bytes\n\n FROM ( SELECT schemaname\n , tablename\n , cc.reltuples\n , cc.relpages\n , blocksize\n , CEIL((cc.reltuples * ((datahdr + pagesize - (CASE WHEN datahdr%pagesize = 0 THEN pagesize\n ELSE datahdr%pagesize END)) + nullhdr2 + 4)) / (blocksize - 20::float)\n ) AS otta\n , COALESCE(c2.relname,'?') AS iname, COALESCE(c2.reltuples, 0) AS ituples, COALESCE(c2.relpages, 0) AS ipages\n , COALESCE(CEIL((c2.reltuples * (datahdr - 12)) / (blocksize - 20::float)), 0) AS iotta\n FROM ( SELECT pagesize\n , blocksize\n , schemaname\n , tablename\n , (datawidth + (hdr + pagesize - (case when hdr%pagesize = 0 THEN pagesize ELSE hdr%pagesize END)))::numeric AS datahdr\n , (maxfracsum * (nullhdr + pagesize - (case when nullhdr%pagesize = 0 THEN pagesize ELSE nullhdr%pagesize END))) AS nullhdr2\n FROM ( SELECT schemaname\n , tablename\n , hdr\n , pagesize\n , blocksize\n , SUM((1 - null_frac) * avg_width) AS datawidth\n , MAX(null_frac) AS maxfracsum\n , hdr + ( SELECT 1 + count(*) / 8\n FROM pg_stats s2\n WHERE null_frac <> 0\n AND s2.schemaname = s.schemaname\n AND s2.tablename = s.tablename\n ) AS nullhdr\n FROM pg_stats s\n , ( SELECT\n (SELECT current_setting('block_size')::numeric) AS blocksize\n , CASE WHEN SUBSTRING(SPLIT_PART(v, ' ', 2) FROM '#\"[0-9]+.[0-9]+#\"%' for '#')\n IN ('8.0','8.1','8.2') THEN 27 ELSE 23 END AS hdr\n , CASE WHEN v ~ 'mingw32' OR v ~ '64-bit' THEN 8 ELSE 4 END AS pagesize\n FROM (SELECT version() AS v) AS foo\n ) AS constants\n GROUP BY 1, 2, 3, 4, 5\n ) AS foo\n ) AS rs\n JOIN pg_class cc\n ON cc.relname = rs.tablename\n JOIN pg_namespace nn\n ON cc.relnamespace = nn.oid\n AND nn.nspname = rs.schemaname AND nn.nspname <> 'information_schema'\n LEFT JOIN pg_index i\n ON indrelid = cc.oid\n LEFT JOIN pg_class c2\n ON c2.oid = i.indexrelid\n ) AS sml\n WHERE schemaname = 'public'\n ORDER BY 1, 2\n SQL\n\n integer_columns = %w(\n otta\n pages\n pagesize\n rows\n wasted_bytes\n wasted_pages\n wasted_size\n )\n\n float_columns = %w(\n percent_bloat\n )\n\n data.each do |datum|\n integer_columns.each { |c| datum[c] = datum[c].to_i }\n float_columns.each { |c| datum[c] = datum[c].to_f }\n end\n\n data.to_a\n end",
"def calculate_graph_diffs(graphname1, graphname2, diffgraphname)\n update(\"INSERT { GRAPH <#{diffgraphname}> { ?s ?p ?o . }} WHERE { GRAPH <#{graphname1}> { ?s ?p ?o } FILTER NOT EXISTS { GRAPH <#{graphname2}> { ?s ?p ?o }}}\")\nend",
"def cselect(*indexes)\n if indexes.size == 1\n return cselect_explode(indexes[0])\n end \n new_data = \n @data.map do |row|\n new_row = []\n indexes.each do |index|\n if index.kind_of?(Range)\n new_row.push(*row[index])\n else\n new_row.push(row[index])\n end\n end\n new_row\n end\n DataZub.new(new_data)\n end",
"def test_complex_example\n text = 'Ruth 2,1-11.15; 3,7.9-12; Markus 4; 5,3.18-21'\n t1, t2, t3, t4, t5, t6, t7 = text.split(/; |\\./)\n ast = [\n pass(text: t1, b1: :Ruth, c1: 2, v1: 1, b2: :Ruth, c2: 2, v2: 11), dot,\n pass(text: t2, b1: :Ruth, c1: 2, v1: 15, b2: :Ruth, c2: 2, v2: 15), semi,\n pass(text: t3, b1: :Ruth, c1: 3, v1: 7, b2: :Ruth, c2: 3, v2: 7), dot,\n pass(text: t4, b1: :Ruth, c1: 3, v1: 9, b2: :Ruth, c2: 3, v2: 12), semi,\n pass(text: t5, b1: :Mark, c1: 4, b2: :Mark, c2: 4), semi,\n pass(text: t6, b1: :Mark, c1: 5, v1: 3, b2: :Mark, c2: 5, v2: 3), dot,\n pass(text: t7, b1: :Mark, c1: 5, v1: 18, b2: :Mark, c2: 5, v2: 21)\n ]\n assert_parsed_ast_for_text ast, text\n end",
"def select_with_sql_base\n opts[:with].any?{|w| w[:recursive]} ? \"WITH RECURSIVE \" : super\n end",
"def each_dataset(builder, &block)\n block.call(builder.ct.dataset)\n\n @datasets ||= metadata['datasets'].map do |name|\n OsCtl::Lib::Zfs::Dataset.new(\n File.join(builder.ct.dataset.name, name),\n base: builder.ct.dataset.name\n )\n end\n\n @datasets.each(&block)\n end",
"def filter_expr(*args, &block)\n schema_utility_dataset.literal(schema_utility_dataset.send(:filter_expr, *args, &block))\n end",
"def test_6UnionExpression\n \n assert_nothing_thrown(\"Creating AST objects\") { \n \n # Set debug log level\n Common::Logger.setLogLevel(Common::VAR_DEBUG)\n \n var_AST = AST.new(\"sampledata/data.xml\")\n \n expression = UnionExpression.new(\n BagExpression.new(CommaExpression.new(IntegerTerminal.new(888), FloatTerminal.new(11.321))),\n BagExpression.new(CommaExpression.new(StringTerminal.new(\"Test\"), IntegerTerminal.new(600))))\n \n expression.execute(var_AST)\n\n result = var_AST.VAR_QRES().pop()\n \n assert_equal(\"QRES::BagResult\", result.class.to_s())\n assert_equal(600, result.pop().VAR_OBJECT())\n assert_equal(\"Test\", result.pop().VAR_OBJECT())\n assert_equal(11.321, result.pop().VAR_OBJECT())\n assert_equal(888, result.pop().VAR_OBJECT())\n \n expression = UnionExpression.new(\n StructExpression.new(CommaExpression.new(IntegerTerminal.new(888), FloatTerminal.new(11.321))),\n StructExpression.new(CommaExpression.new(StringTerminal.new(\"Test\"), IntegerTerminal.new(600))))\n \n expression.execute(var_AST)\n \n result = var_AST.VAR_QRES().pop()\n \n assert_equal(\"QRES::BagResult\", result.class.to_s())\n assert_equal(600, result.pop().VAR_OBJECT())\n assert_equal(\"Test\", result.pop().VAR_OBJECT())\n assert_equal(11.321, result.pop().VAR_OBJECT())\n assert_equal(888, result.pop().VAR_OBJECT())\n }\n end",
"def test_9cJoinExpression\n\n assert_nothing_thrown(\"Creating AST objects\") { \n\n # Set debug log level\n Common::Logger.setLogLevel(Common::VAR_DEBUG)\n\n var_AST = AST.new(\"sampledata/data.xml\")\n \n expression = JoinExpression.new(\n BagExpression.new(CommaExpression.new(IntegerTerminal.new(1), StringTerminal.new(\"test\"))),\n BagExpression.new(CommaExpression.new(FloatTerminal.new(22.11), IntegerTerminal.new(347))))\n\n expression.execute(var_AST)\n \n result = var_AST.VAR_QRES().pop()\n\n assert_equal(\"QRES::BagResult\", result.class.to_s())\n \n _result = result.pop()\n assert_equal(\"QRES::StructResult\", _result.class.to_s())\n \n assert_equal(347, _result.pop().VAR_OBJECT())\n assert_equal(\"test\", _result.pop().VAR_OBJECT())\n \n _result = result.pop()\n assert_equal(\"QRES::StructResult\", _result.class.to_s())\n \n assert_equal(22.11, _result.pop().VAR_OBJECT())\n assert_equal(\"test\", _result.pop().VAR_OBJECT())\n\n _result = result.pop()\n assert_equal(\"QRES::StructResult\", _result.class.to_s())\n \n assert_equal(347, _result.pop().VAR_OBJECT())\n assert_equal(1, _result.pop().VAR_OBJECT())\n\n assert_equal(22.11, result.pop().VAR_OBJECT())\n assert_equal(1, result.pop().VAR_OBJECT())\n\n expression = JoinExpression.new(\n StructExpression.new(CommaExpression.new(IntegerTerminal.new(1), StringTerminal.new(\"test\"))),\n StructExpression.new(CommaExpression.new(FloatTerminal.new(22.11), IntegerTerminal.new(347))))\n\n expression.execute(var_AST)\n \n result = var_AST.VAR_QRES().pop()\n\n assert_equal(\"QRES::BagResult\", result.class.to_s()) \n\n _result = result.pop()\n assert_equal(\"QRES::StructResult\", _result.class.to_s())\n \n assert_equal(347, _result.pop().VAR_OBJECT())\n assert_equal(\"test\", _result.pop().VAR_OBJECT())\n \n _result = result.pop()\n assert_equal(\"QRES::StructResult\", _result.class.to_s())\n \n assert_equal(22.11, _result.pop().VAR_OBJECT())\n assert_equal(\"test\", _result.pop().VAR_OBJECT())\n \n _result = result.pop()\n assert_equal(\"QRES::StructResult\", _result.class.to_s())\n \n assert_equal(347, _result.pop().VAR_OBJECT())\n assert_equal(1, _result.pop().VAR_OBJECT())\n \n assert_equal(22.11, result.pop().VAR_OBJECT())\n assert_equal(1, result.pop().VAR_OBJECT()) \n \n expression = JoinExpression.new(\n StructExpression.new(CommaExpression.new(IntegerTerminal.new(1), StringTerminal.new(\"test\"))),\n BagExpression.new(CommaExpression.new(FloatTerminal.new(22.11), IntegerTerminal.new(347))))\n\n expression.execute(var_AST)\n \n result = var_AST.VAR_QRES().pop()\n\n assert_equal(\"QRES::BagResult\", result.class.to_s())\n \n _result = result.pop()\n assert_equal(\"QRES::StructResult\", _result.class.to_s())\n\n assert_equal(347, _result.pop().VAR_OBJECT())\n assert_equal(\"test\", _result.pop().VAR_OBJECT())\n\n _result = result.pop()\n assert_equal(\"QRES::StructResult\", _result.class.to_s())\n\n assert_equal(22.11, _result.pop().VAR_OBJECT())\n assert_equal(\"test\", _result.pop().VAR_OBJECT())\n\n _result = result.pop()\n assert_equal(\"QRES::StructResult\", _result.class.to_s())\n\n assert_equal(347, _result.pop().VAR_OBJECT())\n assert_equal(1, _result.pop().VAR_OBJECT())\n\n assert_equal(22.11, result.pop().VAR_OBJECT())\n assert_equal(1, result.pop().VAR_OBJECT())\n \n expression = JoinExpression.new(\n StructExpression.new(CommaExpression.new(IntegerTerminal.new(1), StringTerminal.new(\"test\"))),\n FloatTerminal.new(22.11))\n\n expression.execute(var_AST)\n \n result = var_AST.VAR_QRES().pop()\n\n assert_equal(\"QRES::BagResult\", result.class.to_s())\n \n _result = result.pop()\n assert_equal(\"QRES::StructResult\", _result.class.to_s())\n \n assert_equal(22.11, _result.pop().VAR_OBJECT())\n assert_equal(\"test\", _result.pop().VAR_OBJECT())\n \n assert_equal(22.11, result.pop().VAR_OBJECT())\n assert_equal(1, result.pop().VAR_OBJECT())\n }\n end",
"def test_execute_sql_with_geom()\n\t pg_lyr = create_poly_layer(@pg_ds)\n\t create_features_from_file\n\t \n\t\tsql_lyr = @pg_ds.execute_sql( \"select * from tpoly where prfedea = '2'\" )\n\t\tassert_equal(1, sql_lyr.get_feature_count, \"Wrong number of features returned.\")\n\n\t\tassert(check_features_against_list( sql_lyr, 'prfedea', [ '2' ] ))\n\t\tsql_lyr.reset_reading\n\n\t\tsql_lyr.each do |feat|\n\t\t assert(check_feature_geometry( feat, 'MULTILINESTRING ((5.00121349 2.99853132,5.00121349 1.99853133),(5.00121349 1.99853133,5.00121349 0.99853133),(3.00121351 1.99853127,5.00121349 1.99853133),(5.00121349 1.99853133,6.00121348 1.99853135))' ))\n\t\tend\n\t\t\n ## Must do a gc to get rid of features created in check_features_against_list\n ## Otherwise, we'll get a segmentation fault when we call release_result_set.\n GC.start\n\t\t@pg_ds.release_result_set( sql_lyr )\n\tend",
"def pg_gem_batch__from psql_db, db_queries\n psql_db = array__from psql_db\n db_queries = array__from db_queries\n pg_gem_conn = pg_gem_conn__from psql_db\n pg_connection = pg_gem_conn[5]\n batch = [pg_connection].product db_queries\n end",
"def unionise *sub_queries\n sub_queries_with_parens = sub_queries.map do |i| \n \"{ #{i} }\" \n end\n\n sub_queries_with_parens.join(' UNION ')\n end",
"def build_subselect key, o\n stmt = Nodes::SelectStatement.new\n core = stmt.cores.first\n core.froms = o.relation\n core.wheres = o.wheres\n core.projections = [key]\n stmt.limit = o.limit\n stmt.orders = o.orders\n stmt\n end",
"def test_execute_sql_no_geom()\n\t pg_lyr = create_poly_layer(@pg_ds)\n\t\tpopulate_poly_layer(pg_lyr)\n\n \tdriver = Gdal::Ogr.get_driver_by_name( 'PostgreSQL' )\n\t\tpg_ds = driver.open( 'PG:dbname=autotest user=postgres', 1 )\n\n\t\texpect = [ 179, 173, 172, 171, 170, 169, 168, 166, 165, 158 ]\n \tsql_lyr = pg_ds.execute_sql( 'select distinct eas_id from tpoly order by eas_id desc' )\n\n \tassert(check_features_against_list( sql_lyr, 'eas_id', expect ))\n\n\t # Release the resultset \n pg_ds.release_result_set( sql_lyr )\n\tend",
"def except(dataset, opts={})\n opts = {:all=>opts} unless opts.is_a?(Hash)\n raise(Sequel::Error, \"EXCEPT ALL not supported\") if opts[:all]\n compound_clone(:minus, dataset, opts)\n end",
"def safe_column_exprs leaves, use_table_name = nil\n leaves.map{|leaf| safe_column_expr(leaf, table_prefix(leaf, use_table_name))}\n end",
"def test_nested_quote3\n t = build_tree_on('t022-3.txt')\n assert_equal 3, t.array.size\n assert_equal 1, t.array[2].array.size\n end",
"def select_sql\n return super unless o = @opts[:offset]\n l = @opts[:limit]\n order = @opts[:order]\n dsa1 = dataset_alias(1)\n dsa2 = dataset_alias(2)\n rn = row_number_column\n irn = Sequel::SQL::Identifier.new(rn).qualify(dsa2)\n subselect_sql(unlimited.\n from_self(:alias=>dsa1).\n select_more(Sequel::SQL::QualifiedIdentifier.new(dsa1, WILDCARD),\n Sequel::SQL::WindowFunction.new(SQL::Function.new(:ROW_NUMBER), Sequel::SQL::Window.new(:order=>order)).as(rn)).\n from_self(:alias=>dsa2).\n select(Sequel::SQL::QualifiedIdentifier.new(dsa2, WILDCARD)).\n where(l ? ((irn > o) & (irn <= l + o)) : (irn > o))) # Leave off limit in case of limit(nil, offset)\n end",
"def db_queries_operate__samples_non_recursive\n [\n [ [\"A\", \"B\"], nil ],\n [ [\"A\", \"B\"], \"<\" ],\n [ [\"A\", \"B\"], \"OR\" ],\n ]\n\n end",
"def subselect_sql_append(sql, ds)\n ds.clone(:append_sql=>sql, :prepared_args=>prepared_args, :bind_vars=>@opts[:bind_vars]).\n send(:to_prepared_statement, :select, nil, :extend=>prepared_statement_modules).\n prepared_sql\n end",
"def test_3CommaExpression\n \n assert_nothing_thrown(\"Creating AST objects\") { \n \n # Set debug log level\n Common::Logger.setLogLevel(Common::VAR_DEBUG)\n \n var_AST = AST.new(\"sampledata/data.xml\")\n \n expression = CommaExpression.new(CommaExpression.new(IntegerTerminal.new(600), StringTerminal.new(\"operator test\")), FloatTerminal.new(123.321))\n \n expression.execute(var_AST)\n \n result = var_AST.VAR_QRES().pop()\n \n assert_equal(\"QRES::BagResult\", result.class.to_s())\n \n _result = result.pop()\n \n assert_equal(\"QRES::StructResult\", _result.class.to_s())\n \n assert_equal(123.321, _result.pop().VAR_OBJECT())\n assert_equal(\"operator test\", _result.pop().VAR_OBJECT())\n \n assert_equal(123.321, result.pop().VAR_OBJECT())\n assert_equal(600, result.pop().VAR_OBJECT())\n }\n end",
"def expr(path_stack, nodeset, context = T.unsafe(nil)); end",
"def test_function_not_table\n # Should take params and add between brackets - retaining any existing params.\n skip \"SELECT storeopeninghours_tostring AS tmp from storeopeninghours_tostring('123');\"\n end",
"def test_2WheresExpression\n \n assert_nothing_thrown(\"Creating AST objects\") { \n \n # Set debug log level\n Common::Logger.setLogLevel(Common::VAR_DEBUG)\n \n var_AST = AST.new(\"sampledata/data.xml\")\n \n #((emp.address) where number==50).(street) \n expression = WhereExpression.new(\n DotExpression.new( \n NameExpression.new(\"emp\"), \n NameExpression.new(\"address\")), \n EqualExpression.new(\n NameExpression.new(\"number\"), \n IntegerTerminal.new(50)))\n \n expression.execute(var_AST)\n \n assert_equal(\"SBA35\", var_AST.VAR_QRES().pop().dereference(var_AST.VAR_STORE()).VAR_ID())\n \n expression = WhereExpression.new(\n DotExpression.new( \n NameExpression.new(\"emp\"), \n NameExpression.new(\"address\")), \n GreatherExpression.new(\n NameExpression.new(\"number\"), \n IntegerTerminal.new(48)))\n \n expression.execute(var_AST)\n \n result = var_AST.VAR_QRES().pop()\n \n assert_equal(\"SBA49\", result.pop().dereference(var_AST.VAR_STORE()).VAR_ID())\n assert_equal(\"SBA35\", result.pop().dereference(var_AST.VAR_STORE()).VAR_ID())\n }\n end",
"def test_assignment_is_parallel\n Fluid.let([:dawn, \"best beloved\"]) {\n assert_equal(\"best beloved\", Fluid.dawn)\n Fluid.let([:dawn, \"wife\"],\n [:paul, \"child of #{Fluid.dawn}\"]) {\n assert_equal(\"wife\", Fluid.dawn)\n assert_equal(\"child of best beloved\", Fluid.paul)\n }\n }\n end",
"def execute_sql(my_sql)\n pg_result = ActiveRecord::Base.connection.execute(my_sql)\n\n # In this example we are just calling #to_a to convert the PG::Result to an\n # Array. PG::Result has a nice API for slicing and dicing itself so you may\n # want to to something clever instead. See\n # https://www.rubydoc.info/gems/pg/PG/Result for details.\n #\n # The important bit here is that we are copying all the data we care about\n # out of the PG::Result in preparation for later clearing the PG::Result\n results = pg_result.to_a\n\n # Calling #clear on the PG::Result is the important bit of cleanup and the\n # whole reason this method exists. See\n # https://www.rubydoc.info/gems/pg/PG/Result#clear-instance_method\n pg_result.clear\n\n yield results if block_given?\n\n results\nend",
"def filter_expr(*args, &block)\n schema_utility_dataset.literal(schema_utility_dataset.send(:filter_expr, *args, &block))\n end",
"def filter_expr(*args, &block)\n schema_utility_dataset.literal(schema_utility_dataset.send(:filter_expr, *args, &block))\n end",
"def run_row(row)\n dataset = dataset_from_row(row)\n\n if row['name'].present? && dataset.name != row['name'].strip\n dataset.name = row['name'].strip\n dataset.save!\n end\n\n commits.map do |c|\n commit = c.build_commit(dataset, row)\n\n next if commit.dataset_edits.none?\n\n commit.save!\n commit\n end\n end",
"def graph(*)\n raise(Error, \"Can't graph a dataset that splits multiple result sets\") if opts[:split_multiple_result_sets]\n super\n end",
"def graph(*)\n raise(Error, \"Can't graph a dataset that splits multiple result sets\") if opts[:split_multiple_result_sets]\n super\n end",
"def union(dataset, all = false)\n clone(:union => dataset, :union_all => all)\n end",
"def scrooge_select_sql( set )\n set.map{|a| attribute_with_table( a ) }.join( ScroogeComma )\n end",
"def select(*) end",
"def benchmark_block(keywords_arrays)\n Benchmark.bmbm do |x|\n x.report(\"relation w/ 2 joins:\") { keywords_arrays.each { |kw_arr| Product.select_products_by_tags(*kw_arr) } }\n x.report(\"relation w/ 3 queries:\") { keywords_arrays.each { |kw_arr| Product.select_products_by_tags_v2(*kw_arr) } }\n x.report(\"relation w/ 2 subq:\") { keywords_arrays.each { |kw_arr| Product.select_products_by_tags_v3(*kw_arr) } }\n x.report(\"relation w/ 1 subq, 2 joins:\") { keywords_arrays.each { |kw_arr| Product.select_products_by_tags_with_subqueries(*kw_arr) } }\n x.report(\"jsonb w/ 1 subq:\") { keywords_arrays.each { |kw_arr| Product.select_products_by_jsonb_v2(kw_arr) } }\n x.report(\"array w/ 1 subq:\") { keywords_arrays.each { |kw_arr| Product.select_products_by_array_v2(kw_arr) } }\n x.report(\"hstore w/ 1 subq:\") { keywords_arrays.each { |kw_arr| Product.select_products_by_hstore_v2(kw_arr) } }\n x.report(\"jsonb w/ 2 queries:\") { keywords_arrays.each { |kw_arr| Product.select_products_by_jsonb(kw_arr) } }\n x.report(\"hstore w/ 2 queries:\") { keywords_arrays.each { |kw_arr| Product.select_products_by_hstore(kw_arr) } }\n x.report(\"array w/ 2 queries:\") { keywords_arrays.each { |kw_arr| Product.select_products_by_array(kw_arr) } }\n end\nend",
"def create_likely_qda_data\n ActiveRecord::Base.connection.execute likely_qda_sql\n end",
"def filter_expr(arg=nil, &block)\n if arg.is_a?(Proc) && !block\n block = arg\n arg = nil\n elsif arg.is_a?(String)\n arg = Sequel.lit(arg)\n elsif arg.is_a?(Array)\n if arg.first.is_a?(String)\n arg = Sequel.lit(*arg)\n elsif arg.length > 1\n arg = Sequel.&(*arg)\n end\n end\n schema_utility_dataset.literal(schema_utility_dataset.send(:filter_expr, arg, &block))\n end",
"def dataset\n DB[SQL, from: from_truncated, to: to_truncated, tick: tick]\n end",
"def initialize(dataset)\n opts = dataset.opts\n eager_graph = opts[:eager_graph]\n @master = eager_graph[:master]\n requirements = eager_graph[:requirements]\n reflection_map = @reflection_map = eager_graph[:reflections]\n reciprocal_map = @reciprocal_map = eager_graph[:reciprocals]\n limit_map = @limit_map = eager_graph[:limits]\n @unique = eager_graph[:cartesian_product_number] > 1\n \n alias_map = @alias_map = {}\n type_map = @type_map = {}\n after_load_map = @after_load_map = {}\n reflection_map.each do |k, v|\n alias_map[k] = v[:name]\n after_load_map[k] = v[:after_load] unless v[:after_load].empty?\n type_map[k] = if v.returns_array?\n true\n elsif (limit_and_offset = limit_map[k]) && !limit_and_offset.last.nil?\n :offset\n end\n end\n\n # Make dependency map hash out of requirements array for each association.\n # This builds a tree of dependencies that will be used for recursion\n # to ensure that all parts of the object graph are loaded into the\n # appropriate subordinate association.\n @dependency_map = {}\n # Sort the associations by requirements length, so that\n # requirements are added to the dependency hash before their\n # dependencies.\n requirements.sort_by{|a| a[1].length}.each do |ta, deps|\n if deps.empty?\n dependency_map[ta] = {}\n else\n deps = deps.dup\n hash = dependency_map[deps.shift]\n deps.each do |dep|\n hash = hash[dep]\n end\n hash[ta] = {}\n end\n end\n \n # This mapping is used to make sure that duplicate entries in the\n # result set are mapped to a single record. For example, using a\n # single one_to_many association with 10 associated records,\n # the main object column values appear in the object graph 10 times.\n # We map by primary key, if available, or by the object's entire values,\n # if not. The mapping must be per table, so create sub maps for each table\n # alias.\n records_map = {@master=>{}}\n alias_map.keys.each{|ta| records_map[ta] = {}}\n @records_map = records_map\n\n datasets = opts[:graph][:table_aliases].to_a.reject{|ta,ds| ds.nil?}\n column_aliases = opts[:graph_aliases] || opts[:graph][:column_aliases] # SEQUEL5: Remove :graph_aliases support\n primary_keys = {}\n column_maps = {}\n models = {}\n row_procs = {}\n datasets.each do |ta, ds|\n models[ta] = ds.model\n primary_keys[ta] = []\n column_maps[ta] = {}\n row_procs[ta] = ds.row_proc\n end\n column_aliases.each do |col_alias, tc|\n ta, column = tc\n column_maps[ta][col_alias] = column\n end\n column_maps.each do |ta, h|\n pk = models[ta].primary_key\n if pk.is_a?(Array)\n primary_keys[ta] = []\n h.select{|ca, c| primary_keys[ta] << ca if pk.include?(c)}\n else\n h.select{|ca, c| primary_keys[ta] = ca if pk == c}\n end\n end\n @column_maps = column_maps\n @primary_keys = primary_keys\n @row_procs = row_procs\n\n # For performance, create two special maps for the master table,\n # so you can skip a hash lookup.\n @master_column_map = column_maps[master]\n @master_primary_keys = primary_keys[master]\n\n # Add a special hash mapping table alias symbols to 5 element arrays that just\n # contain the data in other data structures for that table alias. This is\n # used for performance, to get all values in one hash lookup instead of\n # separate hash lookups for each data structure.\n ta_map = {}\n alias_map.keys.each do |ta|\n ta_map[ta] = [records_map[ta], row_procs[ta], alias_map[ta], type_map[ta], reciprocal_map[ta]]\n end\n @ta_map = ta_map\n end",
"def union_eager_loader\n cached_fetch(:union_eager_loader) do\n Sequel::Dataset::PlaceholderLiteralizer.loader(associated_dataset) do |pl, ds|\n ds = self[:eager_block].call(ds) if self[:eager_block]\n keys = predicate_keys\n ds = ds.where(keys.map{pl.arg}.zip(keys))\n if eager_loading_use_associated_key?\n ds = ds.select_append(*associated_key_array)\n end\n ds.from_self\n end\n end\n end",
"def test_7SetMinusExpression\n \n assert_nothing_thrown(\"Creating AST objects\") { \n \n # Set debug log level\n Common::Logger.setLogLevel(Common::VAR_DEBUG)\n \n var_AST = AST.new(\"sampledata/data.xml\")\n \n expression = SetMinusExpression.new(\n BagExpression.new(CommaExpression.new(IntegerTerminal.new(888), FloatTerminal.new(11.321))),\n BagExpression.new(CommaExpression.new(StringTerminal.new(\"Test\"), IntegerTerminal.new(888))))\n \n expression.execute(var_AST)\n\n result = var_AST.VAR_QRES().pop()\n \n assert_equal(\"QRES::BagResult\", result.class.to_s())\n assert_equal(11.321, result.pop().VAR_OBJECT())\n \n expression = SetMinusExpression.new(\n StructExpression.new(CommaExpression.new(IntegerTerminal.new(888), FloatTerminal.new(11.321))),\n StructExpression.new(CommaExpression.new(StringTerminal.new(\"Test\"), IntegerTerminal.new(600))))\n \n expression.execute(var_AST)\n \n result = var_AST.VAR_QRES().pop()\n \n assert_equal(\"QRES::BagResult\", result.class.to_s())\n assert_equal(11.321, result.pop().VAR_OBJECT())\n assert_equal(888, result.pop().VAR_OBJECT())\n }\n end",
"def db(*args); data(1, args); end",
"def query\n <<-SPARQL\n SELECT DISTINCT ?item WHERE {\n {\n ?item p:P31 ?statement0.\n ?statement0 (ps:P31/(wdt:P279*)) wd:Q7889.\n }\n UNION\n {\n ?item p:P31 ?statement1.\n ?statement1 (ps:P31) wd:Q16070115.\n }\n UNION\n {\n ?item p:P31 ?statement2.\n ?statement2 (ps:P31/(wdt:P279*)) wd:Q865493.\n }\n UNION\n {\n ?item p:P31 ?statement3.\n ?statement3 (ps:P31) wd:Q209163.\n }\n }\n SPARQL\nend",
"def insert_facet_series(table, type, select = 'facet_id, series_id')\n $db.execute <<-SQL\n INSERT INTO destination.#{table}\n SELECT #{select}\n FROM facet_series\n WHERE type = '#{type}';\n SQL\nend",
"def sql_literal(ds)\n ds.literal(sql_expr)\n end",
"def aggregated_over_time_query\n quantity_field = @country_ids.present? ? \"#{entity_quantity}_reported_quantity\" : \"#{@reported_by}_reported_quantity\"\n\n <<-SQL\n SELECT ROW_TO_JSON(row)\n FROM (\n SELECT JSON_AGG(JSON_BUILD_OBJECT('x', year, 'y', value) ORDER BY year) AS datapoints\n FROM (\n SELECT year, ROUND(SUM(#{quantity_field}::FLOAT)) AS value\n FROM #{shipments_table}\n #{child_taxa_join}\n WHERE #{@condition} AND #{quantity_field} IS NOT NULL AND #{country_condition}\n AND #{child_taxa_condition}\n GROUP BY year\n #{quantity_condition(quantity_field)}\n ORDER BY value DESC\n #{limit}\n ) t\n ) row\n SQL\n end"
] |
[
"0.5690019",
"0.5614801",
"0.525116",
"0.52488124",
"0.52488124",
"0.51948553",
"0.5151954",
"0.5031857",
"0.50287414",
"0.50183415",
"0.50035864",
"0.49730965",
"0.4959759",
"0.4919089",
"0.4887447",
"0.4861835",
"0.4861835",
"0.4861835",
"0.48376042",
"0.481184",
"0.48033413",
"0.47779474",
"0.47323295",
"0.47323295",
"0.47323295",
"0.47216856",
"0.46504062",
"0.4643175",
"0.46373656",
"0.46333924",
"0.46318796",
"0.4621074",
"0.45966485",
"0.45956725",
"0.45891398",
"0.4566676",
"0.45456195",
"0.45402378",
"0.4539468",
"0.45264795",
"0.45236892",
"0.4506699",
"0.44932947",
"0.44846097",
"0.4479378",
"0.4464184",
"0.4464184",
"0.4464184",
"0.44629368",
"0.44550288",
"0.44513613",
"0.4447475",
"0.4443654",
"0.4440542",
"0.44386685",
"0.44324556",
"0.44157264",
"0.4398782",
"0.4392475",
"0.43712896",
"0.43600133",
"0.43595114",
"0.435461",
"0.43531597",
"0.43437067",
"0.4338745",
"0.43370125",
"0.43275717",
"0.43210426",
"0.43181887",
"0.43152845",
"0.43121088",
"0.43077236",
"0.43072692",
"0.43015444",
"0.42969728",
"0.4296017",
"0.42948946",
"0.4292268",
"0.42823917",
"0.4273971",
"0.4273971",
"0.42729434",
"0.42693442",
"0.42693442",
"0.42675886",
"0.42675614",
"0.425913",
"0.4255737",
"0.42518237",
"0.42517677",
"0.42476094",
"0.4240191",
"0.4239941",
"0.42326093",
"0.42243356",
"0.42232004",
"0.4222192",
"0.4221763",
"0.42204934"
] |
0.5490127
|
2
|
Backslash is supported by default as the escape character on PostgreSQL, and using ESCAPE can break LIKE ANY() usage.
|
def requires_like_escape?
false
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def to_escaped_for_sql_like\n gsub(/[\\\\%_]/) { |x| '\\\\' + x }.gsub(\"*\", \"%\")\n end",
"def escape_wildcards(unescaped)\n case ActiveRecord::Base.connection.adapter_name\n when \"Mysql2\".freeze\n # Necessary for MySQL\n unescaped.to_s.gsub(/([\\\\%_])/, '\\\\\\\\\\\\1')\n when \"PostgreSQL\".freeze\n # Necessary for PostgreSQL\n unescaped.to_s.gsub(/([\\\\%_.])/, '\\\\\\\\\\\\1')\n else\n unescaped\n end\n end",
"def escape_wildcards(unescaped)\n case ActiveRecord::Base.connection.adapter_name\n when \"Mysql2\", \"PostgreSQL\"\n # Necessary for PostgreSQL and MySQL\n unescaped.to_s.gsub(/([\\\\|\\%|.])/, '\\\\\\\\\\\\1')\n else\n unescaped\n end\n end",
"def sanitize_sql_like(text, escape_character = '\\\\')\n text.to_s.gsub(/(^|.)([%_])/) do |s|\n ($1 == escape_character) ? s : [$1, escape_character, $2].compact.join\n end\n end",
"def double_quote(value)\n return if value.nil?\n\n case value.to_s\n # Ignore keys that contain double quotes or a Arel.star (*)[all columns]\n # or if a table has already been explicitly declared (ex: users.id)\n when \"*\", /((^\".+\"$)|(^[[:alpha:]]+\\.[[:alnum:]]+)|\\(.+\\))/\n value\n else\n PG::Connection.quote_ident(value.to_s)\n end\n end",
"def quote_and_escape(value)\n case value\n when \"NULL\"\n value\n else\n value = value.gsub(/\\\\/, ARRAY_ESCAPE)\n value.gsub!(/\"/,\"\\\\\\\"\")\n \"\\\"#{value}\\\"\"\n end\n end",
"def escape(string)\n # This code is taken directly from the documentation so we dont have to rely on the SQLite3::Database class. This way it can also be used with JRuby and IronRuby...\n # http://sqlite-ruby.rubyforge.org/classes/SQLite/Database.html\n string.to_s.gsub(\"'\", \"''\")\n end",
"def sanitize_sql_like(string, escape_character = '\\\\')\n pattern = Regexp.union(escape_character, '%', '_')\n string.gsub(pattern) { |x| [escape_character, x].join }\n end",
"def escape(text)\n return text.gsub(/[\\`*_{}\\[\\]()#+\\-.!]/, \"\\\\\\\\\\\\0\")\n end",
"def escape_wildcards\n gsub(/\\\\/, '\\\\\\\\').gsub(/\\*/, '\\*')\n end",
"def escape(*chars)\n gsub(/(?<!\\\\)(#{chars.join(\"|\")})/) do |char|\n \"\\\\\" + char\n end\n end",
"def test_sql_escaped\n exc_handler = HumanParseExceptionHandler.new\n new_str = exc_handler.get_human_result_for_string(\"fo'ob\\\"ar\",\"ParseException\")\n assert_equal(false, new_str)\n end",
"def escape(string)\n #This is a stub, used for indexing\n end",
"def escape(str)\n return ActiveRecord::Base.connection.quote_string(str)\n end",
"def escape(string); end",
"def escape_literal(text)\n text\n end",
"def extra_escape(string)\n string.gsub('\\\\', '\\\\\\\\\\\\').\n gsub(\"\\\\'\", '\\\\\\\\' + \"'\").\n gsub('\\\\\\\\\\\\', '\\\\\\\\\\\\')\n end",
"def escape(s)\n dummy_conn.escape(s)\n end",
"def shell_escape(string)\n return string.gsub(/\\\\/, \"\\\\\\\\\\\\\").gsub(/\\\"/, \"\\\\\\\"\").gsub(/\\$/, \"\\\\\\$\").gsub(/\\`/, \"\\\\\\\\\\`\")\n end",
"def likeEscape( str )\n str.gsub( \"\\\\\", \"\\\\\\\\\" ).gsub( \"%\", \"\\%\" ).gsub( \"_\", \"\\_\" )\n end",
"def shell_escape(s)\n s = s.to_s\n if s !~ /^[0-9A-Za-z+,.\\/:=@_-]+$/\n s = s.gsub(\"'\") { \"'\\\\''\" }\n s = \"'#{s}'\"\n end\n s\n end",
"def escape(string)\n @conn.escape(string.to_s)\n end",
"def generate_sql_escape(token)\n escaped_token = token.gsub(/\\\\|'/, '\\0\\0\\0\\0').gsub(\"?\", \"\\\\\\\\77\")\n \"'\" + escaped_token + \"'\"\n end",
"def escape(value)\n $db.escape_identifier(value)\nend",
"def escape(str); end",
"def escape_glob(s)\n s.gsub(/[\\\\\\{\\}\\[\\]\\*\\?]/) { |x| \"\\\\\"+x }\nend",
"def shell_escape\n inspect.gsub(/\\\\(\\d{3})/) { $1.to_i(8).chr }\n end",
"def escape(text)\n text.gsub('\"', '\\\"')\nend",
"def character_escape(string)\n string.gsub(/^'/, '').gsub(/'$/, '').gsub(/\\\\'/, \"'\")\n end",
"def escape_for_double_quotes(str)\n str.gsub(/[\\\\\"`$]/) { |c| \"\\\\#{c}\" }\nend",
"def escape_regex\n gsub(/\\$/, '\\\\$')\n end",
"def escape_find_operators(s)\n s.gsub(FM_FIND_OPERATORS_RE, \"\\\\\\\\\\\\1\")\n end",
"def escape(string)\n return '' if string.nil?\n return string unless string.to_s =~ /[\\\\\\$`\" \\(\\)\\{\\}\\[\\]]/\n '\"' + string.to_s.gsub(/[\\\\\\$`\"]/) { |match| '\\\\' + match } + '\"'\n end",
"def escape_text_delim\n return '\"' if text_delim == \"\\'\"\n \"\\'\"\n end",
"def latex_escape(source)\n source.chars.inject('') do |s, b|\n s << if b == '\\\\'\n '~'\n elsif SAFE_CHARS.include? b\n b\n else\n \"\\\\char%d\" % b[0].ord\n end\n end\n end",
"def escape(string)\n string.gsub('\\\\', '\\\\\\\\').\n gsub(\"\\b\", '\\\\b').\n gsub(\"\\f\", '\\\\f').\n gsub(\"\\t\", '\\\\t').\n gsub(\"\\n\", '\\\\n').\n gsub(\"\\r\", '\\\\r').\n gsub('\"', '\\\\\"')\n end",
"def escape_query(query)\n # backslash escape doesn't seem to work\n #query.gsub(/([\\\\\\(\\)\\:])/) do |match|\n # \"\\\\#{$1}\"\n #end\n query.gsub(/([\\\\\\(\\)\\:])/, ' ')\n end",
"def escape_shell_special_chars(string)\n string.gsub(/([ ()])/, '\\\\\\\\\\1')\n end",
"def ruby_path_escape( path )\n if (@os == :mac)\n '\"' + path + '\"'\n else\n # Adapt to other OSs if necessary\n '\"' + path + '\"'\n end\n end",
"def escape_text(text)\n text\n end",
"def escape_special_char(str)\n str.gsub('[', '\\[').gsub(']', '\\]').gsub('(', '\\(').gsub(')', '\\)') if str\n end",
"def consume_escaped; end",
"def quote_and_escape(text, quote = '\"')\n \"#{quote}#{text.gsub(/#{quote}/) { |m| \"#{m}\\\\#{m}#{m}\" }}#{quote}\"\n end",
"def escape_path_for_glob(path)\n result = path.to_s\n characters_to_escape = ['[', ']', '{', '}', '?', '*']\n characters_to_escape.each do |character|\n result.gsub!(character, \"\\\\#{character}\")\n end\n Pathname.new(result)\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\n end",
"def escape(string)\r\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\r\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\r\n end",
"def escape(string)\r\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\r\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\r\n end",
"def escape(string)\r\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\r\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\r\n end",
"def escape(string)\r\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\r\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\r\n end",
"def escape(string)\r\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\r\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\r\n end",
"def escape(string)\r\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\r\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\r\n end",
"def escape(string)\r\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\r\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\r\n end",
"def escape(string)\r\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\r\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\r\n end",
"def escape(string)\r\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\r\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\r\n end",
"def escape(string)\r\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\r\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\r\n end",
"def escape(string)\r\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\r\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] } if string\r\n end",
"def valid_escape?(text = nil)\n text = @s.current + @s.peek if text.nil?\n !!(text[0] == '\\\\' && text[1] != \"\\n\")\n end",
"def quote\n %q[(?:\"|')?]\n end",
"def quote(val, column=nil)\n return val unless val.is_a?(String)\n \"'#{val.gsub(/\\'/, \"\\\\\\\\'\")}'\" # \" <= for Emacs font-lock\n end",
"def escape(input); input.to_s.gsub('\"', '\\\\\"'); end",
"def escape_string (string)\n string.gsub(/([\\x00-\\x1f\\x21-\\x2f\\x3a-\\x40\\x5b-\\x5e\\x60\\x7b-\\x7f])/, '\\\\\\\\\\\\1')\n end",
"def escape(str)\n str.gsub(/\\t/, '\\t').gsub(/\\n/, '\\n').gsub(/\\\\/, \"\\\\\\\\\\\\\")\n end",
"def escape_custodian_for_search(custodian)\n\treturn custodian.encode(\"utf-8\")\n\t\t.gsub(\"\\\\\",\"\\\\\\\\\\\\\") #Escape \\\n\t\t.gsub(\"?\",\"\\\\?\") #Escape ?\n\t\t.gsub(\"*\",\"\\\\*\") #Escape *\n\t\t.gsub(\"\\\"\",\"\\\\\\\"\") #Escape \"\n\t\t.gsub(\"\\u201C\".encode(\"utf-8\"),\"\\\\\\u201C\".encode(\"utf-8\")) #Escape left smart quote\n\t\t.gsub(\"\\u201D\".encode(\"utf-8\"),\"\\\\\\u201D\".encode(\"utf-8\")) #Escape right smart quote\n\t\t.gsub(\"'\",\"\\\\\\\\'\") #Escape '\n\t\t.gsub(\"{\",\"\\\\{\")\n\t\t.gsub(\"}\",\"\\\\}\")\nend",
"def escape_string(str)\n raw_connection.escape(str)\n end",
"def to_sql_contains\n \"%#{to_sql_escaped_for_like}%\"\n end",
"def escape(string)\n # Globally replace characters based on the ESCAPE_CHARACTERS constant\n string.to_s.gsub(/[&\"><]/) { |special| ESCAPE_CHARACTERS[special] }\n end",
"def valid_escape?(text = nil)\n text = @s.peek(2) if text.nil?\n !!(text[0] == '\\\\' && text[1] != \"\\n\")\n end",
"def escape(x)\n x = x.to_s\n x.gsub! @delimiter, @edelim if @delimiter\n x.gsub! @internal_delimiter, @eidelim\n x\n end",
"def quote_string(string)\n @logger.unknown(\"ODBCAdapter#quote_string>\") if @trace\n\n # MySQL requires backslashes to be escaped\t\t\t\t\n string.gsub(/\\\\/, '\\&\\&').gsub(/'/, \"''\")\n end",
"def escape(value)\n end",
"def escape(s)\n s.gsub('\"', '\\\"')\nend",
"def escape_character?\n options[:escape_character] == true\n end"
] |
[
"0.69977903",
"0.68312544",
"0.68172544",
"0.6625503",
"0.6295788",
"0.6227684",
"0.6224164",
"0.6196449",
"0.6193427",
"0.6176893",
"0.6164101",
"0.6155538",
"0.6151317",
"0.6123133",
"0.6092577",
"0.6091664",
"0.60775405",
"0.60621685",
"0.6015789",
"0.59425694",
"0.59296834",
"0.59288734",
"0.59153557",
"0.59148306",
"0.5868293",
"0.5866378",
"0.5854007",
"0.58498085",
"0.5828454",
"0.5809905",
"0.5806821",
"0.57834613",
"0.5774457",
"0.5758882",
"0.5756658",
"0.5749214",
"0.57410496",
"0.5724923",
"0.5717287",
"0.57155275",
"0.57084006",
"0.570541",
"0.5695599",
"0.56863534",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.56820554",
"0.5672552",
"0.5672552",
"0.5672552",
"0.5672552",
"0.5672552",
"0.5672552",
"0.5672552",
"0.5672552",
"0.5672552",
"0.5672552",
"0.5672552",
"0.5670928",
"0.5668043",
"0.5663797",
"0.56501085",
"0.5645164",
"0.56435126",
"0.5638814",
"0.5629689",
"0.56154007",
"0.5608417",
"0.5603823",
"0.55966514",
"0.55956113",
"0.55937195",
"0.558367",
"0.5557467"
] |
0.58517337
|
27
|
Support FETCH FIRST WITH TIES on PostgreSQL 13+.
|
def select_limit_sql(sql)
l = @opts[:limit]
o = @opts[:offset]
return unless l || o
if @opts[:limit_with_ties]
if o
sql << " OFFSET "
literal_append(sql, o)
end
if l
sql << " FETCH FIRST "
literal_append(sql, l)
sql << " ROWS WITH TIES"
end
else
if l
sql << " LIMIT "
literal_append(sql, l)
end
if o
sql << " OFFSET "
literal_append(sql, o)
end
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def apply_distinct_on_eager_limit_strategy(ds)\n keys = predicate_key\n ds.distinct(*keys).order_prepend(*keys)\n end",
"def first **args\n query( **( { order: \"@rid\" , limit: 1 }.merge args)).execute(reduce: true)\n\tend",
"def oldest(constraints = {})\n constraints.merge!(order: :created_at.asc)\n _q = query(constraints)\n _q.define_singleton_method(:method_missing) { |m, *args, &block| self.results.send(m, *args, &block) }\n _q\n end",
"def select_first!\n limit(1).select!.first\n end",
"def first(limit = 1)\n take(limit)\n end",
"def first\n\n wi(fetch_all({}).first)\n end",
"def fetch_first(&block)\n Volt.logger.warn('.fetch_first is deprecated in favor of .first')\n first\n end",
"def apply_window_function_eager_limit_strategy(ds, limit_and_offset=limit_and_offset())\n rn = ds.row_number_column \n limit, offset = limit_and_offset\n ds = ds.unordered.select_append{|o| o.row_number.function.over(:partition=>predicate_key, :order=>ds.opts[:order]).as(rn)}.from_self\n ds = if !returns_array?\n ds.where(rn => offset ? offset+1 : 1)\n elsif offset\n offset += 1\n if limit\n ds.where(rn => (offset...(offset+limit))) \n else\n ds.where{SQL::Identifier.new(rn) >= offset} \n end\n else\n ds.where{SQL::Identifier.new(rn) <= limit} \n end\n end",
"def first(n=1)\n query(@sql + ' LIMIT ' + n.to_s, cache: false)\n end",
"def first_object(query, kwargs = {})\n objs = objects(query, kwargs)\n return objs.length > 0 ? objs[0] : nil\n end",
"def select_books_titles_and_years_in_first_series_order_by_year\n \"SELECT title, year FROM books WHERE series_id = 1;\"\nend",
"def newest(constraints = {})\n constraints.merge!(order: :created_at.desc)\n _q = query(constraints)\n _q.define_singleton_method(:method_missing) { |m, *args, &block| self.results.send(m, *args, &block) }\n _q\n end",
"def find_first(conditions = nil, orderings = nil)\n sql = \"SELECT * FROM #{table_name} \"\n add_conditions!(sql, conditions)\n sql << \"ORDER BY #{orderings} \" unless orderings.nil?\n sql << \"LIMIT 1\"\n \n record = connection.select_one(sql, \"#{name} Load First\")\n instantiate(record) unless record.nil?\n end",
"def apply_filter_by_associations_distinct_on_limit_strategy(ds)\n k = filter_by_associations_limit_key \n ds.where(k=>apply_distinct_on_eager_limit_strategy(associated_eager_dataset.select(*k)))\n end",
"def sql_select_first_row(sql)\n result = sql_select_all(sql)\n return nil if result.empty?\n result[0].extend SelectHashHelper # Erweitern Hash um Methodenzugriff auf Elemente\n end",
"def afirst(selector={}, opts={}, &blk)\n opts[:limit] = 1\n afind(selector, opts) do |res|\n yield res.first\n end\n end",
"def find_by(arg, *args)\n where(arg, *args).take\n end",
"def first(*args)\n query = scoped_query(args.last.respond_to?(:merge) ? args.pop : {})\n\n if args.any?\n new_collection(query).first(*args)\n else\n query.repository.read(query.update(:limit => 1)).first\n end\n end",
"def apply_correlated_subquery_limit_strategy(ds)\n table = ds.first_source_table\n table_alias = ds.first_source_alias\n primary_key = associated_class.primary_key\n key = self[:key]\n cs_alias = :t1\n cs = associated_dataset.\n from(Sequel.as(table, :t1)).\n select(*qualify(cs_alias, primary_key)).\n where(Array(qualify(cs_alias, key)).zip(Array(qualify(table_alias, key)))).\n limit(*limit_and_offset)\n ds.where(qualify(table_alias, primary_key)=>cs)\n end",
"def apply_eager_graph_limit_strategy(strategy, ds)\n case strategy\n when :distinct_on\n apply_distinct_on_eager_limit_strategy(ds.order_prepend(*self[:order]))\n when :window_function\n apply_window_function_eager_limit_strategy(ds.order_prepend(*self[:order])).select(*ds.columns)\n else\n ds\n end\n end",
"def first(options = {})\n opts = options.dup\n opts.merge!(:limit => [0, 1])\n\n if opts[:by]\n sort_by(opts.delete(:by), opts).first\n else\n sort(opts).first\n end\n end",
"def find_first(selector={}, options={})\n h = options.dup\n h[:limit] = 1\n cursor = find(selector, h)\n cursor.next_object # don't need to explicitly close b/c of limit\n end",
"def find_by(*conditions)\n where(*conditions).take\n end",
"def first\n\t\trow = connection.get_first_row <<-SQL\n\t\t\tSELECT #{columns.join \",\"} FROM #{table}\n\t\t\tORDER BY id ASC LIMIT 1;\n\t\tSQL\n\t\tinit_object_from_row(row)\n\tend",
"def find_by!(*conditions)\n where(*conditions).take!\n end",
"def fetch_first_field(sql)\n fetch_first_row(sql)&.values&.first\n end",
"def first\n results.first\n end",
"def first() end",
"def query_return_first(sql, *binds)\n mysql.fetch(sql, *binds).first\n end",
"def select_first(&block)\n if block_given?\n result = nil\n begin\n each { |obj|\n result = yield obj\n return result unless result.nil?\n }\n rescue StopIteration => ex\n end\n result\n else\n to_enum(:select_first)\n end\n end",
"def first(n=1)\n return values[0] if self.class == BaseRelation && loaded && n == 1\n result = limit(n).load\n result.length == 1 ? result[0] : result\n end",
"def select_books_titles_and_years_in_first_series_order_by_year\n \"select books.title, books.year from books join series on books.series_id = series.id where series_id = 1 order by books.year\"\nend",
"def find_by!(arg, *args)\n where(arg, *args).take!\n end",
"def first(*args)\n last_arg = args.last\n\n limit = args.first if args.first.kind_of?(Integer)\n with_query = last_arg.respond_to?(:merge) && !last_arg.blank?\n\n query = with_query ? last_arg : {}\n query = scoped_query(query.merge(:limit => limit || 1))\n\n if !with_query && (loaded? || lazy_possible?(head, limit || 1))\n if limit\n new_collection(query, super(limit))\n else\n super()\n end\n else\n if limit\n all(query)\n else\n relate_resource(query.repository.read_one(query))\n end\n end\n end",
"def first\n find.limit(1).next_document\n end",
"def best_concert\n reviews.order(rating: :desc).limit(1).first && reviews.order(created_at: :asc).last \n end",
"def filtered_scan_minimum\n filtered_scan.tap do |scan|\n scan.cache_blocks = false\n scan.setMaxVersions 1\n\n # A filter that will only return the first KV from each row\n # A filter that will only return the key component of each KV\n filters = [FirstKeyOnlyFilter.new, KeyOnlyFilter.new]\n if flist = scan.getFilter\n filters.each do |filter|\n flist.addFilter filter\n end\n else\n scan.setFilter FilterList.new(filters)\n end\n end\n end",
"def first(&block)\n args = limit(1).include_docs.query\n\n end",
"def select_youngest_bear_and_returns_name_and_age\n 'SELECT min(bears.name),(bears.age) FROM bears ORDER BY age ASC LIMIT 1'\nend",
"def get_first_row( sql, *bind_vars )\n execute( sql, *bind_vars ) { |row| return row }\n nil\n end",
"def best_promoted_item(group)\n active_promoted_items(group)\n .where.not(price_cents: nil)\n .order(:price_cents)\n .first\n end",
"def best_rank_subquery(group_by)\n @source.respond_to?(:project) or raise ThroughHierarchySourceError, \"#{@source} cannot be converted into a subquery\"\n subq = source.\n project(foreign_type_column, foreign_key_column, group_by, best_rank).\n where(filters).\n group(source[group_by]).\n as(best_rank_table_name)\n\n spawn(subq)\n end",
"def findTopMovies(actor, top_number=100) \r\n movie_array = []\r\n\r\n actor.film_actor_hash.each_key {|key| movie_array.push(key)}\r\n\r\n return movie_array.take(top_number)\r\nend",
"def min_by_key(key, students)\n first_by_key(key, 'asc', students)\nend",
"def apply_filter_by_associations_window_function_limit_strategy(ds)\n ds.where(filter_by_associations_limit_key=>apply_window_function_eager_limit_strategy(associated_eager_dataset.select(*filter_by_associations_limit_alias_key)).select(*filter_by_associations_limit_aliases))\n end",
"def find_preexisting_row(row)\n q = \"SELECT * FROM #{dimension_table} WHERE #{natural_key_equality_for_row(row)}\"\n q << \" AND #{scd_latest_version_field}\" if scd_type == 2\n\n result = connection.select_one(q)\n end",
"def caar; first.first end",
"def select_best_rank(group_by:)\n sub = best_rank_subquery(group_by)\n @source.\n join(sub.source).\n on(\n hierarchy_rank.eq(sub.best_rank_column).\n and(@source[group_by].eq(sub.source[group_by]))\n ).\n order(@source[group_by])\n end",
"def search_top_references(expr, count = 10)\n papers = search_api(expr, 1000)\n\n sorted_referenced_papers = papers[\"entities\"].inject([]) { |result, arr|\n if arr[\"RId\"].instance_of?(Array)\n result + arr[\"RId\"]\n else\n result\n end\n }.group_by(&:itself)\n .map { |key, value| [key, value.count] }\n .sort { |(k1, v1), (k2, v2)| v2 <=> v1 }\n\n request = \"Or(#{sorted_referenced_papers[0, count].map { |key, value|\n \"Id=#{key}\"\n }.join(\",\")})\"\n\n return search_api(request, count)\n end",
"def first(options = {})\n search(options.merge(:limit => true))\n end",
"def first\n result ? all.first : limit(1).all.first\n end",
"def first\n limit(1).to_a.first\n end",
"def first\n graph.first_object(subject: first_subject, predicate: RDF.first)\n end",
"def select_books_titles_and_years_in_first_series_order_by_year\n \"SELECT books.title, books.year\n FROM books\n JOIN series\n ON books.series_id = series.id\n WHERE books.series_id = 1\n ORDER BY books.year;\"\nend",
"def first\n response = query(:per_page => 1, :page => 1).get!\n response[:results].first\n end",
"def index\n @posts = Post.joins(:post_detail).select('posts.*, post_details.*').where('post_details.price = (SELECT MIN(post_details.price) FROM post_details)').all\n end",
"def last **args\n query( **( { order: {\"@rid\" => 'desc'} , limit: 1 }.merge args)).execute(reduce: true)\n\tend",
"def selects_oldest_bear_and_returns_name_and_age\n \"SELECT name, age\n FROM bears\n ORDER BY age DESC \n LIMIT 1\"\nend",
"def cheapest(products)\n products.min_by do |product|\n product[:price]\n end\nend",
"def selects_oldest_bear_and_returns_name_and_age\n 'SELECT bears.name, bears.age FROM bears ORDER BY age DESC LIMIT 1'\nend",
"def year_with_most_guests\n sql = <<-SQL\n SELECT year FROM guests GROUP BY year\n ORDER BY count(*) DESC LIMIT 1;\n SQL\n DB[:conn].execute(sql)[0][0]\nend",
"def find_by(values)\n all.where(values).limit(1).query_as(:n).pluck(:n).first\n end",
"def fast_each(options)\n i=minimum(\"#{table_name}.#{primary_key}\", options)\n \n # not all the backends always sort primay_key columns so do it manuall\n options.update(:order => \"#{table_name}.#{primary_key} ASC\")\n \n i=minimum(\"#{table_name}.#{primary_key}\", options) or return\n # first the first object by id\n yield(o=find_one(i, {}))\n # as long as we keep finding objects, keep going\n while o\n with_scope(:find => {:conditions => [ \"#{table_name}.#{primary_key} > ?\", i]} ) do\n if o=find_initial(options)\n i=o.send primary_key\n yield(o) \n end\n end\n end\n end",
"def fetch_first_column(sql)\n fetch_rows(sql).map(&:values).map(&:first)\n end",
"def most_relevant_articles(articles, num: 3, max_days: 30)\n # retrieve only the newest articles\n article_subset = articles.where('articles.created_at > :start_date',\n { start_date: max_days.days.ago })\n\n # heuristically evaluate each retrieved article\n heuristic_values = []\n article_ids = []\n article_subset.each do |article|\n heuristic_values << heuristic_article_value(article)\n article_ids << article.id\n end\n \n # select only the top rated 'num' of articles\n selected_article_ids = []\n num.times do |_|\n break if heuristic_values.empty?\n heuristic_index = heuristic_values.index heuristic_values.max\n selected_article_ids << article_ids[heuristic_index]\n heuristic_values.delete_at heuristic_index\n article_ids.delete_at heuristic_index\n end\n !selected_article_ids.empty? ? article_subset.find(selected_article_ids) : Article.none\n end",
"def default_eager_limit_strategy\n super if self[:order] || offset\n end",
"def filter_articles(articles)\n include PopularitySearch\n #this isn't fair to more recent articles\n popularity_cutoff = 300\n articles.each do |article|\n article[:twitter_pop] = twitter_popularity(article[:url])\n end\n articles.select do |article|\n article[:twitter_pop] > popularity_cutoff\n end\n articles = articles.sort_by{|article| article[:twitter_pop]}.reverse\n return articles[0..2] #only pick the top 3 if there's more than 3\nend",
"def lazy_select\n lazify.call(S.select)\n end",
"def earliest_order\n # order(created_at: :asc)\n scan_index_forward(true)\n end",
"def first\n @head\n end",
"def best_items(bib_id, limit = 5)\n return [] unless holdings_exist_for? bib_id\n\n ITEM_TIERS.each do |criteria|\n matches = holdings_data(bib_id).copies.select(&criteria)\n return matches.first(limit) if matches.any?\n end\n end",
"def get_first_satisfiedPK\n # allColumns = @allColumnList.map do |field|\n # \t# \"#{field.colname} as #{field.relname}_#{field.colname} \"\n # \t\"#{field.relname}_#{field.colname} \"\n # end.join(',')\n query = \"select #{@allColumns_renamed} from golden_record where type = 'satisfied' and branch = '#{@branches[0].name}';\"\n res = DBConn.exec(query)\n abort(\"Cannot find satisfied tuple at #{@branches[0].name}!\") if res.ntuples == 0\n res\n end",
"def get_first_row(*args)\n @db.get_first_row(*args)\n end",
"def select_series_title_with_most_human_characters\n \"\n SELECT series.title\n FROM series\n JOIN characters\n ON characters.series_id = series.id\n GROUP BY series.title, characters.species\n HAVING characters.species = 'human'\n ORDER BY COUNT(characters.species) DESC\n LIMIT 1\n \"\nend",
"def first\n self.take(1)[0]\n end",
"def find_articles\n connection = PG.connect(dbname: 'slacker_news')\n results = connection.exec('SELECT * FROM articles ORDER BY created_at DESC')\n connection.close\n\n results\nend",
"def first\n enumerator(:limit => 1).first \n end",
"def pick(*column_names)\n if loaded? && all_attributes?(column_names)\n result = records.pick(*column_names)\n return @async ? Promise::Complete.new(result) : result\n end\n\n limit(1).pluck(*column_names).then(&:first)\n end",
"def select_youngest_bear_and_returns_name_and_age\n \"SELECT name, age FROM bears ORDER BY age ASC LIMIT 1\"\nend",
"def first(opts={})\n ::Kernel.Veneer(find_first(::Hashie::Mash.new(opts)))\n end",
"def apply_eager_limit_strategy(ds, strategy=eager_limit_strategy, limit_and_offset=limit_and_offset())\n case strategy\n when :distinct_on\n apply_distinct_on_eager_limit_strategy(ds)\n when :window_function\n apply_window_function_eager_limit_strategy(ds, limit_and_offset)\n else\n ds\n end\n end",
"def true_eager_graph_limit_strategy\n if associated_class.dataset.supports_ordered_distinct_on? && !offset\n :distinct_on\n else\n super\n end\n end",
"def get_top_news(start=0,count=TopNewsPerPage)\n numitems = $r.zcard(\"news.top\")\n news_ids = $r.zrevrange(\"news.top\",start,start+(count-1))\n result = get_news_by_id(news_ids,:update_rank => true)\n # Sort by rank before returning, since we adjusted ranks during iteration.\n return result.sort_by(&:rank), numitems\nend",
"def first(*args)\n find(:first, *args)\n end",
"def first(*args)\n find(:first, *args)\n end",
"def first\n @head.val\n end",
"def select_one(stmt, bindvars={})\n sanity_check(stmt)\n row = nil\n execute(stmt, bindvars) do |sth|\n row = sth.fetch\n end\n row\n end",
"def return_cards_from_top(count)\n if cards and count > 0\n cards.where(\"card_order > ?\", (get_top_order-count) )\n end\n end",
"def first?; end",
"def first options = {}\n enum(options.merge(:limit => 1)).first\n end",
"def next\n Expression.where('id > ? AND collection_id = ?', self.id, self.collection_id).order('id ASC').limit(1).first\n end",
"def find_first\n Period.order(\"begins_at ASC\").first\n end",
"def first_row_from(table, options = {})\n\t\tselect_from(table, { limit: 1 }.merge(options)).first\n\tend",
"def first; self.objects.first end",
"def find_one(options = {})\n @find_one ||=\n if primary_key_set?\n without_collection_params { super() }\n else\n klass.new_collection_from_result(limit(1).fetch(options)).first\n end\n rescue ::Spyke::ConnectionError => error\n fallback_or_reraise(error, default: nil)\n end",
"def true_eager_graph_limit_strategy\n r = super\n ds = associated_dataset\n if r == :ruby && ds.supports_limits_in_correlated_subqueries? && (Array(associated_class.primary_key).length == 1 || ds.supports_multiple_column_in?) && (!offset || ds.supports_offsets_in_correlated_subqueries?)\n :correlated_subquery\n else\n r\n end\n end",
"def _refresh_get(dataset)\n if (sql = model.fast_pk_lookup_sql) && !dataset.opts[:lock]\n sql = sql.dup\n ds = use_server(dataset)\n ds.literal_append(sql, pk)\n ds.with_sql_first(sql)\n else\n dataset.first\n end\n end",
"def _refresh_get(dataset)\n if (sql = model.fast_pk_lookup_sql) && !dataset.opts[:lock]\n sql = sql.dup\n ds = use_server(dataset)\n ds.literal_append(sql, pk)\n ds.with_sql_first(sql)\n else\n dataset.first\n end\n end",
"def first_one(conditions={}, &block)\n all.detect { |item| match_one(item, conditions, &block) }\n end",
"def sql_select_one(sql)\n result = sql_select_first_row(sql)\n return nil unless result\n result.first[1] # Value des Key/Value-Tupels des ersten Elememtes im Hash\n end",
"def eager_limit_strategy\n nil\n end"
] |
[
"0.5735091",
"0.5717865",
"0.5541578",
"0.54762655",
"0.53705156",
"0.51928",
"0.51910776",
"0.51767975",
"0.5150512",
"0.51314545",
"0.51211715",
"0.5107328",
"0.5053165",
"0.49903002",
"0.498065",
"0.49799594",
"0.49768218",
"0.49761242",
"0.49740243",
"0.49682325",
"0.4964784",
"0.4956478",
"0.495287",
"0.49474508",
"0.49398467",
"0.4885874",
"0.4881593",
"0.48749933",
"0.4873564",
"0.4812855",
"0.4800296",
"0.47808334",
"0.47800338",
"0.4741239",
"0.4740372",
"0.47349107",
"0.47294885",
"0.4722024",
"0.47157583",
"0.47045925",
"0.46989626",
"0.46948844",
"0.4693923",
"0.46896702",
"0.4671526",
"0.4662528",
"0.46625155",
"0.4650537",
"0.46461645",
"0.46375486",
"0.4618032",
"0.460635",
"0.45944813",
"0.4590332",
"0.45861378",
"0.45751643",
"0.4570531",
"0.45656642",
"0.45642447",
"0.4555418",
"0.45550874",
"0.45478478",
"0.45440623",
"0.45151314",
"0.45124003",
"0.45077524",
"0.4505438",
"0.45033988",
"0.44912186",
"0.44820994",
"0.4481436",
"0.44738317",
"0.4471712",
"0.44704315",
"0.4469685",
"0.44687107",
"0.4465063",
"0.44631737",
"0.44587043",
"0.44580027",
"0.44561565",
"0.44491932",
"0.44458133",
"0.44446263",
"0.44446263",
"0.44391066",
"0.4433462",
"0.44323567",
"0.44293338",
"0.4426085",
"0.4425644",
"0.4422988",
"0.44099602",
"0.44041234",
"0.44020507",
"0.43977064",
"0.43976292",
"0.43976292",
"0.43953592",
"0.43950683",
"0.43944928"
] |
0.0
|
-1
|
Support FOR SHARE locking when using the :share lock style. Use SKIP LOCKED if skipping locked rows.
|
def select_lock_sql(sql)
lock = @opts[:lock]
if lock == :share
sql << ' FOR SHARE'
else
super
end
if lock
if @opts[:skip_locked]
sql << " SKIP LOCKED"
elsif @opts[:nowait]
sql << " NOWAIT"
end
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def for_share\n lock_style(:share)\n end",
"def nolock\n clone(:table_options => \"(NOLOCK)\")\n end",
"def skip_locked\n cached_dataset(:_skip_locked_ds) do\n raise(Error, 'This dataset does not support skipping locked rows') unless supports_skip_locked?\n clone(:skip_locked=>true)\n end\n end",
"def locked; end",
"def assert_locked_for_share!(name=nil)\n raise ZK::Exceptions::MustBeShareLockedException unless locked_for_share?(name)\n end",
"def lock\n shaz_nolock_lock if !@nolock\n end",
"def lock(mode, &block)\n sql = LOCK % [source_list(@opts[:from]), mode]\n @db.synchronize do\n if block # perform locking inside a transaction and yield to block\n @db.transaction {@db.execute(sql); yield}\n else\n @db.execute(sql) # lock without a transaction\n self\n end\n end\n end",
"def locks\n sql = %q(\n select\n pg_stat_activity.procpid,\n pg_class.relname,\n pg_locks.transactionid,\n pg_locks.granted,\n substr(pg_stat_activity.current_query,1,30) as query_snippet,\n age(now(),pg_stat_activity.query_start) as \"age\"\n from pg_stat_activity,pg_locks left\n outer join pg_class on (pg_locks.relation = pg_class.oid)\n where pg_stat_activity.current_query <> '<insufficient privilege>' and\n pg_locks.pid=pg_stat_activity.procpid and pg_locks.mode = 'ExclusiveLock' order by query_start)\n\n exec_sql(sql, find_uri)\n end",
"def locked\n end",
"def visit_Arel_Nodes_Lock(o, a = nil)\n # SQL Layer does not support row locks\n end",
"def lock(mode, &block)\n sql = LOCK % [@opts[:from], mode]\n @db.synchronize do\n if block # perform locking inside a transaction and yield to block\n @db.transaction {@db.execute(sql); yield}\n else\n @db.execute(sql) # lock without a transaction\n self\n end\n end\n end",
"def lock(mode, opts=OPTS)\n if defined?(yield) # perform locking inside a transaction and yield to block\n @db.transaction(opts){lock(mode, opts); yield}\n else\n sql = 'LOCK TABLE '.dup\n source_list_append(sql, @opts[:from])\n mode = mode.to_s.upcase.strip\n unless LOCK_MODES.include?(mode)\n raise Error, \"Unsupported lock mode: #{mode}\"\n end\n sql << \" IN #{mode} MODE\"\n @db.execute(sql, opts)\n end\n nil\n end",
"def acquire_lock\n\t\t@@logger.info { \"Acquiring a lock in the database.\" } if have_logger?\n\t\tTournament.dataset.filter(:id => self.id, :locked => false).update(:locked => true) != 0\n\tend",
"def supports_locking?\n false #true\n end",
"def without_locking(&block)\n current = ActiveRecord::Base.lock_optimistically\n ActiveRecord::Base.lock_optimistically = false if current\n begin\n block.call\n ensure\n ActiveRecord::Base.lock_optimistically = true if current\n end\n end",
"def return_lock\n\t\t@@logger.info { \"Returning the lock to the database.\" } if have_logger?\n\t\tTournament.dataset.filter(:id => self.id, :locked => true).update(:locked => false) != 0\n\tend",
"def locked?\n end",
"def locked_transaction(&block)\n unless connection.open_transactions.zero?\n raise \"locked_transaction cannot be called from within another transaction!\"\n end\n unsafe_locked_transaction do\n yield if block_given?\n end\n end",
"def lock_is_exclusive?\n lockscope == 'exclusive'\n end",
"def nolock\n clone(:with => \"(NOLOCK)\")\n end",
"def access_locked?; end",
"def work\n stat :attempting_lock_on, item_id: object_id\n if @mutex.try_lock\n stat :has_lock_on, item_id: object_id\n chore\n stat :releasing_lock_on, item_id: object_id\n @mutex.unlock\n else\n stat :bailed_on, item_id: object_id\n end\n end",
"def lock(&block)\n # TODO: only use replace strategy when server is executing the lock\n return call_strategy unless (locked_token = locksmith.lock(&block))\n\n locked_token\n end",
"def sharing\n start_sharing\n begin\n yield\n ensure\n stop_sharing\n end\n end",
"def sharing\n start_sharing\n begin\n yield\n ensure\n stop_sharing\n end\n end",
"def test_shared_locknull\n # ensure that locknull file doesn't exist\n response = @request.delete('locknull')\n\n # create a shared write locked null resource\n lock1 = lock 'locknull', :scope => :shared, :depth => 0\n\n # get another shared lock\n lock2 = lock 'locknull', :scope => :shared, :depth => 0\n\n unlock('locknull', lock2.token)\n\n response = @request.propfind '', 1, :\"current-user-privilege-set\"\n assert_not_nil response[\"#{@uri.path}locknull\"]\n\n # assert that old locktoken doesn't work anymore\n response = @request.unlock('locknull', lock2.token)\n assert_equal '409', response.status # Sec 9.11.1 of draft 18\n\n unlock('locknull', lock1.token)\n\n response = @request.propfind '', 1, :\"current-user-privilege-set\"\n assert_nil response[\"#{@uri.path}locknull\"]\n end",
"def exclusive_schema_lock()\n check_return_code(PureHailDB.ib_schema_lock_exclusive(@trx_ptr))\n end",
"def supports_advisory_locks?\n false\n end",
"def supports_advisory_locks?\n false\n end",
"def set_locking_strategy\n if @config.enable_locking\n @lock = Aws::SessionStore::DynamoDB::Locking::Pessimistic.new(@config)\n else\n @lock = Aws::SessionStore::DynamoDB::Locking::Null.new(@config)\n end\n end",
"def without_locking\n old_value = Parts::WithLocking.locking?\n begin\n Parts::WithLocking.locking = false\n ret_value = yield\n ensure\n Parts::WithLocking.locking = old_value\n end\n ret_value\nend",
"def with_lock(name, opts={}, &b)\n mode = opts[:mode] || :exclusive\n\n raise ArgumentError, \":mode option must be either :shared or :exclusive, not #{mode.inspect}\" unless [:shared, :exclusive].include?(mode)\n\n if mode == :shared\n shared_locker(name).with_lock(&b)\n else\n locker(name).with_lock(&b)\n end\n end",
"def if_access_locked; end",
"def lock\n end",
"def lock\n self.is_locked = true\n self\n end",
"def lock!; end",
"def try_lock\n end",
"def do_LOCK(req, res)\n end",
"def try_await_lock(table, i); end",
"def with_database_exclusive_table_lock_sqlite(&block)\n block.call\n end",
"def lock(name, mode)\n yield\n end",
"def lock_list\n super\n end",
"def lock!\n freeze!\n @locked = true\n self\n end",
"def lock\n post :lock\n end",
"def with_lock\n lock!(true)\n yield\n ensure\n unlock!\n end",
"def yield_shares(purpose: nil, compatible: [], block_share: false)\n loose_shares = previous_wait = nil\n synchronize do\n if loose_shares = @sharing.delete(Thread.current)\n if previous_wait = @waiting[Thread.current]\n purpose = nil unless purpose == previous_wait[0]\n compatible &= previous_wait[1]\n end\n compatible |= [false] unless block_share\n @waiting[Thread.current] = [purpose, compatible]\n end\n\n @cv.broadcast\n end\n\n begin\n yield\n ensure\n synchronize do\n wait_for(:yield_shares) { @exclusive_thread && @exclusive_thread != Thread.current }\n\n if previous_wait\n @waiting[Thread.current] = previous_wait\n else\n @waiting.delete Thread.current\n end\n @sharing[Thread.current] = loose_shares if loose_shares\n end\n end\n end",
"def lock_type\n @lock_type ||= :mysql\n end",
"def lock_for_update(name=nil)\n if locked_for_update?(name)\n logger.debug { \"we are locked for update, yield to the block\" }\n yield\n else\n zk_with_lock(:mode => :exclusive, :name => name) { yield }\n end\n end",
"def lock!\n @locked = true\n end",
"def lock_style(style)\n clone(:lock => style)\n end",
"def lock; end",
"def lock; end",
"def lock; end",
"def is_locked?\n locked\n end",
"def synchronize_resultset(&proc)\n # make it reentrant\n return yield if defined?(@resultset_locked) && @resultset_locked == true\n\n return yield unless File.exist?(resultset_writelock)\n\n with_lock(&proc)\n end",
"def lock(opts={})\n super\n end",
"def lock(opts={})\n super\n end",
"def lock(opts={})\n super\n end",
"def with_database_exclusive_table_lock_mysql(&block)\n begin\n escaped = @low_card_model.connection.quote_table_name(@low_card_model.table_name)\n run_sql(\"LOCK TABLES #{escaped} WRITE\", { })\n block.call\n ensure\n begin\n run_sql(\"UNLOCK TABLES\", { })\n rescue ::ActiveRecord::StatementInvalid => si\n # we tried our best!\n end\n end\n end",
"def select_lock_sql(sql)\n @opts[:lock] == :update ? sql : super\n end",
"def locking_enabled?\n lock_optimistically && columns_hash[locking_column]\n end",
"def lock\n if block_given?\n raise 'Race condition' if locking?\n\n @locking = true\n yield\n return @locking = false\n end\n @locking = true\n end",
"def lock!\n @locked = true\n end",
"def data_lock(item, properties, &blk)\n locked = false\n lock_path = \"plan_store/%s\" % item\n config = {\"plan_store\" => properties}\n\n playbook.data_stores.from_hash(config)\n playbook.data_stores.prepare\n\n playbook.data_stores.lock(lock_path)\n locked = true\n\n yield\n ensure\n playbook.data_stores.release(lock_path) if locked\n end",
"def locks\n dataset.from(:pg_class).join(:pg_locks, :relation=>:relfilenode).select{[pg_class[:relname], Sequel::SQL::ColumnAll.new(:pg_locks)]}\n end",
"def locked?\n raise NotImplementedError\n end",
"def shared_locker(name)\n Locker.shared_locker(self, name)\n end",
"def enable_lock\n add option: \"-lock=true\"\n end",
"def with_app_lock( &block )\n # acquire lock_expiration\n ok = with_connection_lock do |locked_self|\n if locked_self.lock_expiration.nil? then\n row.update_all lock_expiration: Time.now + DELTA\n true\n end\n end\n # use and release lock_expiration outside of the connection_lock\n if ok then\n begin\n block.call\n ensure\n row.update_all lock_expiration: nil\n end\n end\n end",
"def add_lock!(sql, options)\n case lock = options[:lock]\n when true; sql << ' FOR UPDATE'\n when String; sql << \" #{lock}\"\n end\n end",
"def with_database_exclusive_table_lock_postgresql(&block)\n # If we just use the regular :sanitize_sql support, we get:\n # LOCK TABLE 'foo'\n # ...which, for whatever reason, PostgreSQL doesn't like. Escaping it this way works fine.\n escaped = @low_card_model.connection.quote_table_name(@low_card_model.table_name)\n run_sql(\"LOCK TABLE #{escaped}\", { })\n block.call\n end",
"def synchronize_resultset\n # make it reentrant\n return yield if defined?(@resultset_locked) && @resultset_locked\n\n begin\n @resultset_locked = true\n File.open(resultset_writelock, \"w+\") do |f|\n f.flock(File::LOCK_EX)\n yield\n end\n ensure\n @resultset_locked = false\n end\n end",
"def with_database_exclusive_table_lock(&block)\n case @low_card_model.connection.class.name\n when /postgresql/i then with_database_exclusive_table_lock_postgresql(&block)\n when /mysql/i then with_database_exclusive_table_lock_mysql(&block)\n when /sqlite/i then with_database_exclusive_table_lock_sqlite(&block)\n else\n raise LowCardTables::Errors::LowCardUnsupportedDatabaseError, %{You asked for low-card IDs for one or more hashes specifying rows that didn't exist,\nbut, when we went to create them, we discovered that we don't know how to exclusively\nlock tables in your database. (This is very important so that we don't accidentally\ncreate duplicate rows.)\n\nYour database adapter's class name is '#{@low_card_model.connection.class.name}'; please submit at least\na bug report, or, even better, a patch. :) Adding support is quite easy, as long as you know the\nequivalent of 'LOCK TABLE'(s) in your database.}\n end\n end",
"def msg_MLOCK(source, args)\n return nil\n end",
"def locking?\n @locking\n end",
"def synchronize(scope, &block)\n Rails.cache.dalli.with do |dalli|\n RemoteLock.new(RemoteLock::Adapters::Dalli.new(dalli))\\\n .synchronize(scope, initial_wait: 0.1, retries: 100, &block)\n end\n end",
"def non_blocking_lock!( *args, &block )\n unless non_blocking_lock( *args, &block )\n raise ::LockFailed.new( \"Failed to obtain a lock.\" )\n end\n end",
"def without_locking(&block)\n self.class.without_locking(&block)\n end",
"def lock\n doc['lock']\n end",
"def lockable?\n @lockable\n end",
"def lock\n hostlockname = @item['name'].downcase.tr(\" \", \"_\")\n if @locking\n @locking_file = File.open(\"/tmp/vcenter-#{hostlockname}-lock\",\"w\")\n @locking_file.flock(File::LOCK_EX)\n end\n end",
"def for_update\n cached_dataset(:_for_update_ds){lock_style(:update)}\n end",
"def lock?\n return true if @_lock_type\n false\n end",
"def with_instance_locked_for(operation, &block)\n return yield unless operation.version\n\n key = \"#{app}:#{operation.key}\"\n lock = Redis::Lock.new(key, LOCK_OPTIONS.merge(:redis => Promiscuous::Redis.connection))\n\n unless lock.lock\n raise Promiscuous::Error::LockUnavailable.new(lock.key)\n end\n\n begin\n yield\n ensure\n unless lock.unlock\n # TODO Be safe in case we have a duplicate message and lost the lock on it\n raise \"The subscriber lost the lock during its operation. It means that someone else\\n\"+\n \"received a duplicate message, and we got screwed.\\n\"\n end\n end\n end",
"def transaction\n File.open(lock_file_path, File::RDWR | File::CREAT, 0644) do |file|\n # Get lock\n file.sync = true\n file.flock(File::LOCK_EX)\n\n # Execute block\n yield\n\n # Push all changes\n apply\n\n # Release lock\n file.flock(File::LOCK_UN)\n end\n end",
"def unlocked?\n not locked?\n end",
"def select_and_lock(relation, limit:)\n relation = upcoming(relation)\n\n # FOR UPDATE SKIP LOCKED selects and locks entries, but skips those that\n # are already locked - preventing this transaction from being locked.\n sql = relation.to_sql + \" FOR UPDATE SKIP LOCKED\"\n sql += \" LIMIT #{limit}\" if limit\n\n item_class.find_by_sql(sql)\n end",
"def lock\n\t\t\tself.instance_eval do \n\t\t\t\tundef :same_piece_count=\n\t\t\t\tundef :difference_ids=\n\t\t\tend\n\t\tend",
"def with_flock(mode)\n return yield if @locked\n begin\n loop do\n # HACK: JRuby returns false if the process is already hold by the same process\n # see https://github.com/jruby/jruby/issues/496\n Thread.pass until @fd.flock(mode)\n # Check if database was replaced (cleared or compactified) in the meantime\n # break if not\n stat = @fd.stat\n break if stat.nlink > 0 && stat.ino == @inode\n open\n end\n @locked = true\n yield\n ensure\n @fd.flock(File::LOCK_UN)\n @locked = false\n end\n end",
"def ensure_exclusive\n acquire_locks\n write_pid\n end",
"def share\n calculate_share if static_shares?\n end",
"def lock(worker)\n return true if locked? && locked_by == worker.name\n #all this to make sure the check and the lock are simultanious:\n cnt = repository.update({properties[:lock_name]=>worker.name},self.class.all(:id=>self.id,:lock_name=>nil))\n if 0 != cnt\n @lock_name = worker.name\n true\n else\n worker.say( \"Worker #{worker.name} Failed to aquire lock on job #{id}\" )\n false\n end\n end",
"def lock\n hostlockname = @item['name'].downcase.tr(\" \", \"_\")\n if @locking\n @locking_file = File.open(\"/tmp/vcenter-dc-#{hostlockname}-lock\",\"w\")\n @locking_file.flock(File::LOCK_EX)\n end\n end",
"def with_lock\n lock\n begin\n yield if block_given?\n ensure\n unlock\n end\n end",
"def exclusive(timeout: 10.minutes)\n counter = 0\n log_wait = proc do |owner|\n if (counter += 1) % 10 == 1\n executor.output.write(\"Waiting for repository lock for #{owner}\\n\")\n end\n end\n MultiLock.lock(repo_cache_dir, outside_caller, timeout: timeout, failed_to_lock: log_wait) { return yield }\n end",
"def can_lock?(user, space)\n user.review_space_admin? && space.shared? && space.active?\n end",
"def lock _obj, _args\n \"_obj lock _args;\" \n end",
"def lock_strategy_enabled?(strategy); end",
"def with_lock(&block)\n @lock.synchronize { yield @delegate }\n end",
"def lock(key)\n raise NotImplementedError\n end"
] |
[
"0.70884025",
"0.6361162",
"0.62005144",
"0.6196953",
"0.6189135",
"0.6172573",
"0.6153694",
"0.60519814",
"0.6024574",
"0.6016043",
"0.60124713",
"0.5951911",
"0.5951208",
"0.5945349",
"0.59318465",
"0.59261966",
"0.592478",
"0.59171903",
"0.5903963",
"0.5874665",
"0.5862838",
"0.5859312",
"0.5849455",
"0.5839344",
"0.5839344",
"0.58363223",
"0.58350575",
"0.5825092",
"0.5825092",
"0.5818059",
"0.57952505",
"0.57837754",
"0.5763299",
"0.571287",
"0.5676876",
"0.5675284",
"0.5671825",
"0.5657221",
"0.56463015",
"0.56149524",
"0.56105894",
"0.55818933",
"0.5576322",
"0.55633116",
"0.5555955",
"0.55491984",
"0.5538587",
"0.55220485",
"0.5521626",
"0.5521281",
"0.55062044",
"0.55062044",
"0.55062044",
"0.5499171",
"0.5487151",
"0.54668397",
"0.54668397",
"0.54668397",
"0.54645747",
"0.5460114",
"0.5446592",
"0.54295015",
"0.54188865",
"0.5417468",
"0.5417229",
"0.54086",
"0.54079986",
"0.54043007",
"0.5403817",
"0.5396915",
"0.53953296",
"0.5391844",
"0.53871167",
"0.5380764",
"0.5378341",
"0.537799",
"0.53726226",
"0.5366654",
"0.534551",
"0.5327204",
"0.53199834",
"0.5311449",
"0.53069866",
"0.5302711",
"0.5293864",
"0.52796227",
"0.5269335",
"0.52684313",
"0.52653956",
"0.5262382",
"0.5261912",
"0.5258575",
"0.5248717",
"0.52469295",
"0.52454257",
"0.52363664",
"0.5228735",
"0.5228731",
"0.52251136",
"0.5223323"
] |
0.66496354
|
1
|
Support VALUES clause instead of the SELECT clause to return rows.
|
def select_values_sql(sql)
sql << "VALUES "
expression_list_append(sql, opts[:values])
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def test_values\n value = nil\n assert_nothing_raised do\n value = ActiveRecord::Base.connection.send(:select_rows, \"VALUES('ur', 'doin', 'it', 'right')\")\n end\n assert_equal [['ur', 'doin', 'it', 'right']], value\n end",
"def select_values(sql, name = nil)\n result = select_rows(sql, name)\n result.map { |v| v[0] }\n end",
"def select_values values\n return unless values\n values.each do |val|\n row = @list.index val\n add_row_selection_interval row, row unless row.nil?\n end\n end",
"def insert_select(*values)\n unless @opts[:disable_insert_returning]\n ds = opts[:returning] ? self : returning\n ds.insert(*values){|r| return r}\n end\n end",
"def values value_type = :formatted_value\n return @values unless @values.nil?\n\n @values = []\n while @rowset.next do\n @values << 1.upto(self.columns.size).map do |i|\n @rowset.getString i\n end\n end\n\n @values\n end",
"def selected_values &block\n ar = []\n selected_rows().each do |i|\n val = @list[i]\n if block_given?\n yield val\n else\n ar << val\n end\n end\n return ar unless block_given?\n end",
"def insert(*values)\n if @opts[:sql] || @opts[:returning]\n super\n else\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def returning(*values)\n if values.empty?\n cached_dataset(:_returning_ds) do\n raise Error, \"RETURNING is not supported on #{db.database_type}\" unless supports_returning?(:insert)\n clone(:returning=>EMPTY_ARRAY)\n end\n else\n raise Error, \"RETURNING is not supported on #{db.database_type}\" unless supports_returning?(:insert)\n clone(:returning=>values.freeze)\n end\n end",
"def insert_returning_columns(ds)\n return unless ds.supports_returning?(:insert)\n return unless values = ds.opts[:select]\n\n values = values.map{|v| ds.unqualified_column_for(v)}\n if values.all?\n values\n end\n end",
"def values\n rows.map{|r| r.value}\n end",
"def dynamic_values\n if @dynamic_values.blank?\n\n if self.column.blank?\n raise \"dynamic_values column not defined for filter_param_id = #{self.id}\"\n end\n\n query_result = self.dynamic_values_query.try(:query_result, nil, nil,\n :sample_option => ::Query::Sample::REAL,\n :sql_string => \"SELECT `#{self.column}` FROM {table_name}\"\n )\n result_set = query_result.try(:result_set)\n if !result_set.nil?\n @dynamic_values = result_set.try(:results_array).collect do |row|\n result_set.value_at(row, self.column)\n end\n end\n end\n\n @dynamic_values ||= []\n end",
"def multiple(sql, values = [])\n r = $db.exec_params(sql, values)\n return [] if r.ntuples == 0\n r.map { |row| convert_to_ruby_types(row) }\nend",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(*insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def insert_select_sql(*values)\n ds = opts[:returning] ? self : returning\n ds.insert_sql(*values)\n end",
"def value_list(arg)\n raise Error, 'argument to Sequel.value_list must be an array' unless arg.is_a?(Array)\n SQL::ValueList.new(arg)\n end",
"def insert(*values)\n if @opts[:returning]\n # Already know which columns to return, let the standard code handle it\n super\n elsif @opts[:sql] || @opts[:disable_insert_returning]\n # Raw SQL used or RETURNING disabled, just use the default behavior\n # and return nil since sequence is not known.\n super\n nil\n else\n # Force the use of RETURNING with the primary key value,\n # unless it has been disabled.\n returning(insert_pk).insert(*values){|r| return r.values.first}\n end\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |column_name|\n values << \"'#{send(column_name)}'\" unless send(column_name).nil?\n end \n #The above code, however, will result in a values array. We need comma separated values for our SQL statement. Let's join this array into a string:\n values.join(\", \")\n end",
"def select_rows(sql, name = nil)\n array_query(sql, name, [])\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n #We need comma separated values for our SQL statement. Let's join this array into a string:\n values.join(\", \")\n end",
"def query_return_array(sql, *binds)\n mysql.fetch(sql, *binds).all\n end",
"def values(*) end",
"def rows\n @rows ||= if ActiveRecord::Base.connection.adapter_name == \"PostgreSQL\"\n result.entries\n else\n [].tap do |row_hashes|\n result.entries.map do |row|\n hash = {}\n result.fields.each do |field|\n hash[field] = row[result.fields.index(field)]\n end\n row_hashes << hash\n end\n end\n end\n end",
"def single_value(sql, opts=OPTS)\n _execute(sql, opts){|conn| log_connection_yield(sql, conn){conn.first_value_from(sql)}}\n end",
"def GetValueId(db, valueTable, idColumn, valueColumn, value)\n\tquery = db.execute(\"SELECT \" + idColumn + \" FROM \" + valueTable + \" WHERE \" + valueColumn + \" = ?;\",[value])\n\t\n\tif (query.length == 1)\n\t\treturn query\n\telse\n\t\tdb.execute(\"INSERT INTO \" + valueTable + \"(\" + valueColumn + \") VALUES (?);\", [value])\n\t\treturn db.execute(\"SELECT \" + idColumn + \" FROM \" + valueTable + \" WHERE \" + valueColumn + \" = ?;\",[value])\n\tend\nend",
"def _import(columns, values, opts=OPTS)\n if @opts[:returning]\n # no transaction: our multi_insert_sql_strategy should guarantee\n # that there's only ever a single statement.\n sql = multi_insert_sql(columns, values)[0]\n returning_fetch_rows(sql).map{|v| v.length == 1 ? v.values.first : v}\n elsif opts[:return] == :primary_key\n returning(insert_pk)._import(columns, values, opts)\n else\n super\n end\n end",
"def set_values_from(result)\n @arguments = Arguments.new(result[0]['arguments'])\n @return_type = result[0]['return_type']\n @query = \"SELECT * FROM %s.%s(%s)\" % [\n quoted_schema,\n quoted_name,\n @arguments.to_params\n ]\n end",
"def default_values_clause\n 'VALUES (DEFAULT)'\n end",
"def _insert_select_raw(ds)\n if use_prepared_statements_for?(:insert_select)\n if ps = model.send(:prepared_insert_select, @values.keys)\n _set_prepared_statement_server(ps).call(@values)\n end\n else\n super\n end\n end",
"def select_rows(sql, name = nil)\n log(sql, name) do\n @connection.query(:array, sql)\n end\n end",
"def values(ids, common_value)\n common_values = [common_value] * ids.length\n convert_to_sql(ids.zip(common_values).each(&:flatten!))\n end",
"def select_rows(sql, name = nil)\n raise NotImplementedError, \"select_rows is an abstract method\"\n end",
"def insert_select(*values)\n with_sql_first(insert_select_sql(*values))\n end",
"def sql_literal(*)\n @dataset.sql\n end",
"def select_rows(sql, name = nil)\r\n rs = ADS.instance.api.ads_execute_direct(@connection, sql)\r\n raise ActiveRecord::StatementInvalid.new(\"#{ADS.instance.api.ads_error(@connection)}:#{sql}\") if rs.nil?\r\n record = []\r\n while ADS.instance.api.ads_fetch_next(rs) == 1\r\n max_cols = ADS.instance.api.ads_num_cols(rs)\r\n result = Array.new(max_cols)\r\n max_cols.times do |cols|\r\n result[cols] = ADS.instance.api.ads_get_column(rs, cols)[1]\r\n end\r\n record << result\r\n end\r\n ADS.instance.api.ads_free_stmt(rs)\r\n return record\r\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \") # join values array into a string separated by comma's\n end",
"def query_return_first_value(sql, *binds)\n mysql.fetch(sql, *binds).single_value\n end",
"def insert_select(*values)\n return unless supports_insert_select?\n # Handle case where query does not return a row\n server?(:default).with_sql_first(insert_select_sql(*values)) || false\n end",
"def select_values_forcing_binds(arel, name, binds)\n # remove possible force of unprepared SQL during dictionary access\n unprepared_statement_forced = prepared_statements_disabled_cache.include?(object_id)\n prepared_statements_disabled_cache.delete(object_id) if unprepared_statement_forced\n\n select_values(arel, name, binds)\n ensure\n # Restore unprepared_statement setting for surrounding SQL\n prepared_statements_disabled_cache.add(object_id) if unprepared_statement_forced\n end",
"def values_for_insert\n return_values = []\n self.class.column_names.each do |column_name|\n return_values << \"'#{self.send(column_name)}'\" unless self.send(column_name).nil?\n end\n return_values.join(\", \")\n end",
"def fetch_rows(sql)\n return cursor_fetch_rows(sql){|h| yield h} if @opts[:cursor]\n execute(sql){|res| yield_hash_rows(res, fetch_rows_set_cols(res)){|h| yield h}}\n end",
"def execute( sql, *bind_vars )\n stmt = prepare( sql )\n stmt.bind_params( *bind_vars )\n stmt.execute do |result|\n if block_given?\n result.each { |row| yield row }\n else\n return result.inject( [] ) { |arr,row| arr << row; arr }\n end\n end\n end",
"def run\n if @prepared_type == :insert\n fetch_rows(prepared_sql){|r| return r.values.first}\n else\n super\n end\n end",
"def values\n @values ||= Values.new(schema_result.to_h)\n end",
"def prepare_explicit_statement(ds, type, vals=OPTS)\n f = ds.opts[:from]\n meth = type == :insert_select ? :returning : :select\n s = ds.opts[meth]\n if f && f.length == 1 && !ds.opts[:join] && (!s || s.empty?)\n ds = ds.send(meth, *columns.map{|c| Sequel.identifier(c)})\n end \n \n prepare_statement(ds, type, vals)\n end",
"def get_from_db(column, table, where, value)\n if where.nil? || value.nil?\n return db.execute(\"SELECT #{column} FROM #{table}\")\n else\n return db.execute(\"SELECT #{column} FROM #{table} WHERE #{where} = ?\",value)\n end\n end",
"def values(column)\n rows do |row|\n row[column]\n end\n end",
"def values_at(*) end",
"def result\n ActiveRecord::Base.connection.select_all(sql).entries\n end",
"def values_at(*rest) end",
"def values_at(*rest) end",
"def values_at(*rest) end",
"def getvalue\n selected_rows\n end",
"def getvalue\n selected_rows\n end",
"def argument_rows\n rows[1..rows.count] || []\n end",
"def get_data sql\n #$log.debug \"SQL: #{sql} \"\n columns, *rows = @db.execute2(sql)\n #$log.debug \"XXX COLUMNS #{sql}, #{rows.count} \"\n content = rows\n return content\n end",
"def values(v)\n raise Error, \"Cannot provide an empty array for values\" if v.empty?\n @default_dataset.clone(:values=>v)\n end",
"def select_rows(sql, name = nil)\n # last parameter indicates to return also column list\n result, columns = select(sql, name, true)\n result.map{ |v| columns.map{|c| v[c]} }\n end",
"def bind_values\n []\n end",
"def assign_row_values\n row.each do |column, value|\n field = column_fields[column]\n next if field.nil?\n\n item.public_send(\"#{field.attribute_name}=\", value)\n end\n end",
"def execute(*values)\n IBM_DB.execute(@stmt, values)\n end",
"def fetch_value(sql)\n # Get the row\n row = fetch_row(sql)\n\n # Check field count\n if row.count > 1\n check.critical(\"Expected to receive a single value, but result has more than one field\", \"SQL: #{sql}\\nResult: #{row.inspect}\")\n end\n\n return row.values.first\n end",
"def fetch_rows(sql, &block)\n raise NotImplementedError, NOTIMPL_MSG\n end",
"def add_data_values\n # remove any previous accepted values so that we can keep a track of what has been updated\n sqlclean = \"select clear_datacolumn_accepted_values(#{id})\"\n\n datatype = Datatypehelper.find_by_name(import_data_type)\n\n # I would like to change this so that the SQL is in one function but it wasn't working\n # TODO: I will look at this again - SR\n if datatype.name == 'text'\n sql = \"select accept_text_datacolumn_values(#{id})\"\n else\n dataset = Dataset.find(dataset_id)\n comment = ''\n comment = dataset.title unless dataset.nil?\n sql = \"select accept_datacolumn_values(#{datatype.id}, #{id}, #{datagroup_id}, '#{comment}')\"\n end\n\n begin\n connection = ActiveRecord::Base.connection()\n connection.begin_db_transaction\n connection.execute(sqlclean)\n connection.execute(sql)\n\n connection.commit_db_transaction\n rescue StandardError\n connection.rollback_db_transaction\n raise\n end\n end",
"def update_sql(values=OPTS)\n case values\n when LiteralString\n super\n when String\n super(LiteralString.new(values))\n else\n super\n end\n end",
"def fetch_rows(sql)\n execute(sql) do |stmt|\n columns = []\n convert = convert_smallint_to_bool\n cps = db.conversion_procs\n stmt.num_fields.times do |i|\n k = stmt.field_name i\n key = output_identifier(k)\n type = stmt.field_type(i).downcase.to_sym\n # decide if it is a smallint from precision\n type = :boolean if type == :int && convert && stmt.field_precision(i) < 8\n type = :blob if type == :clob && db.use_clob_as_blob\n columns << [key, cps[type]]\n end\n cols = columns.map{|c| c[0]}\n self.columns = cols\n\n while res = stmt.fetch_array\n row = {}\n res.zip(columns).each do |v, (k, pr)|\n row[k] = ((pr ? pr.call(v) : v) if v)\n end\n yield row\n end\n end\n self\n end",
"def values_at(*primary_key_values)\n unless next_unscoped_key_column_valid_for_in_query?\n raise IllegalQuery,\n \"Only the last partition key column and the last clustering column can match multiple values\"\n end\n\n primary_key_values = primary_key_values.map(&method(:cast_range_key))\n\n scoped { |attributes| attributes[:scoped_key_values] <<\n primary_key_values }.resolve_if_fully_specified\n end",
"def value\n @value ||= values_for columns\n end",
"def query(query, values)\n handle.exec(query, values)\n end",
"def select(sql, name = nil, binds = [])\n ret = exec_query(sql, name, binds)\n ArVer::GTEQ_4 ? ret : ret.to_a\n end",
"def values_for_insert\n values = []\n self.class.column_names.each {|column| values << \"'#{send(column)}'\" unless send(column).nil?}\n values.join(\", \")\n end",
"def return_value_list_from_query?\n @this_val_where[:mode] == 'return_value_list'\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n end",
"def values_at(*keys)\n @table_with_original_keys.values_at *keys\n end",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n end\n values.join(\", \")\n\n # Let's iterate over the column names stored in #column_names and use the #send method with each individual column name to invoke the method by that same name and capture the return value:\n # values = []\n # self.class.column_names.each do |col_name|\n # values << \"'#{send(col_name)}'\" unless send(col_name).nil?\n # end\n # Here, we push the return value of invoking a method via the #send method, unless that value is #nil (as it would be for the id method before a record is saved, for instance).\n # Notice that we are wrapping the return value in a string. That is because we are trying to craft #a string of SQL. Also notice that each individual value will be enclosed in single quotes, ' ', #inside that string. That is because the final SQL string will need to look like this:\n\n # INSERT INTO songs (name, album)\n # VALUES 'Hello', '25';\n # SQL expects us to pass in each column value in single quotes.\n # The above code, however, will result in a values array\n # [\"'the name of the song'\", \"'the album of the song'\"]\n # We need comma separated values for our SQL statement. Let's join this array into a string:\n # values.join(\", \")\n end",
"def select(sql, name = nil, binds = [])\n ret = exec_query(sql, name, binds)\n ActiveRecord::VERSION::MAJOR >= 4 ? ret : ret.to_a\n end",
"def ascii_query(sql,*values)\n sth = self.query(sql,*values)\n rows = sth.fetch_all\n col_names = sth.column_names\n sth.finish\n DBI::Utils::TableFormatter.ascii(col_names, rows)\n end",
"def multi_insert_sql_strategy\n :values\n end",
"def exec_raw(sql, options = {})\n cursor = $connection.exec(sql)\n if(options[:return_hash])\n recordset, = pack_cursor(cursor, :return => \"hash\")\n return recordset\n else\n return_data = []\n while current_row = cursor.fetch()\n return_data.push(current_row)\n end\n return return_data\n end\n end",
"def map_values(row, columns)\n values = columns.map do |v|\n # TODO - stw - which other cases do we need to handle?\n case v[1]\n when /int/: \n row[v[0]] || 'NULL'\n else \n (row[v[0]].nil? ? 'NULL' : \"'\" + @db1.escape_string(row[v[0]].to_s) + \"'\")\n end\n end\n values = values.join(',') \n end",
"def select_rows(sql, name = nil)\n select_raw(sql, name).last\n end",
"def select_rows(sql, name = nil)\n select_raw(sql, name).last\n end",
"def sql_literal_append(ds, sql)\n check_columns!\n sql << 'ROW'\n ds.literal_append(sql, values_at(*columns))\n if db_type\n sql << '::'\n ds.quote_schema_table_append(sql, db_type)\n end\n end",
"def values_for_insert\r\n values = []\r\n self.class.column_names.each do |col_name|\r\n values << \"'#{send(col_name)}'\" unless send(col_name).nil?\r\n end\r\n values.join(\", \")\r\nend",
"def values_for_insert\n values = []\n self.class.column_names.each do |col_name|\n values << \"'#{self.send(col_name)}'\" unless self.send(col_name).nil?\n end\n values.join(\", \")\n end",
"def set_values(array)\n @row = array\n end",
"def select_all(stmt, bindvars={}, &p)\n sanity_check(stmt)\n rows = nil\n execute(stmt, bindvars) do |sth|\n if block_given?\n sth.each(&p)\n else\n rows = sth.fetch_all\n end\n end\n return rows\n end",
"def data_values\n if bare_data_set?\n [data]\n else\n data.map{ |set| set.is_a?(Hash) ? set[:values] : set }\n end\n end"
] |
[
"0.7333736",
"0.6650351",
"0.6242213",
"0.62295794",
"0.6073188",
"0.60719794",
"0.59351194",
"0.59288603",
"0.58959985",
"0.5886909",
"0.58362275",
"0.58324695",
"0.57895106",
"0.5780254",
"0.5780254",
"0.5780254",
"0.57623494",
"0.5755752",
"0.57550055",
"0.5736297",
"0.5729119",
"0.57086897",
"0.5702008",
"0.5657217",
"0.56338173",
"0.55684173",
"0.5565314",
"0.5535772",
"0.5526311",
"0.5522086",
"0.5509489",
"0.5507015",
"0.54952604",
"0.54815507",
"0.5470573",
"0.5467997",
"0.5461003",
"0.54405016",
"0.5436906",
"0.54173374",
"0.54168063",
"0.5405379",
"0.5396216",
"0.53957677",
"0.53893423",
"0.5367328",
"0.5355792",
"0.5351732",
"0.534516",
"0.53377044",
"0.53136456",
"0.53120655",
"0.53120655",
"0.53021586",
"0.53021586",
"0.5298877",
"0.5292707",
"0.52766126",
"0.52723175",
"0.5270308",
"0.5263412",
"0.525702",
"0.5254286",
"0.52533656",
"0.5248881",
"0.52449125",
"0.5242733",
"0.52418816",
"0.5240819",
"0.522029",
"0.5219112",
"0.52168185",
"0.5214789",
"0.52134013",
"0.52134013",
"0.52134013",
"0.52134013",
"0.52134013",
"0.52134013",
"0.52134013",
"0.52134013",
"0.52134013",
"0.52134013",
"0.52134013",
"0.52134013",
"0.5195196",
"0.51940924",
"0.5191837",
"0.51887655",
"0.5176947",
"0.5167836",
"0.5161325",
"0.51602316",
"0.51563704",
"0.51545864",
"0.51535255",
"0.5151412",
"0.5147142",
"0.5135709",
"0.51336986"
] |
0.7835908
|
0
|
Use WITH RECURSIVE instead of WITH if any of the CTEs is recursive
|
def select_with_sql_base
opts[:with].any?{|w| w[:recursive]} ? "WITH RECURSIVE " : super
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def recursive => nil",
"def recurse_result_set(result, options = {}, &block)\n return result unless block_given? \n inner_recursion = options.delete(:inner_recursion)\n result_set = inner_recursion ? result : result.dup\n \n parent_id = (options.delete(:parent_id) || result_set.first[result_set.first.parent_col_name]) rescue nil\n options[:level] ||= 0\n options[:nested] = true unless options.key?(:nested)\n \n siblings = options[:nested] ? result_set.select { |s| s.parent_id == parent_id } : result_set \n siblings.sort! {|a,b| a.send(options[:sort_on]) <=> b.send(options[:sort_on])} if options[:sort_on]\n siblings.each do |sibling|\n result_set.delete(sibling) \n block.call(sibling, options[:level])\n opts = { :parent_id => sibling.id, :level => options[:level] + 1, :inner_recursion => true, :sort_on => options[:sort_on]} \n recurse_result_set(result_set, opts, &block) if options[:nested]\n end\n result_set.each { |orphan| block.call(orphan, options[:level]) } unless inner_recursion\n end",
"def each_recursive(&block)\n tree.each_recursive(&block)\n end",
"def pipe_cte_with!(subquery)\n return self unless subquery.try(:with_values?)\n\n # Add subquery CTE's to the parents query stack. (READ THE SPECIAL NOTE ABOVE!)\n if @scope.with_values?\n @scope.cte.pipe_cte_with!(subquery.cte)\n else\n # Top level has no with values\n @scope.with!(subquery.cte)\n end\n\n self\n end",
"def each(&block)\n squish = lambda do |tree, list|\n new_children = tree.children.reduce(list) { |acc, elem| squish.call(elem, acc) }\n [tree.root].lazy_append(new_children)\n end\n\n squish.call(self, [])\n\n # base = [root]\n # recursive = children.map(&:each)\n # res = base.lazy_append(recursive)\n\n # return res.each(&block) if block_given?\n\n # res\n\n # res = [[root], children.flat_map(&:each)].lazy.flat_map(&:lazy)\n # res = res.map(&block) if block_given?\n # res\n end",
"def nested_set_recurse(&block)\n self.each do |x| \n x.nested_set_recurse(self, &block)\n end\n end",
"def expand_children node=:current_index\n $multiplier = 999 if !$multiplier || $multiplier == 0\n node = row_to_node if node == :current_index\n return if node.children.empty? # or node.is_leaf?\n #node.children.each do |e| \n #expand_node e # this will keep expanding parents\n #expand_children e\n #end\n node.breadth_each($multiplier) do |e|\n expand_node e\n end\n $multiplier = 0\n _structure_changed true\n end",
"def recursive_dependencies(&block)\n Dependency.expand(self, &block)\n end",
"def nested_set_recurse(set, &block)\n block.call self, lambda{\n index = set.index(self) + 1\n while set[index].parent_id == self.id\n set[index].nested_set_recurse(set, &block)\n index += 1\n end\n }\n end",
"def run_recursive_loop &block\n results = yield\n return results\nend",
"def block_node_taken_by_with_method_with_no_normal_args\n each_backward_chained_node(node, :child_as_second_arg) do |chained_node, child_node|\n next unless chained_node.block_type?\n return nil unless child_node.children[1] == :with\n return nil if child_node.children[2]\n return chained_node\n end\n end",
"def deep_each(&block)\n self.each_sexp do |sexp|\n block[sexp]\n sexp.deep_each(&block)\n end\n end",
"def visit_all(&block)\n visit &block\n children.each {|c| c.visit_all &block}\n end",
"def expand_depth(node) #:nodoc:\n relations_for(node).each do |rel|\n yield_node(rel)\n\n expand_node(rel) unless @description.prune_node?(rel)\n end\n end",
"def solution(t)\n # write your code in Ruby 2.2\n depth = 0\n childs = []\n\n childs << t.l if t.l\n childs << t.r if t.r\n\n while not childs.empty? do\n depth += 1\n\n cc = []\n childs.each do |t|\n cc << t.l if t.l\n cc << t.r if t.r\n end\n\n childs = cc\n end\n\n depth\nend",
"def nesting() end",
"def build_current_children_and_evaluate! opts={}\n these_asserts\n @current.clear\n if (@start_offset > final_offset) # see :note3\n @done = true\n evaluate_ok!\n return nil\n end\n @ok = nil\n @done = false\n @children_productions = production.children\n @ctxt = parse_context\n (start_offset..@children.length-1).each do |idx|\n @current.push idx\n break unless build_this_child_and_keep_going?(idx, opts)\n end\n @children_productions = nil\n @ctxt = nil\n evaluate_ok!\n nil\n end",
"def rewrite(&block)\n ast = yield self\n\n if ast.children.any?\n node(ast.type, *ast.children.map { _1.rewrite(&block) } )\n else\n ast\n end\n end",
"def depth\n @options[:depth] += 1\n ret = yield\n @options[:depth] -= 1\n ret\n end",
"def recursive_solution\n\n end",
"def traverse\n nodes = [self]\n until nodes.empty?\n node = nodes.pop\n yield node\n nodes += node.children.reverse unless node.children.empty?\n end\n end",
"def deep_find\n ([self] + all_subtrees.to_a).each do |node|\n return node if yield(node)\n end\n nil\n end",
"def rebuild!\n\n scope = lambda{}\n # TODO: add scope stuff\n \n # Don't rebuild a valid tree.\n return true if valid?\n indices = {}\n \n move_to_child_of_lambda = lambda do |parent_node|\n # Set left\n parent_node[nested_set_options[:left_column]] = indices[scope.call(parent_node)] += 1\n # Gather child noodes of parend_node and iterate by children\n parent_node.children.order(:id).all.each do |child_node|\n move_to_child_of_lambda.call(child_node)\n end\n # Set right\n parent_node[nested_set_options[:right_column]] = indices[scope.call(parent_node)] += 1\n parent_node.save\n end\n\n # Gatcher root nodes and iterate by them\n self.roots.all.each do |root_node|\n # setup index for this scope\n indices[scope.call(root_node)] ||= 0\n move_to_child_of_lambda.call(root_node)\n end\n end",
"def all_children(special=nil)\n if special && special[:exclude]\n transaction do\n # exclude some items and all their children\n special[:exclude] = [special[:exclude]] if !special[:exclude].is_a?(Array)\n # get objects for ids\n special[:exclude].collect! {|s| s.is_a?(self.class) ? s : self.class.find(s)}\n # get all subtrees and flatten the list\n exclude_list = special[:exclude].map{|e| e.full_set.map{|ee| ee.id}}.flatten.uniq.join(',')\n if exclude_list.blank?\n self.class.find(:all, :conditions => \"#{acts_as_nested_set_options[:scope]} AND (#{acts_as_nested_set_options[:left_column]} > #{self[acts_as_nested_set_options[:left_column]]}) and (#{acts_as_nested_set_options[:right_column]} < #{self[acts_as_nested_set_options[:right_column]]})\", :order => acts_as_nested_set_options[:left_column])\n else\n self.class.find(:all, :conditions => \"#{acts_as_nested_set_options[:scope]} AND id NOT IN (#{exclude_list}) AND (#{acts_as_nested_set_options[:left_column]} > #{self[acts_as_nested_set_options[:left_column]]}) and (#{acts_as_nested_set_options[:right_column]} < #{self[acts_as_nested_set_options[:right_column]]})\", :order => acts_as_nested_set_options[:left_column])\n end\n end\n else\n self.class.find(:all, :conditions => \"#{acts_as_nested_set_options[:scope]} AND (#{acts_as_nested_set_options[:left_column]} > #{self[acts_as_nested_set_options[:left_column]]}) and (#{acts_as_nested_set_options[:right_column]} < #{self[acts_as_nested_set_options[:right_column]]})\", :order => acts_as_nested_set_options[:left_column])\n end\n end",
"def do_refresh(recurse=true)\n Thread.new do\n file_tree_mutex.synchronize do\n @tree.refresh\n @tree.model.refilter\n @tree.expand_first_row\n end\n end\n end",
"def traverse(&block); end",
"def traverse(&block); end",
"def traverse(include_self = false, &block)\n return enum_for(__method__, include_self) unless block_given?\n\n block.call(:enter, self, 0) if include_self\n\n each_with_index do |exp, index|\n if exp.terminal?\n block.call(:visit, exp, index)\n else\n block.call(:enter, exp, index)\n exp.traverse(&block)\n block.call(:exit, exp, index)\n end\n end\n\n block.call(:exit, self, 0) if include_self\n\n self\n end",
"def compute_children_of(current, relations) # :nodoc:\n\t old_size = current.size\n\t for rel in relations\n\t\tnext if (rel.parent && relations.include?(rel.parent))\n\n\t\tcomponents = rel.generated_subgraphs(current, false)\n\t\tfor c in components\n\t\t current.merge c\n\t\tend\n\t end\n\n\t if current.size == old_size\n\t\treturn current\n\t else\n\t\treturn compute_children_of(current, relations)\n\t end\n\tend",
"def process_tree_with_renew\n @process_tree = nil\n process_tree\n end",
"def traverse obj=self, &block\n case\n when obj.respond_to?(:parent?) && obj.respond_to?(:child?)\n block.call obj\n obj.children.each { |c| traverse(c, &block) }\n when obj.respond_to?(:parent?)\n obj.children.each { |c| traverse(c, &block) }\n when obj.respond_to?(:child?)\n block.call obj\n end\n end",
"def each_leaf!\n raise \"Method not yet written.\"\n\n self.each do |leaf|\n yield(leaf)\n end\n end",
"def test_recursive_methods\n assert_equal 0, find_node(1).ancestors_r.size\n assert_equal 8, find_node(1).descendants_r.size\n assert_equal 4, find_node('1_1_2_1_1').ancestors_r.size\n end",
"def recompute_recursive_subsets\n @recursive_subsets = subsets.inject(ValueSet.new) do |set, child|\n set.merge(child.recursive_subsets)\n end\n if parent\n parent.recompute_recursive_subsets\n end\n end",
"def to_recursive\n self\n end",
"def expand_breadth(node) #:nodoc:\n cached_relations = []\n\n # 1. yield direct relations first\n relations_for(node).each do |rel|\n yield_node(rel)\n\n # memoize relation for next iteration\n cached_relations << rel unless @description.prune_node?(rel)\n end\n\n # 2. dig deeper\n cached_relations.each do |rel|\n expand_breadth(rel)\n end\n end",
"def recurse_trade(node, route, items, visited={})\n \tif node == route[0]\n \t\troute << node\n \t\treturn route\n \telsif visited[node]\n \t\treturn\n \telse\n \t\tvisited[node] = true\n \t\troute << node\n \t\titems_children(node, items).each do |child|\n if child.new_owner\n next\n end\n \t\t\tresult = recurse_trade(child, route, items, visited)\n if result and result.count > 1\n # puts \"RESULT is #{result.count}\"\n # puts \"Result first is #{result.first}\"\n # puts \"Result last is #{result.last}\"\n return result\n else\n end\n \t\t\t# return result if result\n \t\tend\n \tend\t\t\n end",
"def result_to_array(result, options = {}, &block)\n array = []\n inner_recursion = options.delete(:inner_recursion)\n result_set = inner_recursion ? result : result.dup\n \n parent_id = (options.delete(:parent_id) || result_set.first[result_set.first.parent_col_name]) rescue nil\n level = options[:level] || 0\n options[:children] ||= 'children'\n options[:methods] ||= []\n options[:nested] = true unless options.key?(:nested)\n options[:symbolize_keys] = true unless options.key?(:symbolize_keys)\n \n if options[:only].blank? && options[:except].blank?\n options[:except] = [:left_column, :right_column, :parent_column].inject([]) do |ex, opt|\n column = acts_as_nested_set_options[opt].to_sym\n ex << column unless ex.include?(column)\n ex\n end\n end\n \n siblings = options[:nested] ? result_set.select { |s| s.parent_id == parent_id } : result_set\n siblings.each do |sibling|\n result_set.delete(sibling)\n node = block_given? ? block.call(sibling, level) : sibling.attributes(:only => options[:only], :except => options[:except]) \n options[:methods].inject(node) { |enum, m| enum[m.to_s] = sibling.send(m) if sibling.respond_to?(m); enum } \n if options[:nested] \n opts = options.merge(:parent_id => sibling.id, :level => level + 1, :inner_recursion => true)\n childnodes = result_to_array(result_set, opts, &block)\n node[ options[:children] ] = childnodes if !childnodes.empty? && node.respond_to?(:[]=)\n end\n array << (options[:symbolize_keys] && node.respond_to?(:symbolize_keys) ? node.symbolize_keys : node)\n end\n unless inner_recursion\n result_set.each do |orphan| \n node = (block_given? ? block.call(orphan, level) : orphan.attributes(:only => options[:only], :except => options[:except])) \n options[:methods].inject(node) { |enum, m| enum[m.to_s] = orphan.send(m) if orphan.respond_to?(m); enum }\n array << (options[:symbolize_keys] && node.respond_to?(:symbolize_keys) ? node.symbolize_keys : node)\n end\n end \n array\n end",
"def walk_concurrence(fexp, ftree, tree)\n\n if ftree[2].size != tree[2].size\n #\n # that's lazy, but why not?\n #\n # we could add/apply a new child...\n\n register(MutationPoint.new(fexp.fei, tree, :re_apply))\n\n else\n #\n # if there is a least one child that replied and whose\n # tree must be changes, then re-apply the whole concurrence\n #\n # else try to re-apply only the necessary branch (walk them)\n\n branches = ftree[2].zip(tree[2]).each_with_object([]) { |(ft, t), a|\n #\n # match child expressions (if not yet replied) with current tree (ft)\n # and desired tree (t)\n #\n cfei = fexp.children[a.size]\n cexp = cfei ? @ps.fexp(cfei) : nil\n a << [ cexp, ft, t ]\n #\n }.select { |cexp, ft, t|\n #\n # only keep diverging branches\n #\n ft != t\n }\n\n branches.each do |cexp, ft, t|\n\n next if cexp\n\n # there is at least one branch that replied,\n # this forces re-apply for the whole concurrence\n\n register(MutationPoint.new(fexp.fei, tree, :re_apply))\n return\n end\n\n branches.each do |cexp, ft, t|\n #\n # we're left with divering branches that haven't yet replied,\n # let's walk to register the mutation point deep into it\n\n walk(cexp, t)\n end\n end\n end",
"def rebuild!\n # Don't rebuild a valid tree.\n return true if valid?\n \n scope = lambda{ |node| {} }\n if acts_as_nested_set_options[:scope]\n scope = lambda { |node|\n scope_column_names.inject({}) { |hash, column_name|\n hash[column_name] = node.send(column_name.to_sym)\n hash\n }\n }\n end\n indices = {}\n \n set_left_and_rights = lambda do |node|\n # set left\n node.send(:\"#{left_column_name}=\", (indices[scope.call(node)] += 1))\n # find\n all(scope.call(node).merge(parent_column_name => node.id)).each { |n| set_left_and_rights.call(n) }\n # set right\n node.send(:\"#{right_column_name}=\", (indices[scope.call(node)] += 1))\n node.save! \n end\n \n # Find root node(s)\n root_nodes = all(parent_column_name => nil, :order => \"#{left_column_name}, #{right_column_name}, id\").each do |root_node|\n # setup index for this scope\n indices[scope.call(root_node)] ||= 0\n set_left_and_rights.call(root_node)\n end\n end",
"def recursive(*args)\n @scope.with_values += args\n @scope.recursive_value = true\n @scope\n end",
"def recursive(*args)\n @scope.with_values += args\n @scope.recursive_value = true\n @scope\n end",
"def each_subexp(include_root = true, &block)\n yield self if include_root\n each do |child|\n if child.is_a?(Sexp)\n child.each_subexp(&block)\n end\n end\n end",
"def dfs\n yield self\n self.children.each do |child|\n next unless is_sexp?(child)\n case child[0]\n when Array\n child.each { |x| x.dfs { |y| yield y}}\n when ::Symbol\n child.dfs { |y| yield y }\n end\n end\n end",
"def descendants\n tree.tap(&:shift)\n end",
"def each(&proc)\n @subtrees.each(&proc)\n end",
"def recursive_requirements(&block)\n Requirement.expand(self, &block)\n end",
"def rewrite_tree_block\n # -> uncomment the next line to manually enable rule tracing\n # trace_in(__method__, 34)\n return_value = RewriteTreeBlockReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n lp = nil\n char_literal154 = nil\n rewrite_tree_alternative153 = nil\n\n tree_for_lp = nil\n tree_for_char_literal154 = nil\n stream_T__81 = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token T__81\")\n stream_T__83 = ANTLR3::AST::RewriteRuleTokenStream.new(@adaptor, \"token T__83\")\n stream_rewrite_tree_alternative = ANTLR3::AST::RewriteRuleSubtreeStream.new(@adaptor, \"rule rewrite_tree_alternative\")\n begin\n # at line 366:9: lp= '(' rewrite_tree_alternative ')'\n lp = match(T__81, TOKENS_FOLLOWING_T__81_IN_rewrite_tree_block_2596) \n if @state.backtracking == 0\n stream_T__81.add(lp)\n end\n @state.following.push(TOKENS_FOLLOWING_rewrite_tree_alternative_IN_rewrite_tree_block_2598)\n rewrite_tree_alternative153 = rewrite_tree_alternative\n @state.following.pop\n if @state.backtracking == 0\n stream_rewrite_tree_alternative.add(rewrite_tree_alternative153.tree)\n end\n char_literal154 = match(T__83, TOKENS_FOLLOWING_T__83_IN_rewrite_tree_block_2600) \n if @state.backtracking == 0\n stream_T__83.add(char_literal154)\n end\n # AST Rewrite\n # elements: rewrite_tree_alternative\n # token labels: \n # rule labels: return_value\n # token list labels: \n # rule list labels: \n # wildcard labels: \n if @state.backtracking == 0\n\n return_value.tree = root_0\n stream_return_value = return_value ? subtree_stream(\"rule return_value\", return_value.tree) : subtree_stream(\"token return_value\")\n\n root_0 = @adaptor.create_flat_list!\n # 367:6: -> ^( BLOCK[$lp,\\\"BLOCK\\\"] rewrite_tree_alternative EOB[$lp,\\\"EOB\\\"] )\n # at line 367:9: ^( BLOCK[$lp,\\\"BLOCK\\\"] rewrite_tree_alternative EOB[$lp,\\\"EOB\\\"] )\n root_1 = @adaptor.create_flat_list!\n root_1 = @adaptor.become_root(@adaptor.create!(BLOCK, lp, \"BLOCK\"), root_1)\n\n @adaptor.add_child(root_1, stream_rewrite_tree_alternative.next_tree)\n @adaptor.add_child(root_1, @adaptor.create!(EOB, lp, \"EOB\"))\n\n @adaptor.add_child(root_0, root_1)\n\n\n\n return_value.tree = root_0\n\n end# - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look(-1)\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing(root_0)\n @adaptor.set_token_boundaries(return_value.tree, return_value.start, return_value.stop)\n\n end\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node!(@input, return_value.start, @input.look(-1), re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out(__method__, 34)\n\n end\n \n return return_value\n end",
"def each_with_level(objects)\n path = [nil]\n objects.sort_by(&left_column_name.to_sym).each do |o|\n if o._parent_id != path.last\n # we are on a new level, did we decent or ascent?\n if path.include?(o._parent_id)\n # remove wrong wrong tailing paths elements\n path.pop while path.last != o._parent_id\n else\n path << o._parent_id\n end\n end\n yield(o, path.length - 1)\n end\n end",
"def get_children_recursion(node)\n\tif node.class == SOCIAL_NETWORK[0].class\n\t\tnode.children.each do |child|\n\t\t\tget_children_recursion(delegate_word(child))\n\t\tend\n\tend\nend",
"def deep_each\n \n end",
"def deep_cur\n self.cur.deep_cur\n end",
"def left_recursive?\n any { |r| r.left_recursive? } \n end",
"def descendants_r(*args)\n pending = [self]\n des = []\n while !pending.empty?\n e = pending.pop\n e.children(*args).each do |c|\n if !des.include?(c)\n des << c\n pending.push(c)\n end\n end\n end\n des\n end",
"def apply_children\n \n end",
"def to_recursive\n self\n end",
"def apply_nesting(*args, &block)\n if(args.include?(:shallow))\n apply_shallow_nesting(&block)\n else\n apply_deep_nesting(&block)\n end\n end",
"def arrange_serializable options={}, nodes=nil, &block\n nodes = arrange(options) if nodes.nil?\n nodes.map do |parent, children|\n if block_given?\n yield parent, arrange_serializable(options, children, &block)\n else\n parent.serializable_hash.merge 'children' => arrange_serializable(options, children)\n end\n end\n end",
"def arrange_serializable options={}, nodes=nil, &block\n nodes = arrange(options) if nodes.nil?\n nodes.map do |parent, children|\n if block_given?\n yield parent, arrange_serializable(options, children, &block)\n else\n parent.serializable_hash.merge 'children' => arrange_serializable(options, children)\n end\n end\n end",
"def async\n recursive.deep_dup\n end",
"def each &block\n return enum_for(:each) unless block_given?\n return if !@root\n @root.each &block\n end",
"def visit(node, &block)\n before = current.dup\n enter(node)\n yield node, current, before\n node.children.each do |child|\n if child.kind_of?(Parser::AST::Node)\n visit(child, &block)\n end\n end\n leave(node)\n end",
"def result_to_xml(result, options = {}, &block)\n inner_recursion = options.delete(:inner_recursion) \n result_set = inner_recursion ? result : result.dup\n \n parent_id = (options.delete(:parent_id) || result_set.first[result_set.first.parent_col_name]) rescue nil\n options[:nested] = true unless options.key?(:nested)\n \n options[:except] ||= []\n [:left_column, :right_column, :parent_column].each do |opt|\n column = acts_as_nested_set_options[opt].intern\n options[:except] << column unless options[:except].include?(column)\n end\n \n options[:indent] ||= 2\n options[:builder] ||= Builder::XmlMarkup.new(:indent => options[:indent])\n options[:builder].instruct! unless options.delete(:skip_instruct)\n \n record = options.delete(:record)\n root = options.delete(:root) || :nodes\n children = options.delete(:children) || :children\n \n attrs = {}\n attrs[:xmlns] = options[:namespace] if options[:namespace] \n \n siblings = options[:nested] ? result_set.select { |s| s.parent_id == parent_id } : result_set \n options[:builder].tag!(root, attrs) do\n siblings.each do |sibling|\n result_set.delete(sibling) if options[:nested] \n procs = options[:procs] ? options[:procs].dup : []\n procs << Proc.new { |opts| block.call(opts, sibling) } if block_given?\n if options[:nested] \n proc = Proc.new do |opts| \n proc_opts = opts.merge(:parent_id => sibling.id, :root => children, :record => record, :inner_recursion => true) \n proc_opts[:procs] ||= options[:procs] if options[:procs]\n proc_opts[:methods] ||= options[:methods] if options[:methods]\n sibling.class.result_to_xml(result_set, proc_opts, &block)\n end\n procs << proc\n end \n opts = options.merge(:procs => procs, :skip_instruct => true, :root => record) \n sibling.to_xml(opts)\n end\n end\n options[:builder].target!\n end",
"def recursive_traverse(node, current_state)\n Log.debug \"visiting #{node.name}\"\n current_state.timelines.first << node.name\n if node.downstream.empty?\n Log.debug \"#{node.name} has no downstream nodes. Reached end of timeline\"\n return current_state\n elsif node.downstream.size == 1\n Log.debug \"#{node.name} only one downstream node. Moving to it now.\"\n return recursive_traverse node.downstream.first, current_state\n else\n Log.debug \"#{node.name} has #{node.downstream.size} downstream nodes. Investigating merge options.\"\n\n timeline_forks = node.downstream.collect { |down|\n downstream_fork_states = recursive_traverse(down, TimelineState.new).timelines\n raise \"Downstream partial merge detected. This type of merge is not yet supported\" unless downstream_fork_states.size == 1\n downstream_fork_states.first\n }\n common_elems = timeline_forks.inject { |sum, nex| sum & nex }\n\n common_elem = common_elems.first\n base_forks = timeline_forks.select { |f| f.first.equal? common_elem }\n if common_elem.nil?\n current_state.state = 'No merge is possible'\n\n timelines = timeline_forks.collect { |f|\n (current_state.timelines + f).flatten\n }\n\n current_state.timelines = timelines\n\n elsif base_forks.empty?\n current_state.state = 'Partial merge is possible'\n\n common_elem_index = timeline_forks.first.find_index common_elem\n agreed_tail_arry = timeline_forks.first.slice(common_elem_index, timeline_forks.first.size - 1)\n\n disputed_events_array = timeline_forks.collect { |f|\n term = f.find_index common_elem\n f.slice(0, term)\n }\n\n # could iterate over permuations of parallel events here to list all possible timelines, but outside of problem description\n possible_timelines = disputed_events_array.collect { |p|\n (current_state.timelines + p + agreed_tail_arry).flatten\n }\n\n current_state.timelines = possible_timelines\n\n elsif base_forks.size == 1\n current_state.state = 'Merge is possible'\n Log.debug 'Full merge possible'\n\n longest_merge_array = timeline_forks.sort_by(&:size).last\n current_state.timelines = [(current_state.timelines + longest_merge_array).flatten]\n else\n raise 'Only one base fork should be found!'\n end\n\n end\n current_state\n end",
"def all_clades tree, metadata = nil\n return enum_for(:all_clades, tree, metadata) unless block_given?\n\n tree.clade_nodes.reverse.each do |node|\n yield Clade.new node, tree, metadata\n end\n end",
"def dfs_object(root_node, target)\n #two base cases\n return root_node if root_node.value == target\n # return nil if root_node.parent.nil? #when there are no parents, we know we're back at the actual root of the tree\n\n root_node.children.each do |child_node|\n result = dfs(child_node, target)\n\n #returning nil at this point would cut short\n if result #is not nil\n return result\n end\n end\n\n nil\nend",
"def preorder_iterative(&block)\n stack = []\n\n stack.push(@root)\n until stack.empty?\n node = stack.pop\n yield node\n\n stack.push(node.right) if node.right\n stack.push(node.left) if node.left\n end\n end",
"def invert_tree_dfs_iter(root)\n stack = [root]\n while !stack.empty?\n node = stack.pop()\n next if !node\n node.left, node.right = node.right, node.left\n stack += [node.left, node.right]\n end\n\n root\nend",
"def get_query_options(&block)\n ActsAsRecursiveTree::Options::QueryOptions.from(&block)\n end",
"def deep_transform! &block\n\n\t\tdo_deep_transform_on_self_(&block)\n\tend",
"def breadth_first_scan_old(root, &block)\n if root.file?\n yield root\n return\n end\n\n children = Pathname(root).children.sort\n begin\n children.each { |child| yield child } # breadth\n children.each { |child| breadth_first_scan(child, &block) if child.directory? }\n rescue Errno::EACCES, Errno::EPERM => e\n STDERR.puts(\"Error: #{e}\".red)\n end\nend",
"def recalculate!(tree,order,memo=nil,&block)\n if tree\n\n case order\n when :postorder\n \n arr = tree.children.map{ |t| recalculate!(t,order,memo,&block) }\n result = block.call(arr.push tree.data)\n tree.data = result\n\n\n when :preorder\n \n tree.data = yield tree, memo\n memo = tree.data\n\n tree.children.each do |t|\n recalculate!(t,order,memo,&block)\n end\n\n when :inorder\n raise ArgumentError unless self.tree.is_a? BinaryTree\n recalculate!(tree.left,order,memo,&block)\n\n tree.data = yield tree, memo\n memo = tree.data\n\n recalculate!(tree.right,order,memo,&block)\n\n end\n end\n end",
"def traverse(&block)\n\t\t\t\treturn to_enum(:traverse) unless block_given?\n\t\t\t\t\n\t\t\t\ttraverse_recurse(@order-1, 0, 0, self.origin, self.size, &block)\n\t\t\tend",
"def walk(fexp, tree)\n\n ftree = Ruote.compact_tree(@ps.current_tree(fexp))\n\n if ftree[0] != tree[0] || ftree[1] != tree[1]\n #\n # if there is anything different between the current tree and the\n # desired tree, let's force a re-apply\n\n register(MutationPoint.new(fexp.fei, tree, :re_apply))\n\n elsif ftree[2] == tree[2]\n #\n # else, if the tree children are the same, exit, there is nothing to do\n\n return\n\n else\n\n register(MutationPoint.new(fexp.fei, tree, :update))\n #\n # NOTE: maybe a switch for this mutation not to be added would\n # be necessary...\n\n if fexp.is_concurrent?\n #\n # concurrent expressions follow a different heuristic\n\n walk_concurrence(fexp, ftree, tree)\n\n else\n #\n # all other expressions are considered sequence-like\n\n walk_sequence(fexp, ftree, tree)\n end\n end\n end",
"def immediate_subqueries\n my_nodes_tagged(:subquery)\n end",
"def immediate_subqueries\n my_nodes_tagged(:subquery)\n end",
"def expand_parents node\n _path = node.tree_path\n _path.each do |e|\n # if already expanded parent then break we should break\n #set_expanded_state(e, true) \n expand_node(e)\n end\n end",
"def each_with_level &block\n # @todo A bit of a hack that I would like to fix one day...\n @root.children.each do |element|\n recursive_each_with_level element, 1, block\n end\n end",
"def rewrite_tree_block\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 34 )\n return_value = RewriteTreeBlockReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n lp = nil\n char_literal154 = nil\n rewrite_tree_alternative153 = nil\n\n tree_for_lp = nil\n tree_for_char_literal154 = nil\n stream_T__81 = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token T__81\" )\n stream_T__83 = ANTLR3::AST::RewriteRuleTokenStream.new( @adaptor, \"token T__83\" )\n stream_rewrite_tree_alternative = ANTLR3::AST::RewriteRuleSubtreeStream.new( @adaptor, \"rule rewrite_tree_alternative\" )\n begin\n # at line 357:9: lp= '(' rewrite_tree_alternative ')'\n lp = match( T__81, TOKENS_FOLLOWING_T__81_IN_rewrite_tree_block_2591 )\n if @state.backtracking == 0\n stream_T__81.add( lp )\n end\n @state.following.push( TOKENS_FOLLOWING_rewrite_tree_alternative_IN_rewrite_tree_block_2593 )\n rewrite_tree_alternative153 = rewrite_tree_alternative\n @state.following.pop\n if @state.backtracking == 0\n stream_rewrite_tree_alternative.add( rewrite_tree_alternative153.tree )\n end\n char_literal154 = match( T__83, TOKENS_FOLLOWING_T__83_IN_rewrite_tree_block_2595 )\n if @state.backtracking == 0\n stream_T__83.add( char_literal154 )\n end\n # AST Rewrite\n # elements: rewrite_tree_alternative\n # token labels: \n # rule labels: return_value\n # token list labels: \n # rule list labels: \n # wildcard labels: \n if @state.backtracking == 0\n\n return_value.tree = root_0\n stream_return_value = return_value ? subtree_stream( \"rule return_value\", return_value.tree ) : subtree_stream( \"token return_value\" )\n\n root_0 = @adaptor.create_flat_list\n # 358:6: -> ^( BLOCK[$lp,\\\"BLOCK\\\"] rewrite_tree_alternative EOB[$lp,\\\"EOB\\\"] )\n # at line 358:9: ^( BLOCK[$lp,\\\"BLOCK\\\"] rewrite_tree_alternative EOB[$lp,\\\"EOB\\\"] )\n root_1 = @adaptor.create_flat_list\n root_1 = @adaptor.become_root( @adaptor.create( BLOCK, lp, \"BLOCK\" ), root_1 )\n\n @adaptor.add_child( root_1, stream_rewrite_tree_alternative.next_tree )\n @adaptor.add_child( root_1, @adaptor.create( EOB, lp, \"EOB\" ) )\n\n @adaptor.add_child( root_0, root_1 )\n\n\n\n return_value.tree = root_0\n\n end# - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n if @state.backtracking == 0\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n end\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 34 )\n\n end\n \n return return_value\n end",
"def dfs_rec(target, current_node=@root)\n return if current_node.nil?\n return current_node if current_node.value == target\n dfs_rec(target, current_node.left_child) ||\n dfs_rec(target, current_node.right_child)\n\tend",
"def deep_transform_values(depth = 0, &block)\n transform_values do |value|\n recursive_deep_transform_value(value, :deep_transform_values, depth + 1, &block)\n end.tap { reset_seen(depth) }\n end",
"def test_should_block_recursion_in_tree\n group = Group.find @greeks_group.id\n\n assert_raises(RecursionInTree) { group.parent = Group.find @cretes_group.id }\n end",
"def traverse(&block)\n\t\ttraverse(@current_node, &block)\n\tend",
"def each_leaf#:yields: leaf\n leafs.compact!\n \n leafs.each do |leaf|\n yield leaf\n end\n end",
"def grow_tree(root, nodes, adopt_fn)\n kids = nodes.select { |w| adopt_fn.call(root, w) }\n branches = kids.map { |k| grow_tree(k, nodes - [root], adopt_fn) }\n { root => branches.reduce(&:merge) }\nend",
"def subtrees\n yield self\n each do |child|\n if child.kind_of? Tree\n child.subtrees do |tree|\n yield tree\n end\n end\n end\n end",
"def traverse(parent, &block)\n\n end",
"def simplify\n if block_given?\n head = @node.find_head_noun\n head_index = @node.children.each.with_index{|e,i| break i if e.to_literal == head.to_literal }\n head_index = nil unless Numeric === head_index\n if head_index == @node.children.size - 1\n @node.children.size.times do |index|\n yield @node.children[index..-1].map{|c| c.to_literal }.join(\" \")\n end\n else\n yield @node.to_literal\n if head_index\n children = @node.children[0..head_index]\n children.size.times do |index|\n yield children[index..-1].map{|c| c.to_literal }.join(\" \")\n end\n end\n end\n else\n enum_for(:simplify)\n end\n end",
"def all_descendants_sql_by_node(node)\n column_names_initial_select = column_names.join(',')\n column_names_recursive_select = column_names.map { |c| 't.'+ c }.join(',')\n\n sql = \"\n WITH RECURSIVE walk_tree_to_deep(#{column_names_initial_select}) AS\n (\n SELECT #{column_names_initial_select} FROM nodes WHERE parent_id = #{node.id}\n UNION ALL\n SELECT #{column_names_recursive_select}\n FROM nodes t, walk_tree_to_deep ft WHERE t.parent_id = ft.id\n )\n SELECT * FROM walk_tree_to_deep ORDER BY id\n \"\n\n find_by_sql(sql)\n end",
"def apply_deep_nesting(*args, &block)\n outputs = collect_outputs\n nested_stacks(:with_resource).each do |stack, resource|\n unless stack.nested_stacks.empty?\n stack.apply_deep_nesting(*args)\n end\n unless stack.root?\n stack.compile.parameters.keys!.each do |parameter_name|\n next if stack.compile.parameters.set!(parameter_name).stack_unique == true\n if !stack.parent.compile.parameters.data![parameter_name].nil?\n resource.properties.parameters.set!(parameter_name, resource.ref!(parameter_name))\n elsif output_name = output_matched?(parameter_name, outputs.keys)\n next if outputs[output_name] == stack\n stack_output = stack.make_output_available(output_name, outputs)\n resource.properties.parameters.set!(parameter_name, stack_output)\n end\n end\n end\n end\n if block_given?\n extract_templates(&block)\n end\n compile\n end",
"def not_reset_scope?(node); end",
"def traverse_down(&block)\n block.call(self)\n if(!children.nil?)\n children.each{ |child| child.traverse_down(&block) }\n end\n end",
"def each(&block)\n tree.each(&block)\n end",
"def traverse_tree(idea=self, depth=1, &blk)\n if idea.has_citations?\n blk.call(idea, depth)\n depth += 1\n idea.citations.each { |citation| traverse_tree(citation, depth, &blk) }\n else\n blk.call(idea, depth)\n end\n end",
"def dfs_recursive(query, node=@tree) \n return nil if node.nil?\n return node if query == node.value\n\n# left search equals if the left child is not nil then call the method with target & left child node parameters, otherwise is nil\n left_search = node.left_child != nil ? dfs_recursive(query, node.left_child) : nil\n\n# return if left search does not result in nil\n return left_search if left_search != nil\n right_search = node.right_child != nil ? dfs_recursive(query, node.right_child) : nil\n return right_search if right_search != nil \n end",
"def preorder_traversal_rec(root)\n res = []\n preorder_dfs(root, res)\n res\nend",
"def dfs_rec(tree, value)\n return nil if tree.nil?\n\n p tree.value\n return tree if tree.value == value\n\n left = dfs_rec(tree.left, value)\n return left if left && left.value == value\n\n\n right = dfs_rec(tree.right, value)\n return right if right && right.value == value\n\nend",
"def dfs(&block)\n yield @data\n @nodes.each do |node|\n node.dfs(&block)\n end\n end",
"def union_all!\n @union_all = true if recursive?\n end",
"def additive_changeset(node)\n {\n 'children_attribs' => [{\n 'id' => node.c[0].id,\n 'option_attribs' => { 'id' => node.c[0].option_id, 'name_translations' => {'en' => 'Animal'} },\n 'children_attribs' => [\n {\n 'id' => node.c[0].c[0].id,\n 'option_attribs' => { 'id' => node.c[0].c[0].option_id, 'name_translations' => {'en' => 'Cat'} },\n 'children_attribs' => 'NONE'\n },\n {\n 'id' => node.c[0].c[1].id,\n 'option_attribs' => { 'id' => node.c[0].c[1].option_id, 'name_translations' => {'en' => 'Dog'} },\n 'children_attribs' => 'NONE'\n },\n {\n 'option_attribs' => { 'name_translations' => {'en' => 'Ocelot'} },\n 'children_attribs' => 'NONE'\n }\n ]\n }, {\n 'id' => node.c[1].id,\n 'option_attribs' => { 'id' => node.c[1].option_id, 'name_translations' => {'en' => 'Plant'} },\n 'children_attribs' => [\n {\n 'id' => node.c[1].c[0].id,\n 'option_attribs' => { 'id' => node.c[1].c[0].option_id, 'name_translations' => {'en' => 'Tulip'} },\n 'children_attribs' => 'NONE'\n },\n {\n 'id' => node.c[1].c[1].id,\n 'option_attribs' => { 'id' => node.c[1].c[1].option_id, 'name_translations' => {'en' => 'Oak'} },\n 'children_attribs' => 'NONE'\n }\n ]\n }]\n }\n end"
] |
[
"0.6101633",
"0.58727944",
"0.5860632",
"0.58225715",
"0.57552505",
"0.57173055",
"0.570878",
"0.5668911",
"0.5499742",
"0.5494343",
"0.5467305",
"0.54537404",
"0.5439365",
"0.5438485",
"0.53512436",
"0.5348822",
"0.53355944",
"0.5330341",
"0.5294763",
"0.5254271",
"0.52419925",
"0.5207044",
"0.51975304",
"0.5192644",
"0.5192512",
"0.5162568",
"0.5162568",
"0.5156337",
"0.51504266",
"0.5140295",
"0.5139779",
"0.51226324",
"0.51163393",
"0.5106952",
"0.51055944",
"0.51052773",
"0.50928456",
"0.5062925",
"0.505607",
"0.5049903",
"0.5043584",
"0.5043584",
"0.5037204",
"0.5035187",
"0.5026921",
"0.5019772",
"0.50038713",
"0.5000143",
"0.4999719",
"0.49979228",
"0.49950278",
"0.49702257",
"0.49658975",
"0.49610522",
"0.49569732",
"0.49568412",
"0.49518782",
"0.4950218",
"0.4950218",
"0.49415573",
"0.49383336",
"0.49314663",
"0.49312338",
"0.49293154",
"0.49214694",
"0.49154675",
"0.49145365",
"0.49116254",
"0.4910677",
"0.4910096",
"0.49077365",
"0.4900872",
"0.48822492",
"0.48811483",
"0.48808205",
"0.48808205",
"0.4880204",
"0.48799348",
"0.48779434",
"0.48743394",
"0.48670858",
"0.48653132",
"0.48631212",
"0.48614198",
"0.4856924",
"0.48557973",
"0.48493156",
"0.48465145",
"0.48414373",
"0.48275846",
"0.48272884",
"0.4826268",
"0.48248804",
"0.48236376",
"0.48193318",
"0.48181212",
"0.48104444",
"0.48020393",
"0.47994125",
"0.47818246"
] |
0.53558385
|
14
|
Support PostgreSQL 14+ CTE SEARCH/CYCLE clauses
|
def select_with_sql_cte(sql, cte)
super
select_with_sql_cte_search_cycle(sql, cte)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def immediate_subqueries\n my_nodes_tagged(:subquery)\n end",
"def immediate_subqueries\n my_nodes_tagged(:subquery)\n end",
"def find_related_nodes_via_cypher(query_string, options = {})\n query_string = \"\n #{neo_self_match_clause}\n #{query_string}\n \"\n # t1 = Time.now\n result = CypherResult.new(Neo4jDatabase.execute(query_string))\n # t2 = Time.now\n result.to_active_record(options) || []\n # t3 = Time.now\n\n \n # p \"===\", (t2-t1)*1000.0, (t3-t2)*1000.0\n # result.to_active_record || []\n end",
"def nodes_within(src, chk)\n s = (src.is_a?(Array) ? src : [src]).map{|x|x.is_a?(ActiveRecord::Base) ? eval(\"x.#{configuration[:primary_key]}\") : x.to_s}\n c = (chk.is_a?(Array) ? chk : [chk]).map{|x|x.is_a?(ActiveRecord::Base) ? eval(\"x.#{configuration[:primary_key]}\") : x.to_s}\n if(s.empty? || c.empty?)\n nil\n else\n query = \n \"(WITH RECURSIVE crumbs AS (\n SELECT #{configuration[:class].table_name}.*, 0 AS depth FROM #{configuration[:class].table_name} WHERE #{configuration[:primary_key]} IN (#{s.join(', ')})\n UNION ALL\n SELECT alias1.*, crumbs.depth + 1 FROM crumbs JOIN #{configuration[:class].table_name} alias1 on alias1.#{configuration[:foreign_key]} = crumbs.#{configuration[:primary_key]}\n #{configuration[:max_depth] ? \"WHERE crumbs.depth + 1 < #{configuration[:max_depth].to_i}\" : ''}\n ) SELECT * FROM crumbs WHERE #{configuration[:primary_key]} IN (#{c.join(', ')})) as #{configuration[:class].table_name}\"\n if(rails_3?)\n configuration[:class].from(query)\n else\n configuration[:class].scoped(:from => query)\n end\n end\n end",
"def find_all_in_scope(node, predicate); end",
"def findCicle(nodes, idInicio, passed)\n if(!passed.any?{|id| id == nodes.operationId})\n passed.push(nodes.operationId)\n nodes.entradas.each { \n |node|\n if(node.operationId != idInicio)\n return findCicle(node, idInicio, passed)\n else\n return true\n end \n }\n else \n return true\n end\n return false\nend",
"def pipe_cte_with!(subquery)\n return self unless subquery.try(:with_values?)\n\n # Add subquery CTE's to the parents query stack. (READ THE SPECIAL NOTE ABOVE!)\n if @scope.with_values?\n @scope.cte.pipe_cte_with!(subquery.cte)\n else\n # Top level has no with values\n @scope.with!(subquery.cte)\n end\n\n self\n end",
"def scan_patterns\n @max_level = 0\n @root.bfs do |r|\n reached = false\n curr_instances = yield(r.parent_id)\n curr_instances.each do |c_instance|\n q = @query.new.extend(Pbuilder::Query)\n q = q.search_next_instances(c_instance.uri, r.node.edge, r.node.value)\n q.execute do |i|\n @action.call(i, r.id, r.level, r.node.edge, c_instance.id)\n reached = true\n end\n end if ! curr_instances.nil?\n @max_level = r.level if r.level > @max_level\n reached\n end if @pass\n @max_level\n end",
"def non_deterministic_search(goal_vertex)\n @explored = {}\n @stack = [] #\n non_deterministic_recursive(self, goal_vertex)\n @stack\n end",
"def find_candidates\n scout(exp: expression, depth: 0)\n end",
"def match(nodes, offset)\n if offset >= nodes.length\n return min_repeats.zero? ? offset : nil\n end\n\n count = 0\n nodes[offset...nodes.length].each_with_index do |n, i|\n next if n.ignorable?\n\n return returnable(nodes, i + offset + 1) if count == max_repeats\n\n if n.traversible? && n.name == seeking\n count += 1\n return returnable(nodes, i + offset + 1) if count == max_repeats\n\n next\n end\n\n return count >= min_repeats ? returnable(nodes, i + offset) : nil\n end\n count < min_repeats ? nil : returnable(nodes, nodes.length) # all nodes were consumed\n end",
"def detect_cycle_in_graph(edges)\nend",
"def subsequents(node)\n siblings[(position(node) + 1)..]\n end",
"def ret_matching_nodes(parent_idh)\n if parent_idh[:model_name] == :node\n return [parent_idh]\n end\n filter = [:eq, :assembly_id, parent_idh.get_id()]\n if node_filter = ret_filter(pattern, :node)\n filter = [:and, filter, node_filter]\n end\n sp_hash = {\n cols: [:id, :group_id, :display_name],\n filter: filter\n }\n Model.get_objs(parent_idh.createMH(:node), sp_hash)\n end",
"def run_recursive_loop &block\n results = yield\n return results\nend",
"def dfs_rec(data)\n @root.each { |node| return node if data == node.data }\n end",
"def grep(pattern, treeish=head) # :yields: path, blob\n options = pattern.respond_to?(:merge) ? pattern.dup : {:e => pattern}\n options.delete_if {|key, value| nil_or_empty?(value) }\n options = options.merge!(\n :cached => true,\n :name_only => true,\n :full_name => true\n )\n \n unless commit = grit.commit(treeish)\n raise \"unknown commit: #{treeish}\"\n end\n \n sandbox do |git, work_tree, index_file|\n git.read_tree({:index_output => index_file}, commit.id)\n git.grep(options).split(\"\\n\").each do |path|\n yield(path, (commit.tree / path))\n end\n end\n self\n end",
"def dfs_helper(start_node)\n\n ret_list = [start_node.value]\n # Your code here\n start_node.visited = true\n start_node.edges.each do |edge|\n unless edge.node_to.visited\n ret_list += dfs_helper(edge.node_to)\n end\n end\n return ret_list\n end",
"def tree_grep(pattern, treeish=head) # :yields: path, tree\n options = pattern.respond_to?(:merge) ? pattern.dup : {:e => pattern}\n options.delete_if {|key, value| nil_or_empty?(value) }\n \n unless commit = grit.commit(treeish)\n raise \"unknown commit: #{treeish}\"\n end\n \n sandbox do |git, work_tree, index_file|\n postfix = options.empty? ? '' : begin\n grep_options = git.transform_options(options)\n \" | grep #{grep_options.join(' ')}\"\n end\n \n stdout, stderr = git.sh(\"#{Grit::Git.git_binary} ls-tree -r --name-only #{git.e(commit.id)} #{postfix}\")\n stdout.split(\"\\n\").each do |path|\n yield(path, commit.tree / path)\n end\n end\n self\n end",
"def start_nodes(rows)\n rows.reject do |row|\n rows.any? do |other_row|\n other_row.to == row.from\n end\n end\nend",
"def solution(t)\n # write your code in Ruby 2.2\n depth = 0\n childs = []\n\n childs << t.l if t.l\n childs << t.r if t.r\n\n while not childs.empty? do\n depth += 1\n\n cc = []\n childs.each do |t|\n cc << t.l if t.l\n cc << t.r if t.r\n end\n\n childs = cc\n end\n\n depth\nend",
"def dfs\n visited = {}\n vertexes.keys.each {|v| visited[v] = false}\n unvisited = find_first_unvisited visited\n search_results = []\n until unvisited.is_nothing?\n #find a depth first search tree and hash containing the order that each node is visited\n dpst = explore(unvisited.from_just)\n if dpst.is_nothing?\n return Nothing.new\n else\n dpst = dpst.from_just\n end\n if search_results.empty?\n search_results.push dpst\n else\n search_results.each_with_index do |result, i|\n tree = dpst[:tree]\n found = false\n result.each do |v|\n if tree[v] and result.length < tree.keys.length\n results[i] = dpst\n found = true\n break\n end\n end\n break if found\n end\n end\n # Mark each point in the path as visited\n dpst[:visit_order].each do |k|\n visited[k] = true\n end\n unvisited = find_first_unvisited visited\n end\n search_results\n end",
"def dependency(start, vertex, nested = T.unsafe(nil)); end",
"def traverse_df(aStartVertex, &_visitAction)\n visited = Set.new\n stack = []\n visitee = aStartVertex\n curr_edge = nil\n\n begin\n # print_vertex( 'Traversing', visitee)\n\n first_time = !visited.include?(visitee)\n if first_time\n yield(visitee)\n visited << visitee\n end\n\n case visitee\n when Rley::GFG::StartVertex\n if first_time\n stack.push(Branching.new(visitee, curr_edge))\n curr_edge = stack.last.next_edge\n elsif curr_edge.nil?\n # Error probably caused by missing terminal symbol object\n msg = \"Undefined grammar symbol #{visitee.label.sub(/^\\./, '')}\"\n raise StandardError, msg\n else\n # Skip both start and end vertices\n # Retrieve the corresponding return edge\n curr_edge = get_matching_return(curr_edge)\n end\n\n when Rley::GFG::EndVertex\n if stack.last.done?\n popped = stack.pop\n break if stack.empty?\n\n # puts \"Popped!\"\n return_key = popped.in_edge.key.sub(/^CALL/, 'RET')\n curr_edge = visitee.edges.find { |e| e.key == return_key }\n else\n curr_edge = stack.last.next_edge\n end\n\n else\n # All other vertex types have only one successor\n curr_edge = visitee.edges[0]\n end\n visitee = curr_edge.successor unless curr_edge.nil?\n end until stack.empty?\n # Now process the end vertex matching the initial start vertex\n last_one = end_vertex_for[aStartVertex.non_terminal]\n yield(last_one) unless visited.include?(last_one)\n end",
"def test_hierarchical_loops\n want = <<~EDI.gsub(/\\n/, \"\")\n ST*856*0001~\n BSN*00*??*19700101*00000000*0001~\n DTM*011*19700101~\n HL*1**S~\n TD1*CTN*1****G*0.1773127753*LB~\n TD5*Z*2*??*ZZ*UPS3~\n REF*PK*?~\n DTM*011*19700101~\n N1*ST*Sweeney Todd~\n N3*2705 Fleet St~\n N4*Birmingham*AL*35226*US~\n HL*2*1*O~\n PRF*00000007397108***19700101~\n HL*3*2*P~\n MAN*SM*?~\n HL*4*3*I~\n LIN*1*UP*860001662184*VP*860001662184~\n SN1*1*1*EA**1*EA~\n SLN*1**O*1*EA*59.95*PE~\n CTT*2*159.85~\n SE*21*0001\n EDI\n store = Eddy::Data::Store.new(time: @epoch)\n ts = Eddy::TransactionSets::TS856::TS.new(store)\n ts.BSN do |bsn|\n bsn.BSN01 = \"00\"\n bsn.BSN02 = \"??\"\n bsn.BSN03 = @epoch\n bsn.BSN04 = @epoch\n bsn.BSN05 = \"0001\"\n end\n ts.DTM do |dtm|\n dtm.DateTimeQualifier = \"011\" # Shipped\n dtm.Date = @epoch\n end\n ts.HL_SHIPMENT do |hl_s|\n hl_s.HL.HL01 = \"1\"\n hl_s.HL.HL03 = \"S\"\n hl_s.TD1.TD101 = \"CTN\"\n hl_s.TD1.TD102 = 1\n hl_s.TD1.TD106 = \"G\"\n hl_s.TD1.TD107 = (80.5 / 454) # 0.1773127753\n hl_s.TD1.TD108 = \"LB\"\n hl_s.TD5.TD501 = \"Z\"\n hl_s.TD5.TD502 = \"2\"\n hl_s.TD5.TD503 = \"??\"\n hl_s.TD5.TD504 = \"ZZ\"\n hl_s.TD5.TD505 = \"UPS3\"\n hl_s.REF.REF01 = \"PK\"\n hl_s.REF.REF02 = \"?\"\n hl_s.DTM.DTM01 = \"011\" # Shipped\n hl_s.DTM.DTM02 = @epoch\n hl_s.L_N1 do |n1|\n # N1\n n1.N1.N101 = \"ST\"\n n1.N1.Name = \"Sweeney Todd\"\n # N3\n n1.N3.AddressInformation1 = \"2705 Fleet St\"\n # N4\n n1.N4.CityName = \"Birmingham\"\n n1.N4.StateOrProvinceCode = \"AL\"\n n1.N4.PostalCode = \"35226\"\n n1.N4.CountryCode = \"US\"\n end\n hl_s.HL_ORDER do |hl_o|\n hl_o.HL.HL01 = \"2\"\n hl_o.HL.HL02 = \"1\"\n hl_o.HL.HL03 = \"O\"\n hl_o.PRF.PRF01 = \"00000007397108\"\n hl_o.PRF.PRF04 = @epoch\n end\n hl_s.HL_TARE do |hl_t|\n hl_t.HL.HL01 = \"3\"\n hl_t.HL.HL02 = \"2\"\n hl_t.HL.HL03 = \"P\"\n hl_t.MAN.MAN01 = \"SM\"\n hl_t.MAN.MAN02 = \"?\"\n end\n hl_s.HL_ITEM do |hl_i|\n hl_i.HL.HL01 = \"4\"\n hl_i.HL.HL02 = \"3\"\n hl_i.HL.HL03 = \"I\"\n hl_i.LIN.LIN01 = \"1\"\n hl_i.LIN.LIN02 = \"UP\"\n hl_i.LIN.LIN03 = \"860001662184\"\n hl_i.LIN.LIN04 = \"VP\"\n hl_i.LIN.LIN05 = \"860001662184\"\n hl_i.SN1.SN101 = \"1\"\n hl_i.SN1.SN102 = 1\n hl_i.SN1.SN103 = \"EA\"\n hl_i.SN1.SN105 = 1\n hl_i.SN1.SN106 = \"EA\"\n hl_i.L_SLN do |rep|\n rep.SLN.SLN01 = \"1\"\n rep.SLN.SLN03 = \"O\"\n rep.SLN.SLN04 = 1\n rep.SLN.SLN05 = \"EA\"\n rep.SLN.SLN06 = 59.95\n rep.SLN.SLN07 = \"PE\"\n end\n end\n end\n ts.CTT do |ctt|\n ctt.CTT01 = 2\n ctt.CTT02 = 159.85\n end\n result = ts.render()\n assert_equal(want, result)\n end",
"def dfs_rec(cur_node, value)\n return nil if cur_node.nil?\n \n if cur_node.value == value\n\tcur_node\n else\n tgt_node = nil\n cur_node.children.each do |child| \n\t tgt_node = dfs_rec(child, value)\n\t break unless tgt_node.nil?\n\tend\n\ttgt_node\n end\nend",
"def nodes_within?(src, chk)\n s = (src.is_a?(Array) ? src : [src]).map{|x|x.is_a?(ActiveRecord::Base) ? eval(\"x.#{configuration[:primary_key]}\") : x.to_s}\n c = (chk.is_a?(Array) ? chk : [chk]).map{|x|x.is_a?(ActiveRecord::Base) ? eval(\"x.#{configuration[:primary_key]}\") : x.to_s}\n if(s.empty? || c.empty?)\n false\n else\n q = configuration[:class].connection.select_all(\n \"WITH RECURSIVE crumbs AS (\n SELECT #{configuration[:class].table_name}.*, 0 AS level FROM #{configuration[:class].table_name} WHERE #{configuration[:primary_key]} IN (#{s.join(', ')})\n UNION ALL\n SELECT alias1.*, crumbs.level + 1 FROM crumbs JOIN #{configuration[:class].table_name} alias1 on alias1.#{configuration[:foreign_key]} = crumbs.#{configuration[:primary_key]}\n ) SELECT count(*) as count FROM crumbs WHERE #{configuration[:primary_key]} IN (#{c.join(', ')})\"\n )\n q.first['count'].to_i > 0\n end\n end",
"def subgraphs\n subs = []\n nodes_to_hit = @vertices.dup\n until nodes_to_hit.empty?\n subgraph = subgraph_with(nodes_to_hit.to_a.first)\n subs << subgraph\n nodes_to_hit -= subgraph.vertices\n end\n subs\n end",
"def search(start_key, end_key, limit, offset, reverse, with_keys)\n offset ||= 0\n \n start_node = find_by_prefix(start_key, reverse)\n !start_node and return []\n \n start_node = skip_nodes(start_node, offset, reverse)\n !start_node and return []\n \n collect_values(start_node, end_key, limit, reverse, with_keys)\n end",
"def unoptimized_contains_cycle(head)\n histo = {}\n node = head\n while node\n if histo[node]\n return true\n else\n histo[node] = true\n end\n node = node.next\n end\n false\nend",
"def dfs(root, target)\n return root if root.value == target\n root.children.each do |child|\n search_result = dfs(child, target)\n return search_result unless search_result.nil?\n end\n\n nil\nend",
"def traverse(&block); end",
"def traverse(&block); end",
"def dfs_iterative(target)\n stack = [self]\n\n until stack.empty?\n node = stack.pop\n\n return node if node.value == target\n\n node.children.each do |child|\n stack << child\n end\n end\n\n nil\n end",
"def query_selector_all(expr)\n Traverser.new(expr).evaluate(Context.new(node: self))\n end",
"def binary_search(opts={})\n # I have a lot of stuff to do for scouts\n # but instead i'm doing this\n # hizzah!\n count = 0\n \n until opts[:find].empty?\n new_search = []\n count += 1\n \n zipped = opts[:find].zip opts[:repo].between(opts[:find])\n zipped.each do |(n, list)|\n list << n[1]\n p = n[0]\n f = 1 # ??? why are these vars so NAMELESS\n \n list.each do |item|\n UI::debug \"narrowing #{f}:#{list.size} #{short item}\"\n \n if opts[:node_map].include? item\n if f <= 2\n opts[:on_find].call(p, item)\n else\n UI::debug \"narrowed branch search to #{short p}:#{short item}\"\n new_search << [p, item]\n end\n break\n end\n \n p, f = item, f*2\n end\n end\n \n opts[:find] = new_search\n end\n \n [opts[:find], count]\n end",
"def iterative_dfs\n (1..@map.cities.count).each do |i| # Path can't be longer than # cities\n result = dls(i)\n return result if result\n end\n return nil\n end",
"def match_all(data, &block)\n return nil if @root == nil\n i=0\n while (i<data.length)\n node = @root.find_forward(data, i, data.length-i)\n if (node!=nil && node.value!=nil)\n yield Item.new(i, true, node)\n i += node.length\n else\n i += 1\n end\n end\n end",
"def traverse; end",
"def tsort_each_child(node, &block)\n query(:object => node) do |statement|\n block.call(statement.subject)\n end\n end",
"def tsort_each_child(node, &block)\n query(:object => node) do |statement|\n block.call(statement.subject)\n end\n end",
"def tsort_each_child(node, &block)\n query(:object => node) do |statement|\n block.call(statement.subject)\n end\n end",
"def explore(pos)\n end",
"def visit_node(n); end",
"def solve\n node = @list.first_node\n traversed_nodes = {}\n\n loop do\n return node if traversed_nodes[node]\n traversed_nodes[node] = true\n node = node.next\n break unless node\n end\n end",
"def infinite_loops\n reachable_sets = @nodes.group_by(&:forward)\n reachable_sets.each do |reachable,nodes|\n yield reachable if reachable == nodes.to_set\n end\n end",
"def collect_all_active_occurences(entity)\n results = []\n queue = []\n entity_transformation = (entity.respond_to?(:transformation)) ? # Sketchup::ComponentInstance, Sketchup::Group, Sketchup::Image\n entity.transformation :\n IDENTITY\n queue.push([[entity], entity_transformation])\n until queue.empty?\n path, transformation = *queue.shift\n outer = path.first\n # If the outermost container is already the model, end the search.\n if outer.parent.is_a?(Sketchup::Model) || outer.parent.nil?\n # Check if this occurence of entity is below the active path,\n # that means whether the entity's path contains the active path.\n # Note: Sketchup::Model#active_path returns nil instead of empty array when in global context.\n if entity.model.active_path.nil? || (entity.model.active_path - path).empty?\n # Active path: entity's path is equal or deeper than active path\n results << transformation\n end\n # Otherwise look if it has siblings, ie. the parent has instances with the same entity.\n else\n instances = (outer.is_a?(Sketchup::ComponentDefinition)) ?\n outer.instances :\n (outer.respond_to?(:parent) && outer.parent.respond_to?(:instances)) ? # Sketchup::Drawingelement\n outer.parent.instances :\n [] # Sketchup::Model\n instances.each{ |instance|\n queue.push([[instance].concat(path), instance.transformation * transformation])\n }\n end\n end\n return results\n end",
"def plexus_search_iteration(options, waiting, color_map, visited_edges, result, recursive = false)\n # Fetch the next waiting vertex in the list.\n #sleep\n u = waiting.next\n options.handle_vertex(:enter_vertex, u)\n result << u\n\n # Examine all adjacent outgoing edges, but only those not previously traversed.\n adj_proc = options[:adjacent] || self.method(:adjacent).to_proc\n adj_proc.call(u, :type => :edges, :direction => :out).reject { |w| visited_edges[w] }.each do |e|\n e = e.reverse unless directed? or e.source == u # Preserves directionality where required.\n v = e.target\n options.handle_edge(:examine_edge, e)\n visited_edges[e] = true\n\n case color_map[v]\n # If it's unvisited, it goes into the waiting list.\n when :unvisited\n options.handle_edge(:tree_edge, e)\n color_map[v] = :waiting\n waiting.push(v)\n # If it's recursive (i.e. dfs), then call self.\n plexus_search_iteration(options, waiting, color_map, visited_edges, result, true) if recursive\n when :waiting\n options.handle_edge(:back_edge, e)\n else\n options.handle_edge(:forward_edge, e)\n end\n end\n\n # Done with this vertex!\n options.handle_vertex(:exit_vertex, u)\n color_map[u] = :visited\n end",
"def each\n cycles_found = []\n\n @graph.keys.each do |start_node|\n @ticker.tick\n next if cycles_found.include?(start_node)\n\n find_cycles(start_node).each do |cycle|\n unless cycles_found.include?(cycle.node)\n cycles_found << cycle.node\n yield cycle.node\n end\n end\n end\n end",
"def node_search\n while (current_node = queue.shift).present? && final.nil?\n @processed += 1\n yield(current_node)\n log \"\\rProcessing #{start.name}.... %d / %d / %d / %d @ %ds - depth: %d\", @unmarked, @requeued, @processed, @steps, (Time.now - @started), current_node.depth\n end\n\n format_results\n save_results if final.present?\n self.final_path\n end",
"def build_current_children_and_evaluate! opts={}\n these_asserts\n @current.clear\n if (@start_offset > final_offset) # see :note3\n @done = true\n evaluate_ok!\n return nil\n end\n @ok = nil\n @done = false\n @children_productions = production.children\n @ctxt = parse_context\n (start_offset..@children.length-1).each do |idx|\n @current.push idx\n break unless build_this_child_and_keep_going?(idx, opts)\n end\n @children_productions = nil\n @ctxt = nil\n evaluate_ok!\n nil\n end",
"def yield_statement\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 25 )\n return_value = YieldStatementReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n root_0 = nil\n\n _last = _first_0 = nil\n string_literal99 = nil\n expression100 = nil\n\n tree_for_string_literal99 = nil\n\n begin\n root_0 = @adaptor.create_flat_list\n\n\n # at line 134:5: ^( 'yield' ( expression )? )\n _save_last_1 = _last = @input.look\n _first_1 = nil\n root_1 = @adaptor.create_flat_list\n _last = @input.look\n string_literal99 = match( YIELD, TOKENS_FOLLOWING_YIELD_IN_yield_statement_745 )\n\n tree_for_string_literal99 = @adaptor.copy_node( string_literal99 )\n\n root_1 = @adaptor.become_root( tree_for_string_literal99, root_1 )\n\n\n\n if @input.peek == DOWN\n match( DOWN, nil )\n # at line 134:16: ( expression )?\n alt_28 = 2\n look_28_0 = @input.peek( 1 )\n\n if ( look_28_0.between?( AMP, AMP_ASGN ) || look_28_0 == POST_DECR || look_28_0.between?( GEQ, AREF ) || look_28_0.between?( GREATER, HAT ) || look_28_0.between?( ARROW, HAT_ASGN ) || look_28_0 == ASGN || look_28_0 == REGEX || look_28_0 == IN || look_28_0 == INCR || look_28_0.between?( INSTANCEOF, RSHIFT3 ) || look_28_0 == RSHIFT3_ASGN || look_28_0 == RSHIFT_ASGN || look_28_0 == LEQ || look_28_0.between?( LESS, SLASH ) || look_28_0 == SLASH_ASGN || look_28_0.between?( STAR, DECR ) || look_28_0 == STAR_ASGN || look_28_0 == LSHIFT || look_28_0.between?( DELETE, THIS ) || look_28_0.between?( MINUS, TILDE ) || look_28_0.between?( MINUS_ASGN, MOD ) || look_28_0.between?( MOD_ASGN, TYPEOF ) || look_28_0.between?( NEQ, UMINUS ) || look_28_0.between?( NEQQ, UNDEFINED ) || look_28_0 == NEW || look_28_0 == NOT || look_28_0.between?( NULL, UPLUS ) || look_28_0 == OBJECT || look_28_0.between?( EQ, OR_ASGN ) || look_28_0 == FALSE || look_28_0 == PIPE || look_28_0 == PIPE_ASGN || look_28_0 == PLUS || look_28_0.between?( ID, DOC ) )\n alt_28 = 1\n end\n case alt_28\n when 1\n # at line 134:16: expression\n _last = @input.look\n @state.following.push( TOKENS_FOLLOWING_expression_IN_yield_statement_747 )\n expression100 = expression\n @state.following.pop\n\n @adaptor.add_child( root_1, expression100.tree )\n\n\n end\n\n match( UP, nil )\n end\n @adaptor.add_child( root_0, root_1 )\n _last = _save_last_1\n\n\n\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 25 )\n\n end\n \n return return_value\n end",
"def select_with_sql_base\n opts[:with].any?{|w| w[:recursive]} ? \"WITH RECURSIVE \" : super\n end",
"def find(filters, &block)\n ds = db[:tokens.as(:t0)]\n f = filters[0]\n ds = ds.where(:t0__word_id=>f.word) if f.word\n ds = ds.where(:t0__tag_id=>f.tag) if f.tag\n i = 0\n filters[1..-1].each do |f|\n as = \"t#{i+=1}\"\n h = {}\n h[:\"#{as}__word_id\"] = f.word if f.word\n h[:\"#{as}__tag_id\"] = f.tag if f.tag\n g = {}\n g[:\"#{as}__word_id\"] = f.ex_word if f.ex_word\n g[:\"#{as}__tag_id\"] = f.ex_tag if f.ex_tag\n ds = ds.join(:tokens.as(as)){ |j, lj, js| {:text_id.qualify(j) => :text_id.qualify(lj), :position.qualify(j) => :position.qualify(lj) + 1} }.where(h).exclude(g)\n end\n select = ds.select(:t0__text_id.as(:text_id), :t0__src_begin.as(:src_begin), :\"t#{i}__src_end\".as(:src_end),\n :t0__tagged_begin.as(:tagged_begin), :\"t#{i}__tagged_end\".as(:tagged_end), :t0__position.as(:pos_begin), :\"t#{i}__position\".as(:pos_end))\n\n puts select.sql\n puts select.explain\n\n select.to_enum.each do |e|\n block.call [e[:text_id], e[:src_begin], e[:src_end], e[:tagged_begin], e[:tagged_end], e[:pos_begin], e[:pos_end]]\n end\n end",
"def dfs\n visited = Hash.new(false)\n @v.each do |vertex| \n visited.merge(explore(vertex)) if !visited[vertex]\n end\n return visited\n end",
"def solve\n graph = Graph.create(words, WordNode)\n graph.connect_nodes(:one_char_diff)\n\n end_node = graph.nodes_hash[end_word]\n start_node = graph.nodes_hash[start_word]\n start_node.cost = 0\n\n heap = Containers::Heap.new { |a, b| (a.cost <=> b.cost) == -1 }\n graph.nodes_hash.each do |k, v|\n heap.push v\n end\n\n puts heap.size\n # puts \"is empty?#{heap.empty?}\"\n until heap.empty? do\n current_node = heap.pop\n puts current_node.value, current_node.cost\n # puts current_node == end_node\n # puts current_node.value, end_node.value\n return graph.path(end_node) if current_node == end_node\n\n current_node.connected_nodes.each do |node|\n\n # puts node.visited\n unless node.visited\n cost = current_node.cost + 1\n if cost < node.cost\n node.cost = cost\n node.parent = current_node\n # puts \"changed parent\"\n # puts node.parent\n end\n end\n end\n\n current_node.visited = true\n end\n end",
"def depth_first_search_recursive(source)\n visited.add(source)\n\n source.neighbors.each do |neighbor|\n unless visited.include?(neighbor)\n depth_first_search_recursive(neighbor)\n meta[neighbor] = source\n end\n end\n end",
"def plexus_search_helper(op, options = {}, &block)\n return nil if size == 0\n result = []\n\n # Create the options hash handling callbacks.\n options = {:enter_vertex => block, :start => to_a[0]}.merge(options)\n options.instance_eval \"def handle_vertex(sym,u) self[sym].call(u) if self[sym]; end\"\n options.instance_eval \"def handle_edge(sym,e) self[sym].call(e) if self[sym]; end\"\n\n # Create a waiting list, which is a queue or a stack, depending on the op specified.\n # The first entry is the start vertex.\n waiting = [options[:start]]\n waiting.instance_eval \"def next; #{op.to_s}; end\"\n\n # Create a color map, all elements set to \"unvisited\" except for start vertex,\n # which will be set to waiting.\n color_map = vertices.inject({}) { |a,v| a[v] = :unvisited; a }\n color_map.merge!(waiting[0] => :waiting)\n options.handle_vertex(:start_vertex, waiting[0])\n options.handle_vertex(:root_vertex, waiting[0])\n\n # Perform the actual search until nothing is \"waiting\".\n until waiting.empty?\n # Loop till the search iterator exhausts the waiting list.\n visited_edges = {} # This prevents retraversing edges in undirected graphs.\n until waiting.empty?\n plexus_search_iteration(options, waiting, color_map, visited_edges, result, op == :pop)\n end\n # Waiting for the list to be exhausted, check if a new root vertex is available.\n u = color_map.detect { |key,value| value == :unvisited }\n waiting.push(u[0]) if u\n options.handle_vertex(:root_vertex, u[0]) if u\n end\n\n result\n end",
"def busqueda(g, d, h, nro_nodos, nodos_visitados, nro_nodos_en_caminos)\r\n #Luego, en esta variable dependiendo de si estamos en un objeto DFS o BFS, se definira el orden\r\n #en el que se seleccionaran los nodos para la busqueda. Ademas, esta variable tendra los elementos\r\n #adyacentes del nodo d ya sea en una pila o en una cola. \r\n pila_o_cola = self.orden_nodos(g,d)\r\n\r\n #pila_o_cola.estructura.each {|elem| puts elem}\r\n\r\n #Verificamos si la pila o cola, de adyacentes, incluye a \"h\"\r\n if pila_o_cola.estructura.include? h \r\n\r\n #De ser asi, se incluye el numero de nodos del recorrido que llevo a \"h\"\r\n nro_nodos_en_caminos << nro_nodos\r\n\r\n #Si la pila o cola no incluye a \"h\"\r\n else\r\n\r\n #Entonces mientras no este vacia, se recorre la estructura\r\n while not(pila_o_cola.vacio)\r\n\r\n #Luego se remueve un nodo de la estructura, que sera el siguiente que se usara\r\n #como \"inicio\" de la busqueda\r\n siguiente_nodo = pila_o_cola.remover\r\n\r\n #Si el nodo en cuestion no ha sido visitado\r\n if not(nodos_visitados.include? siguiente_nodo)\r\n #Lo agregamos al arreglo de visitados\r\n nodos_visitados << siguiente_nodo\r\n #Y hacemos una llamada recursiva al metodo busqueda pero esta vez partiendo\r\n #del nodo antes tomado. En esta llamada aumentamos el numero de nodos (nro_nodos)\r\n #en 1\r\n busqueda(g, siguiente_nodo, h , nro_nodos + 1 , nodos_visitados, nro_nodos_en_caminos)\r\n end\r\n end\r\n end\r\n\r\n #Luego de que acaba todo el procesamiento verificamos si en el arreglo \"nro_nodos_en_caminos\"\r\n #esta vacio, si es asi entonces significa que el nodo \"h\" no fue alcanzado entonces\r\n if nro_nodos_en_caminos.empty?\r\n #retornamos -1\r\n return -1\r\n end\r\n\r\n #Finalmente, si sobrevivimos todo lo anterior, llegados a este punto retornamos el numero de nodos que\r\n #fueron recorridos de \"d\" a \"h\". Recordamos que el arreglo \"nro_nodos_en_caminos\" contiene para todos los\r\n #caminos de \"d\" a \"h\", el numero de nodos recorridos.\r\n\r\n ## OJO: En este caso se retornara el nro de nodos recorridos del camino mas corto. ##\r\n\r\n #Sin embargo, si se quisiera el nro de nodos de solo el primer camino por el que se metio y encontro a \"h\"\r\n #bastaria con solo retornar \"nro_nodos_en_caminos[0]\".\r\n #Por otra parte, si se quisiera para cada camino el nro de nodos recorridos entonces basta con retornar\r\n #el arreglo completo, etc.\r\n \r\n return \"#{nro_nodos_en_caminos.min} nodos recorridos.\"\r\n end",
"def alias_node_query\n Category.left_outer_joins(:articles)\n .join_recursive do |query|\n query\n .connect_by(id: :parent_id)\n end\n end",
"def self_and_descendants\n # using _left_ for both sides here lets us benefit from an index on that column if one exists\n nested_set_scope.where(\n arel_table[primary_column_name].eq(self.primary_id).or(arel_table[total_order_column_name].gteq(total_order)).\n and(arel_table[total_order_column_name].lt(snumv/denv)))\n end",
"def traverse\n @result.clear\n @queue.clear\n\n @queue.enqueue(@node)\n @result.push @node\n\n\n while not @queue.empty?\n node = @queue.dequeue\n return @result unless node\n # puts \"Visiting node: #{node}\"\n return node if (@search and node==@search)\n node && node.children.each do |node|\n unless @result.include?(node)\n @result.push(node)\n @queue.enqueue(node)\n end\n end\n end\n return result\n end",
"def subgraph_with(vertex)\n vertices = Set.new\n edges = Set.new\n pending = [vertex]\n until pending.empty?\n this_node = pending.shift\n conn_edges, conn_nodes = neighbors(this_node)\n conn_nodes.each do |node|\n pending << node unless vertices.include?(node)\n end\n vertices << this_node\n edges << conn_edges\n end\n PipeGraph.new(edges, vertices)\n end",
"def dfs(start_vertex, &block)\n start_vertex.gray!\n @time += 1\n start_vertex.discovery = @time\n start_vertex.get_connections.each do |next_vertex|\n if next_vertex.white?\n next_vertex.predecessor = start_vertex\n dfs(next_vertex, &block)\n end\n end\n start_vertex.black!\n @time += 1\n start_vertex.finish = @time\n yield start_vertex if block_given?\n end",
"def expand_children node=:current_index\n $multiplier = 999 if !$multiplier || $multiplier == 0\n node = row_to_node if node == :current_index\n return if node.children.empty? # or node.is_leaf?\n #node.children.each do |e| \n #expand_node e # this will keep expanding parents\n #expand_children e\n #end\n node.breadth_each($multiplier) do |e|\n expand_node e\n end\n $multiplier = 0\n _structure_changed true\n end",
"def query(search_term)\n <<-SQL\n with recursive friends_graph (member_id, friend_id, path) as (\n select f.member_id, f.friend_id, array[f.member_id, f.friend_id] as path\n from friendships as f\n where member_id = #{member.id}\n union all\n select graph.member_id, f.friend_id, graph.path || ARRAY[f.friend_id]\n from friendships as f\n join friends_graph as graph\n on f.member_id = graph.friend_id\n and f.friend_id != ALL(graph.path)\n )\n\n select distinct path from friends_graph\n inner join headings\n on headings.member_id = friends_graph.friend_id\n where headings.name ilike '%#{search_term}%'\n SQL\n end",
"def build_subselect(key, o)\n stmt = Nodes::SelectStatement.new\n core = stmt.cores.first\n core.froms = o.relation\n core.wheres = o.wheres\n core.projections = [key]\n stmt.limit = o.limit\n stmt.offset = o.offset\n stmt.orders = []\n stmt\n end",
"def depth_first_search\n visited = {}\n timestamp = {}\n tree_edges = {}\n back_edges = {}\n cross_edges = {}\n forward_edges = {}\n count = 0\n\n # begin workaround removing depencency to order of Hash#each\n if @index.empty? then\n preference_of_nodes = nil\n else\n preference_of_nodes = {}.merge(@index)\n i = preference_of_nodes.values.max\n @graph.each_key do |node0|\n preference_of_nodes[node0] ||= (i += 1)\n end\n end\n # end workaround removing depencency to order of Hash#each\n\n dfs_visit = Proc.new { |from|\n visited[from] = true\n timestamp[from] = [count += 1]\n ary = @graph[from].keys\n # begin workaround removing depencency to order of Hash#each\n if preference_of_nodes then\n ary = ary.sort_by { |node0| preference_of_nodes[node0] }\n end\n # end workaround removing depencency to order of Hash#each\n ary.each do |to|\n if visited[to]\n if timestamp[to].size > 1\n if timestamp[from].first < timestamp[to].first\n \t# forward edge (black)\n \tp \"#{from} -> #{to} : forward edge\" if $DEBUG\n \tforward_edges[from] = to\n else\n \t# cross edge (black)\n \tp \"#{from} -> #{to} : cross edge\" if $DEBUG\n \tcross_edges[from] = to\n end\n else\n # back edge (gray)\n p \"#{from} -> #{to} : back edge\" if $DEBUG\n back_edges[from] = to\n end\n else\n # tree edge (white)\n p \"#{from} -> #{to} : tree edge\" if $DEBUG\n tree_edges[to] = from\n dfs_visit.call(to)\n end\n end\n timestamp[from].push(count += 1)\n }\n\n ary = @graph.keys\n # begin workaround removing depencency to order of Hash#each\n if preference_of_nodes then\n ary = ary.sort_by { |node0| preference_of_nodes[node0] }\n end\n # end workaround removing depencency to order of Hash#each\n ary.each do |node|\n unless visited[node]\n dfs_visit.call(node)\n end\n end\n return timestamp, tree_edges, back_edges, cross_edges, forward_edges\n end",
"def cyclic_nodes(root)\n copts = @options.reject { |k, v| k == :prune_cycle }\n cyclic = Set.new\n cycler = Visitor.new(copts) do |parent|\n children = @navigator.call(parent)\n # Look for a cycle back to the child.\n children.each do |child|\n index = cycler.lineage.index(child)\n if index then\n # The child is also a parent: add the nodes between\n # the two occurrences of the child in the lineage.\n cyclic.merge!(cycler.lineage[(index + 1)..-1])\n end\n end\n children\n end\n cycler.visit(root)\n cyclic\n end",
"def all_descendants_sql_by_node(node)\n column_names_initial_select = column_names.join(',')\n column_names_recursive_select = column_names.map { |c| 't.'+ c }.join(',')\n\n sql = \"\n WITH RECURSIVE walk_tree_to_deep(#{column_names_initial_select}) AS\n (\n SELECT #{column_names_initial_select} FROM nodes WHERE parent_id = #{node.id}\n UNION ALL\n SELECT #{column_names_recursive_select}\n FROM nodes t, walk_tree_to_deep ft WHERE t.parent_id = ft.id\n )\n SELECT * FROM walk_tree_to_deep ORDER BY id\n \"\n\n find_by_sql(sql)\n end",
"def find_all(value)\n nodes = []\n self.each {|node| nodes << node if node.value == value}\n nodes\n end",
"def true_eager_graph_limit_strategy\n r = super\n ds = associated_dataset\n if r == :ruby && ds.supports_limits_in_correlated_subqueries? && (Array(associated_class.primary_key).length == 1 || ds.supports_multiple_column_in?) && (!offset || ds.supports_offsets_in_correlated_subqueries?)\n :correlated_subquery\n else\n r\n end\n end",
"def find_all_nodes(*args)\n nodes = @nodes.find_all_nodes(*args)\n nodes.find_all { |n| context?(n) }\n end",
"def query_all_years(attribute, options)\n query = super(attribute, options)\n query = query.unscope(where: :context_id)\n where_conds = []\n where_vars = []\n @node_indices_by_context.each do |context_id, node_idx|\n where_conds << \"flows.context_id = ? AND ? = path[?]\"\n where_vars += [context_id, @node.id, node_idx]\n end\n query.where(\n where_conds.join(\" OR \"), *where_vars\n )\n end",
"def my_select(coll)\n # your code here!\n mod_coll = []\n i=0\n while i < coll.length\n if (yield(coll[i]))\n mod_coll.push(coll[i])\n end\n i = i+1\n end\n mod_coll\nend",
"def DFS(root, target)\n ## base case: \n return nil if root.nil?\n return root if root.value == target\n ##indecutive step: \n ## DFS on the left side then DFS on the right side \n root.children.each do |child|\n search_result = DFS(child, target) ## better to save the actual value then check the value then return nil\n return search_result unless search_result.nil?\n end \n return nil\nend",
"def depth_first_search(node)\n return if node.nil?\n node.visit\n node.connections.each do |conn|\n depth_first_search(conn) unless conn.visited\n end\nend",
"def ciclo_each\n # -> uncomment the next line to manually enable rule tracing\n # trace_in( __method__, 21 )\n\n\n return_value = CicloEachReturnValue.new\n\n # $rule.start = the first token seen before matching\n return_value.start = @input.look\n\n\n root_0 = nil\n\n __Identificador100__ = nil\n __K_EACH102__ = nil\n __K_DO103__ = nil\n __DELIM104__ = nil\n __DELIM106__ = nil\n __LLAIZQ107__ = nil\n __LLADER109__ = nil\n var_local101 = nil\n var_local105 = nil\n bodyexp108 = nil\n\n\n tree_for_Identificador100 = nil\n tree_for_K_EACH102 = nil\n tree_for_K_DO103 = nil\n tree_for_DELIM104 = nil\n tree_for_DELIM106 = nil\n tree_for_LLAIZQ107 = nil\n tree_for_LLADER109 = nil\n\n begin\n root_0 = @adaptor.create_flat_list\n\n\n # at line 115:4: ( Identificador | var_local ) K_EACH K_DO DELIM var_local DELIM LLAIZQ bodyexp LLADER\n # at line 115:4: ( Identificador | var_local )\n alt_13 = 2\n look_13_0 = @input.peek( 1 )\n\n if ( look_13_0 == Identificador )\n alt_13 = 1\n elsif ( look_13_0 == DOUBLEDOT )\n alt_13 = 2\n else\n @state.backtracking > 0 and raise( ANTLR3::Error::BacktrackingFailed )\n\n\n\n raise NoViableAlternative( \"\", 13, 0 )\n\n end\n case alt_13\n when 1\n # at line 115:5: Identificador\n __Identificador100__ = match( Identificador, TOKENS_FOLLOWING_Identificador_IN_ciclo_each_497 )\n if @state.backtracking == 0\n tree_for_Identificador100 = @adaptor.create_with_payload( __Identificador100__ )\n @adaptor.add_child( root_0, tree_for_Identificador100 )\n\n end\n\n\n when 2\n # at line 115:19: var_local\n @state.following.push( TOKENS_FOLLOWING_var_local_IN_ciclo_each_499 )\n var_local101 = var_local\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, var_local101.tree )\n end\n\n\n end\n __K_EACH102__ = match( K_EACH, TOKENS_FOLLOWING_K_EACH_IN_ciclo_each_502 )\n if @state.backtracking == 0\n tree_for_K_EACH102 = @adaptor.create_with_payload( __K_EACH102__ )\n @adaptor.add_child( root_0, tree_for_K_EACH102 )\n\n end\n\n __K_DO103__ = match( K_DO, TOKENS_FOLLOWING_K_DO_IN_ciclo_each_504 )\n if @state.backtracking == 0\n tree_for_K_DO103 = @adaptor.create_with_payload( __K_DO103__ )\n @adaptor.add_child( root_0, tree_for_K_DO103 )\n\n end\n\n __DELIM104__ = match( DELIM, TOKENS_FOLLOWING_DELIM_IN_ciclo_each_506 )\n if @state.backtracking == 0\n tree_for_DELIM104 = @adaptor.create_with_payload( __DELIM104__ )\n @adaptor.add_child( root_0, tree_for_DELIM104 )\n\n end\n\n @state.following.push( TOKENS_FOLLOWING_var_local_IN_ciclo_each_508 )\n var_local105 = var_local\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, var_local105.tree )\n end\n\n __DELIM106__ = match( DELIM, TOKENS_FOLLOWING_DELIM_IN_ciclo_each_510 )\n if @state.backtracking == 0\n tree_for_DELIM106 = @adaptor.create_with_payload( __DELIM106__ )\n @adaptor.add_child( root_0, tree_for_DELIM106 )\n\n end\n\n __LLAIZQ107__ = match( LLAIZQ, TOKENS_FOLLOWING_LLAIZQ_IN_ciclo_each_512 )\n if @state.backtracking == 0\n tree_for_LLAIZQ107 = @adaptor.create_with_payload( __LLAIZQ107__ )\n @adaptor.add_child( root_0, tree_for_LLAIZQ107 )\n\n end\n\n @state.following.push( TOKENS_FOLLOWING_bodyexp_IN_ciclo_each_514 )\n bodyexp108 = bodyexp\n @state.following.pop\n if @state.backtracking == 0\n @adaptor.add_child( root_0, bodyexp108.tree )\n end\n\n __LLADER109__ = match( LLADER, TOKENS_FOLLOWING_LLADER_IN_ciclo_each_516 )\n if @state.backtracking == 0\n tree_for_LLADER109 = @adaptor.create_with_payload( __LLADER109__ )\n @adaptor.add_child( root_0, tree_for_LLADER109 )\n\n end\n\n\n # - - - - - - - rule clean up - - - - - - - -\n return_value.stop = @input.look( -1 )\n\n\n if @state.backtracking == 0\n return_value.tree = @adaptor.rule_post_processing( root_0 )\n @adaptor.set_token_boundaries( return_value.tree, return_value.start, return_value.stop )\n\n end\n\n rescue ANTLR3::Error::RecognitionError => re\n report_error(re)\n recover(re)\n return_value.tree = @adaptor.create_error_node( @input, return_value.start, @input.look(-1), re )\n\n\n ensure\n # -> uncomment the next line to manually enable rule tracing\n # trace_out( __method__, 21 )\n\n\n end\n\n return return_value\n end",
"def test_row_first(r)\n flag = false\n $pages[r-1].select{|p| (p.p >> PAGE_SHIFT).even? }.each {|p|\n $pages[r+1].select {|q| (q.p >> PAGE_SHIFT).even? and conflict?(p.v, q.v)\n }.each {|q|\n flag |= hammer_row(r, p, q, $ntime_max)\n } \n }\n return flag\nend",
"def direct_children_by_id(*args)\n scope = args.last.is_a?(Hash) ? args.pop : {}\n ids = args.flatten.compact.uniq\n self.class.find_in_nested_set(:all, { \n :conditions => [\"#{scope_condition} AND #{prefixed_parent_col_name} = #{self.id} AND #{self.class.table_name}.#{self.class.primary_key} IN (?)\", ids]\n }, scope) \n end",
"def dfs(&block)\n yield @data\n @nodes.each do |node|\n node.dfs(&block)\n end\n end",
"def dfs_recursive(query, node=@tree) \n return nil if node.nil?\n return node if query == node.value\n\n# left search equals if the left child is not nil then call the method with target & left child node parameters, otherwise is nil\n left_search = node.left_child != nil ? dfs_recursive(query, node.left_child) : nil\n\n# return if left search does not result in nil\n return left_search if left_search != nil\n right_search = node.right_child != nil ? dfs_recursive(query, node.right_child) : nil\n return right_search if right_search != nil \n end",
"def get_repos(neo)\n\n\t# return repo's that have more than 1 connection\n\t# TODO: try this; n<-r[follows*2..]-m\n\tcypher_query = ' START n = node:nodes_index(type = \"repo\")'\n\tcypher_query << ' MATCH n<-[r:follows]-m'\n\tcypher_query << ' WITH n, count(m) AS follow_count'\n\tcypher_query << ' WHERE follow_count > 2'\n\tcypher_query << ' RETURN ID(n), n.name, follow_count'\n\tcypher_query << ' ORDER BY ID(n)'\n\tneo.execute_query(cypher_query)['data']\n\nend",
"def run( current, positions, iterations, num_values )\n iterations.times do \n slice_start = current.next\n current.next = current.next.next.next.next\n destination = nil\n backoff = 1\n \n while ! destination \n search_for_label = current.value - backoff\n if search_for_label < 1\n search_for_label += num_values\n end \n destination = positions[search_for_label]\n if destination.value == slice_start.value ||\n destination.value == slice_start.next.value ||\n destination.value == slice_start.next.next.value \n destination = nil\n backoff += 1\n end\n end\n \n after_slice = destination.next\n destination.next = slice_start\n slice_start.next.next.next = after_slice\n current = current.next\n end\nend",
"def findByWord(word)\n Neo4j::Transaction.run do\n puts \"Find all sentence with #{word}\"\n result = Sentence.find(:text => word)\n\n puts \"Found #{result.size} sentences\"\n \n result.each {|x| puts \"#{x}\"}\n end\nend",
"def traverse_df(node,level,&block)\n @children[node].sort_by {|a| length(a) }.reverse.each { |child| traverse_df(child, level+1, &block) }\n yield node, level if block_given?\n end",
"def queries(resolve=true)\n return @queries unless resolve\n \n query = 1\n query += 1 while peptide_hits(query)\n @queries\n end",
"def update( search_nodes )\n if has_target_predicate? then\n search_nodes.each do |node|\n if node.slot_filled?(@slot_name) then\n unless @target_predicate.apply(node[@slot_name].to_a).empty?\n yield( node )\n end \n end\n end\n else\n search_nodes.each do |node|\n yield( node )\n end\n end\n end",
"def find_paths(&block)\n follow,kill,find,continue = SearchParams.process(&block)\n\n paths,path = [],[]\n search = lambda do |node|\n if find[node]\n paths << path.dup\n next if not continue[node]\n end\n next if kill[node]\n [*follow[node]].each do |n|\n next if path.include? n\n path.push(n)\n search[n]\n path.pop\n end\n end\n\n [*follow[self]].each do |n| \n path.push(n)\n search[n] \n path.pop\n end\n\n paths\n end",
"def dfs(graph)\n visited = Hash.new\n\n graph.each do |vertex, _|\n visited[vertex] = Hash.new\n visited[vertex][:state] = \"not visited\"\n end\n\n # This is to enable passing by reference in the dfs_visit method, otherwise\n # simply having a variable for integer will get passed by value and does not\n # behave/increment as expected.\n order = Hash.new\n order[:time] = 0\n\n graph.each do |vertex, _|\n dfs_visit(graph, vertex, visited, order) unless visited[vertex][:state] == \"done\"\n end\n\n visited\nend",
"def depth_first_search\n visited = {}\n timestamp = {}\n tree_edges = {}\n back_edges = {}\n cross_edges = {}\n forward_edges = {}\n count = 0\n\n dfs_visit = Proc.new { |from|\n\tvisited[from] = true\n\ttimestamp[from] = [count += 1]\n\t@graph[from].each_key do |to|\n\t if visited[to]\n\t if timestamp[to].size > 1\n\t if timestamp[from].first < timestamp[to].first\n\t\t# forward edge (black)\n\t\tp \"#{from} -> #{to} : forward edge\" if $DEBUG\n\t\tforward_edges[from] = to\n\t else\n\t\t# cross edge (black)\n\t\tp \"#{from} -> #{to} : cross edge\" if $DEBUG\n\t\tcross_edges[from] = to\n\t end\n\t else\n\t # back edge (gray)\n\t p \"#{from} -> #{to} : back edge\" if $DEBUG\n\t back_edges[from] = to\n\t end\n\t else\n\t # tree edge (white)\n\t p \"#{from} -> #{to} : tree edge\" if $DEBUG\n\t tree_edges[to] = from\n\t dfs_visit.call(to)\n\t end\n\tend\n\ttimestamp[from].push(count += 1)\n }\n\n @graph.each_key do |node|\n\tunless visited[node]\n\t dfs_visit.call(node)\n\tend\n end\n return timestamp, tree_edges, back_edges, cross_edges, forward_edges\n end",
"def descendants_scope(node)\n node.scope.join_recursive do |query|\n query.connect_by(join_columns(columns.id => columns.parent))\n .start_with(node.to_relation)\n\n yield query if block_given?\n\n query.order_siblings(position)\n end\n end",
"def query_pattern(pattern, **options, &block)\n return super unless pattern.graph_name == DEFAULT_GRAPH\n\n if block_given?\n pattern = pattern.dup\n pattern.graph_name = nil\n\n each_statement do |statement|\n yield statement if (statement.graph_name == DEFAULT_GRAPH ||\n statement.graph_name.nil?) && pattern === statement\n end\n else\n enum_for(:query_pattern, pattern, options)\n end\n end",
"def sql_inventory_groups\n \"WITH ooc_groups AS\n (\n SELECT assg.asset_id, grp.ooc_group_id as group_id,grp.ooc_group_name as group_name,\n grp.ooc_group_type as group_type, grp.ooc_group_status as group_status\n FROM hip_ooc_asset_group_v AS assg\n JOIN hip_ooc_group_v AS grp ON grp.ooc_group_id = assg.ooc_group_id\n WHERE grp.ooc_group_status != 'deleted'\n AND grp.org_l1_id = #{org_l1_id}\n AND grp.org_id = #{org_id}\n ),\n hc_groups as(\n SELECT assg.asset_id, grp.hc_group_id as group_id,grp.group_name,'hc cycle'as group_type ,\n grp.is_current\n FROM hip_asset_group_v AS assg\n JOIN hip_hc_group_v AS grp ON grp.hc_group_id = assg.hc_group_id\n WHERE grp.org_l1_id = #{org_l1_id}\n AND grp.org_id = #{org_id}\n ),\n all_groups as (\n select * from ooc_groups\n union\n select * from hc_groups\n )\n SELECT assh.host_name,assh.ip_string_list, assh.hc_start_date,assh.security_policy_name,\n assh.tool_asset_id,os.os_product,assh.system_status,assh.hc_auto_flag,assh.hc_auto_interval_weeks,\n assh.hc_manual_interval_weeks,assh.hc_manual_flag,\n #{group_type_columns_frag}\n CASE\n WHEN assh.hc_auto_flag='y' and assh.hc_manual_flag='y' then 'Yes'\n WHEN assh.hc_auto_flag='n' and assh.hc_manual_flag='n' then 'No'\n ELSE NULL\n END AS hc_required \n FROM dim_comm_tool_asset_hist_v AS assh\n LEFT join all_groups AS g ON g.asset_id = assh.tool_asset_id\n JOIN dim_comm_os_v AS os ON os.os_id=assh.os_id\n WHERE\n assh.org_l1_id=#{org_l1_id} AND assh.org_id=#{org_id}\n AND CURRENT_TIMESTAMP BETWEEN assh.row_from_timestamp AND COALESCE(assh.row_to_timestamp, CURRENT_TIMESTAMP)\n group by assh.host_name,assh.ip_string_list,assh.hc_start_date,assh.security_policy_name,\n assh.tool_asset_id,os.os_product,assh.system_status,assh.hc_auto_flag, assh.hc_auto_interval_weeks, assh.hc_manual_interval_weeks,\n assh.hc_manual_flag\n ORDER BY assh.host_name\"\n end",
"def find_node(node, type, first, last, depth: 0)\n child_nodes = node.children.select { |c| c.is_a?(RubyVM::AbstractSyntaxTree::Node) }\n # @logger.debug(\"D: #{depth} #{node.type} has #{child_nodes.size} children and spans #{node.first_lineno}:#{node.first_column} to #{node.last_lineno}:#{node.last_column}\")\n\n if node.type == type && first >= node.first_lineno && last <= node.last_lineno\n return node\n end\n\n child_nodes.map { |n| find_node(n, type, first, last, depth: depth + 1) }.flatten\n end",
"def cycle(n)\n RGL::ImplicitGraph.new { |g|\n g.vertex_iterator { |b| 0.upto(n - 1, &b) }\n g.adjacent_iterator { |x, b| b.call((x + 1) % n) }\n g.directed = true\n }\nend",
"def where(expr)\n expr = expr.to_s\n result = empty_dup\n ev = Evaluator.new(ivars: { row: 0, group: 0 })\n rows.each_with_index do |row, k|\n grp = row_index_to_group_index(k)\n ev.update_ivars(row: k + 1, group: grp)\n ev.eval_before_hook(locals: row)\n result << row if ev.evaluate(expr, locals: row)\n ev.eval_after_hook(locals: row)\n end\n result.normalize_boundaries\n result\n end",
"def hard(input)\n graph = Graph.from(input)\n components = []\n vertices = Set.new(graph.vertices)\n until vertices.empty?\n start = vertices.first\n component = graph_walk(graph, start)\n components << component\n vertices = vertices.difference(component)\n end\n components.length\nend",
"def bfs_search(root)\n queue = []\n\n root.marked = true\n queue.push(root)\n\n while queue.length != 0\n current = queue.shift\n visit(current)\n\n current.adjacent.each do |node|\n if !node.marked\n node.marked = true\n queue.push(node)\n end\n end\n end\nend",
"def chapters_matching(query)\n results = []\n\n return results if !query || query.empty?\n\n each_chapter(query) do |number, name, contents, paragraph|\n results << {number: number, name: name, paragraph: paragraph} if contents.include?(query)\n end\n\n results\nend"
] |
[
"0.52050734",
"0.52050734",
"0.5193499",
"0.51634336",
"0.49859515",
"0.49299592",
"0.48931837",
"0.48773682",
"0.48163575",
"0.48068506",
"0.4787729",
"0.477681",
"0.47724617",
"0.4771895",
"0.47612202",
"0.4757136",
"0.4750637",
"0.47494748",
"0.47455943",
"0.47416502",
"0.47393036",
"0.4724004",
"0.47180694",
"0.47133023",
"0.47104567",
"0.47018412",
"0.47002506",
"0.4695485",
"0.46831712",
"0.4678406",
"0.4660781",
"0.46594992",
"0.46594992",
"0.464788",
"0.46463847",
"0.46456683",
"0.46442336",
"0.46311262",
"0.46306673",
"0.46296465",
"0.46296465",
"0.46296465",
"0.4622859",
"0.46192783",
"0.4617289",
"0.4613573",
"0.46085656",
"0.4606385",
"0.4602935",
"0.45997214",
"0.45959508",
"0.4592767",
"0.45910653",
"0.45833614",
"0.45821637",
"0.4571853",
"0.45628497",
"0.4562832",
"0.45599774",
"0.45521548",
"0.45439944",
"0.4542293",
"0.45416647",
"0.45380715",
"0.45294842",
"0.45184055",
"0.45178372",
"0.45150495",
"0.45137727",
"0.4507969",
"0.45064184",
"0.45058015",
"0.4505569",
"0.45041388",
"0.45022887",
"0.4502061",
"0.44971535",
"0.4485763",
"0.4482513",
"0.44684353",
"0.44677624",
"0.44677162",
"0.4463654",
"0.44611242",
"0.44607267",
"0.4460631",
"0.44585982",
"0.44581628",
"0.4456429",
"0.4452958",
"0.44458148",
"0.4442341",
"0.44387186",
"0.44367018",
"0.4434977",
"0.4433388",
"0.44327158",
"0.4428681",
"0.4428186",
"0.442332"
] |
0.58591795
|
0
|
The version of the database server
|
def server_version
db.server_version(@opts[:server])
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def version\n @version ||= exec('SHOW server_version')[0]['server_version'].split[0]\n end",
"def get_server_version\n server_info[:server_version]\n end",
"def db_version\n migration_meta_node[:_db_version]\n end",
"def server_version; end",
"def server_version\n check_connection\n @protocol.server_version\n end",
"def server_version(server=nil)\n return @server_version if @server_version\n ds = dataset\n ds = ds.server(server) if server\n @server_version = swallow_database_error{ds.with_sql(\"SELECT CAST(current_setting('server_version_num') AS integer) AS v\").single_value} || 0\n end",
"def schema_version\n exec_one_result( \"SELECT schema_version FROM info\" )\n end",
"def engine_version\n @dbi.engine_version\n end",
"def engine_version\n @dbi.engine_version\n end",
"def server_version\n ServerVersion.new(server_info[\"version\"])\n end",
"def version\n VERSION\n end",
"def oracle_server_vernum()\n #This is a stub, used for indexing\n end",
"def database_version\n raise NoMethodError, \"Not implemented for this raw driver\"\n end",
"def version\n @version\n end",
"def version\n return @version\n end",
"def version\n return @version\n end",
"def version\n return @version\n end",
"def version\n return @version\n end",
"def version\n return @version\n end",
"def version\n return @version\n end",
"def version\n return @version\n end",
"def version\n return @version\n end",
"def version\n return @version\n end",
"def version\n VERSION\n end",
"def server_version\n call! :server_version\n end",
"def version\n read_property 'Version'\n end",
"def engine_version\n stats.version\n end",
"def server_version\n request(auth: false, expects: 200)['version']\n rescue => ex\n error { \"Server version exception\" }\n error { ex }\n nil\n end",
"def server_version(server=nil)\n @server_version ||= (synchronize(server){|conn| conn.info[:id]})\n end",
"def version\n @db.send(:get_int_pragma, 'user_version')\n end",
"def version_number\n @version\n end",
"def version\n @version ||= __determine_version\n end",
"def server_version\n server_info.present? ? @server_info[:parseServerVersion] : nil\n end",
"def version\n self[:version]\n end",
"def server_version(_server=nil)\n @server_version ||= super()\n end",
"def version\n @__version\n end",
"def version\n VERSION\n end",
"def version\n VERSION\n end",
"def version\n VERSION\n end",
"def client_version\n ClientVersion\n end",
"def version\n return last_version if versionable?\n version_number\n end",
"def version\n @version || 0\n end",
"def version\n driver.getVersion\n end",
"def version; schema.version; end",
"def getVersion\r\n\t\t\t\t\treturn @version\r\n\t\t\t\tend",
"def version\n self.class.version\n end",
"def server_version\n status['value']['build']['version']\n end",
"def version\n 1\n end",
"def version_number\n return @version_number\n end",
"def version\n @version ||= data.version\n end",
"def engine_version\n data[:engine_version]\n end",
"def engine_version\n data[:engine_version]\n end",
"def get_version()\n\t\tend",
"def version\n @context[:version]\n end",
"def version\n 0\n end",
"def version\n data.version\n end",
"def current_version\n @version\n end",
"def version\n fetch('vehicle.version')\n end",
"def get_version\n\t\tend",
"def version\n self.class.get(\"/get/version\")\n end",
"def version\n @version ||= create_version\n end",
"def version\n @version ||= version_hex.to_s(16).chars.entries.join('.')\n end",
"def ddl_version(agent)\n ddl = agent_ddl(agent)\n ddl.meta[:version]\n end",
"def version\n echo_rosh_command\n\n @version ||= adapter.current_version\n end",
"def version\n\t\treturn @version ||= self.find_version\n\tend",
"def version\n @version_helper.to_s\n end",
"def version\n @version_helper.to_s\n end",
"def version\n \"rs_connect #{right_link_version} - RightLink's server importer (c) 2014 RightScale\"\n end",
"def schema_version\n migration_context.current_version\n end",
"def version\n options['version']\n end",
"def server\n @database.server\n end",
"def server\n @database.server\n end",
"def version\n ret = @client.call('Bugzilla.version')\n handle_faults(ret)\n ret['version']\n end",
"def get_schema_version\n File.read(File.join(install_directory,'db','schema_version')).to_i rescue 0\n end",
"def version\n cmd(COMMANDS[:version], 2)\n end",
"def engine_version\n endpoint.engine_version\n end",
"def version\n @table[:version] ||= Version::Number.new('0.0.0')\n end",
"def version\n nil\n end",
"def version; end",
"def version; end",
"def version; end",
"def version; end",
"def version; end",
"def version; end",
"def version; end",
"def version; end",
"def version; end",
"def version; end",
"def version; end",
"def version; end",
"def version; end",
"def version; end",
"def version; end",
"def version; end",
"def version; end",
"def version; end",
"def version; end",
"def version; end",
"def version; end",
"def version; end"
] |
[
"0.85410744",
"0.81251967",
"0.8072402",
"0.8030856",
"0.7889446",
"0.78843343",
"0.7822001",
"0.7734123",
"0.7734123",
"0.76928204",
"0.7673027",
"0.76689583",
"0.76477915",
"0.7637135",
"0.7603097",
"0.7603097",
"0.7603097",
"0.7603097",
"0.7603097",
"0.7603097",
"0.7603097",
"0.7603097",
"0.7603097",
"0.75740725",
"0.75056326",
"0.7448432",
"0.74055547",
"0.7392609",
"0.73741275",
"0.73693454",
"0.7360145",
"0.73382455",
"0.73187155",
"0.73040754",
"0.7296893",
"0.7288689",
"0.7273319",
"0.7273319",
"0.7273319",
"0.7230756",
"0.72260255",
"0.7220021",
"0.7214078",
"0.72113734",
"0.72001636",
"0.71946764",
"0.71911263",
"0.718977",
"0.7171017",
"0.71638405",
"0.7145815",
"0.7145815",
"0.7138011",
"0.71151745",
"0.70893484",
"0.7084296",
"0.70841676",
"0.70826364",
"0.7071322",
"0.7068714",
"0.7056682",
"0.70520484",
"0.70260584",
"0.6987991",
"0.6983553",
"0.6978409",
"0.6978409",
"0.6974229",
"0.6970883",
"0.6959595",
"0.69479847",
"0.69479847",
"0.6947632",
"0.69421905",
"0.6941472",
"0.69409525",
"0.69330454",
"0.6931251",
"0.69243383",
"0.69243383",
"0.69243383",
"0.69243383",
"0.69243383",
"0.69243383",
"0.69243383",
"0.69243383",
"0.69243383",
"0.69243383",
"0.69243383",
"0.69243383",
"0.69243383",
"0.69243383",
"0.69243383",
"0.69243383",
"0.69243383",
"0.69243383",
"0.69243383",
"0.69243383",
"0.69243383",
"0.69243383"
] |
0.8844359
|
0
|
PostgreSQL 9.4+ supports the FILTER clause for aggregate functions.
|
def supports_filtered_aggregates?
server_version >= 90400
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def filter_agg(field:, value:, &block)\r\n agg = { agg: { filter: { term: { field => value } } } }\r\n base_agg(agg, block)\r\n end",
"def filter_expr(*args, &block)\n schema_utility_dataset.literal(schema_utility_dataset.send(:filter_expr, *args, &block))\n end",
"def filter_expr(*args, &block)\n schema_utility_dataset.literal(schema_utility_dataset.send(:filter_expr, *args, &block))\n end",
"def filter_expr(*args, &block)\n schema_utility_dataset.literal(schema_utility_dataset.send(:filter_expr, *args, &block))\n end",
"def filter(expr); end",
"def filter(expr); end",
"def aggregate_f(*args)\n aggregate_function.f(*args)\n end",
"def summarize_per_subset\n @having = ANY_ROWS\n end",
"def apply_filter\n end",
"def filter(*args); Typelib.filter_function_args(args, self) end",
"def filter; end",
"def filter; end",
"def filter; end",
"def agg_where\n agg_filters = params.fetch(:agg_filters, {})\n if agg_filters.is_a?(String)\n agg_filters = JSON.parse(agg_filters)\n end\n where = Hash[agg_filters.map{|key, val|\n [key, val]\n }.reject{|x| x[1].empty? }.map{|x| [x[0], x[1].keys]}]\n\n [\"start_date\", \"completion_date\"].each do |key|\n if where.has_key?(key)\n unix_milliseconds_string = where[key].first # only doing one!\n year = Time.at(unix_milliseconds_string.to_i / 1000).year\n where[key] = {\n gte: \"#{year}||/y\",\n lte: \"#{year}||/y\",\n }\n end\n end\n where\n end",
"def filtered_dataset\n filter_args_from_query.inject(@dataset) do |filter, cond|\n filter.filter(cond)\n end\n end",
"def filter\n end",
"def apply_filter(query)\n query\n end",
"def apply_filter(query)\n query\n end",
"def filter(name, function)\n design_doc.create_filter(name, function)\n end",
"def parse_filter(filter_argument = T.unsafe(nil), &filter_proc); end",
"def filtered(collection, filter)\n collection.grep_v(FILTERS.fetch(filter, FILTERS[:default]))\n end",
"def visit_axiom_aggregate_sum(sum)\n aggregate_function_sql(SUM, sum)\n end",
"def filter!; end",
"def apply_filters(query)\n query\n end",
"def filter(filter_name, *filter_args)\n @client.execute_udf(@key, @PACKAGE_NAME, 'filter', [@bin_name, @user_module, filter_name, filter_args], @policy)\n end",
"def filter(name, function)\n filters = (self.model.design_doc['filters'] ||= {})\n filters[name.to_s] = function\n end",
"def filter(values); end",
"def filter(values); end",
"def filter(values); end",
"def apply_filter(query, filter, value)\n return [query] if query.blank? || filter.blank? || value.blank?\n return [query] if filter.noop?\n\n klass = filter.klass || query.klass\n column = filter.column\n value = filter.values.call(value) if filter.values.present?\n\n queries = []\n\n if column.present?\n target_column = \"#{klass.table_name}.#{column}\"\n\n if filter.data_type == DateTime\n target_column = \"date_trunc('second', #{target_column})\"\n end\n\n vals = Array(value)\n value = []\n q = []\n\n vals.each do |val|\n if klass.columns_hash[column.to_s] && klass.columns_hash[column.to_s].type == :integer && !(val.to_s.strip =~ /\\A\\d+\\Z/)\n q << '0 = 1'\n else\n q << \"LOWER(#{target_column}::varchar) LIKE LOWER(?)\"\n value << \"%#{val}%\"\n end\n end\n\n queries << \"(#{q.join(' OR ')})\" if q.any?\n elsif filter.query.present?\n query, query_list, value_list = filter.query.call(query, value)\n queries = Array(query_list)\n value = Array(value_list)\n elsif filter.having.present?\n query = query.having(\"LOWER((#{filter.having})::varchar) LIKE ?\", \"%#{Array(value).first}%\")\n end\n\n [query, queries, value]\n end",
"def filter(filt = nil, *args, **opts)\n @filter = if filt.nil? && !defined?(@filter)\n Filters.default\n elsif filt\n Filters.resolve(filt, *args, **opts)\n else\n @filter\n end\n\n aggregator.filter = @filter\n\n @filter\n end",
"def filter_expr(expr)\n case expr\n when Hash\n SQL::BooleanExpression.from_value_pairs(expr)\n when Array\n if String === expr[0]\n filter_expr(expr.shift.gsub(QUESTION_MARK){literal(expr.shift)}.lit)\n else\n SQL::BooleanExpression.from_value_pairs(expr)\n end\n when Proc\n expr.to_sql(self).lit\n when SQL::NumericExpression, SQL::StringExpression\n raise(Error, \"Invalid SQL Expression type: #{expr.inspect}\") \n when Symbol, SQL::Expression\n expr\n when String\n \"(#{expr})\".lit\n else\n raise(Error, 'Invalid filter argument')\n end\n end",
"def convert_filter_to_mongo_query(filter) \n filter\n end",
"def filter(*cond, &block)\n clause = (@opts[:having] ? :having : :where)\n cond = cond.first if cond.size == 1\n raise(Error::InvalidFilter, \"Invalid filter specified. Did you mean to supply a block?\") if cond === true || cond === false\n cond = transform_save(cond) if @transform if cond.is_a?(Hash)\n cond = filter_expr(block || cond)\n cond = SQL::BooleanExpression.new(:AND, @opts[clause], cond) if @opts[clause] && !@opts[clause].blank?\n clone(clause => cond)\n end",
"def filter_clause\n @filters[filter % @filters.size] unless @filters.size.zero?\n end",
"def filter(param, type:, operation:, column: nil, required: false, format_value: nil)\n query_param = param.to_s.underscore\n column ||= param.to_s.underscore\n op = operations.fetch(operation.to_sym) { raise ArgumentError, \"unknown operation #{operation}\" }\n sql = op.call(column, query_param, type)\n raw_filter(param, sql:, required:, format_value:, query_param:, array: op.array)\n end",
"def filter_expr(arg=nil, &block)\n if arg.is_a?(Proc) && !block\n block = arg\n arg = nil\n elsif arg.is_a?(String)\n arg = Sequel.lit(arg)\n elsif arg.is_a?(Array)\n if arg.first.is_a?(String)\n arg = Sequel.lit(*arg)\n elsif arg.length > 1\n arg = Sequel.&(*arg)\n end\n end\n schema_utility_dataset.literal(schema_utility_dataset.send(:filter_expr, arg, &block))\n end",
"def aggregate(nodes, relation, aggregate_function, restriction = [])\n if nodes.empty?\n return []\n end\n if relation.nil?\n raise MissingRelationException\n end\n items = nodes.map{|node| node.item}\n values_stmt = \"VALUES ?s {#{items.map{|item| \"<\" + Xplain::Namespace.expand_uri(item.id) + \">\"}.join(\" \")}}\"\n query_stmt = \"SELECT ?s (#{aggregate_function}(?o) as ?o) where{#{values_stmt} #{path_clause(relation)} #{values_clause(\"?o\", restriction)} #{path_clause_as_subselect(relation, values_stmt, \"?s\", limit, offset)}. }\"\n query_stmt << \" GROUP BY ?s\"\n get_results(query_stmt, relation)\n end",
"def crypt_keeper_filter_postgres_log(payload)\n payload.gsub(FILTER) do |_|\n \"#{$~[:operation]}([FILTERED])\"\n end\n end",
"def add_filter(filter_argument = T.unsafe(nil), &filter_proc); end",
"def apply_filter(relation)\n relation.where(@q.filter.to_sql, *@q.values)\n end",
"def filter(record)\n true\n end",
"def filter(sparql)\n raise \"Must be overridden\"\n end",
"def filter_argument; end",
"def aggregate\n klass.collection.group(\n :key => field_list,\n :cond => selector,\n :initial => { :count => 0 },\n :reduce => Javascript.aggregate\n )\n end",
"def having(*cond, &block)\n raise(Error::InvalidOperation, \"Can only specify a HAVING clause on a grouped dataset\") unless @opts[:group]\n clone(:having=>{}).filter(*cond, &block)\n end",
"def filters=(_arg0); end",
"def filters=(_arg0); end",
"def having(*cond, &block)\n add_filter(:having, cond, &block)\n end",
"def filter(collection)\n collection\n end",
"def filter(collection)\n collection\n end",
"def filters; end",
"def filters; end",
"def apply_filter_expression(scope, expression)\n ast = calculator.ast(expression)\n if ast.is_a?(Keisan::AST::LogicalOperator) || boolean_function?(ast)\n apply_ast(scope, ast)\n else\n raise Kaprella::Errors::InvalidFilterExpression.new\n end\n end",
"def aggregated_over_time_query\n quantity_field = @country_ids.present? ? \"#{entity_quantity}_reported_quantity\" : \"#{@reported_by}_reported_quantity\"\n\n <<-SQL\n SELECT ROW_TO_JSON(row)\n FROM (\n SELECT JSON_AGG(JSON_BUILD_OBJECT('x', year, 'y', value) ORDER BY year) AS datapoints\n FROM (\n SELECT year, ROUND(SUM(#{quantity_field}::FLOAT)) AS value\n FROM #{shipments_table}\n #{child_taxa_join}\n WHERE #{@condition} AND #{quantity_field} IS NOT NULL AND #{country_condition}\n AND #{child_taxa_condition}\n GROUP BY year\n #{quantity_condition(quantity_field)}\n ORDER BY value DESC\n #{limit}\n ) t\n ) row\n SQL\n end",
"def named_filter; end",
"def raw_filter(param, sql:, query_param: nil, required: false, format_value: nil, array: false)\n param = param.to_sym\n raise ArgumentError, \"filter with param #{param} already defined\" if filters.key?(param)\n\n query_param ||= param.to_s.underscore\n format_value ||= ->(value, _opts) { value }\n filters[param] = Filter.new(param:, sql:, query_param:, required:, format_value:, array:)\n end",
"def unfiltered\n cached_dataset(:_unfiltered_ds){clone(:where => nil, :having => nil)}\n end",
"def filter_publications(collection=Publication)\n query = publication_query(collection)\n query.apply_where\n end",
"def filters\n end",
"def filter(filter)\n @query[:q] << \"filter:#{filter}\"\n self\n end",
"def filter\n @filter\n end",
"def event_filter query\n filter = { _t: {} }.tap do |filter|\n if query.has_key? :after\n filter[:_t][:gt] = to_timestamp query.delete(:after)\n query.delete(:from)\n elsif query.has_key? :from\n filter[:_t][:gte] = to_timestamp query.delete(:from)\n end\n\n if query.has_key? :before\n filter[:_t][:lt] = to_timestamp query.delete(:before)\n query.delete(:upto)\n elsif query.has_key? :upto\n filter[:_t][:lte] = to_timestamp query.delete(:upto)\n end\n # sel['_id'] = Regexp.new(query.delete(:id)) if query[:id]\n query.each_pair{ |key, val| filter[:_d] ||= {} ; filter[:_d][key] = val }\n end\n end",
"def filter!(&block)\n @rows.select!(&block)\n end",
"def filter(filter)\n current_widget.filter filter\n end",
"def filter(expression = {}, &block)\n case expression\n when SPARQL::Algebra::Expression\n filter_without_expression do |solution|\n expression.evaluate(solution).true?\n end\n filter_without_expression(&block) if block_given?\n self\n else filter_without_expression(expression, &block)\n end\n end",
"def apply(aggregate)\n raise NotImplementedError\n end",
"def filter\n RuleAspect.from_hash(description['Filter'])\n end",
"def filter_complex\n @filtergraph ||= FilterGraph\n end",
"def filter_results(event)\n self.each_with_object([]) do |filter, arr|\n arr << [filter, filter.call(event)]\n end\n end",
"def filter_index\n filter\n end",
"def filter( filter_name, value )\n defined? @filters \\\n or return nil\n @filters.respond_to? \"has_key?\" \\\n or return nil\n @filters.has_key? filter_name \\\n or return nil\n f = @filters[filter_name]\n f.call value\n end",
"def filter(field, operator, value, value2=nil)\n operator = operator.to_s.upcase\n raise ArgumentError, \"invalid operator\" unless Operators.include?(operator)\n \n chain { |x|\n filter = { 'field' => field, 'operator' => operator, 'value1' => value.to_s }\n filter['value2'] = value2.to_s if operator == 'BETWEEN'\n x.filters << filter\n }\n end",
"def filter(*args)\n raise NotImplementedError, 'Subclass should implement.'\n end",
"def filter!(*cond, &block)\n filter(nil, *cond, &block)\n end",
"def buil_must_query_filter\n query_filter = [{ 'range' => date_range_filter }]\n query_filter | @raw_query\n end",
"def to_filter_proc(filter)\n case ff = filter\n when NilClass then ->(f){ true }\n when Proc then ff\n when Regexp then ->(f){ ff =~ f.to_s }\n else\n ->(f){ ff === f }\n end\n end",
"def filter\n params['filter_field'] || '*'\n end",
"def aggregate\n []\n end",
"def apply(_aggregate)\n raise NotImplementedError\n end",
"def filter(options={})\n raise NotImplementedError\n end",
"def filter\n super\n end",
"def global_filter; end",
"def filter(editable_payload, level, event_name)\n raise NotImplementedError,\n 'You must implement the method filter(editable_payload, level, event_name)'\n end",
"def filtrando(ventas)\n ventas.group_by {|x| x.to_i > 70000}\nend",
"def strict_filters=(_arg0); end",
"def filter_parameters; end",
"def filter_parameters; end",
"def calculate_function(function)\n raise \"invalid function '#{function}'\" unless [:sum, :avg, :min, :max, :count].include?(function.to_sym)\n Sequel::SQL::Function.new(function.to_sym, :value)\n end",
"def aggregation?\n false\n end",
"def aggregate op, type = :fixnum\n check_closed\n\n aggregation_impl op, type\n end",
"def Filter=(arg0)",
"def filter(&callable)\n filters << callable\n end",
"def filter(value)\n true\n end",
"def filtered_dataset\n dataset\n end",
"def filter_event(event)\n event\n end",
"def filterable?; @filterable; end",
"def filter_events(collection=Conference)\n query = event_query(collection)\n query.apply_where\n end",
"def apply_filter(type, request, ds)\n if filter = filter_for\n ds = filter.call(ds, type, request)\n end\n ds\n end",
"def filter(objects) objects end"
] |
[
"0.68197376",
"0.63455915",
"0.6330108",
"0.6330108",
"0.6127705",
"0.6127705",
"0.5911166",
"0.583755",
"0.5799499",
"0.57693523",
"0.57682407",
"0.57682407",
"0.57682407",
"0.56768763",
"0.56580806",
"0.5655221",
"0.563342",
"0.563342",
"0.5628486",
"0.5573211",
"0.552571",
"0.5525133",
"0.5518872",
"0.5498928",
"0.5492058",
"0.5491977",
"0.5470937",
"0.5470937",
"0.5470937",
"0.5448609",
"0.5419357",
"0.5393831",
"0.53816944",
"0.5378011",
"0.5370574",
"0.5369413",
"0.5318143",
"0.5309728",
"0.5306039",
"0.53021234",
"0.52844375",
"0.5273581",
"0.5268926",
"0.5265354",
"0.5231408",
"0.52143294",
"0.5205714",
"0.5205714",
"0.5190697",
"0.5184897",
"0.5184897",
"0.5174438",
"0.5174438",
"0.5161894",
"0.51480794",
"0.514378",
"0.51067984",
"0.510386",
"0.5102251",
"0.50809926",
"0.5078994",
"0.50745153",
"0.5061601",
"0.5053027",
"0.5043364",
"0.50384164",
"0.50278765",
"0.50055486",
"0.49875534",
"0.49813837",
"0.4956008",
"0.49394965",
"0.49389765",
"0.49320683",
"0.49229664",
"0.4913031",
"0.4899967",
"0.48991975",
"0.48907286",
"0.48798037",
"0.4877023",
"0.48671603",
"0.48616934",
"0.48568836",
"0.48555702",
"0.48528823",
"0.48465493",
"0.48465493",
"0.48421133",
"0.48407277",
"0.4837234",
"0.48311782",
"0.48310354",
"0.48195764",
"0.48150137",
"0.4810707",
"0.48042315",
"0.47947946",
"0.47945264",
"0.4794299"
] |
0.5316833
|
37
|
PostgreSQL supports quoted function names.
|
def supports_quoted_function_names?
true
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def irregular_function_sql(f)\n \"#{f.f}(#{literal(f.arg1)} #{f.joiner} #{literal(f.arg2)})\"\n end",
"def function_sql(f)\n args = f.args\n \"#{f.f}#{args.empty? ? '()' : literal(args)}\"\n end",
"def function(name, *args)\n SQL::Function.new(name, *args)\n end",
"def quoted_string; end",
"def function(name, *args)\n SQL::Function.new(function_name(name), *args)\n end",
"def create_custom_function\n # puts \"Creating a custom function to make queries with timestamp as a string...\".cyan\n @session.execute('CREATE OR REPLACE FUNCTION timefstring(somearg text)\n RETURNS NULL ON NULL INPUT\n RETURNS timestamp\n LANGUAGE java\n AS $$\n java.text.SimpleDateFormat formatter = new java.text.SimpleDateFormat(\"yyyy-MM-dd HH:mm:ss.SSS\");\n try {\n Date date = formatter.parse(somearg);\n return date;\n } catch(java.text.ParseException e) {\n return new Date();\n }\n $$')\n end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote; end",
"def quote_column_name_or_expression(name) # :nodoc:\n name = name.to_s\n case name\n # if only valid lowercase column characters in name\n when /^[a-z][a-z_0-9$#]*$/\n \"\\\"#{name.upcase}\\\"\"\n when /^[a-z][a-z_0-9$#-]*$/i\n \"\\\"#{name}\\\"\"\n # if other characters present then assume that it is expression\n # which should not be quoted\n else\n name\n end\n end",
"def quote_column_name(name) #:nodoc:\r\n %Q(\"#{name}\")\r\n end",
"def quote(name)\n raise NotImplementedError\n end",
"def name\n\t\t\"db_fun\"\n\tend",
"def full_function_name(function_name, options)\n schema = options[:schema]\n function_name = %Q{\"#{schema}\".#{function_name}} if schema\n function_name\n end",
"def functions\n pg_major = ::PgSaurus::Engine.pg_server_version[0]\n res = select_all <<-SQL\n SELECT n.nspname AS \"Schema\",\n p.proname AS \"Name\",\n pg_catalog.pg_get_function_result(p.oid) AS \"Returning\",\n CASE\n WHEN #{pg_major >= 11 ? \"p.prokind = 'w'\" : \"p.proiswindow\"} THEN 'window'\n WHEN p.prorettype = 'pg_catalog.trigger'::pg_catalog.regtype THEN 'trigger'\n ELSE 'normal'\n END AS \"Type\",\n p.oid AS \"Oid\"\n FROM pg_catalog.pg_proc p\n LEFT JOIN pg_catalog.pg_namespace n ON n.oid = p.pronamespace\n WHERE pg_catalog.pg_function_is_visible(p.oid)\n AND n.nspname <> 'pg_catalog'\n AND n.nspname <> 'information_schema'\n AND #{pg_major >= 11 ? \"p.prokind <> 'a'\" : \"p.proisagg <> TRUE\"}\n ORDER BY 1, 2, 3, 4;\n SQL\n res.inject([]) do |buffer, row|\n returning = row['Returning']\n function_type = row['Type']\n oid = row['Oid']\n\n function_str = select_value(\"SELECT pg_get_functiondef(#{oid});\")\n\n name = parse_function_name(function_str)\n language = parse_function_language(function_str)\n definition = parse_function_definition(function_str)\n volatility = parse_function_volatility(function_str)\n\n if definition\n buffer << ::PgSaurus::ConnectionAdapters::FunctionDefinition.new(name,\n returning,\n definition.strip,\n function_type,\n language,\n oid,\n volatility)\n end\n buffer\n end\n end",
"def function\n function = '\\w+\\(\\)'\n end",
"def single_quote\n # single quote string here\n 'Hello World and others!'\nend",
"def quote(value); end",
"def quoted_name\n Squirm.quote_ident name\n end",
"def create_function_sql(name, definition, opts=OPTS)\n args = opts[:args]\n if !opts[:args].is_a?(Array) || !opts[:args].any?{|a| Array(a).length == 3 and %w'OUT INOUT'.include?(a[2].to_s)}\n returns = opts[:returns] || 'void'\n end\n language = opts[:language] || 'SQL'\n <<-END\n CREATE#{' OR REPLACE' if opts[:replace]} FUNCTION #{name}#{sql_function_args(args)}\n #{\"RETURNS #{returns}\" if returns}\n LANGUAGE #{language}\n #{opts[:behavior].to_s.upcase if opts[:behavior]}\n #{'STRICT' if opts[:strict]}\n #{'SECURITY DEFINER' if opts[:security_definer]}\n #{\"PARALLEL #{opts[:parallel].to_s.upcase}\" if opts[:parallel]}\n #{\"COST #{opts[:cost]}\" if opts[:cost]}\n #{\"ROWS #{opts[:rows]}\" if opts[:rows]}\n #{opts[:set].map{|k,v| \" SET #{k} = #{v}\"}.join(\"\\n\") if opts[:set]}\n AS #{literal(definition.to_s)}#{\", #{literal(opts[:link_symbol].to_s)}\" if opts[:link_symbol]}\n END\n end",
"def quote_column_name(name) #:nodoc:\n \"\\\"#{name}\\\"\"\n #query(\"SELECT QUOTE_IDENT('#{name}')\")[0][0]\n end",
"def function_name(name)\n \"JSON_#{name.to_s.upcase}\"\n end",
"def funcname\n NAMES[@name][1]\n end",
"def double_quote(value)\n return if value.nil?\n\n case value.to_s\n # Ignore keys that contain double quotes or a Arel.star (*)[all columns]\n # or if a table has already been explicitly declared (ex: users.id)\n when \"*\", /((^\".+\"$)|(^[[:alpha:]]+\\.[[:alnum:]]+)|\\(.+\\))/\n value\n else\n PG::Connection.quote_ident(value.to_s)\n end\n end",
"def sql_for_remove\n \"DROP FUNCTION IF EXISTS #{proname}(#{get_function_args})\"\n end",
"def quoted_identifier(name)\n \"\\\"#{name.to_s.upcase}\\\"\"\n end",
"def quote_column_name(name)\n %[\"#{name.to_s.gsub('\"', '\"\"')}\"]\n end",
"def as_you_like_it_quote; end",
"def quote_table_name(name)\n quote_column_name(name)\n end",
"def my_quote\n\tend",
"def quotes; end",
"def drop_function_sql(name, opts=OPTS)\n \"DROP FUNCTION#{' IF EXISTS' if opts[:if_exists]} #{name}#{sql_function_args(opts[:args])}#{' CASCADE' if opts[:cascade]}\"\n end",
"def sql_quoted(value)\n Arel::Nodes.build_quoted(value)\n end",
"def quote_table_name(name)\n '[' + name.to_s + ']'\n end",
"def quote_column_name(column_name)\n column_name.to_s\n end",
"def quotes\n end",
"def i(s)\n connection.quote_column_name(s)\n end",
"def function_name\n full_name.to_underscore\n end",
"def make_sql_friendly( name )\n return name.strip.gsub( \"'\", \"''\" )\n end",
"def visit_RGeo_ActiveRecord_SpatialNamedFunction(node_)\n name_ = st_func(node_.name)\n exprs_ = []\n node_.expressions.each_with_index do |expr_, index_|\n exprs_ << (node_.spatial_argument?(index_) ? visit_in_spatial_context(expr_) : visit(expr_))\n end\n \"#{name_}(#{node_.distinct ? 'DISTINCT ' : ''}#{exprs_.join(', ')})#{node_.alias ? \" AS #{visit node_.alias}\" : ''}\"\n end",
"def named_function(name, expression, function_alias = nil)\n Arel::Nodes::NamedFunction.new(name, expression, function_alias)\n end",
"def info_sql\n \"#{INFO_SQL} AND pg_catalog.pg_get_function_arguments(p.oid) = '#{to_s}'\"\n end",
"def quote_column_name(name)\n name.to_s\n end"
] |
[
"0.6741665",
"0.6719341",
"0.64273614",
"0.64095914",
"0.62986046",
"0.6254364",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61536664",
"0.61491776",
"0.6129138",
"0.6042367",
"0.60377145",
"0.6034418",
"0.59660244",
"0.5952023",
"0.5933959",
"0.59136176",
"0.5910356",
"0.5884077",
"0.5880228",
"0.5842586",
"0.5831905",
"0.58172596",
"0.5806732",
"0.58026475",
"0.57992244",
"0.57679886",
"0.5745185",
"0.5718383",
"0.5703738",
"0.5678453",
"0.5673309",
"0.56449807",
"0.5643959",
"0.56207114",
"0.56099755",
"0.5599267",
"0.5591045",
"0.5588557",
"0.5587179",
"0.5579466",
"0.5573755"
] |
0.7917335
|
1
|
Concatenate the expressions with a space in between
|
def full_text_string_join(cols)
cols = Array(cols).map{|x| SQL::Function.new(:COALESCE, x, '')}
cols = cols.zip([' '] * cols.length).flatten
cols.pop
SQL::StringExpression.new(:'||', *cols)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def to_s\n \"(#{@x.map(&:to_s).join(\" + \")})\"\n end",
"def expr_to_s\n buf = ''\n each_term do |term|\n if term[:operator]\n buf << term[:operator]\n end\n buf << term[:token]\n end\n buf\n end",
"def to_s\n # Wrap the expression on the left/right in parentheses if the operation of\n # the expression on the left/right has a lower precedence than this\n # Expression's, e.g. 2 * (3 + 5) because without parentheses it becomes 6 +\n # 5\n [@left, @right].each do |operand|\n if operand.class == Expression\n operand.do_parens = (has_lower_precedence?(operand) or ! operand.is_associative?)\n end\n end\n string = \"#{@left} #{@operator} #{@right}\"\n # Wrap in parentheses if necessary\n string = \"(#{string})\" if @do_parens\n return string\n end",
"def cmd_concat_operator\n\t\t\" & \"\n\tend",
"def to_output(expr)\n\t\tif expr.kind_of? Array\n\t\t\texpr.map { |x| to_output(x) + \" \"}.join[0..-2]\n\t\telse\n\t\t\texpr.to_s\n\t\tend\n\tend",
"def to_s\n template = indirect? ? \"[%s]\" : \"%s\"\n return template % expr_to_s\n end",
"def to_s\n \"(#{@op.to_s} #{@first.to_s} #{@second.to_s})\"\n end",
"def string_conc(dog, cat)\n p \"#{dog} #{cat} around the house.\"\nend",
"def join_terms(op, *terms)\n [terms.map { |t| t.first.presence }.compact.join(\" #{op} \"),\n terms.map(&:second).reduce(:+) || []]\n end",
"def orginize_equation\n gsub!(' ','')\n ##TODO:: orginaize \n end",
"def to_s\n values = compact.map do |scope|\n value = scope.to_s\n value = if scope.is_a?(NestedScope)\n scope.compact.count > 1 ? \"(#{value})\" : value\n else value\n end\n [scope.operator, value]\n end\n\n values = values.flatten.compact\n values.shift\n values.join(\" \")\n end",
"def to_s\n \"(#{@x.map(&:to_s).join(\" * \")})\"\n end",
"def to_s\n operator_string + \" \" + @expression.to_s\n end",
"def compile_expression\n write_tag '<expression>'\n compile_term\n compile_term while match('+', '-', '<', '/', '=', '>', '&', '|', '*')\n write_tag '</expression>'\n end",
"def adjacent *a; CssString.new a.flatten.unshift(self).join ' + ' end",
"def concatenation(word1, word2)\n puts \"Artist #{word1} just released their newest album, titled #{word2}.\"\nend",
"def join_up(words)\n words.flatten.join(' ').tap { |str| str << ')' if @in_brackets }\n end",
"def opcode(op,cond,args) \"#{op}#{cond} #{args.join ','}\" end",
"def space_out\n gsub(/(.)/, ' \\1')\n end",
"def operator_c_string\n operator_string + \" \"\n end",
"def concat(*elements)\n concat_with(elements, ' ')\n end",
"def operator_ruby_string\n operator_string + \" \"\n end",
"def concatenator(string1, string2)\n \"#{string1} #{string2}\"\nend",
"def complete_operator(node)\n definition.operator_by_field_name(node.value).map { |o| o.end_with?(' ') ? o : \"#{o} \" }\n end",
"def evaluate_expression_inner(expression)\n expression_string = evaluate_expression(expression.left)\n expression_string << ' ' + expression.operator.to_s + ' '\n expression_string << evaluate_expression(expression.right)\n expression_string\n end",
"def opcode(op,args) \"#{op} #{args.join ','}\" end",
"def opcode(op,args) \"#{op} #{args.join ','}\" end",
"def opcode(op,args) \"#{op} #{args.join ','}\" end",
"def opcode(op,args) \"#{op} #{args.join ','}\" end",
"def to_css\n css = ''\n css << resolved_modifier\n css << ' ' unless resolved_modifier.empty?\n css << resolved_type\n css << ' and ' unless resolved_type.empty? || expressions.empty?\n css << expressions.map {|e| e.to_css}.join(' and ')\n css\n end",
"def string_concat(name_1, name_2)\n puts \"I went to the concert with \" +name_1 + \" and \" + name_2 + \".\"\nend",
"def to_s\n \"(#{[' ', '+'][value]})\"\n end",
"def join(symbols = {}, scope = \"\")\n self.inject(\"\") do |ss, s|\n (ss.empty? ? \"\" : ss + self.class.format) + s.to_s(symbols, scope)\n end\n end",
"def complex_expression_sql_append(sql, op, args)\n case op\n when :^\n j = ' # '\n c = false\n args.each do |a|\n sql << j if c\n literal_append(sql, a)\n c ||= true\n end\n when :ILIKE, :'NOT ILIKE'\n sql << '('\n literal_append(sql, args[0])\n sql << ' ' << op.to_s << ' '\n literal_append(sql, args[1])\n sql << ')'\n else\n super\n end\n end",
"def expression\n comma\n end",
"def concat(string); end",
"def string_output(string)\n return string.gsub /\\s/, \"+\"\nend",
"def collapse(expression)\n expression.gsub!(/^\\(|\\)$/,'') if expression.match(/^\\(.*\\)$/)\n while expression.index('(')\n inner_range = expression.rindex('(')..expression.index(')')\n expression[inner_range] = collapse(expression[inner_range])\n end\n\n integer, fraction = expression.split(/\\+/)\n num, denom = Math.simplify_fraction(fraction).split('/').map(&:to_i)\n Math.reduce_fraction \"#{num + integer.to_i * denom}/#{denom}\"\nend",
"def create_equation\n equation =''\n for i in 0..(@operands.size-1)\n if i==0\n equation << @operands[i].to_s\n else\n #binding.pry\n equation << @operators[@operator_value[i-1]].to_s\n equation << @operands[i].to_s\n end\n end\n equation\n end",
"def to_code\n \"(#{@x.map(&:to_code).join(\" + \")})\"\n end",
"def get_display_expression(sequence)\n sequence.inject('') do |string, summand|\n if summand.to_s.start_with?('1')\n summand.to_s\n elsif summand > 0\n string + ' + ' + summand.to_s\n else\n string + ' - ' + summand.abs.to_s\n end\n end\nend",
"def process_lasgn(exp)\n s = \"#{exp.shift}\"\n s += \"=#{process exp.shift}\" unless exp.empty?\n s\n end",
"def to_s\n\n s = \"\\n\\n==== #{self.class} ====\"\n\n find_expressions.each do |fexp|\n\n s << \"\\n\"\n if fexp.kind_of?(RawExpression)\n s << \"*raw\"\n else\n s << \" \"\n end\n s << fexp.fei.to_s\n end\n s << \"\\n==== . ====\\n\"\n\n s\n end",
"def _eval_join(*args)\n args = args.compact\n args.delete_if &:empty?\n args.slice(1, args.size).to_a.inject(args.first) do |memo, item|\n if item.start_with?(\"[\")\n memo += item\n else\n memo += \".#{item}\"\n end\n end\n end",
"def to_s\n \"(#{ x }, #{ y })\"\n end",
"def concat(other_expression)\n @expression = send(:+, other_expression)\n end",
"def concatenation (blow_dryer_setting, male_first_name)\n puts \"but my friends call me #{blow_dryer_setting} #{male_first_name}\"\nend",
"def join_strings(text_1, text_2)\n text_1 + \" \" + text_2\nend",
"def add_expressions(*expressions); end",
"def print_expression(expression)\n string = ''\n if expression.is_a? Yarpler::Models::Forall\n range = expression.range.to_s\n range=range.tr(\",[]\", '')\n string << \"[#{expression} #{expression.variable} for #{range}]->\" + print_expression(expression.expression)\n elsif expression.is_a? Yarpler::Models::Cardinality\n string << \"[#{expression}]\\n\"\n string << \"[#{expression}]->\" + print_expression(expression.element)\n elsif expression.is_a? Yarpler::Models::Field\n string << \"[#{expression} #{expression.variable}.#{expression.attribute}]\\n\"\n elsif expression.is_a? Yarpler::Models::Literal\n string << \"[#{expression} #{expression.value}]\\n\"\n elsif expression.is_a? Yarpler::Models::Instance\n string << \"[#{expression} #{expression.variable}]\\n\"\n elsif expression.is_a? Yarpler::Models::CountFunction\n string << \"[#{expression}} for #{expression.range.variable}.#{expression.range.attribute} ]\\n\"\n string << \"[#{expression}} for #{expression.range.variable}.#{expression.range.attribute} ]--\" + print_expression(expression.where)\n elsif expression.is_a? Yarpler::Models::SumValueFunction\n string << \"[#{expression}} #{expression.attribute} of #{expression.set.variable}.#{expression.set.attribute} ]\\n\"\n elsif expression.is_a? Yarpler::Models::Expression\n string << \"[#{expression} #{expression.operator}]\\n\"\n string << \"[#{expression} #{expression.operator}]->\" + print_expression(expression.left)\n string << \"[#{expression} #{expression.operator}]->\" + print_expression(expression.right)\n else\n puts expression.class.to_s + ' missing.'\n end\n string\n end",
"def concatenate(strings)\n\nend",
"def together(string, string2, string3)\n return \"#{string} #{string2} #{string3}\" \nend",
"def to_s(exp)\n if exp.is_a?(Array)\n \"( #{exp.map{ |t| to_s(t) }.join(' ')} )\"\n else\n exp.to_s\n end\n end",
"def to_s\n identifier = \"#{fieldx}_#{value}\".downcase.gsub(/\\W/,'_')\n \"#{operator}#{identifier}\"\n end",
"def exponify a, b\n return \"a ^ b = #{display_exponify a, b}#{a**b}\"\nend",
"def join_conditions op, *conditions\n conditions = conditions.compact\n if conditions.empty?\n conditions\n elsif conditions.count == 1\n conditions.first\n else\n strings = conditions.map(&:first)\n args = conditions.flat_map{|c| c[1..-1]}\n [\n strings.map{|s| \"(#{s})\"}.join(\" #{op} \"),\n *args\n ]\n end\n end",
"def apply_function_concat(scope, ast)\n args = ast.children.map { |arg|\n scope, sql = apply_ast(scope, arg)\n \"(#{sql}::text)\"\n }\n\n return scope, \"(concat(#{args.join(',')}))\"\n end",
"def to_string(expression)\n\t\t\tif expression.is_a? Array\n\t\t\t\texpression.map { |exp| to_string(exp) }\n\t\t\t\treturn \"(\" + expression.join(\" \") + \")\"\n\t\t\telse\n\t\t\t\treturn expression.to_s\n\t\t\tend\n\t\tend",
"def add_expr_escaped(src, code)\n src << \" #{@bufvar} << _erubis_xml_helper.escape_xml((\" << code << '));'\n end",
"def format_sexp(args)\n args.map{|x| format_object(x, \" \", \"'\")}.join(' ')\n end",
"def join_strings(word_1, word_2)\n join_strings=\"#{word_1} #{word_2}\"\nend",
"def compile(exp)\n \"\\\"#{compile!(exp)}\\\"\"\n end",
"def expressions; end",
"def to_s\n rule.embed + operator\n end",
"def to_code\n \"(#{@x.map(&:to_code).join(\" * \")})\"\n end",
"def oxford_comma(words)\n if words.length == 1\n words.join\n elsif words.length == 2\n return \"#{words[0]} and #{words[1]}\"\n else words.length > 2\n words[-1].insert(0 , \"and \")\n end\n words.join(\", \") \n end",
"def to_s; \"<Operand: #{to_source}>\"; end",
"def union_expand_str(expander, columns)\n foo=[]\n 1.upto(columns.to_i) { |num| foo << expander }\n u = \"UNION ALL SELECT \" + \"#{foo.join(',')}\"\n return u\nend",
"def concatentation_method(lover1, lover2)\n puts \"#{lover1} loves #{lover2}\"\nend",
"def add_expression_result_escaped(code)\n with_buffer{@src << ' << ' << @escapefunc << '((' << code << '))'}\n end",
"def to_s\n @expression.to_s\n end",
"def oxford_comma(array)\nif array.count == 1 then array.join\nelsif array.count == 2 then array.join(\" and \")\nelsif array.count >= 3\n array[-1].insert(0, \"and \")\n array.join(', ')\nend\nend",
"def join_with_indent_or_spaces(args)\n if args.count >= 5\n joined = args.join(\",\\n \")\n \"\\n #{joined}\"\n else\n args.join(', ')\n end\n end",
"def join_strings(parameter1, parameter2)\r\n\treturn parameter1 + \" \" + parameter2\r\nend",
"def echo(*expressions)\n command \"echo #{expressions.join(' ')}\"\n end",
"def format_comma_and(array)\n return array.join if array.length <= 1\n array[0..-2].join(', ') + \" and #{array[-1]}\"\nend",
"def compute(*values)\n if values.size == 0\n return ''\n end\n if values.size == 1\n return values[0].to_s\n end\n return [values[0], values[1..].join('')].join('.')\nend",
"def join_atoms(ary)\n out = \"\"\n ary.each do |a|\n out += \" \" if (out.length != 0 && !a.match(/^[,:;.?!]+$/))\n out += a\n end\n return out\n end",
"def imitate_code(txt)\n \"%#{txt};;\"\n end",
"def space; txt ' ','' end",
"def terminate_expression\n @src << '; ' if @chain_appends\n end",
"def expr_eval_to_s\n if @last_eval_state.nil?\n return \"<unevaluated>\"\n end\n \n str = ''\n if @offset\n str << \"#{@offset < 0 ? '-' : ''}0x#{@offset.abs.to_s(16)}\"\n end\n \n if @last_eval_state[:special] || @last_eval_state[:register]\n if @offset\n str << '+'\n end\n if @last_eval_state[:special]\n str << @last_eval_state[:special_tok]\n elsif @last_eval_state[:register]\n str << @last_eval_state[:reg_tok]\n end \n end\n \n str\n end",
"def to_phrase(items)\n *items, last_item = items\n return last_item if items.empty?\n\n items.join(', ') + ', and ' + last_item\n end",
"def combine_name(first_name, last_name)\n # Q3 CODE HERE\n return \"#{first_name} #{last_name}\"\nend",
"def to_s\n [left, right].uniq.join(\" #{messages[:or]} \")\n end",
"def special_ending(text)\n (text.present?) ? '(%s)' % text : ''\n end",
"def assemble\n @code.join\n end",
"def handle_concat(concat)\n if concat.class.to_s == 'Puppet::Parser::AST::Concat'\n ret_val = String.new(\"\")\n concat.value.each do |value|\n if value.to_s[0].eql?(\"$\")\n ret_val.concat('#{' + value.to_s + '}')\n else\n ret_val.concat(value.to_s)\n end\n end\n ret_val.gsub!('\"', '')\n else\n return concat\n end\n end",
"def oxford_comma(array)\n if array.length == 2\n return \"#{array[0]} and #{array[1]}\"\n elsif 2 < array.length\n array[-1].insert(0, \"and \")\n end\n array.join(\", \") \nend",
"def comstr\n\t\tjoin(', ')\n\tend",
"def display_string\n op_match = /#{SORTEDOPS.map { |o| Regexp.quote(o) }.join(\"|\")}/\n popen = Regexp.quote(OPEN_PAREN)\n pclose = Regexp.quote(CLOSE_PAREN)\n text = self.to_s\n text.gsub(/#{popen}(.*?)#{pclose}/) { |m|\n payload = $1.dup\n count = 0\n payload.gsub(op_match) do |pm|\n count += 1\n pm\n end\n if count == 1\n payload.strip\n else\n OPEN_PAREN + payload.strip + CLOSE_PAREN\n end\n }.strip\n end",
"def +(suffix)\n suffix = suffix.to_s\n\n call + (suffix.start_with?(' ') ? '' : ' ') + suffix\n end",
"def do_expression(expr, vars)\n parse_expression(expr).each do |k|\n expr = expr.gsub(\"$(#{k})\", vars[k])\n end\n expr\nend",
"def sentence_maker(x)\n\tx.join(\" \")\nend",
"def to_s\n \"(#{@x})^(#{@y})\"\n end",
"def join_strings(a, b)\n a + \" \" + b\nend",
"def my_join(spacer = '')\n str = ''\n\n self.each_with_index do |char, idx|\n str += char\n str += spacer if idx != self.length - 1\n end\n\n str\n end",
"def concat(ga1, ga2)\n p \"#{ga1} makes a better ginger ale than #{ga2}\"\nend",
"def to_s\n inject(\"\") { |a, b| a + b.to_s }\n end",
"def to_s\n inject(\"\") { |a, b| a + b.to_s }\n end"
] |
[
"0.63969976",
"0.63577175",
"0.6309748",
"0.6276193",
"0.6244219",
"0.62273544",
"0.62001157",
"0.619735",
"0.6179301",
"0.61741954",
"0.61233836",
"0.6113048",
"0.6025421",
"0.59900457",
"0.59634244",
"0.59588397",
"0.5937076",
"0.59319395",
"0.5896875",
"0.58860207",
"0.5870589",
"0.58581764",
"0.5838935",
"0.5831009",
"0.5823272",
"0.5822728",
"0.5822728",
"0.5822728",
"0.5822728",
"0.5821786",
"0.57999617",
"0.57900447",
"0.5788581",
"0.57781667",
"0.57629895",
"0.57528377",
"0.57509106",
"0.5724902",
"0.5717303",
"0.5715948",
"0.5713879",
"0.57016677",
"0.5697026",
"0.5677595",
"0.5676525",
"0.5672033",
"0.56602365",
"0.5655782",
"0.56500417",
"0.56494933",
"0.5647471",
"0.5646063",
"0.5636648",
"0.5632391",
"0.56233937",
"0.5615561",
"0.56088907",
"0.56013316",
"0.55994827",
"0.5592702",
"0.55924016",
"0.5589922",
"0.55769557",
"0.55525786",
"0.55512977",
"0.55424595",
"0.5532376",
"0.55220985",
"0.55150896",
"0.5496846",
"0.54947186",
"0.5489865",
"0.54869604",
"0.54853815",
"0.5481807",
"0.5481435",
"0.54720885",
"0.54704154",
"0.54702806",
"0.54628325",
"0.546058",
"0.5456792",
"0.5447686",
"0.54446846",
"0.54300165",
"0.54204875",
"0.5420089",
"0.54177886",
"0.541674",
"0.5412136",
"0.54116863",
"0.54078484",
"0.54069936",
"0.5396231",
"0.5391693",
"0.53901035",
"0.53890514",
"0.5387373",
"0.53864586",
"0.53864586"
] |
0.5863618
|
21
|
Use FROM to specify additional tables in an update query
|
def update_from_sql(sql)
join_from_sql(:FROM, sql)
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def update_table_sql(sql)\n sql << ' '\n source_list_append(sql, @opts[:from][0..0])\n end",
"def update_from(from_table, *args, &block)\n\n configure_transfer(from_table, *args) { yield }\n\n @current_operation = StatusReport.instance.add_operation :source => @from_table,\n :destination => @to_table,\n :source_type => args.extract_options![:source_type] || :active_record,\n :type => 'update'\n source_iterator(@limit, @type).each do |from_record|\n matching_records = @conditions.call(from_record)\n\n unless matching_records.empty?\n matching_records.each do |to_record|\n @columns.each do |to, from|\n to_record[to]= from.call(from_record)\n end\n\n if @options[:validate]\n report_validation_errors(to_record, from_record, 'update')\n else\n to_record.save(false)\n @current_operation.record_update(to_record)\n end\n store_as(to_record, from_record)\n end\n else\n new_destination_record(from_record)\n end\n end\n @status_report\n end",
"def join_from_sql(type, sql)\n if(from = @opts[:from][1..-1]).empty?\n raise(Error, 'Need multiple FROM tables if updating/deleting a dataset with JOINs') if @opts[:join]\n else\n sql << SPACE << type.to_s << SPACE\n source_list_append(sql, from)\n select_join_sql(sql)\n end\n end",
"def join_from_sql(type, sql)\n if(from = @opts[:from][1..-1]).empty?\n raise(Error, 'Need multiple FROM tables if updating/deleting a dataset with JOINs') if @opts[:join]\n else\n sql << ' ' << type.to_s << ' '\n source_list_append(sql, from)\n select_join_sql(sql)\n end\n end",
"def update!(**args)\n @base_table = args[:base_table] if args.key?(:base_table)\n @sql_query = args[:sql_query] if args.key?(:sql_query)\n @view_type = args[:view_type] if args.key?(:view_type)\n end",
"def update!(**args)\n @oracle_tables = args[:oracle_tables] if args.key?(:oracle_tables)\n @schema = args[:schema] if args.key?(:schema)\n end",
"def add_select_into_table(new_table_name, sql_query)\r\n sql_query.sub(/FROM/i, \"INTO #{new_table_name} FROM\")\r\n end",
"def add_select_into_table(new_table_name, sql_query)\n sql_query.sub(/FROM/i, \"INTO #{new_table_name} FROM\")\n end",
"def update!(**args)\n @postgresql_tables = args[:postgresql_tables] if args.key?(:postgresql_tables)\n @schema = args[:schema] if args.key?(:schema)\n end",
"def copy_table(old_table_name, new_table_name)\r\n execute add_select_into_table(new_table_name, \"SELECT * FROM #{old_table_name}\")\r\n end",
"def update_sql(values = {}, opts = nil, &block)\n opts = opts ? @opts.merge(opts) : @opts\n\n if opts[:group]\n raise Error::InvalidOperation, \"A grouped dataset cannot be updated\"\n elsif (opts[:from].size > 1) or opts[:join]\n raise Error::InvalidOperation, \"A joined dataset cannot be updated\"\n end\n \n sql = \"UPDATE #{source_list(@opts[:from])} SET \"\n if block\n sql << block.to_sql(self, :comma_separated => true)\n else\n set = if values.is_a?(Hash)\n # get values from hash\n values = transform_save(values) if @transform\n values.map do |k, v|\n # convert string key into symbol\n k = k.to_sym if String === k\n \"#{literal(k)} = #{literal(v)}\"\n end.join(COMMA_SEPARATOR)\n else\n # copy values verbatim\n values\n end\n sql << set\n end\n if where = opts[:where]\n sql << \" WHERE #{literal(where)}\"\n end\n\n sql\n end",
"def update_where(table, what, where, *bindvars)\n sql = \"update #{table.name}\\nset #{what} where #{where}\"\n#$stderr.puts sql\n db.do(sql, *bindvars)\n end",
"def update_by_sql(target, set, options = nil)\n set = set.gsub(/@/, '')\n\n if target.is_a? Class\n sql = \"UPDATE #{target.table} SET #{set} \"\n sql << \" WHERE #{options[:condition]}\" if options and options[:condition]\n sql_update(sql)\n else\n sql = \"UPDATE #{target.class.table} SET #{set} WHERE #{pk_field target.class} = #{quote(target.pk)}\"\n sql << \" AND #{options[:condition]}\" if options and options[:condition]\n sql_update(sql)\n end\n end",
"def update!(**args)\n @oracle_columns = args[:oracle_columns] if args.key?(:oracle_columns)\n @table = args[:table] if args.key?(:table)\n end",
"def select!\n # Ensure we can never be destructive by nilifying :update.\n Mao.sql(with_options(:update => nil).sql) do |pg_result|\n if @options[:join]\n other = Mao.query(@options[:join][0])\n pg_result.map {|result|\n Mao.normalize_join_result(result, self, other)\n }\n else\n pg_result.map {|result| Mao.normalize_result(result, @col_types)}\n end\n end\n end",
"def reflection_merge(reflection, from, to)\n foreign_key = reflection.options[:foreign_key] || table_name.classify.foreign_key\n sql = case reflection.macro\n when :has_one, :has_many\n \"UPDATE #{reflection.klass.table_name} SET #{foreign_key} = #{to} WHERE #{foreign_key} = #{from}\\n\" \n when :has_and_belongs_to_many\n join_table = reflection.options[:join_table] || ( table_name < reflection.klass.table_name ? '#{table_name}_#{reflection.klass.table_name}' : '#{reflection.klass.table_name}_#{table_name}')\n \"UPDATE #{join_table} SET #{foreign_key} = #{to} WHERE #{foreign_key} = #{from}\\n\" \n else return\n end\n connection.update(sql)\n end",
"def table_update_query(table, values, org_key = nil)\n org_key ||= values\n query = \"update #{quote_table_name(table)} set \"\n query << values.map do |column_name, value|\n \"#{quote_column_name(column_name)} = #{quote_value(table, column_name, value)}\"\n end.join(', ')\n query << \" where (\" << quote_key_list(table) << \") = (\"\n query << primary_key_names(table).map do |key|\n quote_value(table, key, org_key[key])\n end.join(', ') << \")\"\n end",
"def copy_table(old_table_name, new_table_name)\n execute add_select_into_table(new_table_name, \"SELECT * FROM #{old_table_name}\")\n end",
"def update_sql\n update_manager = Arel::UpdateManager.new(ActiveRecord::Base)\n update_manager.table(arel_table)\n update_manager.where(arel_table[:id].eq(id))\n update_manager.set([[arel_table[:retoure_reason], retoure_reason]])\n update_manager.to_sql\n end",
"def update!(**args)\n @results_table = args[:results_table] if args.key?(:results_table)\n end",
"def update!(**args)\n @results_table = args[:results_table] if args.key?(:results_table)\n end",
"def extract_table_ref_from_update_sql(sql)\n sql[/update\\s(\"[A-Za-z0-9_.\"\\[\\]\\s]+\"|[A-Za-z0-9_.\"\\[\\]]+)\\s*set/im]\n Regexp.last_match(1)&.strip\n end",
"def update!(**args)\n @table_source_type = args[:table_source_type] if args.key?(:table_source_type)\n @table_spec = args[:table_spec] if args.key?(:table_spec)\n @view_spec = args[:view_spec] if args.key?(:view_spec)\n end",
"def update!(**args)\n @table_source_type = args[:table_source_type] if args.key?(:table_source_type)\n @table_spec = args[:table_spec] if args.key?(:table_spec)\n @view_spec = args[:view_spec] if args.key?(:view_spec)\n end",
"def copy_table_data(from, to, remaps = [])\n old = columns(from).collect(&:name)\n current = columns(to).collect(&:name)\n remapped_columns = remaps.collect {|c| c.first.to_s}.compact\n common = (current & old).sort - remapped_columns\n from_columns = common.collect {|c| \"`#{c}`\"}\n to_columns = common.collect {|c| \"`#{c}`\"}\n remaps.each do |remap|\n remap = [remap].flatten\n next if remap.length != 2\n from_columns << remap.first\n to_columns << remap.last\n end\n from_columns_to_s = from_columns.join(', ')\n to_columns_to_s = to_columns.join(', ')\n execute \"INSERT INTO #{to}(#{to_columns_to_s}) SELECT #{from_columns_to_s} FROM #{from}\"\n end",
"def update!(**args)\n @postgresql_columns = args[:postgresql_columns] if args.key?(:postgresql_columns)\n @table = args[:table] if args.key?(:table)\n end",
"def merge(*tables)\n from(default_table).merge(*tables)\n end",
"def _construct_multiple_update_sql(table, where_update_fields, where_update_values)\n id_collector = Array.new\n update_sql = \"UPDATE #{table} SET\"\n \n (where_update_fields.length - 1).times do |field_i|\n update_sql += \", \" if field_i > 0\n update_sql += \" #{where_update_fields[field_i + 1]} = CASE #{where_update_fields[0]}\"\n \n where_update_values.each_index do |i|\n update_sql += \" WHEN \" + _construct_sql_value(where_update_values[i][0]) + \" THEN \" + _construct_sql_value(where_update_values[i][field_i + 1])\n id_collector << _construct_sql_value(where_update_values[i][0]) if field_i == 0\n end\n \n update_sql += \" END\"\n end\n \n update_sql += \" WHERE #{where_update_fields[0]} in (\" + id_collector.join(\",\") + \")\"\n \n update_sql\n end",
"def extend_sql_avoiding_table_naming_clashes!(sql, addition)\r\n used_table_aliases = table_aliases_from_join_fragment(addition)\r\n old_table_aliases = table_aliases_from_join_fragment(sql)\r\n (used_table_aliases & old_table_aliases).each do |join_table_alias|\r\n i = 0\r\n begin\r\n i += 1\r\n new_alias = \"renamed_join_table_#{i}\"\r\n end until !used_table_aliases.include?(new_alias)\r\n convert_table_name_to_new_alias!(sql, join_table_alias, new_alias)\r\n end\r\n sql << \" #{addition} \"\r\n end",
"def update!(**args)\n @schema = args[:schema] if args.key?(:schema)\n @table = args[:table] if args.key?(:table)\n end",
"def update!(**args)\n @schema = args[:schema] if args.key?(:schema)\n @table = args[:table] if args.key?(:table)\n end",
"def update_graph\n connection.execute <<-EOS\n UPDATE #{oqgraph_table_name} \n SET origid = #{self.send(self.class.from_key)}, \n destid = #{self.send(self.class.to_key)}, \n weight = #{self.send(self.class.weight_column)} \n WHERE origid = #{self.send(self.class.from_key + '_was')} AND destid = #{self.send(self.class.to_key + '_was')};\n EOS\n end",
"def scaffold_reflection_merge(reflection, from, to)\n if reflection.class == DataMapper::Associations::OneToMany::Relationship\n foreign_key = get_key_array_safe(reflection.target_key).name\n table = reflection.target_model\n elsif reflection.class == DataMapper::Associations::ManyToMany::Relationship\n foreign_key = get_key_array_safe(reflection.through.child_key).name\n table = reflection.send(:through_model)\n else\n return\n end\n table.all(foreign_key => from).update(foreign_key => to)\n end",
"def delete_from_sql(sql)\n sql << ' FROM '\n source_list_append(sql, @opts[:from][0..0])\n end",
"def update!(**args)\n @mysql_columns = args[:mysql_columns] if args.key?(:mysql_columns)\n @table = args[:table] if args.key?(:table)\n end",
"def update!(**args)\n @destination_bigquery_uri = args[:destination_bigquery_uri] if args.key?(:destination_bigquery_uri)\n @override_existing_table = args[:override_existing_table] if args.key?(:override_existing_table)\n end",
"def select_from_sql(sql)\n sql << \" FROM #{source_list(@opts[:from] || ['DUAL'])}\"\n end",
"def update_audit_tables\n tables.each(&:update_audit_table)\n end",
"def join(*tables)\n from(default_table).join(*tables)\n end",
"def update!(**args)\n @derived_from = args[:derived_from] if args.key?(:derived_from)\n @replaced_by = args[:replaced_by] if args.key?(:replaced_by)\n end",
"def merge_into(target)\n target_id = target.id\n # Find all the Entries attached to this name, that will need to be\n # reindexed after the merge\n entry_ids = entry_ids_to_index_on_update\n\n ids = EntryArtist.where(artist_id: self.id).pluck(:id)\n EntryArtist.where(artist_id: self.id).update_all({ artist_id: target_id })\n EntryArtist.where( id: ids ).each(&:update_bunny)\n\n ids = EntryAuthor.where(author_id: self.id).pluck(:id)\n EntryAuthor.where(author_id: self.id).update_all({ author_id: target_id })\n EntryAuthor.where( id: ids ).each(&:update_bunny)\n\n ids = EntryScribe.where(scribe_id: self.id).pluck(:id)\n EntryScribe.where(scribe_id: self.id).update_all({ scribe_id: target_id })\n EntryScribe.where( id: ids ).each(&:update_bunny)\n\n ids = SaleAgent.where(agent_id: self.id).pluck(:id)\n SaleAgent.where(agent_id: self.id).update_all({ agent_id: target_id })\n SaleAgent.where( id: ids ).each(&:update_bunny)\n\n ids = SourceAgent.where(agent_id: self.id).pluck(:id)\n SourceAgent.where(agent_id: self.id).update_all({ agent_id: target_id })\n SourceAgent.where( id: ids ).each(&:update_bunny)\n\n ids = Provenance.where(provenance_agent_id: self.id).pluck(:id)\n Provenance.where(provenance_agent_id: self.id).update_all({ provenance_agent_id: target_id })\n Provenance.where( id: ids ).each(&:update_bunny)\n\n ids = DericciLink.where(name_id: self.id).pluck(:id)\n DericciLink.where(name_id: self.id).update_all({ name_id: target_id })\n DericciLink.where( id: ids ).each(&:update_bunny)\n\n ids = DericciRecord.where(verified_id: self.id).pluck(:id)\n DericciRecord.where(verified_id: self.id).update_all({verified_id: target_id})\n DericciRecord.where( id: ids ).each(&:update_bunny)\n\n # update flags on the target\n target.is_artist ||= self.is_artist\n target.is_author ||= self.is_author\n target.is_scribe ||= self.is_scribe\n target.is_provenance_agent ||= self.is_provenance_agent\n\n target.save\n\n # but ... CAN't SAVE when name is BLANK (nil)\n # self.name = nil\n self.viaf_id = nil\n self.deleted = true\n self.save!\n\n # slice into managable chunks to avoid running out of space in mysql\n entry_ids.each_slice(200) do |slice|\n SDBMSS::IndexJob.perform_later(Entry.to_s, slice)\n end\n\n Name.update_counters(target.id,\n :authors_count => target.author_entries.where(deprecated: false, draft: false).count - target.authors_count,\n :artists_count => target.artist_entries.where(deprecated: false, draft: false).count - target.artists_count,\n :scribes_count => target.scribe_entries.where(deprecated: false, draft: false).count - target.scribes_count,\n :sale_agents_count => target.sale_entries.where(deprecated: false, draft: false).count - target.sale_agents_count,\n :provenance_count => target.provenance_entries.where(deprecated: false, draft: false).count - target.provenance_count,\n :source_agents_count => target.agent_sources.count - target.source_agents_count\n )\n end",
"def update!(**args)\n @database = args[:database] if args.key?(:database)\n @mysql_tables = args[:mysql_tables] if args.key?(:mysql_tables)\n end",
"def build_table_aliases(from)\n # for the targets\n returning({}) do |aliases|\n from.map(&:to_s).sort.map(&:to_sym).each_with_index do |plural, t_index|\n table = plural._as_class.table_name\n plural._as_class.columns.map(&:name).each_with_index do |field, f_index|\n aliases[\"#{table}.#{field}\"] = \"t#{t_index}_r#{f_index}\"\n end\n end\n end\n end",
"def from(table_name, options = {})\n options[:alias] ||= table_name\n\n # add to roots\n @roots[table_name] = @criteria.from(eval(\"#{FROM}.#{table_name}.java_class\"))\n\n # set from\n @from = @roots[table_name]\n\n # set alias for both\n @alias[options[:alias]] = @roots[table_name]\n @alias[table_name] = @roots[table_name]\n\n # set table name for from\n @from_table = table_name\n\n if options[:count]\n self.count = true\n @select = @criteria.select(@builder.count(@roots[table_name]))\n else\n self.count = false\n @select = @criteria.select(@roots[table_name])\n end\n\n self\n end",
"def update!(**args)\n @database = args[:database] if args.key?(:database)\n @table = args[:table] if args.key?(:table)\n end",
"def delete_using_sql(sql)\n join_from_sql(:USING, sql)\n end",
"def delete_using_sql(sql)\n join_from_sql(:USING, sql)\n end",
"def apply_single\n validate_schema\n\n # Prepare some lists of columns.\n key_cols = @db1.primary_key(@table1)\n data_cols = @db1.except_primary_key(@table1)\n all_cols = @db1.column_names(@table1)\n\n # Let our public know we are beginning.\n @patch.begin_diff\n\n # Advertise column names.\n @rc_columns = DiffColumns.new\n @rc_columns.title_row = all_cols\n @rc_columns.update(0)\n cells = all_cols.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"@@\",cells)\n @patch.apply_row(rc)\n\n # If requested, we will be providing context rows around changed rows.\n # This is not a natural thing to do with SQL, so we do it only on request.\n # When requested, we need to buffer row changes.\n @pending_rcs = []\n\n # Prepare some useful SQL fragments to assemble later.\n sql_table1 = @db1.quote_table(@table1)\n sql_table2 = @db1.quote_table(@table2)\n sql_key_cols = key_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_all_cols = all_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_key_match = key_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS #{sql_table2}.#{c}\"}.join(\" AND \")\n sql_data_mismatch = data_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS NOT #{sql_table2}.#{c}\"}.join(\" OR \")\n\n # For one query we will need to interleave columns from two tables. For\n # portability we need to give these columns distinct names.\n weave = all_cols.map{|c| [[sql_table1,@db1.quote_column(c)],\n [sql_table2,@db2.quote_column(c)]]}.flatten(1)\n dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]}\"}\n sql_dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]} AS #{c[0].gsub(/[^a-zA-Z0-9]/,'_')}_#{c[1].gsub(/[^a-zA-Z0-9]/,'_')}\"}.join(\",\")\n\n # Prepare a map of primary key offsets.\n keys_in_all_cols = key_cols.each.map{|c| all_cols.index(c)}\n keys_in_dbl_cols = keys_in_all_cols.map{|x| 2*x}\n\n # Find rows in table2 that are not in table1.\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table2} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table1} WHERE #{sql_key_match})\"\n apply_inserts(sql,all_cols,keys_in_all_cols)\n\n # Find rows in table1 and table2 that differ while having the same primary\n # key.\n sql = \"SELECT #{sql_dbl_cols} FROM #{sql_table1} INNER JOIN #{sql_table2} ON #{sql_key_match} WHERE #{sql_data_mismatch}\"\n apply_updates(sql,dbl_cols,keys_in_dbl_cols)\n\n # Find rows that are in table1 but not table2\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table1} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table2} WHERE #{sql_key_match})\"\n apply_deletes(sql,all_cols,keys_in_all_cols)\n\n # If we are supposed to provide context, we need to deal with row order.\n if @patch.want_context\n sql = \"SELECT #{sql_all_cols}, 0 AS __coopy_tag__ FROM #{sql_table1} UNION SELECT #{sql_all_cols}, 1 AS __coopy_tag__ FROM #{sql_table2} ORDER BY #{sql_key_cols}, __coopy_tag__\"\n apply_with_context(sql,all_cols,keys_in_all_cols)\n end\n\n # Done!\n @patch.end_diff\n end",
"def apply_single\n validate_schema\n\n # Prepare some lists of columns.\n key_cols = @db1.primary_key(@table1)\n data_cols = @db1.except_primary_key(@table1)\n all_cols = @db1.column_names(@table1)\n\n # Let our public know we are beginning.\n @patch.begin_diff\n\n # Advertise column names.\n @rc_columns = DiffColumns.new\n @rc_columns.title_row = all_cols\n @rc_columns.update(0)\n cells = all_cols.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"@@\",cells)\n @patch.apply_row(rc)\n\n # If requested, we will be providing context rows around changed rows.\n # This is not a natural thing to do with SQL, so we do it only on request.\n # When requested, we need to buffer row changes.\n @pending_rcs = []\n\n # Prepare some useful SQL fragments to assemble later.\n sql_table1 = @db1.quote_table(@table1)\n sql_table2 = @db1.quote_table(@table2)\n sql_key_cols = key_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_all_cols = all_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_key_match = key_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS #{sql_table2}.#{c}\"}.join(\" AND \")\n sql_data_mismatch = data_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS NOT #{sql_table2}.#{c}\"}.join(\" OR \")\n\n # For one query we will need to interleave columns from two tables. For\n # portability we need to give these columns distinct names.\n weave = all_cols.map{|c| [[sql_table1,@db1.quote_column(c)],\n [sql_table2,@db2.quote_column(c)]]}.flatten(1)\n dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]}\"}\n sql_dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]} AS #{c[0].gsub(/[^a-zA-Z0-9]/,'_')}_#{c[1].gsub(/[^a-zA-Z0-9]/,'_')}\"}.join(\",\")\n\n # Prepare a map of primary key offsets.\n keys_in_all_cols = key_cols.each.map{|c| all_cols.index(c)}\n keys_in_dbl_cols = keys_in_all_cols.map{|x| 2*x}\n\n # Find rows in table2 that are not in table1.\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table2} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table1} WHERE #{sql_key_match})\"\n apply_inserts(sql,all_cols,keys_in_all_cols)\n\n # Find rows in table1 and table2 that differ while having the same primary\n # key.\n sql = \"SELECT #{sql_dbl_cols} FROM #{sql_table1} INNER JOIN #{sql_table2} ON #{sql_key_match} WHERE #{sql_data_mismatch}\"\n apply_updates(sql,dbl_cols,keys_in_dbl_cols)\n\n # Find rows that are in table1 but not table2\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table1} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table2} WHERE #{sql_key_match})\"\n apply_deletes(sql,all_cols,keys_in_all_cols)\n\n # If we are supposed to provide context, we need to deal with row order.\n if @patch.want_context\n sql = \"SELECT #{sql_all_cols}, 0 AS __coopy_tag__ FROM #{sql_table1} UNION SELECT #{sql_all_cols}, 1 AS __coopy_tag__ FROM #{sql_table2} ORDER BY #{sql_key_cols}, __coopy_tag__\"\n apply_with_context(sql,all_cols,keys_in_all_cols)\n end\n\n # Done!\n @patch.end_diff\n end",
"def apply_single\n validate_schema\n\n # Prepare some lists of columns.\n key_cols = @db1.primary_key(@table1)\n data_cols = @db1.except_primary_key(@table1)\n all_cols = @db1.column_names(@table1)\n\n # Let our public know we are beginning.\n @patch.begin_diff\n\n # Advertise column names.\n @rc_columns = DiffColumns.new\n @rc_columns.title_row = all_cols\n @rc_columns.update(0)\n cells = all_cols.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"@@\",cells)\n @patch.apply_row(rc)\n\n # If requested, we will be providing context rows around changed rows.\n # This is not a natural thing to do with SQL, so we do it only on request.\n # When requested, we need to buffer row changes.\n @pending_rcs = []\n\n # Prepare some useful SQL fragments to assemble later.\n sql_table1 = @db1.quote_table(@table1)\n sql_table2 = @db1.quote_table(@table2)\n sql_key_cols = key_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_all_cols = all_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_key_match = key_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS #{sql_table2}.#{c}\"}.join(\" AND \")\n sql_data_mismatch = data_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS NOT #{sql_table2}.#{c}\"}.join(\" OR \")\n\n # For one query we will need to interleave columns from two tables. For\n # portability we need to give these columns distinct names.\n weave = all_cols.map{|c| [[sql_table1,@db1.quote_column(c)],\n [sql_table2,@db2.quote_column(c)]]}.flatten(1)\n dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]}\"}\n sql_dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]} AS #{c[0].gsub(/[^a-zA-Z0-9]/,'_')}_#{c[1].gsub(/[^a-zA-Z0-9]/,'_')}\"}.join(\",\")\n\n # Prepare a map of primary key offsets.\n keys_in_all_cols = key_cols.each.map{|c| all_cols.index(c)}\n keys_in_dbl_cols = keys_in_all_cols.map{|x| 2*x}\n\n # Find rows in table2 that are not in table1.\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table2} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table1} WHERE #{sql_key_match})\"\n apply_inserts(sql,all_cols,keys_in_all_cols)\n\n # Find rows in table1 and table2 that differ while having the same primary\n # key.\n sql = \"SELECT #{sql_dbl_cols} FROM #{sql_table1} INNER JOIN #{sql_table2} ON #{sql_key_match} WHERE #{sql_data_mismatch}\"\n apply_updates(sql,dbl_cols,keys_in_dbl_cols)\n\n # Find rows that are in table1 but not table2\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table1} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table2} WHERE #{sql_key_match})\"\n apply_deletes(sql,all_cols,keys_in_all_cols)\n\n # If we are supposed to provide context, we need to deal with row order.\n if @patch.want_context\n sql = \"SELECT #{sql_all_cols}, 0 AS __coopy_tag__ FROM #{sql_table1} UNION SELECT #{sql_all_cols}, 1 AS __coopy_tag__ FROM #{sql_table2} ORDER BY #{sql_key_cols}, __coopy_tag__\"\n apply_with_context(sql,all_cols,keys_in_all_cols)\n end\n\n # Done!\n @patch.end_diff\n end",
"def replace_related_type(old, new)\n $db.execute \"UPDATE destination.series_relationships SET type = ? WHERE type = ?\", new, old;\nend",
"def update_sql(from_id, to_id)\n <<~SQL\n WITH updated_records AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} (\n UPDATE services SET active = TRUE\n WHERE services.project_id BETWEEN #{Integer(from_id)} AND #{Integer(to_id)} AND services.properties = '{}' AND services.type = '#{Migratable::PrometheusService.type}'\n AND #{group_cluster_condition(from_id, to_id)} AND services.active = FALSE\n RETURNING *\n )\n SELECT COUNT(*) as number_of_updated_records\n FROM updated_records\n SQL\n end",
"def tables_from(db=current_database)\n end",
"def apply_changes\n changes.each do |method, args|\n connection.send(method, new_table, *args)\n end\n end",
"def copy_table_indexes(from, to, rename = {})\n indexes(from).each do |index|\n name = index.name.downcase\n if to == \"a#{from}\"\n name = \"t#{name}\"\n elsif from == \"a#{to}\"\n name = name[1..-1]\n end\n\n to_column_names = columns(to).map(&:name)\n columns = index.columns.map { |column| rename[column] || column }\n columns = columns.select { |column| to_column_names.include?(column) }\n\n unless columns.empty?\n # index name can't be the same\n opts = { :name => name.gsub(/(^|_)(#{from})_/, \"\\\\1#{to}_\"), :internal => true }\n opts[:unique] = true if index.unique\n add_index(to, columns, opts)\n end\n end\n end",
"def apply_join(query, join)\n validate_query(query)\n query.joins(join)\n end",
"def update!(**args)\n @force = args[:force] if args.key?(:force)\n @table_uri = args[:table_uri] if args.key?(:table_uri)\n end",
"def update!(**args)\n @existing_schema = args[:existing_schema] if args.key?(:existing_schema)\n @new_schema = args[:new_schema] if args.key?(:new_schema)\n @sampled_data_locations = args[:sampled_data_locations] if args.key?(:sampled_data_locations)\n @schema_change = args[:schema_change] if args.key?(:schema_change)\n @table = args[:table] if args.key?(:table)\n end",
"def rebuild(table); end",
"def update_all(updates, conditions = nil)\n # TODO\n raise \"not implemented\"\n# sql = \"UPDATE #{table_name} SET #{sanitize_sql(updates)} \"\n# add_conditions!(sql, conditions, scope(:find))\n# connection.update(sql, \"#{name} Update\")\n end",
"def table(table)\n self.query.table = table\n self\n end",
"def update(*updates, &select_cond)\r\n raise ArgumentError, \"Must specify select condition code \" + \\\r\n \"block. To update all records, use #update_all instead.\" if \\\r\n select_cond.nil?\r\n\r\n # Update the header variables.\r\n update_header_vars\r\n\r\n # Get all records that match the selection criteria and\r\n # return them in an array.\r\n result_set = get_matches(:update, @field_names, select_cond)\r\n\r\n # If updates is empty, this means that the user must have specified\r\n # the updates in KBResultSet#set, i.e.\r\n # tbl.update {|r| r.recno == 1}.set(:name => 'Bob')\r\n return result_set if updates.empty?\r\n\r\n # Call KBTable#set and pass it the records to be updated and the\r\n # updated criteria.\r\n set(result_set, updates)\r\n end",
"def to_sql(options = {})\n sql = \"UPDATE #{table.to_sql(options)} SET \"\n sql << updates.map { |u| u.to_sql(options) }.join(', ')\n sql << \" WHERE \" << where.to_sql(options) if self.where\n sql\n end",
"def convert_table_name_to_new_alias!(sql, old_table_name, new_alias)\r\n regex = Regexp.new(\"(?:(?:JOIN|AS)?\\\\s+|\\\\()[`\\\"\\\\[]?#{old_table_name}[`\\\"\\\\]]?(?:\\\\s+(?:AS\\\\s+)?(?:`[^`]+`|\\\"[^\\\"]+\\\"|\\\\[[^\\\\]]+\\\\]|\\\\S+)|\\\\.|\\\\s)\", Regexp::IGNORECASE)\r\n sql.gsub!(regex) do |match|\r\n prefix = (match =~ /^\\(/) ? '(' : ''\r\n suffix = match.gsub(/^.*?(\\s+ON|.)$/i, '\\1')\r\n if test = match.match(/^JOIN\\s+(?:`[^`]+`|\"[^\"]+\"|\\[[^\\]]+\\]|\\S+)(\\s+(?:AS\\s+)?(?:`[^`]+`|\"[^\"]+\"|\\[[^\\]]+\\]|\\S+))/i) and !(test.captures.first =~ /^ ON$/i)\r\n # If the table name is already aliased within this match, don't replace it\r\n result = match\r\n else\r\n replacement = \"JOIN #{old_table_name} AS #{new_alias}\" if match =~ /^JOIN\\s/i\r\n replacement = \"AS #{new_alias}\" if match =~ /^AS\\s/i\r\n replacement = \" #{new_alias}\" unless match =~ /^(JOIN|AS)\\s/i\r\n result = \"#{prefix}#{replacement}#{suffix}\"\r\n end\r\n result\r\n end\r\n end",
"def updateRelname(fromPT)\n\n relNames = JsonPath.on(fromPT.to_json, '$..RANGEVAR')\n # pp relNames\n \t# column has no relalias or relname \n \trelNames.each do |rel|\n \t\ttblName = rel['relname']\n tblAlias = rel['alias'].nil? ? nil : rel['alias']['ALIAS']['aliasname']\n # pp tblName\n # pp tblAlias\n # if @relaias already exists, we only query on matching relialia\n if not @relalias.to_s.empty? and tblName != @relalias and tblAlias != @relalias\n # pp 'searching for matching @relalias'\n # binding.pry\n next\n end\n query = QueryBuilder.find_cols_by_data_typcategory(tblName,'',@colname)\n res = DBConn.exec(query)\n # pp res\n if res.count()>0\n # pp res[0]\n r = res[0]\n @relname = tblName\n @datatype = r['data_type']\n @typcategory = r['typcategory']\n # puts 'rel'\n # pp rel\n if rel.has_key?('alias') \n unless rel['alias'].nil?\n @relalias = rel['alias']['ALIAS']['aliasname']\n else\n @relalias = nil\n end\n end\n return\n end\n \tend\n end",
"def update(sql, name = nil) end",
"def join_tables(db)\n db.execute(\"SELECT users.user_name, platform.platform_name, music.artist, music.song, music.explicit_lyrics FROM music JOIN users ON music.user_id = users.id JOIN platform ON music.platform_id = platform.id\")\nend",
"def update_all(updates, conditions = nil)\n sql = \"UPDATE #{table_name} SET #{updates} \"\n add_conditions!(sql, conditions)\n connection.update(sql, \"#{name} Update\")\n end",
"def inner_query\n self.class.\n select(\"#{SUBQUERY_TABLE_ALIAS}.*\").\n from(\"#{table_name} AS #{SUBQUERY_TABLE_ALIAS}\")\n end",
"def build_partial_update_query(all_attribute_keys, hashes)\n # Cache the connection for the batch\n connection = get_connection\n\n all_attribute_keys = (all_attribute_keys + unique_index_columns).uniq\n\n update_query = update_query_beginning(all_attribute_keys)\n update_query += update_query_from_values(hashes, all_attribute_keys, connection, unique_index_columns)\n update_query\n end",
"def update_all(updates)\n sum do |relation|\n relation.update_all(updates)\n end\n end",
"def update!(changes)\n Mao.sql(with_options(:update => changes).sql) do |pg_result|\n pg_result.cmd_tuples\n end\n end",
"def set_publication_tables(name, tables)\n typed_exec(\"ALTER PUBLICATION #{connection.quote_ident(name)} SET TABLE #{safe_list(tables)}\")\n end",
"def db_query_transform__subquery query, tmp_table=\"resultset_table\"\n \"(#{query}) as #{tmp_table}\"\n end",
"def insert_into_sql(sql)\n sql << \" INTO \"\n if (f = @opts[:from]) && f.length == 1\n identifier_append(sql, server_version >= 90500 ? f.first : unaliased_identifier(f.first))\n else\n source_list_append(sql, f)\n end\n end",
"def migrate_from(src_rs, condition = nil, opts = {})\r\n Rigrate.logger.info(\"start migration : source rs [#{src_rs.size}] ->target rs [#{rows.size}]\")\r\n mode = opts[:mode]\r\n condition = eval \"{#{condition.to_s.upcase}}\" unless condition.nil?\r\n\r\n if condition\r\n src_cols_idx = src_rs.column_idx(*condition.keys)\r\n tg_cols_idx = column_idx(*condition.values)\r\n else\r\n # delete line -> src_cols_idx = src_rs.column_idx(*src_rs.default_migration_condition)\r\n # suppose all primary key of target resultset can be found in src result, and in same column idx\r\n tg_cols_idx = column_idx(*default_migration_condition)\r\n src_cols_idx = tg_cols_idx\r\n end\r\n @rows = handle_rows(src_rs.rows, mode, src_cols_idx, tg_cols_idx)\r\n save!(condition)\r\n end",
"def update data, table, condition = {}\n sql = \"update #{table} set\"\n data.each do |k,v|\n v = v.to_json if v.is_a?(Hash)\n if !!v == v\n sql = \"#{sql} #{k}=#{v},\"\n else\n sql = \"#{sql} #{k}='#{v}',\"\n end\n end\n sql = sql[0..-2] + \" where\"\n condition.each do |k,v|\n sql = \"#{sql} #{k} = '#{v}' and\"\n end\n query(sql[0..-4])\n end",
"def join_query\n prefixes = Survey.all.map &:prefix\n query = [\"select * from export_journal_infos ji, \"] << prefixes.map {|prefix| \"export_variables_#{prefix}_answers\" }\n prefixes.each do |prefix|\n query << \"where export_variables_#{prefix}_answers. #{prefix} on #{prefix}.journal_id = ji.journal_id \"\n # query << \"inner join export_variables_#{prefix}_answers #{prefix} on #{prefix}.journal_id = ji.journal_id \"\n end\n query\n end",
"def update_sql(sql, name = nil)\n super\n end",
"def copy_table(old_table_name, new_table_name)\n transaction do\n execute \"CREATE TABLE #{new_table_name} LIKE #{old_table_name}\"\n execute \"INSERT INTO #{new_table_name} SELECT * FROM #{old_table_name}\"\n end\n end",
"def _join_table_dataset(opts)\n ds = model.db.from(opts.join_table_source)\n opts[:join_table_block] ? opts[:join_table_block].call(ds) : ds\n end",
"def update_from(other)\n @name = other.name\n @position = other.position\n @topic = other.topic\n @recipients = other.recipients\n @bitrate = other.bitrate\n @user_limit = other.user_limit\n @permission_overwrites = other.permission_overwrites\n @nsfw = other.nsfw\n @parent_id = other.parent_id\n @rate_limit_per_user = other.rate_limit_per_user\n end",
"def update_all(table, table_hash, values = {}, conditions = {})\n\t\t\t ctx = 0\n\t\t\t\tlen = table.length - 1\n\t\t\t\t\n\t\t\t\tfor i in 0..len\n\t\t\t\t\tupdate = true\n\t\t\t\t\t\n\t\t\t\t\tconditions.each do |key, val|\n\t\t\t\t\t\tif (table[i][table_hash[key]] != val)\n\t\t\t\t\t\t\tupdate = false\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\t\t\n\t\t\t\t\tif (update)\n\t\t\t\t\t\tvalues.each do |field, val|\n\t\t\t\t\t\t\ttable[i][table_hash[field]] = val\n\t\t\t\t\t\tend\n\t\t\t\t\t\tctx = ctx + 1\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\t\n\t\t\t\treturn ctx\n\t\t\tend",
"def copy_table(old_table_name, new_table_name)\n transaction do\n execute \"CREATE TABLE #{new_table_name} LIKE #{old_table_name}\"\n execute \"INSERT INTO #{new_table_name} SELECT * FROM #{old_table_name}\"\n end\n end",
"def update!(**args)\n @delete_all_data_from_table = args[:delete_all_data_from_table] if args.key?(:delete_all_data_from_table)\n @row_key_prefix = args[:row_key_prefix] if args.key?(:row_key_prefix)\n end",
"def copy_table_contents(from, to, columns, rename = {})\n column_mappings = Hash[ columns.map { |name| [name, name] } ]\n rename.each { |a| column_mappings[a.last] = a.first }\n from_columns = columns(from).collect {|col| col.name}\n columns = columns.find_all{ |col| from_columns.include?(column_mappings[col]) }\n quoted_columns = columns.map { |col| quote_column_name(col) } * ','\n\n quoted_to = quote_table_name(to)\n\n raw_column_mappings = Hash[ columns(from).map { |c| [c.name, c] } ]\n\n execute(\"SELECT * FROM #{quote_table_name(from)}\", 'Copy Table').each do |row|\n sql = \"INSERT INTO #{quoted_to} (#{quoted_columns}) VALUES (\"\n\n column_values = columns.map do |col|\n quote(row[column_mappings[col]], raw_column_mappings[col])\n end\n\n sql << column_values * ', '\n sql << ')'\n exec_insert sql, 'Copy Table', []\n end\n end",
"def copy_new_rows\n tables.each(&:copy_new_rows)\n end",
"def update(table, id, attributes) # abstract\n end",
"def update!(**args)\n @default_database = args[:default_database] if args.key?(:default_database)\n @schema_suffix = args[:schema_suffix] if args.key?(:schema_suffix)\n @table_prefix = args[:table_prefix] if args.key?(:table_prefix)\n end",
"def table_alias\r\n @table_alias || from_table_name\r\n end",
"def update_all(by: {}, with: {})\n raise NotImplementedError.new\n end",
"def copy_polymorphic_tables\n self.polymorphic_tables.each do |t|\n polymorphic_id_col, polymorphic_type_col = \"#{t.polymorphic_as}_id\", \"#{t.polymorphic_as}_type\"\n sql_connection.select_rows(t.sql_name) do |rows, page, total_pages|\n Mongify::Status.publish('copy_polymorphic', :size => rows.count, :name => \"Polymorphicizing #{t.name}\", :action => 'add')\n rows.each do |row|\n\n #If no data is in the column, skip importing\n if (row[polymorphic_type_col])\n table_name = row[polymorphic_type_col].tableize\n new_id = no_sql_connection.get_id_using_pre_mongified_id(table_name, get_type_casted_value(t, polymorphic_id_col, row))\n end\n\n row = t.translate(row)\n row[polymorphic_id_col] = new_id if new_id\n row.merge!(fetch_reference_ids(t, row))\n row.delete('pre_mongified_id')\n\n if t.embedded? && table_name\n row.delete(polymorphic_id_col)\n row.delete(polymorphic_type_col)\n save_function_call = t.embedded_as_object? ? '$set' : '$addToSet'\n no_sql_connection.update(table_name, new_id, {save_function_call => {t.name => row}})\n else\n no_sql_connection.insert_into(t.name, row)\n end\n\n Mongify::Status.publish('copy_polymorphic')\n end\n Mongify::Status.publish('copy_polymorphic', :action => 'finish')\n end\n end\n end",
"def setup_target_table\n self.target_table = source_table if target_table.blank?\n end",
"def update(table,cond,op)\n connection.update(path(table),cond,op)\n end",
"def sql opts = EMPTY_HASH\n opts = options.merge(opts)\n\n join_tables = BELONGS_TO.map{|x| x.to_s.pluralize} + [ ContentStatus.table_name ]\n join_tables.uniq!\n\n tables = join_tables.dup\n clauses = [ ]\n\n @model_class = Content::Version if self.latest || self.versions\n\n connection = model_class.connection\n\n t = :updater_users\n tables << \"#{User.table_name} AS #{t}\"\n join_tables << t\n\n t = :creator_users\n tables << \"#{User.table_name} AS #{t}\"\n join_tables << t\n\n unless (version_list_name = params[:version_list_name]).blank?\n @model_class = Content::Version\n tables << \n (t1 = VersionListName.table_name) << \n (t2 = VersionList.table_name) <<\n (t3 = VersionListContent.table_name)\n clauses << \n \"#{t1}.name = #{connection.quote(version_list_name)}\" <<\n \"#{t3}.version_list_id = #{t1}.version_list_id\"\n else\n version_list_name = nil\n end\n\n unless (version_list_id = params[:version_list_id]).blank?\n version_list_id = version_list_id.to_i\n @model_class = Content::Version\n tables <<\n (t3 = VersionListContent.table_name)\n clauses << \n \"#{t3}.version_list_id = #{connection.quote(version_list_id)}\"\n else\n version_list_id = nil\n end\n\n\n table_name = \n opts[:table_name] || \n model_class.table_name\n\n if version_list_name || version_list_id \n tables << \n (t3 = VersionListContent.table_name)\n clauses << \n \"#{t3}.content_version_id = contents.id\"\n end\n\n select_table = \"contents\"\n if self.latest || self.versions\n select_table = 'cv'\n end\n select_values = \"#{select_table}.*\"\n\n order_by = Content.order_by\n order_by = order_by.split('), (').join(\"),\\n (\") \n order_by.gsub!(/\\.id = /, \".id = #{select_table}.\")\n order_by << \",\\n version DESC\" if self.versions\n \n if opts[:count]\n select_values = \"COUNT(#{select_values})\"\n order_by = nil\n end\n\n\n ##################################################################\n # Generate SQL\n #\n\n sql = ''\n\n if self.latest || self.versions\n sql << \"SELECT #{select_values} FROM #{model_class.table_name} AS cv\"\n sql << \"\\nWHERE cv.id IN (\\n\"\n case \n when self.latest\n select_values = 'MAX(contents.id)'\n when self.versions\n select_values = 'contents.id'\n end\n end\n\n \n sql << <<\"END\"\nSELECT #{select_values}\nFROM #{table_name} AS contents, #{tables.uniq * ', '}, content_types\nWHERE\n #{join_tables.map{|x| \"(#{x}.id = contents.#{x.to_s.singularize}_id)\"} * \"\\nAND \"}\nAND (content_types.id = content_keys.content_type_id)\nEND\n\n # Join clauses:\n# pp opts\n clauses << opts[:conditions] unless opts[:conditions].blank?\n unless clauses.empty?\n sql << \"\\nAND \" << (clauses.map{| x | \"(#{x})\"} * \"\\nAND \")\n end\n\n # Search clauses:\n unless (where = sql_where_clauses(opts)).empty?\n sql << \"\\nAND \" << where\n end\n\n if self.latest || self.versions\n sql << \"\\nAND contents.content_id = cv.content_id\" \n sql << \"\\n)\"\n end\n\n # Ordering:\n if order_by\n sql << \"\\nORDER BY\\n \" << order_by\n end\n\n # Limit:\n if x = (opts[:limit])\n sql << \"\\nLIMIT #{x}\"\n end\n\n if opts[:dump_sql] # || true\n $stderr.puts \" params = #{params.inspect}\"\n $stderr.puts \" sql =\\n #{sql}\"\n # raise \"LKSDJFLKSJDF\"\n end\n\n sql\n end",
"def tables(query)\n SqlAssess::Parsers::Tables.new(query).tables.map do |table|\n if table.key?(:join_type)\n table[:table][:table].remove('`')\n else\n table[:table].remove('`')\n end\n end\n end",
"def joins(tables,options={})\n # now check for dot notiation\n dot_notation = tables.split \".\"\n parent_table = nil\n options = {}\n dot_notation.each do |j_table|\n options[:from] = parent_table unless parent_table.nil?\n options[:alias] = j_table unless parent_table.nil?\n join(j_table,options)\n parent_table = j_table\n end\n\n self\n end",
"def to_sql\n @join.association_join.gsub(/::ts_join_alias::/,\n \"#{@reflection.klass.connection.quote_table_name(@join.parent.aliased_table_name)}\"\n )\n end",
"def join(*args)\n\t\tif args.count > 1\n\t\t\tjoins = args.map { |arg| \"INNER JOIN #{arg} ON #{arg}.#{table}_id = #{table}.id\"}.join(\" \")\n\t\t\trows = connection.execute <<-SQL \n\t\t\t\tSELECT * FROM #{table} #{joins};\n\t\t\tSQL\n\t\telse\n\t\t\tcase args.first\n\t\t\twhen String\n\t\t\t\trows = connection.execute <<-SQL\n\t\t\t\t\tSELECT * FROM #{table} #{BlocRecord::Utility.sql_strings(args.first)};\n\t\t\t\tSQL\n\t\t\twhen Symbol\n\t\t\t\trows = connection.execute <<-SQL \n\t\t\t\t\tSELECT * FROM #{table}\n\t\t\t\t\tINNER JOIN #{args.first} ON #{arg.first}.#{table}_id = #{table}.id;\n\t\t\t\tSQL\n\t\t\twhen Hash \n\t\t\t\t#extract the options from the hash\n\t\t\t\tsecond_table = args[0].keys.first \n\t\t\t\tthird_table = args[0].keys.first\n\t\t\t\trows = connection.execute <<-SQL \n\t\t\t\t\tSELECT * FROM #{table}\n\t\t\t\t\tINNER JOIN #{second_table} ON #{second_table}.#{table}_id = #{table}.id\n\t\t\t\t\tINNER JOIN #{third_table} ON #{third_table}.#{second_table}_id = #{second_table}.id;\n\t\t\t\tSQL\n\n\t\t\tend \n\t\tend\n\t\trows_to_array(rows)\n\tend"
] |
[
"0.630731",
"0.6088034",
"0.5901598",
"0.5883422",
"0.5592899",
"0.55487484",
"0.5529329",
"0.54990935",
"0.5488189",
"0.5416212",
"0.5406827",
"0.5382143",
"0.5381436",
"0.53802943",
"0.5377935",
"0.5377765",
"0.53237104",
"0.5322136",
"0.5301938",
"0.5299105",
"0.5299105",
"0.52914435",
"0.5270175",
"0.52697444",
"0.5241133",
"0.52266055",
"0.5219186",
"0.5197197",
"0.5194822",
"0.5169074",
"0.5169074",
"0.51656103",
"0.5089485",
"0.5042396",
"0.5038147",
"0.5030198",
"0.5012581",
"0.49940193",
"0.49910778",
"0.49827996",
"0.4977309",
"0.49458638",
"0.49428356",
"0.49288416",
"0.4922784",
"0.4920355",
"0.49195138",
"0.49093533",
"0.49093533",
"0.49093533",
"0.49077255",
"0.49072236",
"0.49066785",
"0.48866996",
"0.4885186",
"0.48819748",
"0.4864937",
"0.48593947",
"0.4847824",
"0.48474386",
"0.4837491",
"0.48302233",
"0.48130926",
"0.48083034",
"0.47857305",
"0.47819805",
"0.47791058",
"0.4775521",
"0.47631875",
"0.47536278",
"0.47526562",
"0.47440436",
"0.47412542",
"0.47370085",
"0.4731488",
"0.47222456",
"0.4721638",
"0.47089854",
"0.4707995",
"0.470708",
"0.46903795",
"0.46875682",
"0.46839797",
"0.46810842",
"0.46810737",
"0.4677517",
"0.46742788",
"0.46724206",
"0.46704674",
"0.466858",
"0.46642697",
"0.4664051",
"0.46603686",
"0.46470106",
"0.46435115",
"0.46371278",
"0.46365717",
"0.46268407",
"0.4623099"
] |
0.7206524
|
1
|
Only include the primary table in the main update clause
|
def update_table_sql(sql)
sql << ' '
source_list_append(sql, @opts[:from][0..0])
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def update_without_callbacks\r\n where_clause_terms = [self.class.primary_key, quoted_id].transpose.map do |pair| \r\n \"(#{connection.quote_column_name(pair[0])} = #{pair[1]})\"\r\n end\r\n where_clause = where_clause_terms.join(\" AND \")\r\n connection.update(\r\n \"UPDATE #{self.class.quoted_table_name} \" +\r\n \"SET #{quoted_comma_pair_list(connection, attributes_with_quotes(false))} \" +\r\n \"WHERE #{where_clause}\",\r\n \"#{self.class.name} Update\"\r\n )\r\n return true\r\n end",
"def _update_without_checking(columns)\n if use_prepared_statements_for?(:update)\n _set_prepared_statement_server(model.send(:prepared_update, columns.keys)).call(Hash[columns].merge!(pk_hash))\n else\n super\n end\n end",
"def prepared_update(cols)\n cached_prepared_statement(:update, prepared_columns(cols)){prepare_statement(filter(prepared_statement_key_array(primary_key)), :update, prepared_statement_key_hash(cols))}\n end",
"def sql_for_on_duplicate_key_update( table_name, *args ) # :nodoc:\n arg, model, primary_key, locking_column = args\n arg = { columns: arg } if arg.is_a?( Array ) || arg.is_a?( String )\n return unless arg.is_a?( Hash )\n\n sql = ' ON CONFLICT '.dup\n conflict_target = sql_for_conflict_target( arg )\n\n columns = arg.fetch( :columns, [] )\n condition = arg[:condition]\n if columns.respond_to?( :empty? ) && columns.empty?\n return sql << \"#{conflict_target}DO NOTHING\"\n end\n\n conflict_target ||= sql_for_default_conflict_target( table_name, primary_key )\n unless conflict_target\n raise ArgumentError, 'Expected :conflict_target or :constraint_name to be specified'\n end\n\n sql << \"#{conflict_target}DO UPDATE SET \"\n case columns\n when Array\n sql << sql_for_on_duplicate_key_update_as_array( table_name, model, locking_column, columns )\n when Hash\n sql << sql_for_on_duplicate_key_update_as_hash( table_name, model, locking_column, columns )\n when String\n sql << columns\n else\n raise ArgumentError, 'Expected :columns to be an Array or Hash'\n end\n\n sql << \" WHERE #{condition}\" if condition.present?\n\n sql\n end",
"def update!(**args)\n @delete_all_data_from_table = args[:delete_all_data_from_table] if args.key?(:delete_all_data_from_table)\n @row_key_prefix = args[:row_key_prefix] if args.key?(:row_key_prefix)\n end",
"def sql_for_on_duplicate_key_update( table_name, *args ) # :nodoc:\n arg, model, primary_key, locking_column = args\n arg = { columns: arg } if arg.is_a?( Array ) || arg.is_a?( String )\n return unless arg.is_a?( Hash )\n\n sql = ' ON CONFLICT '.dup\n conflict_target = sql_for_conflict_target( arg )\n\n columns = arg.fetch( :columns, [] )\n condition = arg[:condition]\n if columns.respond_to?( :empty? ) && columns.empty?\n return sql << \"#{conflict_target}DO NOTHING\"\n end\n\n conflict_target ||= sql_for_default_conflict_target( primary_key )\n unless conflict_target\n raise ArgumentError, 'Expected :conflict_target to be specified'\n end\n\n sql << \"#{conflict_target}DO UPDATE SET \"\n case columns\n when Array\n sql << sql_for_on_duplicate_key_update_as_array( table_name, model, locking_column, columns )\n when Hash\n sql << sql_for_on_duplicate_key_update_as_hash( table_name, model, locking_column, columns )\n when String\n sql << columns\n else\n raise ArgumentError, 'Expected :columns to be an Array or Hash'\n end\n\n sql << \" WHERE #{condition}\" if condition.present?\n\n sql\n end",
"def update_by_primary_key(primary_key, data)\n query = \"UPDATE `#{@table_name}` SET \"+build_update(data)+\" WHERE \"+build_where({@primary_key => primary_key})\n\n begin\n queryresult = @mysql.query(query)\n rescue Exception => e\n @log.error(\"#{e}\")\n return false\n end\n\n expire_table_cache(get_all_related_tables)\n\n get_one({@primary_key => primary_key})\n end",
"def sneaky_update\n return true if changes.empty?\n\n pk = self.class.primary_key\n original_id = changed_attributes.key?(pk) ? changes[pk].first : send(pk)\n\n changed_attributes = sneaky_update_fields\n\n !self.class.where(pk => original_id).\n update_all(changed_attributes).zero?\n end",
"def prepare_update_statement(o)\n if o.offset || has_group_by_and_having?(o) ||\n has_join_sources?(o) && has_limit_or_offset_or_orders?(o)\n super\n else\n o\n end\n end",
"def update!(**args)\n @primary = args[:primary] if args.key?(:primary)\n end",
"def update(table, id, attributes) # abstract\n end",
"def update!(**args)\n @postgresql_columns = args[:postgresql_columns] if args.key?(:postgresql_columns)\n @table = args[:table] if args.key?(:table)\n end",
"def update_from_sql(sql)\n join_from_sql(:FROM, sql)\n end",
"def update_from_sql(sql)\n join_from_sql(:FROM, sql)\n end",
"def update!(**args)\n @oracle_columns = args[:oracle_columns] if args.key?(:oracle_columns)\n @table = args[:table] if args.key?(:table)\n end",
"def squash_pk_changes(record)\n pk_def = record.class.primary_key\n if pk_def.is_a?(Array) # composite\n # Since we didn't actually fetch the record from the DB, CPK \"mishandles\" our update;\n # it builds a 'where' clause with null values, so it fails to locate the persisted record.\n # Non-CPK records don't have this \"problem\".\n # Workaround: fool dirty-checking into thinking the PK cols haven't changed.\n\n col_val_map = Hash[ pk_def.zip(record.id) ]\n # Caution: the workaround is all or nothing. It could be dangerous to partially set the PK.\n if col_val_map.values.all?\n col_val_map.each do |col, val|\n dangerously_reset_attribute(record, col, val)\n end\n end\n end\n end",
"def update_where(table, what, where, *bindvars)\n sql = \"update #{table.name}\\nset #{what} where #{where}\"\n#$stderr.puts sql\n db.do(sql, *bindvars)\n end",
"def _update_record(values, id, id_was) # :nodoc:\n substitutes, binds = substitute_values values\n\n scope = @klass.unscoped\n\n if @klass.finder_needs_type_condition?\n scope.unscope!(where: @klass.inheritance_column)\n end\n\n # ****** BEGIN PARTITIONED PATCH ******\n if @klass.respond_to?(:dynamic_arel_table)\n using_arel_table = @klass.dynamic_arel_table(Hash[*values.inject([]) { |result, (k, v)| result += [k.name, v] }])\n relation = scope.where(using_arel_table[@klass.primary_key].eq(id_was || id))\n bvs = binds + relation.bind_values\n um = relation\n .arel\n .compile_update(substitutes, @klass.primary_key)\n\n # NOTE(hofer): The um variable got set up using\n # klass.arel_table as its arel value. So arel_table.name is\n # what gets used to construct the update statement. Here we\n # set it to the specific partition name for this record so\n # that the update gets run just on that partition, not on the\n # parent one (which can cause performance issues).\n begin\n @klass.arel_table.name = using_arel_table.name\n @klass.connection.update(\n um,\n 'SQL',\n bvs)\n ensure\n @klass.arel_table.name = @klass.table_name\n end\n else\n # Original lines:\n relation = scope.where(@klass.primary_key => (id_was || id))\n bvs = binds + relation.bind_values\n um = relation\n .arel\n .compile_update(substitutes, @klass.primary_key)\n\n @klass.connection.update(\n um,\n 'SQL',\n bvs)\n end\n # ****** END PARTITIONED PATCH ******\n end",
"def update(table,cond,op)\n connection.update(path(table),cond,op)\n end",
"def _save_update_all_columns_hash\n v = @values.dup\n Array(primary_key).each{|x| v.delete(x) unless changed_columns.include?(x)}\n v.delete(model.lock_column)\n v\n end",
"def update_sql\n update_manager = Arel::UpdateManager.new(ActiveRecord::Base)\n update_manager.table(arel_table)\n update_manager.where(arel_table[:id].eq(id))\n update_manager.set([[arel_table[:retoure_reason], retoure_reason]])\n update_manager.to_sql\n end",
"def _update_without_checking(columns)\n super(identifier_hash(columns))\n end",
"def _update_without_checking(columns)\n ds = _update_dataset\n lc = model.lock_column\n rows = ds.clone(ds.send(:default_server_opts, :sql=>ds.output(nil, [Sequel[:inserted][lc]]).update_sql(columns))).all\n values[lc] = rows.first[lc] unless rows.empty?\n rows.length\n end",
"def sql_update(sql)\n exec(sql)\n # return affected rows.\n end",
"def select!\n # Ensure we can never be destructive by nilifying :update.\n Mao.sql(with_options(:update => nil).sql) do |pg_result|\n if @options[:join]\n other = Mao.query(@options[:join][0])\n pg_result.map {|result|\n Mao.normalize_join_result(result, self, other)\n }\n else\n pg_result.map {|result| Mao.normalize_result(result, @col_types)}\n end\n end\n end",
"def update #:nodoc:\n connection.update(\n \"UPDATE #{self.class.table_name} \" +\n \"SET #{quoted_comma_pair_list(connection, special_attributes_with_quotes(false))} \" +\n \"WHERE #{self.class.primary_key} = #{quote_value(id)}\",\n \"#{self.class.name} Update\"\n )\n end",
"def update!(**args)\n @sql_expression = args[:sql_expression] if args.key?(:sql_expression)\n end",
"def update!(**args)\n @sql_expression = args[:sql_expression] if args.key?(:sql_expression)\n end",
"def apply_single\n validate_schema\n\n # Prepare some lists of columns.\n key_cols = @db1.primary_key(@table1)\n data_cols = @db1.except_primary_key(@table1)\n all_cols = @db1.column_names(@table1)\n\n # Let our public know we are beginning.\n @patch.begin_diff\n\n # Advertise column names.\n @rc_columns = DiffColumns.new\n @rc_columns.title_row = all_cols\n @rc_columns.update(0)\n cells = all_cols.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"@@\",cells)\n @patch.apply_row(rc)\n\n # If requested, we will be providing context rows around changed rows.\n # This is not a natural thing to do with SQL, so we do it only on request.\n # When requested, we need to buffer row changes.\n @pending_rcs = []\n\n # Prepare some useful SQL fragments to assemble later.\n sql_table1 = @db1.quote_table(@table1)\n sql_table2 = @db1.quote_table(@table2)\n sql_key_cols = key_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_all_cols = all_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_key_match = key_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS #{sql_table2}.#{c}\"}.join(\" AND \")\n sql_data_mismatch = data_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS NOT #{sql_table2}.#{c}\"}.join(\" OR \")\n\n # For one query we will need to interleave columns from two tables. For\n # portability we need to give these columns distinct names.\n weave = all_cols.map{|c| [[sql_table1,@db1.quote_column(c)],\n [sql_table2,@db2.quote_column(c)]]}.flatten(1)\n dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]}\"}\n sql_dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]} AS #{c[0].gsub(/[^a-zA-Z0-9]/,'_')}_#{c[1].gsub(/[^a-zA-Z0-9]/,'_')}\"}.join(\",\")\n\n # Prepare a map of primary key offsets.\n keys_in_all_cols = key_cols.each.map{|c| all_cols.index(c)}\n keys_in_dbl_cols = keys_in_all_cols.map{|x| 2*x}\n\n # Find rows in table2 that are not in table1.\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table2} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table1} WHERE #{sql_key_match})\"\n apply_inserts(sql,all_cols,keys_in_all_cols)\n\n # Find rows in table1 and table2 that differ while having the same primary\n # key.\n sql = \"SELECT #{sql_dbl_cols} FROM #{sql_table1} INNER JOIN #{sql_table2} ON #{sql_key_match} WHERE #{sql_data_mismatch}\"\n apply_updates(sql,dbl_cols,keys_in_dbl_cols)\n\n # Find rows that are in table1 but not table2\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table1} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table2} WHERE #{sql_key_match})\"\n apply_deletes(sql,all_cols,keys_in_all_cols)\n\n # If we are supposed to provide context, we need to deal with row order.\n if @patch.want_context\n sql = \"SELECT #{sql_all_cols}, 0 AS __coopy_tag__ FROM #{sql_table1} UNION SELECT #{sql_all_cols}, 1 AS __coopy_tag__ FROM #{sql_table2} ORDER BY #{sql_key_cols}, __coopy_tag__\"\n apply_with_context(sql,all_cols,keys_in_all_cols)\n end\n\n # Done!\n @patch.end_diff\n end",
"def apply_single\n validate_schema\n\n # Prepare some lists of columns.\n key_cols = @db1.primary_key(@table1)\n data_cols = @db1.except_primary_key(@table1)\n all_cols = @db1.column_names(@table1)\n\n # Let our public know we are beginning.\n @patch.begin_diff\n\n # Advertise column names.\n @rc_columns = DiffColumns.new\n @rc_columns.title_row = all_cols\n @rc_columns.update(0)\n cells = all_cols.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"@@\",cells)\n @patch.apply_row(rc)\n\n # If requested, we will be providing context rows around changed rows.\n # This is not a natural thing to do with SQL, so we do it only on request.\n # When requested, we need to buffer row changes.\n @pending_rcs = []\n\n # Prepare some useful SQL fragments to assemble later.\n sql_table1 = @db1.quote_table(@table1)\n sql_table2 = @db1.quote_table(@table2)\n sql_key_cols = key_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_all_cols = all_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_key_match = key_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS #{sql_table2}.#{c}\"}.join(\" AND \")\n sql_data_mismatch = data_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS NOT #{sql_table2}.#{c}\"}.join(\" OR \")\n\n # For one query we will need to interleave columns from two tables. For\n # portability we need to give these columns distinct names.\n weave = all_cols.map{|c| [[sql_table1,@db1.quote_column(c)],\n [sql_table2,@db2.quote_column(c)]]}.flatten(1)\n dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]}\"}\n sql_dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]} AS #{c[0].gsub(/[^a-zA-Z0-9]/,'_')}_#{c[1].gsub(/[^a-zA-Z0-9]/,'_')}\"}.join(\",\")\n\n # Prepare a map of primary key offsets.\n keys_in_all_cols = key_cols.each.map{|c| all_cols.index(c)}\n keys_in_dbl_cols = keys_in_all_cols.map{|x| 2*x}\n\n # Find rows in table2 that are not in table1.\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table2} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table1} WHERE #{sql_key_match})\"\n apply_inserts(sql,all_cols,keys_in_all_cols)\n\n # Find rows in table1 and table2 that differ while having the same primary\n # key.\n sql = \"SELECT #{sql_dbl_cols} FROM #{sql_table1} INNER JOIN #{sql_table2} ON #{sql_key_match} WHERE #{sql_data_mismatch}\"\n apply_updates(sql,dbl_cols,keys_in_dbl_cols)\n\n # Find rows that are in table1 but not table2\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table1} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table2} WHERE #{sql_key_match})\"\n apply_deletes(sql,all_cols,keys_in_all_cols)\n\n # If we are supposed to provide context, we need to deal with row order.\n if @patch.want_context\n sql = \"SELECT #{sql_all_cols}, 0 AS __coopy_tag__ FROM #{sql_table1} UNION SELECT #{sql_all_cols}, 1 AS __coopy_tag__ FROM #{sql_table2} ORDER BY #{sql_key_cols}, __coopy_tag__\"\n apply_with_context(sql,all_cols,keys_in_all_cols)\n end\n\n # Done!\n @patch.end_diff\n end",
"def apply_single\n validate_schema\n\n # Prepare some lists of columns.\n key_cols = @db1.primary_key(@table1)\n data_cols = @db1.except_primary_key(@table1)\n all_cols = @db1.column_names(@table1)\n\n # Let our public know we are beginning.\n @patch.begin_diff\n\n # Advertise column names.\n @rc_columns = DiffColumns.new\n @rc_columns.title_row = all_cols\n @rc_columns.update(0)\n cells = all_cols.map{|v| { :txt => v, :value => v, :cell_mode => \"\" }}\n rc = RowChange.new(\"@@\",cells)\n @patch.apply_row(rc)\n\n # If requested, we will be providing context rows around changed rows.\n # This is not a natural thing to do with SQL, so we do it only on request.\n # When requested, we need to buffer row changes.\n @pending_rcs = []\n\n # Prepare some useful SQL fragments to assemble later.\n sql_table1 = @db1.quote_table(@table1)\n sql_table2 = @db1.quote_table(@table2)\n sql_key_cols = key_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_all_cols = all_cols.map{|c| @db1.quote_column(c)}.join(\",\")\n sql_key_match = key_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS #{sql_table2}.#{c}\"}.join(\" AND \")\n sql_data_mismatch = data_cols.map{|c| @db1.quote_column(c)}.map{|c| \"#{sql_table1}.#{c} IS NOT #{sql_table2}.#{c}\"}.join(\" OR \")\n\n # For one query we will need to interleave columns from two tables. For\n # portability we need to give these columns distinct names.\n weave = all_cols.map{|c| [[sql_table1,@db1.quote_column(c)],\n [sql_table2,@db2.quote_column(c)]]}.flatten(1)\n dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]}\"}\n sql_dbl_cols = weave.map{|c| \"#{c[0]}.#{c[1]} AS #{c[0].gsub(/[^a-zA-Z0-9]/,'_')}_#{c[1].gsub(/[^a-zA-Z0-9]/,'_')}\"}.join(\",\")\n\n # Prepare a map of primary key offsets.\n keys_in_all_cols = key_cols.each.map{|c| all_cols.index(c)}\n keys_in_dbl_cols = keys_in_all_cols.map{|x| 2*x}\n\n # Find rows in table2 that are not in table1.\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table2} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table1} WHERE #{sql_key_match})\"\n apply_inserts(sql,all_cols,keys_in_all_cols)\n\n # Find rows in table1 and table2 that differ while having the same primary\n # key.\n sql = \"SELECT #{sql_dbl_cols} FROM #{sql_table1} INNER JOIN #{sql_table2} ON #{sql_key_match} WHERE #{sql_data_mismatch}\"\n apply_updates(sql,dbl_cols,keys_in_dbl_cols)\n\n # Find rows that are in table1 but not table2\n sql = \"SELECT #{sql_all_cols} FROM #{sql_table1} WHERE NOT EXISTS (SELECT 1 FROM #{sql_table2} WHERE #{sql_key_match})\"\n apply_deletes(sql,all_cols,keys_in_all_cols)\n\n # If we are supposed to provide context, we need to deal with row order.\n if @patch.want_context\n sql = \"SELECT #{sql_all_cols}, 0 AS __coopy_tag__ FROM #{sql_table1} UNION SELECT #{sql_all_cols}, 1 AS __coopy_tag__ FROM #{sql_table2} ORDER BY #{sql_key_cols}, __coopy_tag__\"\n apply_with_context(sql,all_cols,keys_in_all_cols)\n end\n\n # Done!\n @patch.end_diff\n end",
"def exec_update(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def table_update_query(table, values, org_key = nil)\n org_key ||= values\n query = \"update #{quote_table_name(table)} set \"\n query << values.map do |column_name, value|\n \"#{quote_column_name(column_name)} = #{quote_value(table, column_name, value)}\"\n end.join(', ')\n query << \" where (\" << quote_key_list(table) << \") = (\"\n query << primary_key_names(table).map do |key|\n quote_value(table, key, org_key[key])\n end.join(', ') << \")\"\n end",
"def select_lock_sql(sql)\n @opts[:lock] == :update ? sql : super\n end",
"def update_by_sql(target, set, options = nil)\n set = set.gsub(/@/, '')\n\n if target.is_a? Class\n sql = \"UPDATE #{target.table} SET #{set} \"\n sql << \" WHERE #{options[:condition]}\" if options and options[:condition]\n sql_update(sql)\n else\n sql = \"UPDATE #{target.class.table} SET #{set} WHERE #{pk_field target.class} = #{quote(target.pk)}\"\n sql << \" AND #{options[:condition]}\" if options and options[:condition]\n sql_update(sql)\n end\n end",
"def update!(**args)\n @incremental_post_operations = args[:incremental_post_operations] if args.key?(:incremental_post_operations)\n @incremental_pre_operations = args[:incremental_pre_operations] if args.key?(:incremental_pre_operations)\n @incremental_select_query = args[:incremental_select_query] if args.key?(:incremental_select_query)\n @refresh_disabled = args[:refresh_disabled] if args.key?(:refresh_disabled)\n @unique_key_parts = args[:unique_key_parts] if args.key?(:unique_key_parts)\n @update_partition_filter = args[:update_partition_filter] if args.key?(:update_partition_filter)\n end",
"def flush\n conn.transaction do\n buffer.flatten.each do |row|\n # check to see if this row's compound key constraint already exists\n # note that the compound key constraint may not utilize virtual fields\n next unless row_allowed?(row)\n\n # add any virtual fields\n add_virtuals!(row)\n \n key_names = []\n key_values = []\n @key_columns.each do |name|\n key_names << \"#{name}\"\n key_values << conn.quote(row[name]) # TODO: this is probably not database agnostic\n end\n\n names = []\n values = []\n (order - @key_columns).each do |name|\n names << \"#{name}\"\n values << conn.quote(row[name]) # TODO: this is probably not database agnostic\n end\n\n all_name_values = (key_names+names).zip(key_values+values)\n\n q = <<EOF\nMERGE INTO #{table_name} d \nUSING (SELECT #{all_name_values.collect {|c,v| \"#{v} #{c}\"}.join(',')} FROM DUAL) s\nON (#{map_src_to_dest(key_names,'s','d').join(' AND ')})\nWHEN MATCHED THEN \nUPDATE SET #{[map_src_to_dest(names,'s','d'), \"d.#{@update_ts_column}=CURRENT_TIMESTAMP\"].flatten.join(',')}\nWHEN NOT MATCHED THEN\nINSERT (#{all_name_values.collect {|c,v| 'd.'+c}.join(',')},d.#{@insert_ts_column})\nVALUES (#{all_name_values.collect {|c,v| 's.'+c}.join(',')},CURRENT_TIMESTAMP)\nEOF\n #q = \"INSERT INTO `#{table_name}` (#{names.join(',')}) VALUES (#{values.join(',')})\"\n ETL::Engine.logger.debug(\"Executing upsert: #{q}\")\n conn.insert(q, \"Upsert row #{current_row}\")\n @current_row += 1\n end\n buffer.clear\n end\n end",
"def update!(**args)\n @base_table = args[:base_table] if args.key?(:base_table)\n @sql_query = args[:sql_query] if args.key?(:sql_query)\n @view_type = args[:view_type] if args.key?(:view_type)\n end",
"def update\n column_sets = self.class.columns.map { |attr_name| \"#{attr_name} = ?\" }.join(\", \")\n DBConnection.execute(<<-SQL, *attribute_values, self.id)\n UPDATE\n #{self.class.table_name}\n SET\n #{column_sets}\n WHERE\n #{self.class.table_name}.id = ?\n SQL\n # ...\n end",
"def update(sql, name = nil) end",
"def exec_update(sql, name = nil, binds = []) #:nodoc:\r\n log(sql, \"update\", binds) { query(sql, binds) }\r\n end",
"def update!(**args)\n @columns = args[:columns] if args.key?(:columns)\n @primary_geometry = args[:primary_geometry] if args.key?(:primary_geometry)\n @primary_key = args[:primary_key] if args.key?(:primary_key)\n end",
"def exec_update(sql, name, binds)\n exec_query(sql, name, binds)\n end",
"def update\n eqs = self.class.columns.map { |column| \"#{column} = ?\"}.join(\", \")\n DBConnection.execute(<<-SQL, *attribute_values, id)\n UPDATE\n #{self.class.table_name}\n SET\n #{eqs}\n WHERE\n id = ?\n SQL\n end",
"def update\n set_line = self.class.columns.map { |attr| \"#{attr} = ?\" }.join(\", \")\n\n DBConnection.execute(<<-SQL, *attribute_values, self.id)\n UPDATE\n #{self.class.table_name} \n SET\n #{set_line}\n WHERE\n id = ?\n SQL\n end",
"def update_all(table, table_hash, values = {}, conditions = {})\n\t\t\t ctx = 0\n\t\t\t\tlen = table.length - 1\n\t\t\t\t\n\t\t\t\tfor i in 0..len\n\t\t\t\t\tupdate = true\n\t\t\t\t\t\n\t\t\t\t\tconditions.each do |key, val|\n\t\t\t\t\t\tif (table[i][table_hash[key]] != val)\n\t\t\t\t\t\t\tupdate = false\n\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\tend\n\t\t\t\t\tend\n\t\t\t\t\t\n\t\t\t\t\tif (update)\n\t\t\t\t\t\tvalues.each do |field, val|\n\t\t\t\t\t\t\ttable[i][table_hash[field]] = val\n\t\t\t\t\t\tend\n\t\t\t\t\t\tctx = ctx + 1\n\t\t\t\t\tend\n\t\t\t\tend\n\t\t\t\t\n\t\t\t\treturn ctx\n\t\t\tend",
"def update!(**args)\n @gx_ids = args[:gx_ids] if args.key?(:gx_ids)\n @primary_keys = args[:primary_keys] if args.key?(:primary_keys)\n end",
"def requires_table_name_for_update\n true\n end",
"def update!(**args)\n @force = args[:force] if args.key?(:force)\n @table_uri = args[:table_uri] if args.key?(:table_uri)\n end",
"def build_sql(field)\n statement_sql = ''\n keys = primary_keys(field)\n puts \" Altering #{keys.length} records for: #{field.name} => #{field.output_type}\".blue\n keys.each do |primary_key|\n record_sql = \"UPDATE #{field.table} \"\n record_sql += \"SET #{field.column} = '#{out_val(field)}' \"\n record_sql += \"#{where_and(record_sql)} #{field.primary_key_col} = #{primary_key};\\n\"\n statement_sql += record_sql\n end \n statement_sql\nend",
"def _save_update_all_columns_hash\n v = Hash[@values]\n Array(primary_key).each{|x| v.delete(x) unless changed_columns.include?(x)}\n v\n end",
"def _save_update_all_columns_hash\n v = Hash[@values]\n Array(primary_key).each{|x| v.delete(x) unless changed_columns.include?(x)}\n v\n end",
"def update!(**args)\n @primary = args[:primary] if args.key?(:primary)\n @secondary = args[:secondary] if args.key?(:secondary)\n end",
"def update_record(table, values, org_key = nil)\n update table_update_query(table, values, org_key)\n end",
"def update!(**args)\n @table_source_type = args[:table_source_type] if args.key?(:table_source_type)\n @table_spec = args[:table_spec] if args.key?(:table_spec)\n @view_spec = args[:view_spec] if args.key?(:view_spec)\n end",
"def update!(**args)\n @table_source_type = args[:table_source_type] if args.key?(:table_source_type)\n @table_spec = args[:table_spec] if args.key?(:table_spec)\n @view_spec = args[:view_spec] if args.key?(:view_spec)\n end",
"def update_record\n record = Query.new(table)\n .index(primary_index[:name])\n .index_segments(primary_index_segments)\n .eq\n\n record.with_write_lock! do\n @dirty_attributes.each do |key, _|\n record.set_field(key, @attributes[key])\n end\n record.write!\n end\n\n @new_record, @dirty_attributes = false, {}\n return true\n end",
"def update(object, table, where_field, ignore_fields = [])\n vals = object.to_hash\n vals.delete_if {|k, v| ignore_fields.include?(k) }\n set_fields = vals.collect {|k,v| \"#{k} = #{Formatter.format(v)}\"}.join(', ')\n where = {where_field => vals[where_field]}\n sql = \"update #{table} set #{set_fields} #{where_clause(where)};\"\n execute(sql)\n end",
"def update!(**args)\n @postgresql_tables = args[:postgresql_tables] if args.key?(:postgresql_tables)\n @schema = args[:schema] if args.key?(:schema)\n end",
"def execute_update!( activerecord_class )\n collector = @row_collectors[ activerecord_class.table_name ]\n process_text_log << \"Updating #{ activerecord_class.name }...\\r\\n\"\n sql_diff_text_log << \"\\r\\n-- Updates for #{ activerecord_class.name }:\\r\\n\"\n is_ok = true\n begin\n collector.non_duplicates_rows.each do |row|\n sql_attributes = {}\n if row.respond_to?(:swimmer_id) # Correct the swimmer_id link:\n row.swimmer_id = @master_swimmer.id\n sql_attributes['swimmer_id'] = @master_swimmer.id\n end\n # Correct the badge link, when included in the matrix:\n if row.respond_to?(:badge_id) && @dup_badge_matrix_ids.has_key?( row.badge_id )\n row.badge_id = @dup_badge_matrix_ids[ row.badge_id ]\n sql_attributes['badge_id'] = row.badge_id\n end\n\n if row.invalid? # Check validation:\n msg = \"\\r\\n\\r\\n-- *** Swimmer Merge: validation ERROR during #{ row.class.name } update!\\r\\n\" <<\n \"- row..............: #{ row.inspect }\\r\\n\" <<\n \"- sql_attributes...: #{ sql_attributes.inspect }\\r\\n\" <<\n \"\\r\\n- ERROR............: #{ ValidationErrorTools.recursive_error_for( row ) }\\r\\n\"\n# DEBUG\n puts msg\n process_text_log << msg\n end\n row.save!\n # Build-up SQL-diff:\n if sql_attributes.size > 0 # (false = no additional comment)\n sql_diff_text_log << to_sql_update( row, false, sql_attributes, \"\\r\\n\" )\n end\n end\n rescue\n process_text_log << \"\\r\\n\\r\\n*** Swimmer Merge: exception caught!\\r\\n\"\n process_text_log << \"*** Phase '#{ activerecord_class.name } UPDATE': #{ $!.to_s }\\r\\n\" if $!\n process_text_log << sql_diff_text_log\n is_ok = false\n end\n is_ok\n end",
"def update_sql(values = {}, opts = nil, &block)\n opts = opts ? @opts.merge(opts) : @opts\n\n if opts[:group]\n raise Error::InvalidOperation, \"A grouped dataset cannot be updated\"\n elsif (opts[:from].size > 1) or opts[:join]\n raise Error::InvalidOperation, \"A joined dataset cannot be updated\"\n end\n \n sql = \"UPDATE #{source_list(@opts[:from])} SET \"\n if block\n sql << block.to_sql(self, :comma_separated => true)\n else\n set = if values.is_a?(Hash)\n # get values from hash\n values = transform_save(values) if @transform\n values.map do |k, v|\n # convert string key into symbol\n k = k.to_sym if String === k\n \"#{literal(k)} = #{literal(v)}\"\n end.join(COMMA_SEPARATOR)\n else\n # copy values verbatim\n values\n end\n sql << set\n end\n if where = opts[:where]\n sql << \" WHERE #{literal(where)}\"\n end\n\n sql\n end",
"def sneaky_update\n\n # Handle no changes.\n return true if changes.empty?\n\n # Here we have changes --> save them.\n pk = self.class.primary_key\n original_id = changed_attributes.has_key?(pk) ? changes[pk].first : send(pk)\n !self.class.where(pk => original_id).update_all(attributes).zero?\n end",
"def requires_table_name_for_update\n false\n end",
"def update_sql(sql, name = nil) #:nodoc:\n super\n self.affected_rows\n end",
"def update!(**args)\n @oracle_tables = args[:oracle_tables] if args.key?(:oracle_tables)\n @schema = args[:schema] if args.key?(:schema)\n end",
"def update_from_parent(update)\n raise %{\n This method must be implemented in your data object\n class if you plan to pass updates from a\n parent object to the members of its\n collections.\n }\n end",
"def update!(**args)\n @additional_options = args[:additional_options] if args.key?(:additional_options)\n @cluster_expressions = args[:cluster_expressions] if args.key?(:cluster_expressions)\n @dependency_targets = args[:dependency_targets] if args.key?(:dependency_targets)\n @disabled = args[:disabled] if args.key?(:disabled)\n @incremental_table_config = args[:incremental_table_config] if args.key?(:incremental_table_config)\n @partition_expiration_days = args[:partition_expiration_days] if args.key?(:partition_expiration_days)\n @partition_expression = args[:partition_expression] if args.key?(:partition_expression)\n @post_operations = args[:post_operations] if args.key?(:post_operations)\n @pre_operations = args[:pre_operations] if args.key?(:pre_operations)\n @relation_descriptor = args[:relation_descriptor] if args.key?(:relation_descriptor)\n @relation_type = args[:relation_type] if args.key?(:relation_type)\n @require_partition_filter = args[:require_partition_filter] if args.key?(:require_partition_filter)\n @select_query = args[:select_query] if args.key?(:select_query)\n @tags = args[:tags] if args.key?(:tags)\n end",
"def update!(**args)\n @mysql_excluded_objects = args[:mysql_excluded_objects] if args.key?(:mysql_excluded_objects)\n @oracle_excluded_objects = args[:oracle_excluded_objects] if args.key?(:oracle_excluded_objects)\n @postgresql_excluded_objects = args[:postgresql_excluded_objects] if args.key?(:postgresql_excluded_objects)\n end",
"def update!(**args)\n @initial_splits = args[:initial_splits] if args.key?(:initial_splits)\n @table = args[:table] if args.key?(:table)\n @table_id = args[:table_id] if args.key?(:table_id)\n end",
"def build_partial_update_query(all_attribute_keys, hashes)\n # Cache the connection for the batch\n connection = get_connection\n\n all_attribute_keys = (all_attribute_keys + unique_index_columns).uniq\n\n update_query = update_query_beginning(all_attribute_keys)\n update_query += update_query_from_values(hashes, all_attribute_keys, connection, unique_index_columns)\n update_query\n end",
"def update!(**args)\n @conjunctions = args[:conjunctions] if args.key?(:conjunctions)\n @disjunctions = args[:disjunctions] if args.key?(:disjunctions)\n @subs_key = args[:subs_key] if args.key?(:subs_key)\n end",
"def update!(**args)\n @existing_schema = args[:existing_schema] if args.key?(:existing_schema)\n @new_schema = args[:new_schema] if args.key?(:new_schema)\n @sampled_data_locations = args[:sampled_data_locations] if args.key?(:sampled_data_locations)\n @schema_change = args[:schema_change] if args.key?(:schema_change)\n @table = args[:table] if args.key?(:table)\n end",
"def update(values={})\n key_vals = self.class.primary_key.inject({}) { |h, k| h[k] = @cql_properties[k]; h }\n CQLModel::Query::UpdateStatement.new(self.class).update(values.merge(key_vals))\n end",
"def update!(**args)\n @results_table = args[:results_table] if args.key?(:results_table)\n end",
"def update!(**args)\n @results_table = args[:results_table] if args.key?(:results_table)\n end",
"def update!(**args)\n @schema = args[:schema] if args.key?(:schema)\n @table = args[:table] if args.key?(:table)\n end",
"def update!(**args)\n @schema = args[:schema] if args.key?(:schema)\n @table = args[:table] if args.key?(:table)\n end",
"def _update_record(attribute_names = self.attribute_names)\n # ****** BEGIN PARTITIONED PATCH ******\n # NOTE(hofer): This patch ensures the columns the table is\n # partitioned on are passed along to the update code so that the\n # update statement runs against a child partition, not the\n # parent table, to help with performance.\n if self.class.respond_to?(:partition_keys)\n attribute_names.concat self.class.partition_keys.map(&:to_s)\n attribute_names.uniq!\n end\n # ****** END PARTITIONED PATCH ******\n attributes_values = arel_attributes_with_values_for_update(attribute_names)\n if attributes_values.empty?\n 0\n else\n self.class.unscoped._update_record attributes_values, id, id_was\n end\n end",
"def update_all(updates, conditions = nil)\n # TODO\n raise \"not implemented\"\n# sql = \"UPDATE #{table_name} SET #{sanitize_sql(updates)} \"\n# add_conditions!(sql, conditions, scope(:find))\n# connection.update(sql, \"#{name} Update\")\n end",
"def updating_all_timestamp_sql\n update_col = timestamp_attributes_for_update_in_model.first\n\n \", #{update_col} = ?\" if update_col\n end",
"def update_sql(sql, name = nil)\n super\n end",
"def update!(**args)\n @direct_write_record_ids = args[:direct_write_record_ids] if args.key?(:direct_write_record_ids)\n @lg_internal_writer_id = args[:lg_internal_writer_id] if args.key?(:lg_internal_writer_id)\n @provenance_only_addition = args[:provenance_only_addition] if args.key?(:provenance_only_addition)\n @triangulation_key = args[:triangulation_key] if args.key?(:triangulation_key)\n @weak_data = args[:weak_data] if args.key?(:weak_data)\n end",
"def _update_without_checking(columns)\n _update_dataset.update(columns)\n end",
"def _update_without_checking(columns)\n _update_dataset.update(columns)\n end",
"def update!(**args)\n @dataset = args[:dataset] if args.key?(:dataset)\n @force = args[:force] if args.key?(:force)\n @partition_spec = args[:partition_spec] if args.key?(:partition_spec)\n @separate_tables_per_asset_type = args[:separate_tables_per_asset_type] if args.key?(:separate_tables_per_asset_type)\n @table = args[:table] if args.key?(:table)\n end",
"def update\n\n DBConnection.execute2(<<-SQL, attribute_values)\n UPDATE\n #{class_obj.table_name}\n SET\n #{sql_update_set}\n WHERE\n id = #{self.id}\n SQL\n\n end",
"def update!(**args)\n @include_conditions = args[:include_conditions] if args.key?(:include_conditions)\n end",
"def merge_or_replace(mode:)\n target = replication.target_table\n stage = temporary_table_name\n sql = send(\"#{mode}_statement\", target, stage)\n ::RailsRedshiftReplicator.connection.exec sql\n end",
"def update_or_delete_all(removed)\n if _association.destructive?\n removed.delete_all\n else\n removed.update_all(foreign_key => nil)\n end\n end",
"def update!(**args)\n @data_set = args[:data_set] if args.key?(:data_set)\n @dirty = args[:dirty] if args.key?(:dirty)\n @source_id_exist = args[:source_id_exist] if args.key?(:source_id_exist)\n @sync_info = args[:sync_info] if args.key?(:sync_info)\n end",
"def update!(**args)\n @condition = args[:condition] if args.key?(:condition)\n @pin_unexpanded_results = args[:pin_unexpanded_results] if args.key?(:pin_unexpanded_results)\n end",
"def prepared_refresh\n # SEQUEL5: Remove\n cached_prepared_statement(:fixed, :refresh){prepare_explicit_statement(naked.clone(:server=>dataset.opts.fetch(:server, :default)).where(prepared_statement_key_array(primary_key)), :first)}\n end",
"def update!(**args)\n @columns = args[:columns] if args.key?(:columns)\n end",
"def update!(**args)\n @columns = args[:columns] if args.key?(:columns)\n end",
"def update!(**args)\n @ancestor_field = args[:ancestor_field] if args.key?(:ancestor_field)\n @indexes = args[:indexes] if args.key?(:indexes)\n @reverting = args[:reverting] if args.key?(:reverting)\n @uses_ancestor_config = args[:uses_ancestor_config] if args.key?(:uses_ancestor_config)\n end",
"def update_all_by(key, values={})\n CQLModel::Query::UpdateStatement.new(self.class).update(values.merge({ key => @cql_properties[key.to_s] }))\n end",
"def update!(**args)\n @ending_record_id = args[:ending_record_id] if args.key?(:ending_record_id)\n @starting_record_id = args[:starting_record_id] if args.key?(:starting_record_id)\n @write_timestamp_us = args[:write_timestamp_us] if args.key?(:write_timestamp_us)\n end",
"def update where: , set: {}, **arg\n\t\t# In OrientDB V.3 the database only returns the affected rid's \n\t\t# We have to update the contents manually, this is done in the execute-block\n\t\tquery( kind: :update, set: set.merge(arg), where: where).execute{|y| y[:$current].reload!}\n\tend",
"def update!(changes)\n Mao.sql(with_options(:update => changes).sql) do |pg_result|\n pg_result.cmd_tuples\n end\n end",
"def update_all(updates)\n sum do |relation|\n relation.update_all(updates)\n end\n end"
] |
[
"0.6562034",
"0.641759",
"0.62743306",
"0.6030083",
"0.60216254",
"0.60174334",
"0.59798825",
"0.5968073",
"0.5957783",
"0.593133",
"0.5909523",
"0.58660275",
"0.58623",
"0.58623",
"0.5852555",
"0.5752741",
"0.5744804",
"0.57255346",
"0.5718237",
"0.56968045",
"0.56924266",
"0.5677933",
"0.5662626",
"0.56492436",
"0.56318283",
"0.5629759",
"0.5600831",
"0.5600831",
"0.5596881",
"0.5596881",
"0.5596881",
"0.5588612",
"0.5580852",
"0.55711645",
"0.5562239",
"0.55610836",
"0.55521727",
"0.555211",
"0.5539363",
"0.55232644",
"0.5505242",
"0.54939073",
"0.54698694",
"0.5459682",
"0.5445604",
"0.542235",
"0.541137",
"0.5408906",
"0.54001206",
"0.5371091",
"0.5370171",
"0.5370171",
"0.5362542",
"0.5358134",
"0.53558606",
"0.5355462",
"0.53549826",
"0.5354177",
"0.53435254",
"0.53389454",
"0.53381395",
"0.5316344",
"0.53119344",
"0.528068",
"0.5270063",
"0.52656287",
"0.5257037",
"0.5255739",
"0.52499986",
"0.5249217",
"0.5235592",
"0.52332073",
"0.5232775",
"0.5217887",
"0.5217887",
"0.5217523",
"0.5217523",
"0.521617",
"0.5204261",
"0.51892805",
"0.51868033",
"0.5186545",
"0.517817",
"0.517817",
"0.5170143",
"0.51685125",
"0.5151664",
"0.5149982",
"0.51419115",
"0.51362205",
"0.5129434",
"0.5125084",
"0.5108694",
"0.5108694",
"0.5101275",
"0.51004237",
"0.50888103",
"0.5086989",
"0.5082721",
"0.5070126"
] |
0.56624854
|
23
|
added to test that whitespace before/after code system name and codes is stripped
|
def test_is_in_code_set_strip_whitespace
entry = Entry.new
entry.add_code(" 854935 ", " RxNorm ")
entry.add_code(" 44556699 ", "RxNorm ")
entry.add_code(" 1245 ", " Junk ")
assert entry.is_in_code_set?([{'set' => 'RxNorm', 'values' => ['854935', '5440']},
{'set' => 'SNOMED-CT', 'values' => ['24601']}])
assert !entry.is_in_code_set?([{'set' => ' RxNorm ', 'values' => ['854935', '5440']},
{'set' => 'SNOMED-CT', 'values' => ['24601']}])
assert !entry.is_in_code_set?([{'set' => 'RxNorm', 'values' => [' 854935 ', '5440']},
{'set' => 'SNOMED-CT', 'values' => ['24601']}])
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def trimming_for_diff_text(code)\n # gsub(/^\\s*$/, '') means remove empty lines\n code.strip.gsub(/^\\s*$/, '')\n end",
"def strip_listing(code)\n code = code.dup\n code.gsub!(/\\t/, \" \")\n lines = code.split(\"\\n\")\n first_code_line = lines.index { |l| l =~ /\\S/ }\n last_code_line = lines.rindex { |l| l =~ /\\S/ }\n code_lines = lines[first_code_line..last_code_line]\n line_indents = code_lines.map { |l| l.index(/\\S/) || 0 }\n min_indent = line_indents.min\n unindented_code = code_lines.map { |l| l[min_indent..-1] }.join(\"\\n\")\n unindented_code.strip\n end",
"def strip_blanks\n self.title = self.title.strip\n self.code = self.code.strip\n end",
"def validateCode(code)\n \n code = code.split(\"\\n\")\n code.delete_if {|w| w.include? \"puts\"}\n code.delete_if {|w| w.include? \"print\" }\n code.join(\"\\n\")\n end",
"def lstrip() end",
"def remove_whitespace(dirty_name)\n \n return dirty_name.split(' ').join(\" \") \n \n end",
"def whitespace_fixup(text)\n text.andand.gsub(/\\r/, \"\")\n end",
"def clean(code)\n code = code.slice(1) if code[0] == BOM\n code.gsub!(/\\r/, '')\n code.gsub!(TRAILING_SPACES, '')\n if code =~ WHITESPACE\n code = \"\\n#{code}\"\n @chunk_line -= 1\n end\n code\n end",
"def preprocess_code(code_string)\n code_string.lines.collect { |line| l = line.strip; l unless l.empty?}.compact\n end",
"def whitespace_fixup(text)\n text.andand.gsub(/\\r/, \"\")\n end",
"def code_splitter(code)\n student_code = []\n code = code.split('startTraceNow();')\n new_code = code[1].split('endTraceNow();')\n executed_code = new_code[0]\n executed_code_list = executed_code.split('\\\\n')\n flag = false\n counter = 0\n until flag\n if executed_code_list[counter] == '' || executed_code_list[counter] == ' '\n flag = false\n counter += 1\n elsif executed_code_list[counter] != ''\n flag = true\n end\n end\n x = counter\n while x < executed_code_list.length\n temp = executed_code_list[x]\n temp = temp.strip\n student_code << executed_code_list[x] unless temp.empty?\n\n x += 1\n end\n student_code\nend",
"def strip_whitespace\n code.gsub!(WHITESPACE_REGEX, ' ')\n\n code\n end",
"def normalize_coding_system(code)\n lookup = {\n \"lnc\" => \"LOINC\",\n \"loinc\" => \"LOINC\",\n \"cpt\" => \"CPT\",\n \"cpt-4\" => \"CPT\",\n \"snomedct\" => \"SNOMEDCT\",\n \"snomed-ct\" => \"SNOMEDCT\",\n \"rxnorm\" => \"Rxnorm\",\n \"icd9-cm\" => \"ICD9\",\n \"icd9\" => \"ICD9\"\n }\n codingsystem = lookup[code.xpath('./CodingSystem')[0].content.downcase]\n if(codingsystem)\n code.xpath('./CodingSystem')[0].content = codingsystem\n end\n end",
"def lstrip!() end",
"def strip_trailing_whitespace(code)\n code.gsub(/[ \\t]+$/, '')\n end",
"def normalize_coding_system(code)\n lookup = {\n \"lnc\" => \"LOINC\",\n \"loinc\" => \"LOINC\",\n \"cpt\" => \"CPT\",\n \"cpt-4\" => \"CPT\",\n \"snomedct\" => \"SNOMED-CT\",\n \"snomed-ct\" => \"SNOMED-CT\",\n \"rxnorm\" => \"RxNorm\",\n \"icd9-cm\" => \"ICD-9-CM\",\n \"icd9\" => \"ICD-9-CM\",\n \"icd10-cm\" => \"ICD-9-CM\",\n \"icd10\" => \"ICD-9-CM\",\n \"cvx\" => \"CVX\",\n \"hcpcs\" => \"HCPCS\"\n\n }\n codingsystem = lookup[code.xpath('./ccr:CodingSystem')[0].content.downcase]\n if(codingsystem)\n code.xpath('./ccr:CodingSystem')[0].content = codingsystem\n end\n end",
"def pre_proccess(text)\n text.to_s.strip.gsub(/[[:space:]]+/, ' ').gsub(/\\s{2,}/, ' ')\n end",
"def scrub\n self.gsub(/[^a-zA-Z\\s0-9\\.]/, '').gsub(/\\t/, ' ').gsub(/\\r/, ' ').gsub(/\\s\\s/, '').lstrip.rstrip\n end",
"def normalize_coding_system(code)\n lookup = {\n \"lnc\" => \"LOINC\",\n \"loinc\" => \"LOINC\",\n \"cpt\" => \"CPT\",\n \"cpt-4\" => \"CPT\",\n \"snomedct\" => \"SNOMEDCT\",\n \"snomed-ct\" => \"SNOMEDCT\",\n \"rxnorm\" => \"RxNorm\",\n \"icd9-cm\" => \"ICD-9-CM\",\n \"icd9\" => \"ICD-9-CM\"\n }\n codingsystem = lookup[code.at_xpath('./ccr:CodingSystem').content.downcase]\n if(codingsystem)\n code.at_xpath('./ccr:CodingSystem').content = codingsystem\n end\n end",
"def test_remove_spaces\r\n\t\tisbn_number = '123 4X67 890'\r\n\t\tassert_equal('1234X67890', remove_spaces(isbn_number))\r\n\tend",
"def strip() end",
"def isWhiteSpace(code)\n return true if (code >= 0x2000 && code <= 0x200A)\n case code\n when 0x09, # \\t\n 0x0A, # \\n\n 0x0B, # \\v\n 0x0C, # \\f\n 0x0D, # \\r\n 0x20,\n 0xA0,\n 0x1680,\n 0x202F,\n 0x205F,\n 0x3000\n return true\n end\n return false\n end",
"def has_spaces?(two_letter_code)\n !LANGUAGES_WITHOUT_SPACES.include?(two_letter_code.to_sym)\n end",
"def code_for_solr\n return FinderHelper.strip(manifestation_code)\n end",
"def normalize(code); end",
"def trim_whitespace; end",
"def sanitize_barcodes(barcodes)\n barcodes.split(/\\s+/).compact_blank.compact.uniq\n end",
"def fix_mac!\n if self =~ /\\bMac[A-Za-z]{2,}[^acizj]\\b/ || self =~ /\\bMc/\n gsub!(/\\b(Ma?c)([A-Za-z]+)/) { |_| Regexp.last_match[1] + Regexp.last_match[2].capitalize }\n\n # Fix Mac exceptions\n %w[\n MacEdo MacEvicius MacHado MacHar MacHin MacHlin MacIas MacIulis MacKie\n MacKle MacKlin MacKmin MacKmurdo MacQuarie MacLise MacKenzie\n ].each { |mac_name| substitute!(/\\b#{mac_name}/, mac_name.capitalize) }\n end\n\n self # Allows chaining\n end",
"def clean_up_movie_name(value)\n value[0] = '' if value[0] == '*'\n value[-1] = '' if value[-1] == '#'\n value.strip\nend",
"def strong_strip\n reverse.gsub(/^\\p{Zs}+|^\\p{Cf}+/, '').reverse.gsub(/^\\p{Zs}+|^\\p{Cf}+/, '')\n end",
"def strip_space!\n replace self.gsub(/:\\s*/, \":\").gsub(/\\n/, \"\").gsub(/\\s+/, \" \").gsub(/(\\/\\*).*?(\\*\\/)/, \"\")\n end",
"def normalize_whitespace(input); end",
"def sanitize_name(name)\n name.gsub(/\\//,\"\").gsub(/\\s/, \"-\").downcase\nend",
"def remove_excess_whitespace_from_name\n self.name = name&.split&.join(' ')\n end",
"def remove_company_type(name)\n return name.gsub(/(^|\\s)(gmbh|ag)($|\\s)/i, ' ').strip\n end",
"def get_incguard_from_incfname incfname\n incfname.gsub(/[^\\w]+/, \"_\").upcase\nend",
"def remove_system_internals(text_to_process)\n code_location = File.dirname(self.code.path).gsub(\"\\\\\", '/') + '/'\n text_to_process.gsub!(\"\\\\\", '/')\n text_to_process.gsub(code_location, '')\n end",
"def test_removes_spaces\n\t\tassert_equal(true,check_isbn_length(\"123 123 123 4\"))\n\tend",
"def strip_blanks\n self.title = self.title.strip\n self.code = self.code.strip\n self.complement_title = self.complement_title.strip\n end",
"def clean(line)\n ### FILL IN YOUR CODE HERE\n n = line.downcase\n c = n.gsub(/[^a-zA-z]/,\" \")\n end",
"def evaporate\n self.gsub(/\\s/, '')\n end",
"def strip_naked\n return self if blank?\n self.downcase.strip.gsub(/([\\s]{2,})/, ' ')\n end",
"def test_remove_dashes_spaces\r\n\t\tisbn_number = '123-4X67 890'\r\n\t\tassert_equal('1234X67890', remove_dashes_spaces(isbn_number))\r\n\tend",
"def clean_name(name)\n return remove_company_type(remove_postcode(name))\n end",
"def initialize_name_and_code\n self.code ||= self.name.gsub(\" \", \"_\").downcase\n self.name ||= self.code.gsub(\"_\", \" \").capitalize\n end",
"def normalize\n self.clean_extra_spaces.split.map{|w| w.size>2 ? w.capitalize : w.downcase}.join(' ')\n end",
"def clean_name\n clean_name = name.strip\n clean_name.gsub!(/\\s+/,'_')\n clean_name.gsub!(/[^0-9A-Za-z_-]/, '_')\n clean_name\n end",
"def format_input(text)\n\t\ttext.downcase.gsub(' ', '').gsub('\\\\', '') \n\tend",
"def normalize_other_name(other_name)\n other_name.to_s.unicode_normalize(:nfc).normalize_whitespace.squish.tr(\" \", \"_\")\n end",
"def extra_clean_str(str)\n str = str.downcase.gsub @extra_ua_filter, ''\n str = str.gsub(/[^\\x20-\\x7F]/, '')\n str.strip\n end",
"def strip_and_downcase_name\n if name.present?\n name.strip!\n name.downcase!\n end\n end",
"def safe_name\n name.to_s.gsub(/[^a-zA-Z0-9 _\\-:\\.]/, '').gsub(/:/, ' - ').gsub(/ +/, ' ')\n end",
"def code_with_trailing_space\n code.chomp+\" \"\n end",
"def lstrip\n `return self.replace(/^\\s*/, '');`\n end",
"def lstrip\n `return self.replace(/^\\s*/, '');`\n end",
"def processed_content\n # Ignores anything that is not a letter or number\n to_s.scan(/[a-z\\d]/i).join.downcase\n end",
"def clean_for_comp( address = '' )\n address.upcase.gsub /[^0-9A-Z]/, ''\nend",
"def cleanup_name(name)\n return name if name.nil?\n\n name = name.to_s.strip\n return \"\" if name == \"0.0\"\n return \"\" if name == \"0\"\n return \"\" if name == \".\"\n return \"\" if name.include?(\"N/A\")\n\n name = name.tr(\";\", \"'\")\n name.gsub(%r{ */ *}, \"/\")\n end",
"def space_before()\n #This is a stub, used for indexing\n end",
"def _prepareName ( name )\n name.to_s.gsub /[\\W]/, ''\n end",
"def repo_name_without_whitespace(repo_name)\n repo_name.strip\n end",
"def clean_content(content)\n content.gsub(/[^a-zA-Z ]/, '').squeeze(' ').downcase.strip\n end",
"def remove_initial_and_format_change(info) # removes middle initial for space_names and pipe_names AND puts comma_names into nested arrays\n if !info[0].include?(\"|\") && !info[0].include?(\",\")\n info.map! {|element| element.split(\" \")}\n info.each {|element| element.slice!(-4)} \n info\n elsif info[0].include?(\"|\")\n info.map! {|element| element.split(\" | \")}\n info.each {|element| element.slice!(-4)} \n info\n else\n info.map! {|element| element.split(\", \" )} \n end\nend",
"def sanitize_name(name)\n name.gsub!(' ', '_')\n name.downcase!\n name.gsub!('-', '_')\n name.gsub!('\\'', '_')\n name\n end",
"def clean_input(input)\n input.strip\n end",
"def special_action_code\n \"\".ljust_trim 2\n end",
"def trimmed_target_market_code(target_market_code)\n target_market_code[0,2]\n end",
"def is_white(c)\n # c =~ /\\s/\n [\"\\t\", \" \"].include?(c)\nend",
"def cleanse_data!\n self.tag = tag.to_s&.strip&.upcase\n self.shl_case_number = shl_case_number.to_s&.strip&.upcase\n end",
"def remove_trailing_spaces(source)\n for_outstrings_of(source) do |str|\n str.gsub! /\\s+/im, ' '\n str.gsub! /\\s*(=|\\+|\\-|<|>|\\?|\\|\\||&&|\\!|\\{|\\}|,|\\)|\\(|;|\\]|\\[|:|\\*|\\/)\\s*/im, '\\1'\n str.gsub! /;(\\]|\\)|\\}|\\.|\\?|:)/, '\\1' # removing wrong added semicolons\n str.gsub /([^\\d\\w_\\$]typeof)\\s+([\\w\\d\\$_]+)/, '\\1(\\2)'\n end\n end",
"def rstrip() end",
"def strip_strings\n self.name = name.strip\n end",
"def strip_strings\n self.name = name.strip\n end",
"def cleanup_surname(name)\n if name.length > 4\n name.gsub!(/^Mc(\\w+)/) { |s| \"Mc#{$1.capitalize}\" }\n name.gsub!(/^Mac(\\w+)/) { |s| \"Mac#{$1.capitalize}\" }\n name.gsub!(/^Mac(\\w+)/) { |s| \"Mac#{$1.capitalize}\" }\n name.gsub!(/^Osh(\\w+)/) { |s| \"O'sh#{$1}\" }\n name.gsub!(/^Van(\\w+)/) { |s| \"Van#{$1.capitalize}\" }\n name.gsub!(/^Von(\\w+)/) { |s| \"Von#{$1.capitalize}\" } \n# name.gsub!(/^Dev(\\w+)/) { |s| \"DeV#{$1}\" } \n end\n name\n end",
"def test_base_whitespace\n assert_equal 'white space.jpg',\n tested('\"white space.jpg\"').source_names.first\n end",
"def post_code_for(val, optional: false)\n return nil if val.nil? && optional\n match = val.match(/\\A\\s*(\\S+)\\s*(\\d\\w\\w)\\s*\\z/)\n return val.slice(0,7) unless match\n spaces = 4 - match[1].length\n val = \"#{match[1]}#{' ' * spaces}#{match[2]}\"\n val.slice(0,7)\n end",
"def trim_name_whitespace!\r\n self.name.strip!\r\n end",
"def format_code strip = true\n format_ruby self.code, strip\n end",
"def strip_color_codes(text); end",
"def createValidName(inname)\r\n outname = inname.gsub(/[\\s\\/\\\\?*#+]/,'') # Remove illegal chars (replace with underscore).\r\n outname.gsub!(/_+/,\"_\") # Replace consecutive uscores with single uscore.\r\n outname.gsub!(/\\./,\"-\") # Replace period with dash.\r\n\r\n outname\r\n end",
"def sanitize_filename(filename)\n perform_ipad_renaming(filename\n .gsub('Grey', 'Gray') # some Apple devices have \"Grey\" instead of \"Gray\" color -> unify\n .gsub(' - Portrait', '') # iPads Pro include Portrait and Landscape - we just need Portrait; Landscape filtered via DEVICES_TO_SKIP\n .gsub(' - ', ' ') # Google Pixel device names are separated from their colors by a dash -> remove\n .gsub('Note10', 'Note 10') # Samsung Galaxy Note 10 is missing a space in \"Note10\"\n .gsub('Mi Mix Alpha Front', 'Mi Mix Alpha')) # Xiaomi Mi Mix Alpha contains the words \"Front\", \"Back\" and \"Side\" => back and side are filtered via DEVICES_TO_SKIP, \"Front\" removed from the name here\nend",
"def cleanup_distro_name(word)\n # manjaro,arch\n word.gsub!(/[\"']/,'')\n word.gsub!(/,.*+/, '')\n # ubuntu debian\n word.gsub(/ [a-zA-Z]+/, '')\n end",
"def clean_text\n text.tr(\"'@_\", '').gsub(/\\W/, ' ').gsub(/[0-9]/, '').downcase\n end",
"def _strip_order_value(order_value='')\n ((order_value =~ %r{\\A[\\+\\-]} ? order_value.slice(1..-1) : order_value) || '').downcase.strip\n end",
"def sanitized_value(value)\n value.to_s.gsub(/\\s/, \"_\").gsub(/[^-[[:word:]]]/, \"\").mb_chars.downcase.to_s\n end",
"def remove_whitespace\n self.name = self.name.strip\n self.phone = self.phone.strip\n end",
"def strip_prefix(name)\n name.sub /^[xX][rR](_?)/, \"\"\nend",
"def sanitize_name\n if ['Gene List', 'Cluster'].include?(self.file_type)\n self.name.strip!\n end\n end",
"def skip_space; end",
"def create_run_code\n @index = self.short_description.index(\"ip\")\n @end_index = self.short_description.index(\"and bin_id\")\n @result = self.short_description[@index + 1, @end_index - @index]\n @string = @result.gsub(/[ipa:,]/,' ')\n @array = @string.split(/\\s*/)\n @prod_run_code =\"\"\n @array.each do |char|\n if(char==\":\")\n @prod_run_code.concat(\"\")\n elsif(char==\"\")\n @prod_run_code.concat(\"\")\n elsif(char==\",\")\n @prod_run_code.concat(\"\")\n else\n @prod_run_code.concat(char)\n end\n end\n self.run_code = @prod_run_code.strip()\nend",
"def processed_content\n letters_and_digits.downcase\n end",
"def clean_zipcodes(original)\n zipcode = \"#{INVALID_ZIPCODE}#{original}\"\n zipcode = zipcode[-5..-1]\n return zipcode\n end",
"def normalize_name\n @normalize_name ||= begin\n return '' if name.empty?\n\n exclude = %w[corporation institute organization university\n all and of the].join('|')\n tmp = name.dup\n tmp.gsub!(/#{exclude}/i, '')\n tmp.gsub!(/\\s+/, ' ')\n tmp.strip!\n tmp.downcase # it's not case sensitive\n end\n end",
"def line_clean_up(x)\n\t\tx=x.lstrip\n\t\tx=x.gsub(/[a-zA-Z\\]\\'\\\"{\\d]+=[a-zA-Z\\[\\'\\\"{\\d]+/){|x| x.split(\"=\").join(\" = \")}\n\t\t#or equal is failing to work in the same way\n\t\t#x=x.gsub(/[a-zA-Z\\]\\'\\\"{\\d]+=[a-zA-Z\\[\\'\\\"{\\d]+/){|x| x.split(\"||=\").join(\" ||= \")}\n\t\treturn x\n\tend",
"def normalize_as_postal_code(text)\n NKF.nkf('-w -Z1', text).strip.gsub(/-/, '') if text\n end",
"def code_parts\n @code_parts ||= code.to_s.split(\"-\")\n end",
"def unnormalize(code)\n code = code.gsub(/<text:line-break\\/>/, \"\\n\")\n code = code.gsub(/<text:tab\\/>/, \"\\t\")\n code = code.gsub(/<text:s(\\/|(\\s[^>]*))>/, \" \")\n return REXML::Text.unnormalize(code)\n end",
"def rstrip!() end",
"def check_register_names_for_spaces(regs)\n regs.each do |reg|\n abort(\"register has no name: #{reg}\") unless reg.has_key? 'name'\n name = reg[\"name\"]\n if (name.include? \" \")\n puts \"Register has a space in name: #{name}\"\n end\n end\nend",
"def clean_name(name)\n if name.present?\n name.gsub(/(\\s|-|\\.|,)/,'')\n end\n end"
] |
[
"0.64976764",
"0.6289234",
"0.6256693",
"0.62021565",
"0.61930275",
"0.6148575",
"0.613461",
"0.61342424",
"0.6071139",
"0.6060686",
"0.60496193",
"0.6047944",
"0.6047377",
"0.6003417",
"0.5969678",
"0.5964537",
"0.5934929",
"0.5926174",
"0.5852012",
"0.58064336",
"0.5792042",
"0.5784311",
"0.57538515",
"0.5753786",
"0.5747463",
"0.5741026",
"0.5719772",
"0.5709242",
"0.56995803",
"0.56830144",
"0.56738883",
"0.5662887",
"0.56592995",
"0.5644083",
"0.5637747",
"0.5624322",
"0.56193453",
"0.5610764",
"0.5610077",
"0.56077594",
"0.56053066",
"0.55860996",
"0.558517",
"0.5572987",
"0.55728364",
"0.55676514",
"0.55620664",
"0.55602556",
"0.55562305",
"0.5555699",
"0.55439126",
"0.5526652",
"0.5526043",
"0.5516631",
"0.5516631",
"0.55090636",
"0.55047315",
"0.550217",
"0.5493133",
"0.54863375",
"0.5484856",
"0.54822147",
"0.54791623",
"0.54679996",
"0.5467099",
"0.5457041",
"0.5451197",
"0.54449844",
"0.54406667",
"0.54368716",
"0.5436162",
"0.54348564",
"0.54348564",
"0.5431153",
"0.54270595",
"0.5425904",
"0.54227513",
"0.54182786",
"0.541671",
"0.5416408",
"0.54157525",
"0.5409573",
"0.5401259",
"0.5399665",
"0.539945",
"0.5394542",
"0.5392053",
"0.5389748",
"0.53846073",
"0.5379137",
"0.5372597",
"0.53660315",
"0.5364172",
"0.53626555",
"0.5356996",
"0.5355267",
"0.535311",
"0.5345143",
"0.5330949",
"0.5328543"
] |
0.6046308
|
13
|
allows you to push an array/row to the 2 demensional array. all rows must be the same width
|
def << (val)
# if this is first row set the width based on size of first array to be pushed
if @h == 0
@w = val.size
elsif val.size != @w or val.class.to_s != 'Array'
raise "Only add rows/arrays the same size as the width(#{@w})"
end
@data[@h] = val
@h += 1
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def import_array(rows)\n raise ArgumentError, \"Can only work with arrays\" unless rows.is_a?(Array)\n if self.labels.size > 1 and rows.dimensions == 1\n self.add_item(rows)\n else\n # self.items = self.items + rows\n rows.each do |row|\n self.add_item(row)\n end\n end\n end",
"def push_row(other_row)\n\t\t# initialize the new array\n\t\tnew_row = Array.new(@headers.length)\n\t\t\n\t\t# Iterate headers, building our new row\n\t\t0.upto @headers.length-1 do |index|\t\t\n\t\t\t\n\t\t\t# Put the value into the corresponding column in our CSV\n\t\t\tnew_row[index] = other_row[ @headers[index] ]\n\t\tend\n\t\t\n\t\t# Add this row into our matrix\n\t\t@matrix<< new_row\n\tend",
"def add_rows! *rows\r\n matrix_new = add_rows *rows\r\n @rows = matrix_new.to_a\r\n end",
"def create_row(r)\n row = Array.new\n self.board.each { |a|\n row << a[r]\n }\n row\n end",
"def <<( other )\n case other\n when Array\n if multi_array?( other )\n all.all?(&:empty?) ? @data = other : @data += other\n else\n all.all?(&:empty?) ? @data = [ other ] : @data << other\n end\n when Hash ; @data += _convert_hash( other )\n when Sheet ; empty? ? @data = other.data.all.dup : @data += other.data.dup.no_headers\n when Row ; @data << other.to_a.dup\n when Column ; @data.map!.with_index { |row, i| row << other[ i+1 ] }\n else ; @data[0] << other\n end\n calc_dimensions\n self\n end",
"def row; reshape(1, length); end",
"def push(*rows)\n rows.each { |row| self << row }\n self\n end",
"def col_arrays\n #puts self.inspect\n cols = []\n (MIN_ROW_POSITION..MAX_ROW_POSITION).each do |i|\n cols[i] = []\n (MIN_COL_POSITION..MAX_COL_POSITION).each do |j|\n cols[i].push(@board[j][i])\n end\n end \n cols\n end",
"def create_position_array()\n array_2D = []\n array_1D = []\n\n (0...@row).each_with_index do |value, row_index|\n (0...@col).each_with_index { |value, col_index| array_1D.append(value+(row_index*@col)) }\n array_2D.append(array_1D)\n array_1D = []\n end\n\n return array_2D\n end",
"def double_array(array)\n array.concat(array)\nend",
"def yale_nd_row_as_array i\n yale_nd_row(i, :array)\n end",
"def my_transpose\n new_array = []\n\n (0...self.length).each do |row|\n new_row = []\n\n (0...self.length).each do |col|\n new_row << self[col][row]\n \n end\n new_array << new_row\n end\n new_array\n end",
"def add_rows *rows\r\n matrix_a = self.to_a\r\n\r\n rows.each {|m|\r\n if m.column_size != self.column_size\r\n raise ArgumentError, \"number of columns should be identical\"\r\n end\r\n\r\n matrix_a += m.to_a\r\n }\r\n\r\n Matrix.rows(matrix_a)\r\n end",
"def using_concat(array1, array2)\n array1 = array1.push(*array2)\n\nend",
"def rows\n Array.new self\n end",
"def matrix_addition(matrix_a, matrix_b)\n addition = Array.new(matrix_a.length) {Array.new(matrix_a[0].length)}\n\n matrix_a.each_with_index do |row, row_i|\n row.each_with_index do |col, col_i|\n addition[row_i][col_i] = matrix_a[row_i][col_i] + matrix_b[row_i][col_i]\n end\n end\n \n addition\nend",
"def double_array(array)\n array + array\nend",
"def to_ary_rows\n ary = Array.new\n rowa = Array.new\n rowi = 0\n each_index do |row, col|\n if rowi != row\n rowi = row\n ary << rowa\n rowa = Array.new\n end\n\n rowa << self[row, col]\n end\n\n ary << rowa\n ary\n end",
"def push_array(arr_t, arr_v)\n arr_data = arr_t.zip(arr_v)\n @data.concat(arr_data)\n @data.sort_by! { |d| d[0] }\n end",
"def matrix(input)\n # shortcut for making matrix\n #array = Array.new(input[0].length) {Array.new}\n new_array= []\n \n #creating structure\n #get number of columns to create number of rows\n input[0].length.times do\n new_array.push([]) \n end\n puts \"new_array #{new_array}\"\n\n i = 0 \n # i represents col for input\n # i represents row for new_array\n # [row][col]\n # input[j][i]\n # new_array[i][j]\n while i < input[0].length\n j = 0 \n # j represents row for input\n # j represents col for new_array\n # [row][col]\n # input[j][i]\n # new_array[i][j]\n while j < input.length\n \n value = input[j][i]\n #puts \"value #{input[j][i]}\"\n new_array[i].push(value) #i represents column, adding values into columns\n j += 1\n end\n # puts \"new_array #{new_array}\"\n i += 1\n end\n return new_array\nend",
"def add_to_array(x, y)\n combined_array = x\n combined_array.push(y)\nend",
"def array_concat(array_1, array_2)\n array_2.each do |x|\n array_1[array_1.length] = x\n end\n return array_1\nend",
"def matrix(x, y, z)\n sub_array = []\n y.times do\n sub_array.push(z);\n end\n #sub_array\n array = [];\n x.times do\n array.push(sub_array)\n end\n array\nend",
"def beer(arr, x, y)\n\tbuild = Array.new\n\tarr << x << y \nend",
"def array_concat(array_1, array_2)\n array_1.push(*array_2) \nend",
"def push_array\n <<-CODE\n t1 = stack_pop();\n j = N2I(array_get_total(t1));\n for(j--; j >= 0; j--) {\n stack_push(array_get(state, t1, j));\n }\n CODE\n end",
"def from_arrays(array2d)\n @bandwidth = BandMatrixFactory.calculate_bandwidth(array2d)\n @row_size = array2d.size\n @column_size = array2d.first.size\n \n @data = Array.new(@row_size){Array.new(@bandwidth*2+1,0)}\n \n DelegateMatrix.iterate_matrix(array2d, Proc.new do |x,y,v|\n self.[]=(x,y,v)\n end)\n end",
"def alternate\n return [] if size.zero?\n return [[self[0]]] if size == 1 \n populate_alternate_arrays\n end",
"def copy_tg(tg)\n result = [Array.new(tg[0].size),Array.new(tg[1].size)]\n tg[0].each_index do |n|\n result[0][n] = Array.new(tg[0][n])\n end\n tg[1].each_index do |e|\n result[1][e] = Array.new(tg[1][e])\n end\n result\nend",
"def mergerows(row1,row2)\n\t\tif row2 >= @text.length\n\t\t\treturn\n\t\tend\n\t\tcol = @text[row1].length\n\t\t@text[row1] = @text[row1].dup\n\t\t@text[row1] += @text[row2]\n\t\t@text.delete_at(row2)\n\tend",
"def make_board\n board = Array.new(8) { Array.new(8) { Array.new(2) } }\n board.each_with_index do |row, row_i|\n row.each_with_index { |column, column_i| column[0], column[1] = row_i, column_i }\n end\n end",
"def append_row(*args)\n begin_row\n args.each do |arg|\n append(arg)\n end\n end_row\n end",
"def beer(arr, x, y)\n build = Array.new \n arr << x << y \nend",
"def write_line(arr)\n @sheet.row(@current_row).concat arr\n next_row\n end",
"def test_with_irregular_array2D2; show([[0.5], [0.1,0.9]]) end",
"def to_row \n if ndim != 1\n raise \"ndim should be 1\"\n end\n return self[1,:%]\n end",
"def add_row(pixels)\n @width = pixels.length if @width.nil?\n @rows << pixels.map { |c| @color_map[c] } # TODO: truncate & fill to force width?\n end",
"def test_with_irregular_array2D1; show([[0.1,0.9], [0.5]]) end",
"def push(sdata)\n row = Hash.new\n @columns.each do |column,value|\n nvalue = sdata.shift()\n row.store(column,nvalue)\n row.store(column,value) if row[column].nil?\n end\n @data.push(row)\n end",
"def square_arrays\n squares = []\n org_row = 0\n org_col = 0\n (0..8).each do |i|\n squares[i] = []\n (org_row..org_row+2).each do |k|\n (org_col..org_col+2).each do |j|\n squares[i].push(@board[k][j])\n end \n end\n if org_row == 6\n org_col += 3 \n org_row = 0\n else\n org_row += 3 \n end\n end \n squares\n end",
"def multidimArray\n # fill multidimensional\n puts \"----------- Multi-dimensional arrays -------------- \"\n empty_table = Array.new(3) { Array.new(3) }\n Array({:a => \"a\", :b => \"b\"}) #=> [[:a, \"a\"], [:b, \"b\"]]\n number_array = [ [1,2,3], [4,5,6] ]\n puts number_array.to_s\nend",
"def grid(n, m)\n Array.new(n) { Array.new(n) } # If you attempted to write this as Array.new(n, Array.new(m)) the contents would be repeated for each array rather\nend",
"def matrix_addition(matrix1, matrix2)\n new_matrix = Array.new(matrix1.length) { Array.new(matrix1[0].length) }\n\n (0...matrix1.length).each do |i|\n (0...matrix1[i].length).each do |j|\n new_matrix[i][j] = matrix1[i][j] + matrix2[i][j]\n end\n end\n\n new_matrix\nend",
"def my_transpose\n output = Array.new(self.length) {Array.new(self.length)}\n\n self.each_with_index do |row, row_i|\n row.each_with_index do |col, col_i|\n output[col_i][row_i] = self[row_i][col_i]\n end\n end\n output\n end",
"def append_row!(*args)\r\n insert_row!(*args)\r\n end",
"def array_concat(array_1, array_2)\n\n\n newArray = Array.new\n counter = 0\n\n (array_1.length).times do |x|\n newArray[counter] = array_1[x]\n counter = counter + 1\n end\n\n (array_2.length).times do |y|\n newArray[counter] = array_2[y]\n counter = counter + 1\n end\n\n newArray\nend",
"def concat(array,array2)\n array << array2\n return array\nend",
"def from_array_of_arrays(rows, hlines: false, **types)\n headers = []\n if !hlines\n # Take the first row as headers\n # Second row et seq as data\n headers = rows[0].map(&:to_s).map(&:as_sym)\n first_data_row = 1\n elsif rows[1].nil?\n # Use first row 0 as headers\n # Row 1 is an hline\n # Row 2 et seq are data\n headers = rows[0].map(&:to_s).map(&:as_sym)\n first_data_row = 2\n else\n # Synthesize headers\n # Row 0 et seq are data\n headers = (1..rows[0].size).to_a.map { |k| \"col_#{k}\".as_sym }\n first_data_row = 0\n end\n result = new(*headers, **types)\n rows[first_data_row..-1].each do |row|\n if row.nil?\n unless hlines\n msg = 'found an hline in input: try setting hlines true'\n raise UserError, msg\n end\n result.mark_boundary\n next\n end\n row = row.map { |s| s.to_s.strip }\n hash_row = Hash[headers.zip(row)]\n result << hash_row\n end\n result.normalize_boundaries\n result\n end",
"def my_transpose\n row_length = length\n col_length = first.length\n transposed = Array.new(col_length) { Array.new(row_length) }\n\n (0...row_length).each do |row|\n (0...col_length).each do |col|\n transposed[col][row] = self[row][col]\n end\n end\n transposed\n end",
"def insert_array(data, start_cell, direction = 'right')\n direction = get_direction(direction)\n row_offset = 0\n col_offset = 0\n\n data.each do |val|\n @ws.range(start_cell).offset(row_offset, col_offset).value = val\n case direction\n when DOWN\n row_offset += 1\n when UP\n row_offset += -1\n when TORIGHT\n col_offset += 1\n when TOLEFT\n col_offset += -1\n end\n end\n end",
"def array_concat(array_1, array_2)\n # Your code here\n #Iterate over the length of array\n array_2.each do |section|\n #add each section of the array to the other\n array_1.push(section)\n end\n # Output the combination of both the array\n return array_1\nend",
"def hstack(arrays)\n klass = (self==NArray) ? NArray.array_type(arrays) : self\n nd = 0\n arys = arrays.map do |a|\n a = klass.cast(a)\n nd = a.ndim if a.ndim > nd\n a\n end\n dim = (nd >= 2) ? 1 : 0\n concatenate(arys,axis:dim)\n end",
"def array_1(array1)\n array1.shift\n array1.push(6)\n return array1\nend",
"def add_row(row)\n case\n when row.is_a?(Array)\n @rows << Hash[@parameters.zip(row.collect { |value| value.strip })]\n @row_elements << Row.new(\"|#{row.join('|')}|\")\n when row.is_a?(Hash)\n @rows << row.each_value { |value| value.strip! }\n @row_elements << Row.new(\"|#{ordered_row_values(row).join('|')}|\")\n else\n raise(ArgumentError, \"Can only add row from a Hash or an Array but received #{row.class}\")\n end\n end",
"def make_combined_matrix(array_one, array_two)\n result = []\n\n for i in 0..array_one.length-1\n row = []\n for j in 0.. array_two.length-1\n row.push(array_two[j] + array_one[i])\n end\n result.push(row)\n end\n\n return result\nend",
"def merge_arrays (first, second)\n\nlarge_array = []\n\n 11.times do |i|\n smaller_array = []\n smaller_array << first[i]\n smaller_array << second[i]\n large_array << smaller_array\n end\n return large_array\n\nend",
"def add_to_array(x, y, z)\n array = []\n array.push(x, y, z)\n array.flatten!\n p array\nend",
"def array_with_two_elements\n @my_two_array=array_with_two_elements \nend",
"def matrix_addition(matrix_1, matrix_2)\n new_matrix = Array.new(matrix_1.length) {Array.new(matrix_1.first.length)}\n new_matrix.each_with_index do |array, i1|\n array.each_with_index do |el, i2|\n new_matrix[i1][i2] = matrix_1[i1][i2] + matrix_2[i1][i2]\n end\n end\n new_matrix\nend",
"def add_row(numbers)\n row = []\n row.push(numbers.split(\" \").map!{|j| j.to_i})\n row.flatten!\n @arr << row \nend",
"def yale_row_as_array i\n ary = yale_nd_row(i, :array)\n return ary if i >= self.shape[1] || self[i,i].nil? || self[i,i] == 0\n ary << i\n end",
"def add_row(row)\n if row.size < @size\n row.fill('', row.size...@size)\n elsif row.size > @size\n @heading.fill('', @size...row.size)\n @body.each { |r| r.fill('', @size...row.size) }\n @size = row.size\n end\n @body << row\n end",
"def get_oneDArray(shape,twoDArray)\n oneDArray = Java::double[shape[0]*shape[1]].new\n index = 0\n (0...shape[0]).each do |i|\n (0...shape[1]).each do |j|\n oneDArray[index] = twoDArray[i][j]\n index+=1\n end\n end\n oneDArray\n end",
"def insert_array(column, value)\n @columns << column\n @values << Translate.escape_array(value)\n end",
"def push_array(arr, destination)\n arr.each { |x| destination.push(x) }\nend",
"def push(values)\n underlying_array.push(values)\n values\n end",
"def index_arr_multiple_add(arr, index)\n # add any two elements to the arr at the index\n return arr.push(index,arr)\n return arr.push(index,arr)\nend",
"def stack_push_array(a)\n\n Lib.lua_createtable(@pointer, a.size, 0)\n # since we already know the size of the table...\n\n a.each_with_index do |e, i|\n stack_push(i + 1)\n stack_push(e)\n Lib.lua_settable(@pointer, -3)\n end\n end",
"def right(r, sc, ec, array, final_array) #r for row, sc start column, end column\n (sc..ec).each do |num|\n final_array << array[r][num]\n end\nend",
"def make_columns(rows)\n columns = Array.new(rows.size) { [] }\n # make an empty nested array to represent columns.\n # iterate through the rows, for each row...\n # place the number into the new nested array evenly across the subarrays\n rows.each do |row|\n row.each_with_index do |num, col_idx|\n columns[col_idx] << num\n end\n end\n columns\nend",
"def row(r)\n return matrix[r].map{|e| e.dup}\n end",
"def generate_rows(array)\n rows = []\n array.each do |line|\n row = [line]\n array.each do |column|\n row << line * column\n end\n rows << row\n end\n\n rows\n end",
"def matrix_addition(m1, m2)\n height = m1.length\n width = m1[0].length\n result = Array.new(height) { [0] * width }\n\n (0...height).each do |row|\n (0...width).each do |col|\n result[row][col] = m1[row][col] + m2[row][col]\n end\n end\n\n result\nend",
"def increment_size\n\t\tincremented_size = row_count + 1\n\n\t\t@rows = Array.new(incremented_size){Array.new(incremented_size, 0)}\t# adding a row and a column to the current Matrix\n\t\t@row_count = incremented_size\t# incrementing the counter of rows\n\t\t@column_count = incremented_size\t# incrementing the counter of columns\n\tend",
"def setup_with_rows(row_contents)\n @fm = Array.new\n\n raise 'Invalid input to create fmatrix from rows' unless row_contents.kind_of?(Array) && row_contents.length > 0 && row_contents[0].kind_of?(Array) && row_contents[0].length > 0\n expected_row_size = row_contents[0].length\n row_contents.each do |row|\n raise 'Provided rows are not all the same size!' unless row.length == expected_row_size\n @fm.concat(row)\n end\n\n @rows = row_contents.length\n @cols = row_contents[0].length\n end",
"def board_2d\n # slice the array into groups of 4 to create 2d-array\n @board.enum_slice(4).to_a\n end",
"def matrix_addition (arr1, arr2)\n new_arr = []\n arr1.each.with_index do|subarr1, idx1|\n new_sub = []\n subarr1.each.with_index do |num1, idx2|\n new_sub << arr1[idx1][idx2] + arr2[idx1][idx2]\n end\n new_arr << new_sub\n end\n\n new_arr\nend",
"def array_concat(array_1, array_2)\n newarray = Array.new\n for i in 0..(array_1.length-1)\n newarray[i] = array_1[i]\n end\n for j in 0..(array_2.length-1)\n newarray[array_1.length+j] = array_2[j]\n end\n return newarray\nend",
"def matrix_addition(m1, m2)\n rows = m1.length\n cols = m1[0].length\n result = Array.new(rows) { Array.new(cols, 0)}\n (0...rows).each do |row|\n (0...cols).each do |col|\n result[row][col] = m1[row][col] + m2[row][col]\n end\n end\n \n result\nend",
"def create_empty_array\n @outcome_array = Array.new(find_number_of_rows+1) { Array.new(find_number_of_columns+1) }\nend",
"def format_rows(src_rows, tg_width, filled = nil)\r\n r_length = src_rows.first.size\r\n\r\n if r_length > tg_width\r\n src_rows.map! do |row|\r\n row.data = row[0..tg_width - 1]\r\n end\r\n elsif r_length < tg_width\r\n src_rows.map! do |row|\r\n row.fill_with_nil(tg_width - r_length)\r\n end\r\n end\r\n\r\n src_rows\r\n end",
"def column_stack(arrays)\n arys = arrays.map do |a|\n a = cast(a)\n case a.ndim\n when 0; a[:new,:new]\n when 1; a[true,:new]\n else; a\n end\n end\n concatenate(arys,axis:1)\n end",
"def turn\n\t\tnew_grid = Array.new(@size + 1) { Array.new(@size + 1)}\n\t\t0.upto(@size) do |row|\n\t\t\t0.upto(@size) do |column|\n\t\t\t\tnew_grid[column][@size - row] = @grid[row][column]\n\t\t\tend\n\t\tend\n\t\t@grid = new_grid\n\tend",
"def set_values(array)\n @row = array\n end",
"def resize\n array = Array.new(@array.size * 2)\n\n (@left..@right).each do |i|\n array[i] = @array[i]\n end\n\n @array = array\n end",
"def generate(num_rows)\n ans = [[1], [1, 1]]\n return [] if num_rows == 0\n return [[1]] if num_rows == 1\n return ans if num_rows == 2\n \n for i in 3..num_rows do \n curr_arr = [1] * i\n for j in 1..i - 2 do\n curr_arr[j] = ans[i - 2][j - 1] + ans[i - 2][j]\n end\n ans << curr_arr\n end\n ans\nend",
"def add array\n #$log.debug \"tabular got add #{array.count} #{array.inspect} \" if $log\n @list ||= []\n @list << array\n end",
"def make_matrixarray\n\t\tmatrix = []\n\t\t@speakers.each do |this_speaker|\n\t\t\tmatrix << row(this_speaker)\n\t\tend\n\t\t#return matrix array\n\t\treturn matrix\n\tend",
"def array_concat(array_1, array_2)\n\n array_2.each do |a2_element|\n array_1.push(a2_element)\n end\n\n return array_1\n\nend",
"def reshape(one_dim)\n (0...row_length).inject([]) do |rows, row_index|\n start_index = row_index * col_length\n end_index = (row_index+1) * col_length\n range = (start_index)...(end_index)\n rows << one_dim[range]\n end\n end",
"def push_ary ary\n ary.each do |element|\n push element\n end\n end",
"def add_row( group_id, row, number_to_transpose = 0 )\n @groups[group_id - 1] ||= []\n @groups[group_id - 1] << MiltonsMachine::Core::ForteSet.new(row).transpose_mod12(number_to_transpose)\n @max_group_index = @groups.length - 1\n @max_column_index = @groups[0][0].length - 1\n end",
"def add array\n $log.debug \"tabular got add #{array.count} #{array.inspect} \" if $log\n @list ||= []\n @list << array\n end",
"def fill_2d(obj = nil)\r\n # find longest\r\n longest = self.max { |a,b| a.length <=> b.length }.size\r\n self.each do |row|\r\n row[longest-1] = obj if row.size < longest # fill with nulls\r\n end\r\n return self\r\n end",
"def row(y)\n src.xsize.times.map { |x| src[x, y] }\n end",
"def vstack(arrays)\n arys = arrays.map do |a|\n _atleast_2d(cast(a))\n end\n concatenate(arys,axis:0)\n end",
"def [] x, y\r\n @rows[y][x]\r\n end",
"def make_grid\n @grid = Array.new(4){Array.new(4)}\n end",
"def transpose(mtrx)\n new_mtrx = Array.new(3) { Array.new(3) }\n new_mtrx.each_with_index do |sub_arr, idx|\n sub_arr.each_with_index do |element, inner_idx|\n new_mtrx[idx][inner_idx] = mtrx[inner_idx][idx]\n end\n end\n new_mtrx\nend",
"def my_transpose(trans_arr)\n i, j = 0, 1\n array_copy = trans_arr.dup\n (i...trans_arr.length-1).each do |index_one|\n (j...trans_arr.length).each do |index_two|\n array_copy[index_one][index_two], array_copy[index_two][index_one] =\n array_copy[index_two][index_one], array_copy[index_one][index_two]\n end\n end\n array_copy\n end"
] |
[
"0.67068654",
"0.65762824",
"0.6492256",
"0.6334116",
"0.61582327",
"0.6142815",
"0.61379886",
"0.6136306",
"0.6105991",
"0.595282",
"0.59520125",
"0.59357345",
"0.5921715",
"0.5878791",
"0.58428836",
"0.5832875",
"0.58272946",
"0.58116895",
"0.5807732",
"0.5802863",
"0.5792804",
"0.5791342",
"0.57887274",
"0.57883525",
"0.5784579",
"0.577766",
"0.57768065",
"0.57759565",
"0.57653683",
"0.5753716",
"0.5750638",
"0.5748892",
"0.5748217",
"0.5744582",
"0.5744373",
"0.57437086",
"0.57413536",
"0.573811",
"0.57353026",
"0.5709225",
"0.57048285",
"0.56995475",
"0.5666506",
"0.56641495",
"0.56624275",
"0.56588244",
"0.5656524",
"0.56555605",
"0.5645198",
"0.56394356",
"0.5638323",
"0.563279",
"0.5625182",
"0.5623563",
"0.5620407",
"0.56117463",
"0.560829",
"0.55884963",
"0.5578773",
"0.55704314",
"0.55669355",
"0.5564635",
"0.55563235",
"0.5556068",
"0.55375737",
"0.5535497",
"0.5532144",
"0.5530735",
"0.5528719",
"0.5527047",
"0.55170375",
"0.55115885",
"0.55115813",
"0.5507918",
"0.55043083",
"0.55002296",
"0.5492106",
"0.5491276",
"0.54862744",
"0.5480908",
"0.54807234",
"0.5479469",
"0.5478902",
"0.54707116",
"0.54569453",
"0.5453706",
"0.5441312",
"0.5440294",
"0.5438105",
"0.54336715",
"0.54291075",
"0.5419538",
"0.5416403",
"0.5415168",
"0.5413886",
"0.5410631",
"0.5403992",
"0.5403117",
"0.54018587",
"0.54015034"
] |
0.72871757
|
0
|
this returns a hash of x,y
|
def each_index
@data.each_index do |y|
@data[y].each_index do |x|
yield x, y
end
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def hash\n\t\t\"#{@x}#{@y}\".hash\n\tend",
"def hash\n [group, x, y].hash\n end",
"def hash_keys\n [:x, :y]\n end",
"def hash\n @points.inject(0) { |sum, point| sum += point.x + point.y }\n end",
"def hash_code\n prime = 31\n result = 1\n result = prime * result + x\n result = prime * result + y\n return result;\n end",
"def hashed_coord(x, y)\n num = 0\n num += 65536 if x < 0\n num += 65536 * y\n num += x\n num\nend",
"def hash\n code = 17\n code = 37*code + @x.hash\n code = 37*code + @y.hash\n # Add lines like this for each significant instance variable\n code # Return the resulting code\n end",
"def hash\n [self[0], self[1]].hash\n end",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash() end",
"def to_h\n { x: x, y: y }\n end",
"def hash(*) end",
"def to_h\n { x: @x, y: @y }\n end",
"def hash\n\t\t[@a, @b, self.class::D].hash\n\tend",
"def hash\n source.hash ^ target.hash\n end",
"def hash() source.hash ^ target.hash; end",
"def hash\n from_end.hash + to_end.hash + overlap.hash + complement_overlap.to_s.hash\n end",
"def hash\n size.hash ^ rank.hash\n end",
"def hash() source.hash ^ (target.hash+1); end",
"def hash() source.hash ^ (target.hash+1); end",
"def hash\n [height, is_column_break, width, x, y].hash\n end",
"def hash()\n #This is a stub, used for indexing\n end",
"def hash\r\n @offset.hash ^ @previous_offset.hash ^ @numerator_or_time.hash ^ @denominator.hash\r\n end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash\n from_end.hash + to_end.hash + overlap.hash + reverse_overlap.to_s.hash\n end",
"def hash\n self.class.hash ^ left.hash ^ right.hash\n end",
"def hash=(_arg0); end",
"def hash\n return name.hash ^ direction.hash ^ lhs.hash ^ rhs.hash\n end",
"def hash\n self.begin.hash ^ self.end.hash\n end",
"def hash\n @offset.hash ^ @previous_offset.hash ^ @timestamp.hash\n end",
"def hash\n @offset.hash ^ @previous_offset.hash ^ @numerator_or_time.hash ^ @denominator.hash\n end",
"def hash\n [ max_cfvo , max_color , mid_cfvo , mid_color , min_cfvo , min_color ].hash\n end",
"def hash(key); end",
"def to_hashed(tile_data)\n tokenized = tile_data.split(\",\").map{|x| x.to_i}\n tiles = []\n i = 0\n while i < tokenized.length do\n x = x_coord(tokenized[i])\n y = y_coord(tokenized[i])\n index = tokenized[i+1]\n tiles << {x: x, y: y, index: index}\n i += 3\n end\n tiles\nend",
"def hash\r\n a = 0\r\n @id.each_byte {|c| a += c.to_i}\r\n (a + @paired.to_i) * HASH_PRIME\r\n end",
"def hash_code; end",
"def geohash(key, member); end",
"def hash\r\n @row + @column\r\n end",
"def hash\n swap\n scatter\n completed_string\n end",
"def hash\n values.hash ^ known_data.hash\n end",
"def to_hash() end",
"def hash\n [amount.hash, currency.hash].hash\n end",
"def hash\n [amount.hash, currency.hash].hash\n end",
"def hash\n [cents.hash, currency.hash].hash\n end",
"def hash\n [cents.hash, currency.hash].hash\n end",
"def hash\n num = @high << 64\n num |= @low\n num.hash\n end",
"def hash\n @vector\n end",
"def solution2\n n = 1000\n\n hsh = Hash.new([])\n\n (1..n).each do |x|\n (1..n).each do |y|\n hsh[x**3 + y**3] += [x, y]\n end\n end\n\n hsh.values\nend",
"def hash\n [day, from, to].hash\n end",
"def hash\n value = 0\n my_rows = @rows\n r_size = my_rows.size\n for i in 0..r_size-1 do\n a_row = my_rows[i]\n a_size = a_row.size\n for j in 0..a_size-1 do\n value ^= a_row[j].hash\n end\n end\n return value\n end",
"def hash\n @real.hash ^ @image.hash\n end",
"def arrhash(arg)\n the_hash = {}\n arg.each_with_index do |x,y|\n the_hash[y] = x\n end\n puts the_hash\nend",
"def hash\n self.class.hash ^ operand.hash\n end",
"def to_h\n { x: x, y: y, z: z }\n end",
"def hash\n @offset.hash ^ @previous_offset.hash ^ @numerator_or_time.hash ^ @denominator.hash\n end",
"def arrhash (arg)\n the_hash = {}\n arg.each_with_index do |x,y|\n the_hash[y] = x\n end\n puts the_hash\nend",
"def cache_ids_xy(x,y)\n (ids = @exy_cache[[x,y]]) ? ids : []\n end",
"def hash\n [first_name, last_name, address_one, address_two, city, state, zip, phone, email, country_code].hash\n end",
"def hash(*args, **_arg1, &block); end",
"def hash\n h = @e.nil? ? 0 : @e\n h = (h << 1) ^ @r.hash\n h = (h << 1) ^ @v.hash\n end",
"def hash\n [include_zero, max, min, scale].hash\n end",
"def hash_values\n [@modulus, @remainder]\n end",
"def hash\n [@offset, @previous_offset, @timestamp_value].hash\n end",
"def hash\n @hash || @hash = (value.hash * -1)\n end",
"def hash\n end",
"def hash\n end",
"def hash\n end",
"def hash\n @matrix\n end",
"def hash\n @hash || calculate_hash!\n end",
"def hash\n @identifier.hash ^ \n (@latitude ? @latitude.numerator.hash ^ @latitude.denominator.hash : @latitude_numerator.hash ^ @latitude_denominator.hash) ^\n (@longitude ? @longitude.numerator.hash ^ @longitude.denominator.hash : @longitude_numerator.hash ^ @longitude_denominator.hash) ^\n @description.hash\n end",
"def do_hash(input)\n a = OpenSSL::Digest.hexdigest(\"SHA224\", input).to_i % 19\n b = OpenSSL::Digest.hexdigest(\"SHA512\", input).to_i % 19\n [a, b]\n end",
"def calculate_coordinates\n (\n egde(@x1, @y1, @x2, @y1) +\n egde(@x2, @y1, @x2, @y2) +\n egde(@x2, @y2, @x1, @y2) +\n egde(@x1, @y2, @x1, @y1)\n ).uniq\n end",
"def hash\n [oct, pc].hash\n end",
"def hash\n [rank, suit].hash\n end",
"def hash\n Zlib.crc32(to_a.map(&:to_s).sort.to_s)\n end",
"def hash\n excl = @excl ? 1 : 0\n hash = excl\n hash ^= @begin.hash << 1\n hash ^= @end.hash << 9\n hash ^= excl << 24;\n # Are we throwing away too much here for a good hash value distribution?\n return hash & Fixnum::MAX\n end",
"def to_h\n { x: @x, y: @y, z: @z }\n end",
"def coords; {:x => @x, :y => @y} end",
"def hash()\n #This is a stub, used for indexing\nend",
"def hash\n ([graph_name] + operands).hash\n end",
"def hash\n lists.inject({}){ |hash, p| hash[p[0]] ||= []; hash[p[0]] << p[1]; hash }\n end",
"def hash\n [first_row_index, first_cell_index, last_row_index, last_cell_index, allow_splitting].hash\n end",
"def hash\n [compute, group_by, index, multi_compute, search].hash\n end",
"def hash\n @symbols.hash + 37*positive?.hash\n end",
"def hash\n source_position.hash\n end",
"def make_coordinate_hash\n @matrix.each_with_index do |row, x|\n row.each_with_index do |move, y|\n move = Coordinate.new(x,y)\n @coordinates[[x,y]] = move\n end\n end\n end",
"def hash256(hex)\n binary = [hex].pack(\"H*\")\n hash1 = Digest::SHA256.digest(binary)\n hash2 = Digest::SHA256.digest(hash1)\n result = hash2.unpack(\"H*\")[0]\n return result\nend"
] |
[
"0.8494066",
"0.77073634",
"0.75714606",
"0.72874975",
"0.7216309",
"0.70593494",
"0.70473903",
"0.70212257",
"0.6825307",
"0.6825307",
"0.6825307",
"0.6825307",
"0.6825307",
"0.6825307",
"0.6825307",
"0.6822532",
"0.6785041",
"0.66861725",
"0.6421113",
"0.62906164",
"0.6266734",
"0.6265496",
"0.6260882",
"0.62478346",
"0.62478346",
"0.6223653",
"0.6212755",
"0.62033635",
"0.62014705",
"0.62014705",
"0.62014705",
"0.62014705",
"0.62014705",
"0.62014705",
"0.62014705",
"0.62014705",
"0.62014705",
"0.62014705",
"0.6195962",
"0.6183783",
"0.61811996",
"0.61799294",
"0.613111",
"0.6130885",
"0.612821",
"0.6117835",
"0.61056036",
"0.61018735",
"0.60907537",
"0.607171",
"0.6062574",
"0.6046257",
"0.6031698",
"0.6012472",
"0.6006196",
"0.59895635",
"0.59895635",
"0.5988327",
"0.5988327",
"0.59845155",
"0.59761107",
"0.596189",
"0.5960658",
"0.59540534",
"0.5951391",
"0.59475434",
"0.5943647",
"0.59416455",
"0.59392774",
"0.59391993",
"0.59373593",
"0.59126765",
"0.5910131",
"0.59033376",
"0.58736455",
"0.5834537",
"0.5831419",
"0.58286977",
"0.58118784",
"0.58118784",
"0.58118784",
"0.5808497",
"0.5805265",
"0.5794127",
"0.5791372",
"0.57903624",
"0.5789876",
"0.5784272",
"0.57810533",
"0.5768799",
"0.5745879",
"0.57432806",
"0.5725833",
"0.57164353",
"0.5715725",
"0.5713224",
"0.5708962",
"0.57008845",
"0.5699828",
"0.5684786",
"0.56772107"
] |
0.0
|
-1
|
this returns a hash of x,y,value
|
def each_with_index
@data.each_index do |y|
@data[y].each_with_index do |val, x|
yield x,y,val
end
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def hash\n\t\t\"#{@x}#{@y}\".hash\n\tend",
"def hash\n [group, x, y].hash\n end",
"def hash\n @points.inject(0) { |sum, point| sum += point.x + point.y }\n end",
"def hash_keys\n [:x, :y]\n end",
"def to_h\n { x: x, y: y }\n end",
"def to_h\n { x: @x, y: @y }\n end",
"def hash\n [height, is_column_break, width, x, y].hash\n end",
"def hash\n [self[0], self[1]].hash\n end",
"def hash\n [@offset, @previous_offset, @timestamp_value].hash\n end",
"def hash\r\n @row + @column\r\n end",
"def hash\n [ max_cfvo , max_color , mid_cfvo , mid_color , min_cfvo , min_color ].hash\n end",
"def hash\n code = 17\n code = 37*code + @x.hash\n code = 37*code + @y.hash\n # Add lines like this for each significant instance variable\n code # Return the resulting code\n end",
"def hashed_coord(x, y)\n num = 0\n num += 65536 if x < 0\n num += 65536 * y\n num += x\n num\nend",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash() end",
"def hash() end",
"def to_h\n { x: x, y: y, z: z }\n end",
"def hash\n [value].hash\n end",
"def hash\n [value].hash\n end",
"def hash\n values.hash ^ known_data.hash\n end",
"def hash\n @hash || @hash = (value.hash * -1)\n end",
"def hash\n value_id.hash\n end",
"def hash\n [include_zero, max, min, scale].hash\n end",
"def hash(*) end",
"def to_hash\n\t\t{type: \"Point\", coordinates: [@longitude, @latitude]}\n\tend",
"def coords; {:x => @x, :y => @y} end",
"def hash\n\t\tvalue.hash\n\tend",
"def to_h\n { x: @x, y: @y, z: @z }\n end",
"def hash\n @vector\n end",
"def to_hash\n \t{:type=>\"Point\", :coordinates=>[ @longitude, @latitude ]}\n end",
"def hash\n [first_row_index, first_cell_index, last_row_index, last_cell_index, allow_splitting].hash\n end",
"def make_coordinate_hash\n @matrix.each_with_index do |row, x|\n row.each_with_index do |move, y|\n move = Coordinate.new(x,y)\n @coordinates[[x,y]] = move\n end\n end\n end",
"def hash_code\n prime = 31\n result = 1\n result = prime * result + x\n result = prime * result + y\n return result;\n end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash; end",
"def hash\n source_position.hash\n end",
"def hash\n @value.hash\n end",
"def hash\n @matrix\n end",
"def hash\r\n @offset.hash ^ @previous_offset.hash ^ @numerator_or_time.hash ^ @denominator.hash\r\n end",
"def hash()\n #This is a stub, used for indexing\n end",
"def hash\n @offset.hash ^ @previous_offset.hash ^ @numerator_or_time.hash ^ @denominator.hash\n end",
"def hash\n value.hash\n end",
"def hash\n [self.start, self.end, self.label].hash\n end",
"def to_hashed(tile_data)\n tokenized = tile_data.split(\",\").map{|x| x.to_i}\n tiles = []\n i = 0\n while i < tokenized.length do\n x = x_coord(tokenized[i])\n y = y_coord(tokenized[i])\n index = tokenized[i+1]\n tiles << {x: x, y: y, index: index}\n i += 3\n end\n tiles\nend",
"def hash\n\t\t[@a, @b, self.class::D].hash\n\tend",
"def hash\n [self.class, self.val, self.attribute].hash\n end",
"def hash\n size.hash ^ rank.hash\n end",
"def geohash(key, member); end",
"def hash\n [day, from, to].hash\n end",
"def hash\n @offset.hash ^ @previous_offset.hash ^ @numerator_or_time.hash ^ @denominator.hash\n end",
"def hash_values\n [@modulus, @remainder]\n end",
"def values_hash\n @_values.dup\n end",
"def hash\n swap\n scatter\n completed_string\n end",
"def hash\n [first_name, last_name, address_one, address_two, city, state, zip, phone, email, country_code].hash\n end",
"def to_hash() end",
"def values_hash\n hashify(:value)\n end",
"def hash\n @offset.hash ^ @previous_offset.hash ^ @timestamp.hash\n end",
"def hash\n @identifier.hash ^ \n (@latitude ? @latitude.numerator.hash ^ @latitude.denominator.hash : @latitude_numerator.hash ^ @latitude_denominator.hash) ^\n (@longitude ? @longitude.numerator.hash ^ @longitude.denominator.hash : @longitude_numerator.hash ^ @longitude_denominator.hash) ^\n @description.hash\n end",
"def hash\n @data.dup.concat([@ts]).hash\n end",
"def hash\n from_end.hash + to_end.hash + overlap.hash + reverse_overlap.to_s.hash\n end",
"def arrhash(arg)\n the_hash = {}\n arg.each_with_index do |x,y|\n the_hash[y] = x\n end\n puts the_hash\nend",
"def hash\n from_end.hash + to_end.hash + overlap.hash + complement_overlap.to_s.hash\n end",
"def arrhash (arg)\n the_hash = {}\n arg.each_with_index do |x,y|\n the_hash[y] = x\n end\n puts the_hash\nend",
"def hash\n value = 0\n my_rows = @rows\n r_size = my_rows.size\n for i in 0..r_size-1 do\n a_row = my_rows[i]\n a_size = a_row.size\n for j in 0..a_size-1 do\n value ^= a_row[j].hash\n end\n end\n return value\n end",
"def hash\n [compute, group_by, index, multi_compute, search].hash\n end",
"def to_hash\n {\n :type =>\"Point\",\n :coordinates => [@longitude, @latitude]\n }\n end",
"def hash\n [auto_points, teleop_points, foul_points, adjust_points, total_points, robot1_auto, robot2_auto, robot3_auto, rotor1_auto, rotor2_auto, auto_fuel_low, auto_fuel_high, auto_mobility_points, auto_rotor_points, auto_fuel_points, teleop_fuel_points, teleop_fuel_low, teleop_fuel_high, teleop_rotor_points, k_pa_ranking_point_achieved, teleop_takeoff_points, k_pa_bonus_points, rotor_bonus_points, rotor1_engaged, rotor2_engaged, rotor3_engaged, rotor4_engaged, rotor_ranking_point_achieved, tech_foul_count, foul_count, touchpad_near, touchpad_middle, touchpad_far].hash\n end",
"def hash_code; end",
"def hash\n raw = [name, type, values.join('/')].join(' ')\n Digest::MD5.hexdigest(raw)\n end",
"def hash\n\t\t[@id].hash\n\tend",
"def hash(key); end",
"def hash\n [rank, suit].hash\n end",
"def hash\n self.begin.hash ^ self.end.hash\n end",
"def hash\n end",
"def hash\n end",
"def hash\n end",
"def solution2\n n = 1000\n\n hsh = Hash.new([])\n\n (1..n).each do |x|\n (1..n).each do |y|\n hsh[x**3 + y**3] += [x, y]\n end\n end\n\n hsh.values\nend",
"def to_hash\n call\n @hash = @value\n @hash\n end",
"def hash\n [height, id, is_mobile, name, width].hash\n end",
"def hash=(_arg0); end",
"def hash\n @__set.to_a.hash\n end",
"def hash\n expr.hash\n end",
"def hash\n h = @e.nil? ? 0 : @e\n h = (h << 1) ^ @r.hash\n h = (h << 1) ^ @v.hash\n end",
"def output_hash; end",
"def hash\n num = @high << 64\n num |= @low\n num.hash\n end",
"def to_hash\n values\n end",
"def to_h\n { left: left.to_h, bottom: bottom.to_h, tip: tip.to_h,\n top: top.to_h, right: right.to_h }\n end",
"def hash\n [amount.hash, currency.hash].hash\n end",
"def hash\n [amount.hash, currency.hash].hash\n end"
] |
[
"0.7937157",
"0.7896213",
"0.7623533",
"0.74290603",
"0.71898675",
"0.7082582",
"0.6993556",
"0.6979675",
"0.6861034",
"0.6823482",
"0.6798832",
"0.6769189",
"0.66762924",
"0.65341127",
"0.65341127",
"0.65341127",
"0.65341127",
"0.65341127",
"0.65341127",
"0.65341127",
"0.6531921",
"0.65200645",
"0.65200645",
"0.65118504",
"0.6485655",
"0.6452496",
"0.6430662",
"0.64192474",
"0.6410179",
"0.6397047",
"0.6392922",
"0.63729113",
"0.63570195",
"0.63539714",
"0.6345561",
"0.633693",
"0.6334565",
"0.63343585",
"0.63343585",
"0.63343585",
"0.63343585",
"0.63343585",
"0.63343585",
"0.63343585",
"0.63343585",
"0.63343585",
"0.63343585",
"0.6329255",
"0.63255376",
"0.6319536",
"0.63162833",
"0.6269283",
"0.62583125",
"0.62337494",
"0.6219837",
"0.6193296",
"0.61869836",
"0.6173724",
"0.61691",
"0.61452484",
"0.6132019",
"0.612933",
"0.6127344",
"0.6102274",
"0.6097376",
"0.6096356",
"0.608963",
"0.6080367",
"0.6067237",
"0.60596937",
"0.60480934",
"0.60357684",
"0.6034545",
"0.60300577",
"0.6027701",
"0.60207224",
"0.6011554",
"0.60095775",
"0.60079235",
"0.60001683",
"0.59915495",
"0.5990929",
"0.5975625",
"0.5969631",
"0.5950792",
"0.59496915",
"0.59496915",
"0.59496915",
"0.5938412",
"0.59330183",
"0.5926881",
"0.59139514",
"0.5912635",
"0.5902675",
"0.59024835",
"0.58685005",
"0.5864532",
"0.5853584",
"0.5851152",
"0.58486146",
"0.58486146"
] |
0.0
|
-1
|
yeilds each row of the slice from position x,y of width w and height h
|
def slice_rows(x,y,w,h)
(y..y+h-1).each do |i|
yield @data[i][x..x+w-1]
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def clip_to(x, y, w, h, &rendering_code); end",
"def slice_row inimage, rowbeg, rowend, outimage\n m_begin \"slice_row\"\n img = get_image(inimage)\n slice = img.excerpt(0, rowbeg, img.columns, rowend-rowbeg)\n put_image(outimage, slice)\n m_end \"slice_row\"\n end",
"def cut_white_space_edges\n x1_array = Array.new\n x2_array = Array.new\n y1_array = Array.new\n y2_array = Array.new\n \n @groups.each do |g|\n x1_array << g.x_pos\n x2_array << g.x_pos + g.width\n y1_array << g.y_pos\n y2_array << g.y_pos + g.height\n end\n \n if @vertical\n #Normal position of the image (up)\n if values_bits[Constants::V_Image_position]< 4.5 \n @height = y2_array.max + @upper_margin\n #Alternative position(down)\n else \n new_height = @height - (y1_array.min-@upper_margin)\n \n @groups.each do |g|\n g.y_pos -= (@height - new_height)\n end\n @main_image.y_pos -= (@height - new_height)\n \n @height = new_height\n end\n else\n #Normal position of the image (left)\n if values_bits[Constants::V_Image_position]< 4.5\n @width = x2_array.max + @sides_margin\n #Alternative position of the image (right)\n else \n new_width = @width - (x1_array.min-@sides_margin)\n \n @groups.each do |g|\n g.x_pos -= (@width - new_width)\n end\n @main_image.x_pos -= (@width - new_width)\n \n @width = new_width\n end\n end\n end",
"def slice *args\n b, l = get_beginning_and_length *args\n if l.nil?\n self.at b\n else\n e = b + l - 1\n r = self.to_avi\n r.frames.each_with_index do |f, i|\n unless i >= b && i <= e\n f.data = nil\n end\n end\n r.frames\n end\n end",
"def draw_horizontal(row: 0, start_col: 0, end_col: 0, color: \"R\")\n image.map!.with_index do |row_array, row_num|\n if row == row_num\n row_array.map.with_index do |pixel, col_num|\n if col_num >= start_col && col_num <= end_col\n color\n else\n pixel\n end\n end\n else\n row_array\n end\n end\n save_state\n self\n end",
"def split\n sw = (w / 2.0).round\n sh = (h / 2.0).round\n return Rect.new(x, y, sw, sh),\n Rect.new(x + sw, y, sw, sh),\n Rect.new(x, y + sh, sw, sh),\n Rect.new(x + sw, y + sh, sw, sh)\n end",
"def slice(table)\n head, body, array_h, array_b = 4.times.map { [] }\n index = 0\n first_column = 0\n second_column = 1\n\n (0...table.original_columns * table.original_rows).each do |col_index|\n row = table.rows[index]\n array_h += [row[first_column]]\n array_b += [row[second_column]]\n\n if col_index % table.original_columns == 2\n head << array_h\n body << array_b\n array_h, array_b = [], []\n end\n index += 1\n end\n [head, body]\n end",
"def with_crop(left, top, right, bottom); end",
"def cropped\n lines.map { |line| columns(line) }\n end",
"def layout_slices_in_sprite(sprite, opts)\n # The position is the position in the layout direction. In vertical mode\n # (the usual) it is the Y position.\n pos = 0\n\n # Adds some padding that will be painted with a pattern so that it is apparent that\n # CSS is wrong.\n # NOTE: though this is only in debug mode, we DO need to make sure it is on a 2px boundary.\n # This makes sure 2x works properly.\n padding = @options[:pad_sprites_for_debugging] ? 2 : 0\n\n # The position within a row. It starts at 0 even if we have padding,\n # because we always just add padding when we set the individual x/y pos.\n inset = 0\n\n # The length of the row. Length, when layout out vertically (the usual), is the height\n row_length = 0\n\n # The size is the current size of the sprite in the non-layout direction;\n # for example, in the usual, vertical mode, the size is the width.\n #\n # Usually, this is computed as a simple max of itself and the width of any\n # given slice. However, when repeating, the least common multiple is used,\n # and the smallest items are stored as well.\n size = 1\n largest_size = nil\n used_area = 0\n\n is_horizontal = sprite[:use_horizontal_layout]\n\n # Figure out slice width/heights. We cannot rely on slicing to do this for us\n # because some images may be being passed through as-is.\n sprite[:slices].each {|slice|\n # We must find a canvas either on the slice (if it was actually sliced),\n # or on the slice's file. Otherwise, we're in big shit.\n canvas = slice[:canvas] || slice[:file][:canvas]\n\n # TODO: MAKE A BETTER ERROR.\n unless canvas\n throw \"Could not sprite image \" + slice[:path] + \"; if it is not a PNG, make sure you have rmagick installed\"\n end\n\n # RMagick has a different API than ChunkyPNG; we have to detect\n # which one we are using, and use the correct API accordingly.\n if canvas.respond_to?('columns')\n slice_width = canvas.columns\n slice_height = canvas.rows\n else\n slice_width = canvas.width\n slice_height = canvas.height\n end\n\n # slice_length = is_horizontal ? slice_width : slice_height\n slice_size = is_horizontal ? slice_height : slice_width\n\n # When repeating, we must use the least common multiple so that\n # we can ensure the repeat pattern works even with multiple repeat\n # sizes. However, we should take into account how much extra we are\n # adding by tracking the largest size item as well.\n if slice[:repeat] != \"no-repeat\"\n size = size.lcm slice_size\n else\n size = [size, slice_size + padding * 2].max\n end\n\n largest_size = size if largest_size.nil?\n largest_size = [size, largest_size].max\n\n slice[:slice_width] = slice_width.to_i\n slice[:slice_height] = slice_height.to_i\n }\n\n # Sort slices from widest/tallest (dependent on is_horizontal) or is_vertical\n # NOTE: This means we are technically sorting reversed\n sprite[:slices].sort! {|a, b|\n # WHY <=> NO WORK?\n if is_horizontal\n b[:slice_height] <=> a[:slice_height]\n else\n b[:slice_width] <=> a[:slice_width]\n end\n }\n\n sprite[:slices].each do |slice|\n # We must find a canvas either on the slice (if it was actually sliced),\n # or on the slice's file. Otherwise, we're in big shit.\n canvas = slice[:canvas] || slice[:file][:canvas]\n\n slice_width = slice[:slice_width]\n slice_height = slice[:slice_height]\n\n slice_length = is_horizontal ? slice_width : slice_height\n slice_size = is_horizontal ? slice_height : slice_width\n\n if slice[:repeat] != \"no-repeat\" or inset + slice_size + padding * 2 > size or not @options[:optimize_sprites]\n pos += row_length\n inset = 0\n row_length = 0\n end\n\n # We have extras for manual tweaking of offsetx/y. We have to make sure there\n # is padding for this (on either side)\n #\n # We have to add room for the minimum offset by adding to the end, and add\n # room for the max by adding to the front. We only care about it in our\n # layout direction. Otherwise, the slices are flush to the edge, so it won't\n # matter.\n if slice[:min_offset_x] < 0 and is_horizontal\n slice_length -= slice[:min_offset_x]\n elsif slice[:min_offset_y] < 0 and not is_horizontal\n slice_length -= slice[:min_offset_y]\n end\n\n if slice[:max_offset_x] > 0 and is_horizontal\n pos += slice[:max_offset_x]\n elsif slice[:max_offset_y] > 0 and not is_horizontal\n pos += slice[:max_offset_y]\n end\n\n slice[:sprite_slice_x] = (is_horizontal ? pos : inset)\n slice[:sprite_slice_y] = (is_horizontal ? inset : pos)\n\n # add padding for x, only if it a) doesn't repeat or b) repeats vertically because it has horizontal layout\n if slice[:repeat] == \"no-repeat\" or slice[:repeat] == \"repeat-y\"\n slice[:sprite_slice_x] += padding\n end\n\n if slice[:repeat] == \"no-repeat\" or slice[:repeat] == \"repeat-x\"\n slice[:sprite_slice_y] += padding\n end\n\n slice[:sprite_slice_width] = slice_width\n slice[:sprite_slice_height] = slice_height\n\n inset += slice_size + padding * 2\n\n # We pad the row length ONLY if it is a repeat-x, repeat-y, or no-repeat image.\n # If it is \"repeat\", we do not pad it, because it should be processed raw.\n row_length = [slice_length + (slice[:repeat] != \"repeat\" ? padding * 2 : 0), row_length].max\n\n # In 2X, make sure we are aligned on a 2px grid.\n # We correct this AFTER positioning because we always position on an even grid anyway;\n # we just may leave that even grid if we have an odd-sized image. We do this after positioning\n # so that the next loop knows if there is space.\n if opts[:x2]\n row_length = (row_length.to_f / 2).ceil * 2\n inset = (inset.to_f / 2).ceil * 2\n end\n\n used_area += slice_size * slice_length\n end\n pos += row_length\n\n total_area = pos * largest_size\n\n if total_area > 300000 # an arbritrarily large sprite size at which we could begin to worry about the efficiency\n efficiency = (used_area / total_area.to_f) * 100\n sprite[:efficiency] = efficiency\n end\n\n sprite[:width] = is_horizontal ? pos : size\n sprite[:height] = is_horizontal ? size : pos\n\n end",
"def draw_horizontal_segment value, column_from, column_to, row\n raise ArgumentError.new(\"There is no image\") if self.data == []\n raise ArgumentError.new(\"column or row out of bound\") if 1 > column_from || column_from > self.data.column_count || 1 > column_to || column_to > self.data.column_count || 1 > row || row > self.data.row_count\n (column_from..column_to).each {|column| self.data.send(:[]=, row-1, column-1, value)}\n end",
"def remove_filled_rows\n (0...@board_height).each do |row|\n index_range = row * @board_width...(row + 1) * @board_width\n if @board[index_range].reduce(:+) == @board_width\n @board[index_range] = Array.new(10, 0)\n end\n end\n end",
"def crop_rows!\n if crop\n local_copy.in_place :tail, \"+#{crop.first}\"\n local_copy.in_place :head, (crop.last - crop.first + 1)\n end\n end",
"def grid\n rows = []\n (1..self.height).to_a.each do |h|\n rows << row(h)\n end\n rows.reverse\n end",
"def compose_slice_on_canvas(target, slice, x, y, width, height)\n source_canvas = slice[:canvas] || slice[:file][:canvas]\n source_width = slice[:sprite_slice_width]\n source_height = slice[:sprite_slice_height]\n\n top = 0\n left = 0\n\n # Repeat the pattern to fill the width/height.\n while top < height do\n left = 0\n\n while left < width do\n if target.respond_to?(:replace!)\n target.replace!(source_canvas, left + x, top + y)\n else\n target.composite!(source_canvas, left + x, top + y)\n end\n\n left += source_width\n end\n\n top += source_height\n end\n\n end",
"def crop!(x,y,width,height)\n @x, @y, @w, @h = x, y, width, height\n self\n end",
"def reset(cols, rows)\n\n\t@cols = cols.to_i.abs\n\t@rows = rows.to_i.abs\n\t@created = true\n\n\tx = 0\n\tuntil x > @rows\n\t y = 0\n\t until y > @cols\n\t\tset_pixel(\"#{y}-#{x}\", COLOR_WHITE)\n\t\ty = y+1\n\t end\n\t x = x+1\n\tend\n\n end",
"def crop(x, y, w, h)\n clone.crop!(x, y, w, h)\n end",
"def resize(w, h)\r\n @x2 = @x1 + w\r\n @y2 = @y1 + h\r\n end",
"def collapse(length_pixel = 20)\n new_pixels_available = @length_pixel - length_pixel\n @length_pixel = length_pixel\n\n # Distribute the pixels that became available of the other slices\n self.class.sketch.slices.reject{|s| s == self}.each do |slice|\n slice.length_pixel += (new_pixels_available.to_f/(self.class.sketch.slices.length - 1)).round\n end\n self.class.sketch.slices.each_with_index do |slice, i|\n if i == 0\n slice.start_pixel = 1\n else\n slice.start_pixel = self.class.sketch.slices[i-1].stop_pixel + 1\n end\n slice.stop_pixel = slice.start_pixel + slice.length_pixel - 1\n slice.range_pixel = Range.new(slice.start_pixel, slice.stop_pixel)\n slice.resolution = slice.length_pixel.to_f/slice.length_bp\n slice.format_resolution\n slice.fetch_sequence\n end\n\n self.class.sketch.buffer_images[:zoomed] = self.class.sketch.draw_zoomed_buffer\n self.class.sketch.buffer_images[:information_panel] = self.class.sketch.draw_information_panel\n end",
"def row(shape, y)\n n, col = 16, %w(firebrick darkorange white saddlebrown black).shuffle\n (1..n - 1).each do |x|\n col.rotate! # cycle through the colors\n (options = shape.options.clone)[:fill] = col[0]\n (s = shape.clone).options = options\n canvas << s.move(@width / n * x, y)\n end\nend",
"def subsample(*args)\n rx, ry, rw, rh = *Rect.extract(args.singularize)\n result = factory2.new(rw, rh, default: src.default)\n result.ysize.times do |y|\n dy = y + ry\n result.xsize.times do |x|\n result[x, y] = src[x + rx, dy]\n end\n end\n result\n end",
"def magick_crop_rect\n [x1, y1, x2-x1, y2-y1]\n end",
"def draw_vertical_segment value, column, row_from, row_to\n raise ArgumentError.new(\"There is no image\") if self.data == []\n raise ArgumentError.new(\"column or row out of bound\") if 1 > column || column > self.data.column_count || 1 > row_from || row_from > self.data.row_count || 1 > row_to || row_to > self.data.row_count\n (row_from..row_to).each {|row| self.data.send(:[]=, row-1, column-1, value)}\n end",
"def split_rows\n\t\t@board.values.each_slice(15).to_a\n\tend",
"def hflip\n g = Grid.new\n self.each do |point,v|\n g[Point.new( @width - point.x - 1, point.y )] = v \n end\n g\n end",
"def crop!(x, y, w, h)\n prefull, orig = model.actual_dimensions(:prefull), model.actual_dimensions\n ratio = [orig[0].to_f / prefull[0], orig[1].to_f / prefull[1]].min\n x, y, w, h = [x, y, w, h].map { |n| (n * ratio).floor }\n manipulate! do |img|\n img.crop \"#{w}x#{h}+#{x}+#{y}\"\n img = yield(img) if block_given?\n img\n end\n end",
"def remove_header_and_footer_rows(results_rows)\n results_rows[2..-4]\n end",
"def get_grids(x, y, width, height)\n x = (x*10)-5\n y = (y*10)-5\n end_x= x+(width*10)-1\n end_y= y+(height*10) -1\n return [[y, x], [end_y, end_x]]\n end",
"def get_grids(x, y, width, height)\n x = (x*10)-5\n y = (y*10)-5\n end_x= x+(width*10)-1\n end_y= y+(height*10) -1\n return [[y, x], [end_y, end_x]]\n end",
"def slice!(&block)\n if ! @overwrite && File.directory?(@destination) && ! Dir[\"@{@destination}/*\"].empty?\n raise \"Output directory #{@destination} already exists!\"\n @overwrite ? Rails.logger.warn(msg) : raise(msg)\n end\n\n image = @tile_source.dup\n orig_width, orig_height = image.columns, image.rows\n\n # iterate over all levels (= zoom stages)\n max_level(orig_width, orig_height).downto(0) do |level|\n width, height = image.columns, image.rows\n\n current_level_dir = File.join(@destination, level.to_s)\n FileUtils.mkdir_p(current_level_dir)\n\n if block_given?\n yield current_level_dir\n end\n\n # iterate over columns\n x, col_count = 0, 0\n while x < width\n # iterate over rows\n y, row_count = 0, 0\n while y < height\n dest_path = File.join(current_level_dir, \"#{col_count}_#{row_count}.#{@tile_format}\")\n tile_width, tile_height = tile_dimensions(x, y, @tile_size, @tile_overlap)\n\n save_cropped_image(image, dest_path, x, y, tile_width, tile_height, @tile_quality)\n\n y += (tile_height - (2 * @tile_overlap))\n row_count += 1\n end\n x += (tile_width - (2 * @tile_overlap))\n col_count += 1\n end\n\n image.resize!(0.5)\n end\n\n image.destroy!\n end",
"def show\n @pixels.each_slice(@height) { |row| puts row.join(\",\") + \"\\n\" }\n end",
"def edge_pixels(type)\r\n type == :end ? x = 31 : x = 0\r\n [].tap do |pixels|\r\n (0..@slice.dimension.height-1).to_a.each do |p| \r\n pixels << Pixel.new(ChunkyPNG::Color.r(@slice[x,p]), ChunkyPNG::Color.g(@slice[x,p]), \r\n ChunkyPNG::Color.b(@slice[x,p]))\r\n end\r\n end\r\n end",
"def flip_horizontally!\n for y in 0..((height - 1) >> 1) do\n other_y = height - (y + 1)\n other_row = row(other_y)\n replace_row!(other_y, row(y))\n replace_row!(y, other_row)\n end\n self\n end",
"def vertically_striped(unstriped)\n output = unstriped.dup\n row_width = output[0].length\n\n # Zero out top and bottom 2 rows\n output[0] = [0] * row_width\n output[1] = [0] * row_width\n output[-1] = [0] * row_width\n output[-2] = [0] * row_width\n output\n end",
"def horizontalLines\n (0...@height).inject([]) { |arr, row| arr << @modified.row(row) }\n end",
"def rect(a, b, screen)\n screen[0...b].each do |row|\n row[0...a] = (\"#\"*a).split(\"\")\n end\n\n screen\nend",
"def prepend_width\n diff = @hull_size[1]\n @hull.each.with_index do |row, i|\n @hull[i] = Array.new(diff) + row\n end\n @hull_size[1] += diff\n @col += diff\n end",
"def each_slice(slice_width, &block)\n max = size\n p = 0\n while p < max\n yield self[p...p+slice_width]\n p += slice_width\n end\n end",
"def draw_grid\n print_cap\n (0...@vertical).each do |vert_index|\n (0..@v_size).each do |block_height|\n (0...@horizontal).each do |horizontal_index|\n if block_height == (@v_size/2)\n print \"|\" + \" \"*(@h_size/2)\n \"#{print @contents[horizontal_index][vert_index]}|\"\n print \" \"*(@h_size/2-1)\n else\n print \"|\"; print \" \"*@h_size\n end\n end\n print \"|\\n\" \n \n end\n print_cap\n end\n end",
"def prepend_height\n diff = @hull_size[0]\n new_hull = Array.new(diff) { Array.new(@hull_size[1]) }\n new_hull += @hull\n @hull = new_hull\n @hull_size[0] += diff\n @row += diff\n end",
"def draw_image_stretch(image, x1, y1, x2, y2, x3, y3, x4, y4, color)\n end",
"def prepare_strip\r\n @strip = []\r\n for i in 0...@totalFrames\r\n bitmap = Bitmap.new(@width,@height)\r\n bitmap.stretch_blt(Rect.new(0,0,@width,@height),@bitmaps[i],Rect.new(0,0,@width,@height))\r\n @strip.push(bitmap)\r\n end\r\n end",
"def crop!(x, y, w, h)\n ptr = self.class.create_image_ptr(w, h, alpha_blending?)\n ::GD2::GD2FFI.send(:gdImageCopy, ptr, image_ptr, 0, 0, x.to_i, y.to_i, w.to_i, h.to_i)\n init_with_image(ptr)\n end",
"def cut\n rmagick_img.crop_resized!(@x, @y, gravity)\n end",
"def endRow; @row + @height - 1; end",
"def del\r\n @@tiles.delete_at(@tileno)\r\n @x1,@y1,@x2,@y2,@inclusive,@id,@tileno=nil\r\n end",
"def clean\n (1..@rows).each do |x|\n (1..@cols).each do |y|\n self.cleanCell(x, y)\n end\n end\n end",
"def clear\n grid.map do |row|\n row.map! do |pixel|\n pixel = DEFAULT_FILL\n end\n end\n end",
"def border_slicer(board)\n board.pop()\n board.shift()\n \n row = 0\n while row < self.height do\n board[row].pop()\n board[row].shift()\n row += 1\n end\n \n return board\n end",
"def skip_sample_layout\n lyt = []\n make_skip_start_array(@group_size).each do |j|\n cols = Array.new(@columns) { |c| c }\n cols.each { |c| @group_size.times { |i| lyt << [i*2 + j, c] } }\n end\n lyt\n end",
"def remove_row_selection_interval ix0, ix1\n @anchor_selection_index = ix0\n @lead_selection_index = ix1\n arr = @selected_indices.dup # to un highlight\n @selected_indices.delete_if {|x| x >= ix0 and x <= ix1 }\n arr.each {|i| @obj.fire_row_changed(i) }\n lse = ListSelectionEvent.new(ix0, ix1, @obj, :DELETE)\n @obj.fire_handler :LIST_SELECTION_EVENT, lse\n end",
"def sections\n (1..section_size).inject([]) do |array, row_offset|\n (1..section_size).inject(array) do |array, column_offset|\n array << section_coordinates(row_offset - 1, column_offset - 1)\n end\n end\n end",
"def rows\n cells_ordered = cells.order(:y => :desc)\n (0..height).map { |row_id| cells.where(y: row_id) }\n end",
"def each\n (0...height).each do |y|\n row = (0...width).each_with_object(Array.new(width)) do |x, r|\n r[x] = get_pixel(x, y)\n end\n yield row\n end\n end",
"def blur(row_index,col_index)\n update_cell(row_index+1, col_index,1)\n update_cell(row_index,col_index+1, 1)\n update_cell(row_index,col_index-1, 1)\n update_cell(row_index-1, col_index,1)\n end",
"def slice_images(opts) \n slices = @slices\n output = \"\"\n\n slices.each do |name, slice|\n # If we modify the canvas, we'll place the modified canvas here.\n # Otherwise, consumers will use slice[:file] [:canvas] or [:contents]\n # to get the original data as needed.\n slice[:canvas] = nil\n \n # In any case, if there is one, we need to get the original file and canvas;\n # this process also tells us if the slice is 2x, etc.\n canvas = canvas_for slice, opts\n\n # Check if a canvas is required\n must_slice = (slice[:left] != 0 or slice[:right] != 0 or slice[:top] != 0 or slice[:bottom] != 0)\n if must_slice or slice[:x2]\n if canvas.nil?\n throw \"Chance could not load file '#{slice[:path]}'.\" +\n \"If it is not a PNG, RMagick is required to slice or use @2x mode.\"\n end\n\n f = slice[:proportion]\n\n # RMagick or ChunkyPNG? 'columns' is RMagick\n if canvas.respond_to?('columns')\n canvas_width = canvas.columns\n canvas_height = canvas.rows\n else\n canvas_width = canvas.width\n canvas_height = canvas.height\n end\n\n if must_slice\n rect = nil\n\n # The math that uses canvas_width and canvas_height needs to return numbers that,\n # when multiplied by f, are valid. So, divide by f first.\n rect = slice_rect(slice, canvas_width / f, canvas_height / f)\n\n if not rect.nil?\n \n ## CHECK CACHE ##\n file = file_for(slice, opts)\n cached_canvas = Chance::Instance::Slicing.get_canvas_from_cache(file, rect)\n if cached_canvas\n slice[:canvas] = cached_canvas\n else\n slice[:canvas] = canvas.crop(rect[:left] * f, rect[:top] * f, rect[:width] * f, rect[:height] * f)\n Chance::Instance::Slicing.add_canvas_to_cache(slice[:canvas], file, rect)\n end\n \n canvas_width = rect[:height] * f\n canvas_height = rect[:width] * f\n end\n end\n\n slice[:target_width] = canvas_width / f\n slice[:target_height] = canvas_height / f\n end\n \n \n\n end\n \n end",
"def delrows(row1,row2)\n\t\t@text[row1..row2] = []\n\tend",
"def generate!\n total_width = self.column_width + self.gutter_width\n height = @baseline_height\n RVG::dpi = 100\n\n width_in_inches = (total_width.to_f/RVG::dpi).in\n height_in_inches = (height.to_f/RVG::dpi).in\n rvg = RVG.new(width_in_inches, height_in_inches).viewbox(0, 0, total_width, height) do |canvas|\n canvas.background_fill = \"white\"\n end\n\n white = ChunkyPNG::Color.from_hex(\"ffffff\")\n background = ChunkyPNG::Color.from_hex(\"e8effb\")\n line = ChunkyPNG::Color.from_hex(\"e9e9e9\")\n\n png = ChunkyPNG::Image.new(total_width, height, white)\n png.rect(0, 0, column_width - 1, height, background, background)\n png.rect(0, height - 1, total_width, height - 1, line, line)\n\n FileUtils.mkdir(self.output_path) unless File.exists?(self.output_path)\n png.save(File.join(self.output_path, \"grid.png\"))\n end",
"def wrap\n max_h, max_w = w.h, w.w\n\n self.x = max_w if x < 0\n self.y = max_h if y < 0\n\n self.x = 0 if x > max_w\n self.y = 0 if y > max_h\n end",
"def modified_sample_layout\n lyt = []\n make_modified_start_array(@group_size).each do |j|\n cols = Array.new(@columns) { |c| c }\n cols.each { |c| @group_size.times { |i| lyt << [i + j, c] } }\n end\n lyt\n end",
"def build_grid size\n self.tiles.clear\n size.to_i.times do |row|\n size.to_i.times do |column|\n self.tiles.build({\n x: column,\n y: row\n })\n end\n end\n end",
"def build_spans on_range\n # TODO: rewrite in terms of each_row\n @spans = Array.new @size[1]\n\n @size[1].times do |y|\n spans = []\n left = (@size[1]-y-1)*@size[0]\n start = nil\n\n @size[0].times do |x|\n d = on_range.include?(@pixels[left+x])\n\n if !start && d\n start = x\n elsif start && !d\n spans << [start, x]\n start = nil\n end\n end\n\n spans << [start, @size[0]] if start\n @spans[y] = spans\n end\n end",
"def at(h, w)\n raise \"width outside of range\" if w < 0 || w >= width\n raise \"height outside of range\" if h < 0 || h >= height\n\n case @orientation\n when 'N'\n w = width - 1 - w\n h = height - 1 - h\n when 'E'\n w, h = height - 1 - h, w\n when 'W'\n w, h = h, width - 1 - w\n end\n\n return @grid[h][w]\n end",
"def rect(col, row)\n row.times do |row_index|\n col.times do |col_index|\n @data[row_index][col_index] = true\n end\n end\n end",
"def blur(n)\n\t\tn.times do\n\t\t\ttransform(one_index)\n\t\tend\n#\t\tupdate_cell(row_index+1,col_index,1)\n#\t\tupdate_cell(row_index,col_index+1,1)\n#\t\tupdate_cell(row_index,col_index-1,1)\n#\t\tupdate_cell(row_index-1,col_index,1)\n\tend",
"def render_transparent_background(columns, rows)\n Magick::Image.new(columns, rows) do |img|\n img.background_color = 'transparent'\n end\n end",
"def each_row_slice(slice_size = 100, batch = ETL::Batch.new)\n slice = []\n each_row(batch) do |row_in|\n slice << row_in\n if slice.length >= slice_size\n yield slice\n slice = []\n end\n end\n yield slice if slice.length > 0\n end",
"def slice!(p0, *rest) end",
"def slice!(p0, *rest) end",
"def cal_slider_pos_arr(item_size, w, h)\n r = []\n page_size = (item_size.to_f/(h*w).to_f).ceil\n (1..page_size).each do |page|\n offset = (page-1) * (h*w)\n (1..w).each do |i|\n (1..h).each do |j|\n r<<(offset+i+(j-1)*w-1)\n end\n end\n end\n r\n end",
"def rect(wide, tall)\n tall.times do |row|\n wide.times do |col|\n @display[row][col] = '#'\n end\n end\n self\n end",
"def space\n canvas = (1..@height).to_a.collect {|_| [:empty] * @width}\n @crates.each do |sc|\n range_y = (sc.min_y..sc.max_y)\n range_x = (sc.min_x..sc.max_x)\n sc.fill_coordinates.each do |x, y|\n canvas[y][x] = :filled\n end\n end\n canvas\n end",
"def grid(path, rows=10, cols=10)\n push\n rows.times do |row|\n tx = (row+1) * (self.height / rows) - (self.height / rows) / 2\n cols.times do |col|\n ty = (col+1) * (self.width / cols) - (self.width / cols) / 2\n push\n translate(tx, ty)\n draw(path)\n pop\n end\n end\n pop\n end",
"def []=(pos, figure)\n x, y = pos\n @rows[x][y] = figure\n end",
"def trim!(border = pixels.first)\n x1 = [*0...width].index { |c| column(c).uniq != [border] }\n x2 = [*0...width].rindex { |c| column(c).uniq != [border] }\n y1 = [*0...height].index { |r| row(r).uniq != [border] }\n y2 = [*0...height].rindex { |r| row(r).uniq != [border] }\n\n crop! x1, y1, x2 - x1 + 1, y2 - y1 + 1\n end",
"def sub_horizontal_line(start, length)\n offset = byte_offset start\n data[offset .. (offset + (length*pixel_byte_size)-1)]\n end",
"def cut(cols)\n # result = GSL::Matrix.alloc(size1, 1)\n # cols.each do |col| # problem: 1. matix view to matrix 2. initial result in horzcatting - cannot init a matrix with zero columns\n # result = result.horzcat(self.column(col).to_m(size2, 1)) \n # end\n # Matrix.from_gsl(result)\n cols = cols.dup\n first_col = cols.shift\n result = clone.submatrix(nil, first_col..first_col)\n cols.each do |col|\n result = result.horzcat submatrix(nil, col..col)\n end\n ::Matrix.from_gsl(result)\n end",
"def remove(row, column)\n # We have to do four things:\n #\n # 1. Make sure \"row\" is in bounds\n # 2. Make sure \"column\" is in bounds\n # 3. Make sure there's a piece at (row, column) to remove\n # 4. If all the above check out, remove the appropriate piece\n end",
"def compact_rows!\n @data.delete_if { |ar| ar.all? { |el| el.to_s.empty? } || ar.empty? }\n calc_dimensions\n end",
"def shifted_grid_at(t)\n grid = grid_at(t)\n shifted_grid = [[]]\n\n (0...@height).each do |i|\n shifted_grid[i] = grid.map {|n| n[i] }.flatten\n end\n shifted_grid\n end",
"def row(y)\n src.xsize.times.map { |x| src[x, y] }\n end",
"def initialize(width, height)\n @width = width\n @height = height\n @rows = [0] * height\n end",
"def trim_data\n @buffer.keys.each do |k| \n diff = @buffer[k].count - (1.0 * @scene_width / @x_mul).ceil.to_i\n @buffer[k] = @buffer[k].drop(diff) if diff > 0\n end\n end",
"def slice(*args)\n dup.slice!(*args)\n end",
"def row; reshape(1, length); end",
"def remove_ranges\n $spriteset.show_ranges(false)\n end",
"def de_populate_rows\n @rows.each_with_index do |row, y|\n row.each_index do |x|\n @rows[y][x] = nil\n end\n end\n end",
"def build_row(row_id)\n (0..width).each do |col_id|\n cells.create(y: row_id, x: col_id, random_alive: true)\n end\n end",
"def extent; @bounds.diagonal; end",
"def grid\n \t\t\tfinal, y = Array.new, 0\n \t\t\t@@axis.fetch(:y).times do\n \t\t\t\tfinal[y], x = Array.new, 0\n \t\t\t\t@@axis.fetch(:x).times do\n \t\t\t\t\tfinal[y][x] = init_coord(x, y)\n \t\t\t\t\tx += 1\n \t\t\t\tend\n \t\t\t\ty += 1\n\t\t\tend\n\t\t\tfinal.reverse\n\t\tend",
"def clear_full_rows\n\n\t\t# Clear all rows that are full\n\t\t@collision_map.each_with_index do |row, index|\n\t\t\t@collision_map.delete_at(index) if is_row_full?(row)\n\t\tend\n\n\t\tcleared_rows = @size_y - @collision_map.size\n\n\t\t# Create empty rows until the field is back to its original size\n\t\twhile @collision_map.size < @size_y\n\t\t\t@collision_map.unshift( Array.new(@size_x) { |i| { value: false, color: nil }})\n\t\tend\t\t\n\n\t\treturn cleared_rows\n\tend",
"def initialize(x, y, width, height)\n super\n @data = []\n self.index = 0\n activate\n end",
"def slice! *args\n b, l = get_beginning_and_length *args\n head, sliced, tail = ()\n sliced = l.nil? ? self.slice(b) : self.slice(b, l)\n head = self.slice(0, b)\n l = 1 if l.nil?\n tail = self.slice((b + l)..-1)\n self.clear\n self.concat head + tail\n sliced\n end",
"def cut_containing_sprites(rect)\n left = rect[0]\n bottom = rect[1]\n right = left + rect[2]\n top = bottom + rect[3]\n\n [\n # 1\n {\n x: 0,\n source_x: 0,\n y: 0,\n source_y: 0,\n w: left,\n source_w: left,\n h: top,\n source_h: top,\n path: @name\n },\n # 2\n {\n x: 0,\n source_x: 0,\n y: top,\n source_y: top,\n w: right,\n source_w: right,\n h: @height - top,\n source_h: @height - top,\n path: @name\n },\n # 3\n {\n x: right,\n source_x: right,\n y: bottom,\n source_y: bottom,\n w: @width - right,\n source_w: @width - right,\n h: @height - bottom,\n source_h: @height - bottom,\n path: @name\n },\n # 4\n {\n x: left,\n source_x: left,\n y: 0,\n source_y: 0,\n w: @width - left,\n source_w: @width - left,\n h: bottom,\n source_h: bottom,\n path: @name\n }\n ]\n end",
"def slice_rect(slice, image_width, image_height)\n left = slice[:left]\n top = slice[:top]\n bottom = slice[:bottom]\n right = slice[:right]\n width = slice[:width]\n height = slice[:height]\n\n rect = {}\n\n if not left.nil?\n rect[:left] = left\n\n # in this case, it must be left+width or left+right, or left-to-end\n if not right.nil?\n rect[:width] = image_width - right - left\n elsif not width.nil?\n rect[:width] = width\n else\n # then this is left-to-end\n rect[:width] = image_width - left\n end\n elsif not right.nil?\n # in this case it must be right+width or right-to-end\n if not width.nil?\n rect[:left] = image_width - width - right\n rect[:width] = width\n else\n rect[:left] = image_width - right\n rect[:width] = right\n end\n else\n rect[:left] = 0\n rect[:width] = image_width\n end\n\n if not top.nil?\n rect[:top] = top\n\n # in this case, it must be top+height or top+bottom or top-to-bottom\n if not bottom.nil?\n rect[:height] = image_height - bottom - top\n elsif not height.nil?\n rect[:height] = height\n else\n rect[:height] = image_height - top\n end\n elsif not bottom.nil?\n # in this case it must be bottom+height\n if not height.nil?\n rect[:top] = image_height - height - bottom\n rect[:height] = height\n else\n rect[:top] = image_height - bottom\n rect[:height] = bottom\n end\n else\n rect[:top] = 0\n rect[:height] = image_height\n end\n\n if rect[:left] == 0 and rect[:top] == 0 and rect[:width] == image_width and rect[:height] == image_height\n return nil\n end\n\n return rect\n end",
"def slice_column inimage, colbeg, colend, outimage\n m_begin \"slice_column\"\n img = get_image(inimage)\n slice = img.excerpt(colbeg, 0, colend-colbeg, img.rows)\n put_image(outimage, slice)\n m_end \"slice_column\"\n end",
"def reset\n @output = Array.new(Vedeu.height + 1) do |y|\n Array.new(Vedeu.width + 1) do |x|\n Vedeu::Models::Cell.new(position: [y, x])\n end\n end\n end",
"def place_pawn_first_rows\n 8.times { |col| self[[1, col]] = Pawn.new(self, :black, [1, col]) }\n 8.times { |col| self[[6, col]] = Pawn.new(self, :white, [6, col]) }\n end",
"def draw\n @width = @image.columns\n @height = @image.rows\n gc = Magick::Draw.new\n gc.stroke(@color)\n \n #draw horizontal lines\n (0...@height).step(cell_height) do |i|\n gc.line(0,i,@width,i)\n i += cell_height\n end\n \n #draw vertical lines\n (0...@width).step(cell_width) do |i|\n gc.line(i,0,i,@height)\n \n end\n gc.draw(@image)\n end"
] |
[
"0.60578823",
"0.60256624",
"0.6024005",
"0.6016192",
"0.580435",
"0.577119",
"0.5696392",
"0.56555814",
"0.5630477",
"0.5623385",
"0.5590841",
"0.55618393",
"0.5539382",
"0.55052227",
"0.5497365",
"0.54956174",
"0.5484765",
"0.5481666",
"0.5448207",
"0.54435617",
"0.5434807",
"0.54301304",
"0.53435993",
"0.53416336",
"0.5325309",
"0.53220785",
"0.53083515",
"0.5306811",
"0.52823734",
"0.52823734",
"0.5263692",
"0.5257688",
"0.5246097",
"0.5237591",
"0.5221317",
"0.52208257",
"0.52049786",
"0.5198964",
"0.5188017",
"0.51837295",
"0.51819813",
"0.51749367",
"0.5171165",
"0.516567",
"0.51587903",
"0.5146125",
"0.5140782",
"0.5136586",
"0.5125836",
"0.5117913",
"0.50925726",
"0.50907916",
"0.50855577",
"0.5084331",
"0.5072627",
"0.50575197",
"0.5051088",
"0.5049516",
"0.504615",
"0.5043224",
"0.50432026",
"0.5035163",
"0.50301754",
"0.50282013",
"0.50254375",
"0.5021523",
"0.5013827",
"0.501303",
"0.5011171",
"0.5011171",
"0.49980065",
"0.49750873",
"0.4972229",
"0.49589205",
"0.49581295",
"0.49537486",
"0.49421436",
"0.49365258",
"0.49341005",
"0.49323225",
"0.49314144",
"0.492866",
"0.49248573",
"0.49194065",
"0.49180204",
"0.49166796",
"0.49150884",
"0.49135584",
"0.49124733",
"0.49108064",
"0.4903289",
"0.49027932",
"0.49021247",
"0.49019706",
"0.48967156",
"0.48894385",
"0.4887773",
"0.4885394",
"0.487967",
"0.48755115"
] |
0.77835596
|
0
|
simplified version of fill that only takes one argument
|
def fill(x)
@data.each do |a|
a.fill x
end
end
|
{
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
}
|
[
"def fill(*args, &block)\n ary = self.to_a\n ary.fill *args, &block\n self.replace ary\n end",
"def fill(value)\n map! { |_| value }\n self\n end",
"def fill\n \t@fill\n end",
"def new_fill(value)\n self.class.new(*shape).fill(value)\n end",
"def fill\n return @fill\n end",
"def fill(n = T.unsafe(nil)); end",
"def fill!(values)\n values = values.to_a if values.respond_to? :to_a\n if nrows == 1 then\n raise \"values must have #{ncols} elements\" unless values.length == ncols\n elsif ncols == 1 then\n raise \"values must have #{nrows} elements\" unless values.length == nrows\n else\n values = values.flatten(1) if values.length > 0 and values[0].is_a? Array\n raise \"values must have #{nrows}x#{ncols} (= #{nrows*ncols}) elements\" unless values.length == nrows*ncols\n end\n nrows.times do |rowid|\n row = @rows[@rownames.keys[rowid]]\n colnames.each.with_index do |colname, j| row[colname] = values[rowid*ncols + j] end\n end\n self\n end",
"def fill=(value)\n @fill = value\n end",
"def fill_with(default=nil, keys=nil)\n a = self.clone\n a.fill_with!(default, keys)\n a\n end",
"def parse_fill(_)\n []\n end",
"def update_fill\n end",
"def fill_2d(obj = nil)\r\n # find longest\r\n longest = self.max { |a,b| a.length <=> b.length }.size\r\n self.each do |row|\r\n row[longest-1] = obj if row.size < longest # fill with nulls\r\n end\r\n return self\r\n end",
"def fill d, w\n if d == 0\n return {}\n end\n h = {}\n w.times do |i|\n h[i] = fill(d - 1, w)\n end\n h\nend",
"def fill(slots, &block)\n @just_started = true\n to_lazy_pairs(slots.lazy).flat_map(&method(:try_fill_pair)).each(&block)\n end",
"def pad!(array, min_size, value = nil) #destructive\n\n # Your code here\narray.fill(value, array.length..(min_size-1))\nend",
"def fill(pid, seg, seg_id)\n @filled = true; @pid = pid; @seg = seg; @seg_id = seg_id;\n self\n end",
"def fillarc(*)\n super\n end",
"def pad!(array, min_size, value = nil) #destructive\n array.fill(value, array.length...min_size)\nend",
"def fill n, value\n pz = @pz.dup\n pz[n] = value\n Sudoku.new pz\n end",
"def fill!(value_)\n raise TableLockedError if @parent\n @vals.fill(value_)\n self\n end",
"def fill_spec\n zeros = count_zeros # how many to fill\n begin\n last_zeros = zeros\n (0..8).each{|i|\n (0..8).each{|j|\n next if @board[i][j] != 0 # skip filled spaces\n choices = find_choices(i, j)\n raise \"Illegal Board #{i+1} #{j+1}\" if choices.length == 0\n @board[i][j] = choices[0] if choices.length == 1\n }\n }\n zeros = count_zeros\n # if filled some, possibly others are now fully specified\n end while ((zeros > 0) && (last_zeros > zeros))\n end",
"def fill( len, row, memo )\n return memo[row] if memo[row]\n\n total = 0\n if len <= row\n total += row - len + 1\n (row - len).downto( len ) do |sub|\n total += fill( len, sub, memo )\n end\n end\n\n memo[row] = total\n end",
"def fill\n self.each {|image| image.fill}\n end",
"def fill(params)\n p params\n\n (instance_variables - [:@id]).each { |prop|\n at_property = \"#{prop}\"\n ary_property = prop.to_s.delete('@')\n\n # p prop, prop.to_s.delete('@'), at_property, params[ary_property]\n\n if (params[ary_property])\n data = clean_str(params[ary_property])\n if (ary_property == 'price')\n instance_variable_set(at_property, data.to_f)\n else\n instance_variable_set(at_property, data)\n end\n end\n }\n\n self\n end",
"def user_fill (element, data)\r\n begin\r\n key_processor(element)\r\n ****_fill(@selector, @locator, data)\r\n rescue Exception => e\r\n raise e.message\r\n raise e.backtrace.inspect\r\n end\r\n end",
"def populate(blanks, fills)\n blanks.each_with_index do |day, index|\n if fills[day.strftime(\"%b %d %Y\")]\n blanks[index] = \"[ #{day.to_i * 1000}, #{fills[day.strftime(\"%b %d %Y\")]}]\"\n @ruby_data << [day.to_i * 1000, fills[day.strftime(\"%b %d %Y\")]]\n else\n blanks[index] = nil\n end\n end\n blanks.compact.join(', ')\n end",
"def fill_rect(*args)\n raise \"not implemented\"\n end",
"def pad!(array, min_size, value = nil)\nreturn array.fill(value, array.length..(min_size-1))\nend",
"def pad_refact!(array, min_size, value = nil)\n array.fill(value, array.length..min_size)\nend",
"def try_fill_pair(pair)\n result = []\n result << pair.first if @just_started\n @just_started = false\n\n result << fill_pair(*pair) if gap?(*pair)\n result << pair.last\n result\n end",
"def block_fill(x, y)\n marked = Array.new(@image.height) { Array.new(@image.width) }\n find_block_info_recursive(marked, 0, @dp, @cc, x, y, x, y)\n end",
"def what_are_filled_in( a=5 , b=4 , c=3 , d=2 , e=1 )\n \"#{a} #{b} #{c} #{d} #{e}\"\nend",
"def pad!(array, min_size, value = nil) #destructive\n (min_size <= array.length || min_size == 0)? array:array.fill(value, array.length..min_size-1)\nend",
"def what_are_filled_in(a=5, b=4, c=3, d=2, e=1)\n \"#{a} #{b} #{c} #{d} #{e}\"\nend",
"def f4(x:, x: nil); end",
"def fill_imageblock(*args)\n @p.fill_imageblock(self, *args)\n end",
"def pad(array, min_size, value = nil) #non-destructive\n # Your code here\n pad!(array.dup, min_size, value)\nend",
"def set_blank_values!(locale = base_locale, &fill_with)\n blank_keys = find_blank_keys locale\n list = blank_keys.zip fill_with.call(blank_keys)\n data[locale] = data[locale].deep_merge(list_to_tree(list))\n end",
"def pad!(array, min_size, value = nil)\n array.fill(value, array.length, min_size-array.length)\nend",
"def flood_fill(x, y, color)\n x_i, y_i = pixel_to_index(x, y)\n original_color = bitmap[y_i][x_i]\n flood_fill_helper(x, y, original_color, color)\n end",
"def test_fill_in\n assert_equal(0, @sudoku_1.rows[2][4])\n assert_equal(0, @sudoku_1.columns[4][2])\n assert_equal(0, @sudoku_1.squares[1][7])\n @sudoku_1.fill_in(2, 4, 1)\n assert_equal(1, @sudoku_1.rows[2][4])\n assert_equal(1, @sudoku_1.columns[4][2])\n assert_equal(1, @sudoku_1.squares[1][7])\n end",
"def prepare_fills # :nodoc:\n fills = {}\n index = 2 # Start from 2. See above.\n\n # Add the default fills.\n fills['0:0:0'] = 0\n fills['17:0:0'] = 1\n\n # Store the DXF colors separately since them may be reversed below.\n @dxf_formats.each do |format|\n next unless format.pattern != 0 || format.bg_color != 0 || format.fg_color != 0\n\n format.has_dxf_fill(true)\n format.dxf_bg_color = format.bg_color\n format.dxf_fg_color = format.fg_color\n end\n\n @xf_formats.each do |format|\n # The following logical statements jointly take care of special cases\n # in relation to cell colours and patterns:\n # 1. For a solid fill (_pattern == 1) Excel reverses the role of\n # foreground and background colours, and\n # 2. If the user specifies a foreground or background colour without\n # a pattern they probably wanted a solid fill, so we fill in the\n # defaults.\n #\n if format.pattern == 1 && ne_0?(format.bg_color) && ne_0?(format.fg_color)\n format.fg_color, format.bg_color = format.bg_color, format.fg_color\n elsif format.pattern <= 1 && ne_0?(format.bg_color) && eq_0?(format.fg_color)\n format.fg_color = format.bg_color\n format.bg_color = 0\n format.pattern = 1\n elsif format.pattern <= 1 && eq_0?(format.bg_color) && ne_0?(format.fg_color)\n format.bg_color = 0\n format.pattern = 1\n end\n\n key = format.get_fill_key\n\n if fills[key]\n # Fill has already been used.\n format.fill_index = fills[key]\n format.has_fill(false)\n else\n # This is a new fill.\n fills[key] = index\n format.fill_index = index\n format.has_fill(true)\n index += 1\n end\n end\n\n @fill_count = index\n end",
"def pad!(array, min_size, value=nil)\n array.fill(value, array.length...min_size)\nend",
"def pad(fill_value = -1, data = @raw.clone)\n data = _pad data, 0, fill_value, 0\n _pad data, -1, fill_value, 6\n end",
"def reducx(*values)\n @assign = values.compact.reduce({ }, :merge).keep_if &value_present\n self\n end",
"def fillrect(*)\n super\n end",
"def fill_all test \n\t\t$logger.info \"entered\"\n\n\t\t(0...ai.rows).each do |row|\n\t\t\t(0...ai.cols).each do |col|\n\t\t\t\tfill_square test, ai.map[row][col]\n\t\t\t\tFiber.yield\n\t\t\tend\n\t\tend\n\tend",
"def fill(row, column, colour, memo = [])\n start_colour = self[row, column]\n self[row, column] = colour\n\n memo.push([row, column]) # adds coordinates to memo\n p memo\n (row - 1..row + 1).each do |i|\n (column - 1..column + 1).each do |j|\n next unless valid_coordinates?(i, j)\n next if memo.include? [i, j]\n next unless self[i, j] == start_colour\n\n fill(i, j, colour, memo)\n end\n end\n end",
"def get_fill_properties(fill) # :nodoc:\n return { :_defined => 0 } unless fill\n\n fill[:_defined] = 1\n\n fill\n end",
"def flood_fill(matrix, x, y ,z)\n\n # Bounds check\n if [x,y,z].select{|n| n<0 || n>=matrix.length}.size > 0\n return\n end\n # Do nothing check\n if ['X','Z'].include?(matrix[x][y][z])\n return\n end\n # To fill check\n if matrix[x][y][z] ==\"Y\"\n matrix[x][y][z] = \"Z\"\n puts \"#{x},#{y},#{z}\"\n end\n # Recurrsive call\n for x_d in -1..1\n for y_d in -1..1\n for z_d in -1..1\n if x+x_d!=x && y+y_d!=0 && z+z_d!=0\n flood_fill(matrix, x+x_d, y+y_d, z+z_d)\n end\n end\n end\n end\nend",
"def reducx(*values)\n @assign = values.compact.reduce({ }, :merge!).keep_if &value_present\n self\n end",
"def fill_square(n, set, idx)\n set[idx] = n\n end",
"def ordered_fill(page, *fields)\n f_o_i false, nil, page, *fields\n end",
"def set_fill\n @fill = Fill.find(params[:id])\n end",
"def fill_region(x, y, colour)\n return if invalid?(x,y)\n x = xform(x)\n y = xform(y)\n target_colour = @matrix.element(y, x)\n @matrix.flood_fill(x, y, target_colour, colour)\n end",
"def _reduce_194(val, _values, result)\n result = @builder.assign(val[0], val[1], val[2])\n \n result\nend",
"def _reduce_194(val, _values, result)\n result = @builder.assign(val[0], val[1], val[2])\n \n result\nend",
"def repeat(min = T.unsafe(nil), max = T.unsafe(nil)); end",
"def _reduce_194(val, _values, result)\n result = @builder.assign(val[0], val[1], val[2])\n\n result\nend",
"def fill_quad(x1:, y1:, x2:, y2:, x3:, y3:, x4:, y4:, color: nil, colour: nil)\n fill_polygon coordinates: [x1, y1, x2, y2, x3, y3, x4, y4], color: color || colour\n end",
"def fill(cx, cy, half_width, half_height)\n surface = cy\n\n ((cx - half_width)..(cx + half_width)).each { |x|\n ((cy - half_height)..(cy + half_height)).each { |y|\n put_block(x, y, @block_map[:dirt].dup)\n }\n }\n\n put_block(0, 0, @block_map[:rock].dup)\n end",
"def fill_with char\n fill_from_with(0 , char)\n end",
"def compact!\n i = 0\n lim = self.__size\n while i < lim\n break if self.__at(i)._equal?(nil)\n i += 1\n end\n return nil if i._equal?(lim)\n\n fill_idx = i\n while i < lim\n el = self.__at(i)\n unless el._equal?(nil)\n self.__at_put(fill_idx, el )\n fill_idx += 1\n end\n i += 1\n end\n self.__size = fill_idx\n self\n end",
"def fill(pattern)\n @style[:fill] = pattern(pattern)\n end",
"def fi_request(*_)\n force_break\n self.fill = true\n end",
"def array\n \t(1..size).map{ |i| \"#{fill}\" }\n end",
"def pad!(array, min_size, value = nil) #destructive\r\n (array.length..min_size - 1).each { |i| array[i] = value}\r\n array\r\nend",
"def fill_row(x1, x2, y, colour)\n return if invalid?(x1,y) || invalid?(x2,y)\n x1.upto(x2) do |i|\n colour(i,y,colour)\n end\n end",
"def _reduce_211(val, _values, result)\n result = @builder.assign(val[0], val[1], val[2])\n \n result\nend",
"def pad(array, min_size, value = nil) #non-destructive\n new_array = Array.new\n new_array += array\n new_array.fill(value, array.length...min_size)\nend",
"def _reduce_211(val, _values, result)\n result = @builder.assign(val[0], val[1], val[2])\n\n result\nend",
"def _reduce_196(val, _values, result)\n result = @builder.assign(val[0], val[1], val[2])\n\n result\nend",
"def _reduce_196(val, _values, result)\n result = @builder.assign(val[0], val[1], val[2])\n \n result\nend",
"def _reduce_196(val, _values, result)\n result = @builder.assign(val[0], val[1], val[2])\n \n result\nend",
"def _reduce_196(val, _values, result)\n result = @builder.assign(val[0], val[1], val[2])\n \n result\nend",
"def _reduce_192(val, _values, result)\n result = @builder.assign(val[0], val[1], val[2])\n \n result\nend",
"def pad!(array, min_size, value = nil) \n array.fill(value, array.length...min_size) unless min_size <= array.length\n array\nend",
"def pad!(array, min_size, value = nil)\n if min_size <= array.length\n return array\n else\n difference = min_size - array.length\n array.fill(value, array.length, difference)\n end\nend",
"def fill(colorspec)\n primitive \"fill #{enquote(colorspec)}\"\n end",
"def pad!(array, min_size, value = nil)\n#destructive\n\n length = array.length\n\n if length >= min_size\n return p array\n\n elsif min_size == 0\n p array = []\n return p array = []\n\n else\n\n while array.length < min_size\n if value != nil\n array << value\n else\n array << nil\n end\n end\n return p array\n end\nend",
"def dry_schema_args(method, value)\n if method == :filled\n value\n elsif method == :array\n value[:array].instance_of?(Hash) ? create_schema(value[:array]) : value[:array]\n elsif method == :hash\n create_schema(value)\n end\n end",
"def fill(model, item_path, args)\n fill_properties(model, item_path, args)\n end",
"def pad!(array, min_size, value = nil) #destructive\n pad_size(array, min_size).times { array.push(value) }\n return array\nend",
"def _reduce_188(val, _values, result)\n result = @builder.assign(val[0], val[1], val[2])\n \n result\nend",
"def _reduce_188(val, _values, result)\n result = @builder.assign(val[0], val[1], val[2])\n \n result\nend",
"def pad(array, min_size, value=nil)\n array.dup.fill(value, array.length...min_size)\nend",
"def fill_from_object(object)\n self\n end",
"def _reduce_188(val, _values, result)\n result = @builder.assign(val[0], val[1], val[2])\n\n result\nend",
"def fill\n\t\t# Go through each item\n\t\t@items.length.times do |i|\n\t\t\t@items.length.times do |offset|\n\t\t\t\tleft = i - offset\n\t\t\t\tright = i + offset\n\n\t\t\t\t# The equation is the following:\n\t\t\t\t# value of 46 given to the item staying on the current item\n\t\t\t\t# As you move right and left item, the value is given by 50 - (distance from center note + 2)^2\n\t\t\t\tval = 50 - ((offset + 1) ** 2)\n\t\t\t\tval = 0 if val < 0\n\n\t\t\t\tif left >= 0\n\t\t\t\t\t@matrix[[i, left]] = val\n\t\t\t\tend\n\n\t\t\t\tif right < @items.length\n\t\t\t\t\t@matrix[[i, right]] = val\n\t\t\t\tend\n\t\t\tend\n\t\tend\n\tend",
"def fill_array rows, cols, num, val\n num = 0 if num < 0\n array = Array.new(rows) { Array.new(cols) { -1 } }\n (0...num).each { |i|\n row = (i / cols).floor\n col = i % cols\n array[row][col] = val\n }\n array\n end",
"def compact=(_arg0); end",
"def fill_color(color)\n end",
"def nf_request(*_)\n force_break if self.fill\n self.fill = false\n end",
"def assignment(arr)\n yield(arr)\nend",
"def pad!(array, min_size, value = nil) #destructive\n # Your code here\n if array.length < min_size\n x = min_size - array.length\n x.times do array << value\n end\n end\n return array\nend",
"def _reduce_171(val, _values, result)\n result = @builder.assign(val[0], val[1], val[2])\n \n result\nend",
"def pad!(array, min_size, value = nil) #destructive\n difference = (min_size - array.length)\n difference.times {\n array << value}\n return array\nend",
"def pad!(array, min_size, value = nil) #destructive\n length = array.length\n if length >= min_size\n return array\n else\n until array.length == min_size\n return array.push(value = nil)\n end\n end \nend",
"def pad!(array, min_size, value = nil) #destructive\n if array.length >= min_size\n return array\n else\n for i in array.length...min_size\n array[i] = value\n end\n end\nend",
"def pad!(array, min_size, value = nil) #destructive\n array_size = array.size\n until array_size >= min_size\n array << value\n array_size += 1\n end\n return array\nend"
] |
[
"0.7889927",
"0.67792344",
"0.6765239",
"0.6664033",
"0.64098614",
"0.6363014",
"0.6114919",
"0.60925",
"0.5948215",
"0.5917927",
"0.58302677",
"0.57890606",
"0.5772848",
"0.5731747",
"0.5695932",
"0.55723214",
"0.55303895",
"0.5490103",
"0.54730046",
"0.5456026",
"0.544089",
"0.53577685",
"0.5323584",
"0.52902573",
"0.5284311",
"0.52623516",
"0.52548385",
"0.5254145",
"0.52450925",
"0.52405727",
"0.52302116",
"0.52236336",
"0.52147347",
"0.5189261",
"0.5181813",
"0.5123457",
"0.51205796",
"0.51121444",
"0.5100254",
"0.5091221",
"0.50735205",
"0.5059632",
"0.5050269",
"0.50293106",
"0.5026988",
"0.5021597",
"0.50104266",
"0.50101167",
"0.49968985",
"0.49884376",
"0.4987646",
"0.49850076",
"0.49809065",
"0.49685872",
"0.49676484",
"0.49658623",
"0.49658623",
"0.4962468",
"0.4960193",
"0.49567658",
"0.4956531",
"0.4953932",
"0.4952488",
"0.49466583",
"0.4943102",
"0.49373868",
"0.4932463",
"0.4932413",
"0.49157342",
"0.49155378",
"0.49124205",
"0.49119875",
"0.49112168",
"0.49112168",
"0.49112168",
"0.49070185",
"0.48983487",
"0.48976406",
"0.4897126",
"0.4893814",
"0.48857602",
"0.48796853",
"0.4877964",
"0.48751688",
"0.48751688",
"0.4872656",
"0.48632443",
"0.48614812",
"0.48560166",
"0.4854183",
"0.48507485",
"0.48497787",
"0.4831312",
"0.48260027",
"0.48153743",
"0.48125857",
"0.48088986",
"0.48048767",
"0.48009953",
"0.4797542"
] |
0.7154743
|
1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.