text
stringlengths
6
9.38M
/* Name: Development Impressions Data source: 4 Created By: Admin Last Update At: 2016-05-11T15:34:30.883351+00:00 */ (SELECT 'Development' AS Property_type, Property_id, DID.DeveloperName AS DeveloperName, la.street_address AS Address, count(*) AS Impressions FROM (SELECT post_prop35 AS Property_id, FROM (TABLE_QUERY(djomniture:cipomniture_djmansionglobal,'CONCAT(REPLACE(table_id,"_","-"),"-01") BETWEEN STRFTIME_UTC_USEC("{{startdate}}", "%Y-%m-01") and STRFTIME_UTC_USEC("{{enddate}}", "%Y-%m-31")')) WHERE DATE(date_time) >= DATE('{{startdate}}') AND DATE(date_time) <= DATE('{{enddate}}') AND date(date_time) >= date('2016-05-04') AND post_prop35 !='' AND post_prop33 = 'MG_HomeHero_Impressions' ) D LEFT OUTER JOIN (SELECT id, street_address, FROM [djomniture:devspark.MG_Development_Address]) AS LA ON D.Property_id = LA.id /*JOIN TO ADD VALID DEVELOPMENT ID*/ JOIN (SELECT string(id) AS did, string(developer_id) AS developer_id , FROM [djomniture:devspark.MG_Developments]) AS LID ON D.Property_id = LID.did /*JOIN DEVELOPER INFORMATION*/ JOIN (SELECT string(id) AS DeveloperId, name AS DeveloperName, FROM [djomniture:devspark.MG_Developers]) AS DID ON LID.developer_id = DID.DeveloperId GROUP BY Property_type,Property_id, DeveloperName, Address, )
SELECT point, SUM(distance) FROM traffic GROUP BY point
CREATE TABLE departments( id SERIAL PRIMARY KEY, dept_no VARCHAR, dept_name VARCHAR ); SELECT * FROM departments CREATE TABLE dept_emp( id SERIAL PRIMARY KEY, emp_no INT, dept_no VARCHAR ); ALTER TABLE dept_emp ADD CONSTRAINT FK_emp_no FOREIGN KEY (id) REFERENCES dept_emp(id); SELECT * FROM dept_emp CREATE TABLE dept_manager( id SERIAL PRIMARY KEY, dept_no VARCHAR, emp_no INT ); ALTER TABLE dept_manager ADD CONSTRAINT FK_dept_no FOREIGN KEY (id) REFERENCES dept_manager(id); SELECT * FROM dept_manager CREATE TABLE employees( id SERIAL PRIMARY KEY, emp_no INT, emp_title VARCHAR, birth_date DATE, first_name VARCHAR, last_name VARCHAR, sex VARCHAR, hire_date DATE ); ALTER TABLE employees ADD CONSTRAINT FK_emp_no FOREIGN KEY (id) REFERENCES employees(id); SELECT * from employees CREATE TABLE salaries( id SERIAL PRIMARY KEY, emp_no INT, salary INT ); ALTER TABLE salaries ADD CONSTRAINT FK_emp_no FOREIGN KEY (id) REFERENCES salaries(id); SELECT * FROM salaries CREATE TABLE titles( id SERIAL PRIMARY KEY, title_id VARCHAR, title VARCHAR ); SELECT * FROM titles SELECT employees.emp_no, employees.last_name, employees.first_name, employees.sex, salaries.salary FROM salaries INNER JOIN employees ON employees.emp_no=salaries.emp_no; SELECT first_name, last_name, hire_date FROM employees WHERE hire_date BETWEEN '1986-01-01' AND '1986-12-31'; SELECT dept_manager.dept_no, departments.dept_name, dept_manager.emp_no, employees.last_name, employees.first_name FROM dept_manager JOIN employees ON (dept_manager.emp_no=employees.emp_no) JOIN departments ON (departments.dept_no=dept_manager.dept_no) SELECT employees.emp_no, employees.last_name, employees.first_name, dept_emp.dept_no FROM employees JOIN dept_emp ON employees.emp_no=dept_emp.emp_no; SELECT employees.first_name, employees.last_name, employees.sex FROM employees WHERE employees.first_name = 'Hercules'; SELECT dept_emp.emp_no, employees.last_name, employees.first_name, departments.dept_name FROM departments JOIN dept_emp ON dept_emp.dept_no=departments.dept_no JOIN employees ON dept_emp.emp_no=employees.emp_no WHERE departments.dept_name = 'Sales'; SELECT dept_emp.emp_no, employees.last_name, employees.first_name, departments.dept_name FROM departments JOIN dept_emp ON dept_emp.dept_no=departments.dept_no JOIN employees ON dept_emp.emp_no=employees.emp_no WHERE departments.dept_name = 'Sales' OR departments.dept_name = 'Development'; SELECT last_name, count(last_name) AS "Last name count" FROM employees GROUP BY last_name ORDER BY "Last name count" DESC;
INSERT INTO tag (tag_id, name) VALUES(?, ?);
-- phpMyAdmin SQL Dump -- version 4.8.5 -- https://www.phpmyadmin.net/ -- -- Host: 127.0.0.1 -- Generation Time: Jan 06, 2020 at 09:28 AM -- Server version: 10.1.31-MariaDB -- PHP Version: 7.1.27 SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; SET AUTOCOMMIT = 0; START TRANSACTION; SET time_zone = "+00:00"; /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8mb4 */; -- -- Database: `samp` -- -- -------------------------------------------------------- -- -- Table structure for table `masters` -- CREATE TABLE `masters` ( `acc_dbid` int(11) NOT NULL, `acc_name` varchar(64) NOT NULL, `acc_pass` varchar(129) NOT NULL, `acc_email` varchar(255) NOT NULL, `admin` int(11) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=latin1; -- -------------------------------------------------------- -- -- Table structure for table `characters` -- CREATE TABLE `characters` ( `char_dbid` int(11) NOT NULL, `master_id` int(11) NOT NULL, `char_name` varchar(255) NOT NULL, `pTutorial` tinyint(1) NOT NULL DEFAULT '0', `pLevel` int(11) NOT NULL DEFAULT '0', `pExp` int(11) NOT NULL DEFAULT '0', `pLastSkin` int(11) NOT NULL DEFAULT '264', `pFaction` int(11) DEFAULT '0', `pCash` int(11) NOT NULL DEFAULT '0', `pSpawnPoint` int(11) NOT NULL DEFAULT '0', `pSpawnHouse` int(11) NOT NULL DEFAULT '0', `pTimeout` int(11) NOT NULL DEFAULT '0', `pHealth` float NOT NULL DEFAULT '100.0', `pArmour` float NOT NULL DEFAULT '0.0', `pLastPosX` float NOT NULL DEFAULT '0.0', `pLastPosY` float NOT NULL DEFAULT '0.0', `pLastPosZ` float NOT NULL DEFAULT '0.0', `pLastInterior` int(11) NOT NULL DEFAULT '0', `pLastWorld` int(11) NOT NULL DEFAULT '0', `pJob` int(11) NOT NULL DEFAULT '0', `pSideJob` int(11) NOT NULL DEFAULT '0', `pCareer` int(11) NOT NULL DEFAULT '0', `pPaycheck` int(11) NOT NULL DEFAULT '0', `pFishes` int(11) NOT NULL DEFAULT '0' ) ENGINE=InnoDB DEFAULT CHARSET=latin1; -- -- Indexes for dumped tables -- -- -- Indexes for table `masters` -- ALTER TABLE `masters` ADD PRIMARY KEY (`acc_dbid`); -- -- Indexes for table `characters` -- ALTER TABLE `characters` ADD PRIMARY KEY (`char_dbid`); -- -- AUTO_INCREMENT for dumped tables -- -- -- AUTO_INCREMENT for table `masters` -- ALTER TABLE `masters` MODIFY `acc_dbid` int(11) NOT NULL AUTO_INCREMENT; -- -- AUTO_INCREMENT for table `characters` -- ALTER TABLE `characters` MODIFY `char_dbid` int(11) NOT NULL AUTO_INCREMENT; COMMIT; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
--@Autor(es): Ramos Sanchez Samuel --@Fecha creación: 08/12/2019 --@Descripción: Prueba para funcion de aumento de sueldo en cache set serveroutput on Prompt ================================================= Prompt Prueba 1. Prompt Revisar aumento Prompt ================================================== declare v_aumento number := 10; v_sueldo_mensual number := 100; v_resultado number; begin dbms_output.put_line('Iniciando funcion'); v_resultado := cache_aumento_sueldo(v_sueldo_mensual, v_aumento); if v_resultado = 110 then dbms_output.put_line('OK, prueba 1 Exitosa.'); else raise_application_error(-20001, 'ERROR. No se calculo correctamente'); end if; end; / rollback;
-- Table: stock_move -- DROP TABLE stock_move; CREATE TABLE stock_move ( id serial NOT NULL, origin character varying, -- Source product_uos_qty numeric, -- Quantity (UOS) create_date timestamp without time zone, -- Creation Date move_dest_id integer, -- Destination Move product_uom integer NOT NULL, -- Unit of Measure price_unit double precision, -- Unit Price product_uom_qty numeric NOT NULL, -- Quantity company_id integer NOT NULL, -- Company date timestamp without time zone NOT NULL, -- Date product_qty numeric, -- Quantity product_uos integer, -- Product UOS location_id integer NOT NULL, -- Source Location priority character varying, -- Priority picking_type_id integer, -- Picking Type partner_id integer, -- Destination Address note text, -- Notes state character varying, -- Status origin_returned_move_id integer, -- Origin return move product_packaging integer, -- Prefered Packaging date_expected timestamp without time zone NOT NULL, -- Expected Date procurement_id integer, -- Procurement name character varying NOT NULL, -- Description create_uid integer, -- Created by warehouse_id integer, -- Warehouse inventory_id integer, -- Inventory partially_available boolean, -- Partially Available propagate boolean, -- Propagate cancel and split restrict_partner_id integer, -- Owner procure_method character varying NOT NULL, -- Supply Method write_uid integer, -- Last Updated by restrict_lot_id integer, -- Lot group_id integer, -- Procurement Group product_id integer NOT NULL, -- Product split_from integer, -- Move Split From picking_id integer, -- Reference location_dest_id integer NOT NULL, -- Destination Location write_date timestamp without time zone, -- Last Updated on push_rule_id integer, -- Push Rule rule_id integer, -- Procurement Rule invoice_state character varying NOT NULL, -- Invoice Control consumed_for integer, -- Consumed for raw_material_production_id integer, -- Production Order for Raw Materials production_id integer, -- Production Order for Produced Products purchase_line_id integer, -- Purchase Order Line weight numeric, -- Weight weight_net numeric, -- Net weight weight_uom_id integer NOT NULL, -- Unit of Measure invoice_line_id integer, -- Invoice Line CONSTRAINT stock_move_pkey PRIMARY KEY (id), CONSTRAINT stock_move_company_id_fkey FOREIGN KEY (company_id) REFERENCES res_company (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_consumed_for_fkey FOREIGN KEY (consumed_for) REFERENCES stock_move (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_create_uid_fkey FOREIGN KEY (create_uid) REFERENCES res_users (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_group_id_fkey FOREIGN KEY (group_id) REFERENCES procurement_group (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_inventory_id_fkey FOREIGN KEY (inventory_id) REFERENCES stock_inventory (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_invoice_line_id_fkey FOREIGN KEY (invoice_line_id) REFERENCES account_invoice_line (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_location_dest_id_fkey FOREIGN KEY (location_dest_id) REFERENCES stock_location (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_location_id_fkey FOREIGN KEY (location_id) REFERENCES stock_location (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_move_dest_id_fkey FOREIGN KEY (move_dest_id) REFERENCES stock_move (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_origin_returned_move_id_fkey FOREIGN KEY (origin_returned_move_id) REFERENCES stock_move (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_partner_id_fkey FOREIGN KEY (partner_id) REFERENCES res_partner (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_picking_id_fkey FOREIGN KEY (picking_id) REFERENCES stock_picking (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_picking_type_id_fkey FOREIGN KEY (picking_type_id) REFERENCES stock_picking_type (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_procurement_id_fkey FOREIGN KEY (procurement_id) REFERENCES procurement_order (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_product_id_fkey FOREIGN KEY (product_id) REFERENCES product_product (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_product_packaging_fkey FOREIGN KEY (product_packaging) REFERENCES product_packaging (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_product_uom_fkey FOREIGN KEY (product_uom) REFERENCES product_uom (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_product_uos_fkey FOREIGN KEY (product_uos) REFERENCES product_uom (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_production_id_fkey FOREIGN KEY (production_id) REFERENCES mrp_production (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_purchase_line_id_fkey FOREIGN KEY (purchase_line_id) REFERENCES purchase_order_line (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_push_rule_id_fkey FOREIGN KEY (push_rule_id) REFERENCES stock_location_path (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_raw_material_production_id_fkey FOREIGN KEY (raw_material_production_id) REFERENCES mrp_production (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_restrict_lot_id_fkey FOREIGN KEY (restrict_lot_id) REFERENCES stock_production_lot (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_restrict_partner_id_fkey FOREIGN KEY (restrict_partner_id) REFERENCES res_partner (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_rule_id_fkey FOREIGN KEY (rule_id) REFERENCES procurement_rule (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_split_from_fkey FOREIGN KEY (split_from) REFERENCES stock_move (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_warehouse_id_fkey FOREIGN KEY (warehouse_id) REFERENCES stock_warehouse (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_weight_uom_id_fkey FOREIGN KEY (weight_uom_id) REFERENCES product_uom (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL, CONSTRAINT stock_move_write_uid_fkey FOREIGN KEY (write_uid) REFERENCES res_users (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE SET NULL ) WITH ( OIDS=FALSE ); ALTER TABLE stock_move OWNER TO odoo; COMMENT ON TABLE stock_move IS 'Stock Move'; COMMENT ON COLUMN stock_move.origin IS 'Source'; COMMENT ON COLUMN stock_move.product_uos_qty IS 'Quantity (UOS)'; COMMENT ON COLUMN stock_move.create_date IS 'Creation Date'; COMMENT ON COLUMN stock_move.move_dest_id IS 'Destination Move'; COMMENT ON COLUMN stock_move.product_uom IS 'Unit of Measure'; COMMENT ON COLUMN stock_move.price_unit IS 'Unit Price'; COMMENT ON COLUMN stock_move.product_uom_qty IS 'Quantity'; COMMENT ON COLUMN stock_move.company_id IS 'Company'; COMMENT ON COLUMN stock_move.date IS 'Date'; COMMENT ON COLUMN stock_move.product_qty IS 'Quantity'; COMMENT ON COLUMN stock_move.product_uos IS 'Product UOS'; COMMENT ON COLUMN stock_move.location_id IS 'Source Location'; COMMENT ON COLUMN stock_move.priority IS 'Priority'; COMMENT ON COLUMN stock_move.picking_type_id IS 'Picking Type'; COMMENT ON COLUMN stock_move.partner_id IS 'Destination Address '; COMMENT ON COLUMN stock_move.note IS 'Notes'; COMMENT ON COLUMN stock_move.state IS 'Status'; COMMENT ON COLUMN stock_move.origin_returned_move_id IS 'Origin return move'; COMMENT ON COLUMN stock_move.product_packaging IS 'Prefered Packaging'; COMMENT ON COLUMN stock_move.date_expected IS 'Expected Date'; COMMENT ON COLUMN stock_move.procurement_id IS 'Procurement'; COMMENT ON COLUMN stock_move.name IS 'Description'; COMMENT ON COLUMN stock_move.create_uid IS 'Created by'; COMMENT ON COLUMN stock_move.warehouse_id IS 'Warehouse'; COMMENT ON COLUMN stock_move.inventory_id IS 'Inventory'; COMMENT ON COLUMN stock_move.partially_available IS 'Partially Available'; COMMENT ON COLUMN stock_move.propagate IS 'Propagate cancel and split'; COMMENT ON COLUMN stock_move.restrict_partner_id IS 'Owner '; COMMENT ON COLUMN stock_move.procure_method IS 'Supply Method'; COMMENT ON COLUMN stock_move.write_uid IS 'Last Updated by'; COMMENT ON COLUMN stock_move.restrict_lot_id IS 'Lot'; COMMENT ON COLUMN stock_move.group_id IS 'Procurement Group'; COMMENT ON COLUMN stock_move.product_id IS 'Product'; COMMENT ON COLUMN stock_move.split_from IS 'Move Split From'; COMMENT ON COLUMN stock_move.picking_id IS 'Reference'; COMMENT ON COLUMN stock_move.location_dest_id IS 'Destination Location'; COMMENT ON COLUMN stock_move.write_date IS 'Last Updated on'; COMMENT ON COLUMN stock_move.push_rule_id IS 'Push Rule'; COMMENT ON COLUMN stock_move.rule_id IS 'Procurement Rule'; COMMENT ON COLUMN stock_move.invoice_state IS 'Invoice Control'; COMMENT ON COLUMN stock_move.consumed_for IS 'Consumed for'; COMMENT ON COLUMN stock_move.raw_material_production_id IS 'Production Order for Raw Materials'; COMMENT ON COLUMN stock_move.production_id IS 'Production Order for Produced Products'; COMMENT ON COLUMN stock_move.purchase_line_id IS 'Purchase Order Line'; COMMENT ON COLUMN stock_move.weight IS 'Weight'; COMMENT ON COLUMN stock_move.weight_net IS 'Net weight'; COMMENT ON COLUMN stock_move.weight_uom_id IS 'Unit of Measure'; COMMENT ON COLUMN stock_move.invoice_line_id IS 'Invoice Line'; -- Index: stock_move_company_id_index -- DROP INDEX stock_move_company_id_index; CREATE INDEX stock_move_company_id_index ON stock_move USING btree (company_id); -- Index: stock_move_create_date_index -- DROP INDEX stock_move_create_date_index; CREATE INDEX stock_move_create_date_index ON stock_move USING btree (create_date); -- Index: stock_move_date_expected_index -- DROP INDEX stock_move_date_expected_index; CREATE INDEX stock_move_date_expected_index ON stock_move USING btree (date_expected); -- Index: stock_move_date_index -- DROP INDEX stock_move_date_index; CREATE INDEX stock_move_date_index ON stock_move USING btree (date); -- Index: stock_move_invoice_state_index -- DROP INDEX stock_move_invoice_state_index; CREATE INDEX stock_move_invoice_state_index ON stock_move USING btree (invoice_state COLLATE pg_catalog."default"); -- Index: stock_move_location_dest_id_index -- DROP INDEX stock_move_location_dest_id_index; CREATE INDEX stock_move_location_dest_id_index ON stock_move USING btree (location_dest_id); -- Index: stock_move_location_id_index -- DROP INDEX stock_move_location_id_index; CREATE INDEX stock_move_location_id_index ON stock_move USING btree (location_id); -- Index: stock_move_move_dest_id_index -- DROP INDEX stock_move_move_dest_id_index; CREATE INDEX stock_move_move_dest_id_index ON stock_move USING btree (move_dest_id); -- Index: stock_move_name_index -- DROP INDEX stock_move_name_index; CREATE INDEX stock_move_name_index ON stock_move USING btree (name COLLATE pg_catalog."default"); -- Index: stock_move_picking_id_index -- DROP INDEX stock_move_picking_id_index; CREATE INDEX stock_move_picking_id_index ON stock_move USING btree (picking_id); -- Index: stock_move_product_id_index -- DROP INDEX stock_move_product_id_index; CREATE INDEX stock_move_product_id_index ON stock_move USING btree (product_id); -- Index: stock_move_product_location_index -- DROP INDEX stock_move_product_location_index; CREATE INDEX stock_move_product_location_index ON stock_move USING btree (product_id, location_id, location_dest_id, company_id, state COLLATE pg_catalog."default"); -- Index: stock_move_production_id_index -- DROP INDEX stock_move_production_id_index; CREATE INDEX stock_move_production_id_index ON stock_move USING btree (production_id); -- Index: stock_move_purchase_line_id_index -- DROP INDEX stock_move_purchase_line_id_index; CREATE INDEX stock_move_purchase_line_id_index ON stock_move USING btree (purchase_line_id); -- Index: stock_move_raw_material_production_id_index -- DROP INDEX stock_move_raw_material_production_id_index; CREATE INDEX stock_move_raw_material_production_id_index ON stock_move USING btree (raw_material_production_id); -- Index: stock_move_state_index -- DROP INDEX stock_move_state_index; CREATE INDEX stock_move_state_index ON stock_move USING btree (state COLLATE pg_catalog."default"); -- -------------------------------------------- ---------------------- ---------------------- -- View: vg_xxy_temp -- DROP VIEW vg_xxy_temp; CREATE OR REPLACE VIEW vg_xxy_temp AS SELECT i.id, l.id AS location_id, i.product_id, i.name AS description, CASE WHEN i.state::text = 'done'::text THEN i.product_qty ELSE 0::numeric END AS picking_qty, i.date, i.restrict_lot_id AS prodlot_id, i.state, i.split_from, i.origin_returned_move_id, i.picking_id, l.company_id FROM stock_location l, stock_move i WHERE l.usage::text = 'internal'::text AND i.location_dest_id = l.id AND i.state::text <> 'cancel'::text AND i.company_id = l.company_id UNION ALL SELECT - o.id AS id, l.id AS location_id, o.product_id, o.name AS description, CASE WHEN o.state::text = 'done'::text THEN - o.product_qty ELSE 0::numeric END AS picking_qty, o.date, o.restrict_lot_id AS prodlot_id, o.state, o.split_from, o.origin_returned_move_id, o.picking_id, l.company_id FROM stock_location l, stock_move o WHERE l.usage::text = 'internal'::text AND o.location_id = l.id AND o.state::text <> 'cancel'::text AND o.company_id = l.company_id; ALTER TABLE vg_xxy_temp OWNER TO odoo; -- -------------------------------------------- ---------------------- ---------------------- -- View: vg_xxy_temp_2 -- DROP VIEW vg_xxy_temp_2; CREATE OR REPLACE VIEW vg_xxy_temp_2 AS WITH x AS ( SELECT vg_xxy_temp.id, vg_xxy_temp.location_id, vg_xxy_temp.product_id, vg_xxy_temp.description, vg_xxy_temp.picking_qty, vg_xxy_temp.date, vg_xxy_temp.prodlot_id, vg_xxy_temp.state, vg_xxy_temp.split_from, vg_xxy_temp.origin_returned_move_id, vg_xxy_temp.picking_id, vg_xxy_temp.company_id FROM vg_xxy_temp ) SELECT a.id, a.location_id, a.product_id, a.description, a.picking_qty, a.date, a.prodlot_id, a.state, a.split_from, a.origin_returned_move_id, a.picking_id, a.company_id, b.lot_id, b.product_qty FROM x a JOIN stock_pack_operation b ON a.picking_id = b.picking_id AND a.product_id = b.product_id AND b.location_dest_id = a.location_id WHERE a.prodlot_id IS NULL UNION SELECT a.id, a.location_id, a.product_id, a.description, a.picking_qty, a.date, a.prodlot_id, a.state, a.split_from, a.origin_returned_move_id, a.picking_id, a.company_id, b.lot_id, b.product_qty FROM x a JOIN stock_pack_operation b ON a.picking_id = b.picking_id AND a.product_id = b.product_id AND b.location_id = a.location_id WHERE a.prodlot_id IS NULL UNION SELECT a.id, a.location_id, a.product_id, a.description, a.picking_qty, a.date, a.prodlot_id, a.state, a.split_from, a.origin_returned_move_id, a.picking_id, a.company_id, a.prodlot_id AS lot_id, a.picking_qty AS product_qty FROM x a WHERE a.prodlot_id IS NOT NULL; ALTER TABLE vg_xxy_temp_2 OWNER TO odoo; -- -------------------------------------------- ---------------------- ---------------------- -- View: vg_stock_by_product_stock_history -- DROP VIEW vg_stock_by_product_stock_history; CREATE OR REPLACE VIEW vg_stock_by_product_stock_history AS WITH y AS ( SELECT h.product_id, sum(h.quantity) AS qty FROM stock_history h, stock_move m WHERE h.move_id = m.id GROUP BY h.product_id ), z AS ( SELECT y.product_id, b.product_name, b.product_category, y.qty FROM y JOIN vg_product_category b ON b.id = y.product_id WHERE y.qty <> 0::double precision ) SELECT z.product_category, z.product_name, z.qty FROM z ORDER BY z.product_category, z.product_name; ALTER TABLE vg_stock_by_product_stock_history OWNER TO odoo; -- -------------------------------------------- ---------------------- ---------------------- SELECT id, move_id, location_id, company_id, product_id, product_categ_id, quantity, date, price_unit_on_quant, source , sum(quantity) over (partition by product_id order by date , move_id) as cum_qty FROM stock_history where product_id = 2069 order by product_id , date desc , move_id desc ; with x as (SELECT id, move_id, location_id, company_id, product_id, product_categ_id, quantity, date, price_unit_on_quant, source , sum(quantity) over (partition by product_id order by date , move_id) as cum_qty FROM stock_history where product_id = 2069), y as (select * from x where quantity > 0 and cum_qty <= 3 ), z as (select * , row_number() over (partition by cum_qty order by product_id , date desc , move_id desc) as rnk from y) select * from z where rnk = 1 ; WITH x AS ( SELECT id, move_id, location_id, company_id, product_id, product_categ_id, quantity, DATE, price_unit_on_quant, source, sum(quantity) OVER ( PARTITION BY product_id ORDER BY DATE, move_id ) AS cum_qty FROM stock_history WHERE product_id = 2069 ), y AS ( SELECT * FROM x WHERE quantity > 0 AND cum_qty <= 3 ), z AS ( SELECT *, row_number() OVER ( PARTITION BY cum_qty ORDER BY product_id, DATE DESC, move_id DESC ) AS rnk FROM y ) SELECT * FROM z WHERE rnk = 1; -- -------------------------------------------- ---------------------- ---------------------- WITH x AS ( SELECT id, move_id, location_id, company_id, product_id, product_categ_id, quantity, DATE, price_unit_on_quant, source, sum(quantity) OVER ( PARTITION BY product_id ORDER BY DATE, move_id ) AS cum_qty FROM stock_history ), y AS ( SELECT x.*, f.product_category, f.product_name FROM x join vg_xxx_stock_by_product_stock_history f on x.product_id = f.product_id WHERE x.quantity > 0 AND x.cum_qty <= f.qty ), z AS ( SELECT *, row_number() OVER ( PARTITION BY y.product_id , y.cum_qty order by y.product_id , y.cum_qty , y.date ) AS rnk FROM y ) SELECT * FROM z WHERE rnk = 1; -- -------------------------------------------- ---------------------- ---------------------- -- View: vg_stock_by_location_product_stock_history -- DROP VIEW vg_stock_by_location_product_stock_history; CREATE OR REPLACE VIEW vg_stock_by_location_product_stock_history AS WITH x AS ( SELECT stock_history.id, stock_history.move_id, stock_history.location_id, stock_history.company_id, stock_history.product_id, stock_history.product_categ_id, stock_history.quantity, stock_history.date, stock_history.price_unit_on_quant, stock_history.source, sum(stock_history.quantity) OVER (PARTITION BY stock_history.product_id ORDER BY stock_history.date, stock_history.move_id) AS cum_qty FROM stock_history ), y AS ( SELECT x.id, x.move_id, x.location_id, x.company_id, x.product_id, x.product_categ_id, x.quantity, x.date, x.price_unit_on_quant, x.source, x.cum_qty, f.product_category, f.product_name FROM x JOIN vg_xxx_stock_by_product_stock_history f ON x.product_id = f.product_id WHERE x.quantity > 0::double precision AND x.cum_qty <= f.qty ), z AS ( SELECT y.id, y.move_id, y.location_id, y.company_id, y.product_id, y.product_categ_id, y.quantity, y.date, y.price_unit_on_quant, y.source, y.cum_qty, y.product_category, y.product_name, row_number() OVER (PARTITION BY y.product_id, y.cum_qty ORDER BY y.product_id, y.cum_qty, y.date) AS rnk FROM y ) SELECT z.id, z.move_id, z.location_id, z.product_id, z.product_categ_id, l.complete_name AS location, z.product_category, z.product_name, z.quantity FROM z JOIN stock_location l ON z.location_id = l.id WHERE z.rnk = 1; ALTER TABLE vg_stock_by_location_product_stock_history OWNER TO odoo; -- --------------------------------------------------------------------------------------------------------- -- View: stock_history -- DROP VIEW stock_history; CREATE OR REPLACE VIEW stock_lot_history AS SELECT min(foo.id) AS id, foo.move_id, foo.location_id, foo.company_id, foo.product_id, foo.product_categ_id, foo.prodlot_id, sum(foo.quantity) AS quantity, foo.date, foo.price_unit_on_quant, foo.source FROM ( SELECT (stock_move.id::text || '-'::text) || quant.id::text AS id, quant.id AS quant_id, stock_move.id AS move_id, dest_location.id AS location_id, dest_location.company_id, stock_move.product_id, product_template.categ_id AS product_categ_id, quant.qty AS quantity, stock_move.date, quant.cost AS price_unit_on_quant, stock_move.restrict_lot_id as prodlot_id, stock_move.origin AS source FROM stock_quant quant, stock_quant_move_rel, stock_move LEFT JOIN stock_location dest_location ON stock_move.location_dest_id = dest_location.id LEFT JOIN stock_location source_location ON stock_move.location_id = source_location.id LEFT JOIN product_product ON product_product.id = stock_move.product_id LEFT JOIN product_template ON product_template.id = product_product.product_tmpl_id WHERE quant.qty > 0::double precision AND stock_move.state::text = 'done'::text AND (dest_location.usage::text = ANY (ARRAY['internal'::character varying::text, 'transit'::character varying::text])) AND stock_quant_move_rel.quant_id = quant.id AND stock_quant_move_rel.move_id = stock_move.id AND (source_location.company_id IS NULL AND dest_location.company_id IS NOT NULL OR source_location.company_id IS NOT NULL AND dest_location.company_id IS NULL OR source_location.company_id <> dest_location.company_id OR (source_location.usage::text <> ALL (ARRAY['internal'::character varying::text, 'transit'::character varying::text]))) UNION SELECT (('-'::text || stock_move.id::text) || '-'::text) || quant.id::text AS id, quant.id AS quant_id, stock_move.id AS move_id, source_location.id AS location_id, source_location.company_id, stock_move.product_id, product_template.categ_id AS product_categ_id, - quant.qty AS quantity, stock_move.date, quant.cost AS price_unit_on_quant, stock_move.restrict_lot_id as prodlot_id, stock_move.origin AS source FROM stock_quant quant, stock_quant_move_rel, stock_move LEFT JOIN stock_location source_location ON stock_move.location_id = source_location.id LEFT JOIN stock_location dest_location ON stock_move.location_dest_id = dest_location.id LEFT JOIN product_product ON product_product.id = stock_move.product_id LEFT JOIN product_template ON product_template.id = product_product.product_tmpl_id WHERE quant.qty > 0::double precision AND stock_move.state::text = 'done'::text AND (source_location.usage::text = ANY (ARRAY['internal'::character varying::text, 'transit'::character varying::text])) AND stock_quant_move_rel.quant_id = quant.id AND stock_quant_move_rel.move_id = stock_move.id AND (dest_location.company_id IS NULL AND source_location.company_id IS NOT NULL OR dest_location.company_id IS NOT NULL AND source_location.company_id IS NULL OR dest_location.company_id <> source_location.company_id OR (dest_location.usage::text <> ALL (ARRAY['internal'::character varying::text, 'transit'::character varying::text])))) foo GROUP BY foo.move_id, foo.location_id, foo.company_id, foo.product_id, foo.product_categ_id, foo.date, foo.price_unit_on_quant, foo.source, foo.prodlot_id; ALTER TABLE stock_lot_history OWNER TO odoo;
CREATE PROCEDURE [display].[pDel_problem_task] AS TRUNCATE TABLE [display].[problem_task]
create or replace package body wxhpck_regexp as function first_match(str in varchar2 ,pat in varchar2 ,flags in varchar2 := null) return varchar2 is ret varchar2(4000); pos_match_begin number; pos_match_end number; str_used varchar2(4000); begin pos_match_begin := regexp_instr(str , -- pat ,1 , -- start position 1 , -- occurance 0 , -- return option flags); if pos_match_begin > 0 then ret := ''; else return null; end if; pos_match_end := regexp_instr(str , -- pat ,1 , -- start position 1 , -- occurance 1 , -- return option flags); str_used := substr(str, pos_match_begin, pos_match_end - pos_match_begin); for i in 1 .. 1 loop ret := regexp_replace(str_used, '(' || pat || ')', '\' || (i + 1), 1 /*pos*/, 1, flags); end loop; return ret; end first_match; function match ( str in varchar2, pat in varchar2, match# in number, flags in varchar2 := null) return matched is ret matched; pos_match_begin number; pos_match_end number; str_used varchar2(4000); begin if match# > 8 then raise_application_error(-20000, 'at most 8 matches!'); end if; pos_match_begin := regexp_instr ( str, -- pat, 1, -- start position 1, -- occurance 0, -- return option flags ); if pos_match_begin > 0 then ret := matched(); ret.extend(match#); else return null; end if; pos_match_end := regexp_instr ( str, -- pat, 1, -- start position 1, -- occurance 1, -- return option flags ); ---got full matched text str_used := substr(str, pos_match_begin, pos_match_end - pos_match_begin); ---grouping for i in 1 .. match# loop ret(i) := regexp_replace(str_used, '(' || pat || ')', '\' || (i+1) , 1/*pos*/, 1, flags); end loop; ---grouping end; return ret; end match; end; /
CREATE TABLE users ( id INT(6) UNSIGNED AUTO_INCREMENT PRIMARY KEY, firstname VARCHAR(30) NOT NULL, lastname VARCHAR(30) NOT NULL, email VARCHAR(50) ); INSERT INTO users ( firstname, lastname, email ) VALUES ( 'Anton', 'Lapitski', 'a.lapitski.developer@gmail.com' ), ( 'Max', 'Lapitski', 'a.lapitski.developer@gmail.com' ), ( 'John', 'Lapitski', 'a.lapitski.developer@gmail.com' ), ( 'Greg', 'Lapitski', 'a.lapitski.developer@gmail.com' ), ( 'Mark', 'Lapitski', 'a.lapitski.developer@gmail.com' ), ( 'Jane', 'Lapitski', 'a.lapitski.developer@gmail.com' ), ( 'Hopes', 'Lapitski', 'a.lapitski.developer@gmail.com' ), ( 'Slash', 'Lapitski', 'a.lapitski.developer@gmail.com' ), ( 'Harry', 'Lapitski', 'a.lapitski.developer@gmail.com' ), ( 'Ron', 'Lapitski', 'a.lapitski.developer@gmail.com' ), ( 'Markus', 'Lapitski', 'a.lapitski.developer@gmail.com' ), ( 'Anton', 'Lapitski', 'a.lapitski.developer@gmail.com' ), ( 'Anton', 'Lapitski', 'a.lapitski.developer@gmail.com' ), ( 'Anton', 'Lapitski', 'a.lapitski.developer@gmail.com' );
/* Para deletar um banco de dados, basta usar o comando DROP, sendo DROP DATABASE <nome>; */ DROP DATABASE testando;
-- phpMyAdmin SQL Dump -- version 4.8.2 -- https://www.phpmyadmin.net/ -- -- Hôte : localhost:8889 -- Généré le : mer. 07 nov. 2018 à 10:00 -- Version du serveur : 5.7.21 -- Version de PHP : 7.2.7 SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; SET time_zone = "+00:00"; /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8mb4 */; -- -- Base de données : `afipcv` -- -- -------------------------------------------------------- -- -- Structure de la table `commerciaux` -- CREATE TABLE `commerciaux` ( `idcommerciaux` int(11) NOT NULL, `nom` varchar(45) DEFAULT NULL, `prenom` varchar(45) DEFAULT NULL, `login` varchar(45) NOT NULL, `mot_de_passe` varchar(45) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- -- Déchargement des données de la table `commerciaux` -- INSERT INTO `commerciaux` (`idcommerciaux`, `nom`, `prenom`, `login`, `mot_de_passe`) VALUES (1, 'Kara', 'Karim', 'kkara', 'afip'), (2, 'Sayadi', 'Mohamed', 'msayadi', 'afip'), (3, 'Jin', 'Caifeng', 'cjin', 'afip'); -- -------------------------------------------------------- -- -- Structure de la table `diplomes` -- CREATE TABLE `diplomes` ( `iddiplomes` int(11) NOT NULL, `intitule` varchar(45) DEFAULT NULL, `date_obtention` date DEFAULT NULL, `niveau_diplome` varchar(45) DEFAULT NULL, `lieu_obtention` varchar(45) DEFAULT NULL, `stagiaires_idstagiaires` int(11) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- -- Déchargement des données de la table `diplomes` -- INSERT INTO `diplomes` (`iddiplomes`, `intitule`, `date_obtention`, `niveau_diplome`, `lieu_obtention`, `stagiaires_idstagiaires`) VALUES (1, 'vendeur', '1990-06-21', 'bac pro', 'Metz', 1), (2, 'boulanger', '2014-07-12', 'cap', 'Lyon', 2), (3, 'plombier', '1998-06-21', 'cap', 'Lyon', 2), (4, 'assistante de direction', '1993-07-12', 'bac+2', 'Tunis', 3), (5, 'mecanicien auto', '2004-07-12', 'cap', 'Marseille', 4), (6, 'professeur', '1987-07-13', 'bac+3', 'Paris', 5); -- -------------------------------------------------------- -- -- Structure de la table `entreprises` -- CREATE TABLE `entreprises` ( `identreprises` int(11) NOT NULL, `nom_referant` varchar(45) DEFAULT NULL, `prenom_referant` varchar(45) DEFAULT NULL, `nom_entreprise` varchar(45) DEFAULT NULL, `adresse` varchar(45) DEFAULT NULL, `code_postal` varchar(45) DEFAULT NULL, `ville` varchar(45) DEFAULT NULL, `telephone` varchar(45) DEFAULT NULL, `mail` varchar(45) DEFAULT NULL, `siret` varchar(45) DEFAULT NULL, `mot_de_passe` varchar(45) NOT NULL, `autorisation` tinyint(4) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- -- Déchargement des données de la table `entreprises` -- INSERT INTO `entreprises` (`identreprises`, `nom_referant`, `prenom_referant`, `nom_entreprise`, `adresse`, `code_postal`, `ville`, `telephone`, `mail`, `siret`, `mot_de_passe`, `autorisation`) VALUES (1, 'Gates', 'Bill', 'Microsoft', '1 rue de la fortune', '75001', 'Paris', '0134234565', 'microsoft_entreprise@outlook.com', '765GHGG7676868', 'milliard', 0), (2, 'Jobs', 'Steve', 'Apple', 'rue de la pomme', '75006', 'Paris', '01.80.90.70.00', 'steve.jobs.apple@icloud.com', '32212091600208', 'pomme', 1); -- -------------------------------------------------------- -- -- Structure de la table `experiences` -- CREATE TABLE `experiences` ( `idexperiences` int(11) NOT NULL, `intitule` varchar(45) DEFAULT NULL, `date_debut` date DEFAULT NULL, `date_fin` date DEFAULT NULL, `nom_entreprise` varchar(45) DEFAULT NULL, `stagiaires_idstagiaires` int(11) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- -- Déchargement des données de la table `experiences` -- INSERT INTO `experiences` (`idexperiences`, `intitule`, `date_debut`, `date_fin`, `nom_entreprise`, `stagiaires_idstagiaires`) VALUES (1, 'cuisinier', '2015-01-20', '2018-08-06', 'Paul Bocuse', 1), (2, 'vendeur', '2010-06-12', '2014-12-25', 'Nike', 1), (3, 'boulanger', '2015-03-17', '2018-04-18', 'Paul', 2), (4, 'plombier', '1999-05-21', '2015-02-13', 'plomberie service', 2), (5, 'assistante de direction', '1994-06-01', '2018-06-21', 'orange', 3), (9, 'mecanicien', '2005-04-12', '2008-07-28', 'Renault', 4), (10, 'mecanicien', '2009-07-12', '2014-12-23', 'Peugeot', 4), (11, 'mecanicien', '2015-01-13', '2018-05-26', 'Mercedes', 4), (12, 'professeur', '1988-03-21', '2018-07-19', 'Education nationale', 5); -- -------------------------------------------------------- -- -- Structure de la table `stagiaires` -- CREATE TABLE `stagiaires` ( `idstagiaires` int(11) NOT NULL, `nom` varchar(45) DEFAULT NULL, `prenom` varchar(45) DEFAULT NULL, `telephone` varchar(45) DEFAULT NULL, `mail` varchar(45) DEFAULT NULL, `url_cv` varchar(45) DEFAULT NULL, `date_ajout` date NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- -- Déchargement des données de la table `stagiaires` -- INSERT INTO `stagiaires` (`idstagiaires`, `nom`, `prenom`, `telephone`, `mail`, `url_cv`, `date_ajout`) VALUES (1, 'Dupont', 'Pierre', '0606070809', 'dupont.pierre@gmail.com', NULL, '2018-11-02'), (2, 'Durand', 'Michel', '0608050904', 'durand.michel@gmail.com', NULL, '2018-11-02'), (3, 'Zitoun', 'Nadia', '0767452343', 'zitoun.nadia@gmail.com', NULL, '2018-11-02'), (4, 'popof', 'sergei', '0654123423', 'popof.sergei@gmail.com', NULL, '2018-11-05'), (5, 'Pesto', 'maria', '0745899800', 'pesto.maria@gmail.com', NULL, '2018-11-05'); -- -- Index pour les tables déchargées -- -- -- Index pour la table `commerciaux` -- ALTER TABLE `commerciaux` ADD PRIMARY KEY (`idcommerciaux`); -- -- Index pour la table `diplomes` -- ALTER TABLE `diplomes` ADD PRIMARY KEY (`iddiplomes`), ADD KEY `fk_diplomes_stagiaires1_idx` (`stagiaires_idstagiaires`); -- -- Index pour la table `entreprises` -- ALTER TABLE `entreprises` ADD PRIMARY KEY (`identreprises`); -- -- Index pour la table `experiences` -- ALTER TABLE `experiences` ADD PRIMARY KEY (`idexperiences`), ADD KEY `fk_experiences_stagiaires_idx` (`stagiaires_idstagiaires`); -- -- Index pour la table `stagiaires` -- ALTER TABLE `stagiaires` ADD PRIMARY KEY (`idstagiaires`); -- -- AUTO_INCREMENT pour les tables déchargées -- -- -- AUTO_INCREMENT pour la table `commerciaux` -- ALTER TABLE `commerciaux` MODIFY `idcommerciaux` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=6; -- -- AUTO_INCREMENT pour la table `diplomes` -- ALTER TABLE `diplomes` MODIFY `iddiplomes` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=7; -- -- AUTO_INCREMENT pour la table `entreprises` -- ALTER TABLE `entreprises` MODIFY `identreprises` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3; -- -- AUTO_INCREMENT pour la table `experiences` -- ALTER TABLE `experiences` MODIFY `idexperiences` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=13; -- -- AUTO_INCREMENT pour la table `stagiaires` -- ALTER TABLE `stagiaires` MODIFY `idstagiaires` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=6; -- -- Contraintes pour les tables déchargées -- -- -- Contraintes pour la table `diplomes` -- ALTER TABLE `diplomes` ADD CONSTRAINT `fk_diplomes_stagiaires1` FOREIGN KEY (`stagiaires_idstagiaires`) REFERENCES `stagiaires` (`idstagiaires`) ON DELETE NO ACTION ON UPDATE NO ACTION; -- -- Contraintes pour la table `experiences` -- ALTER TABLE `experiences` ADD CONSTRAINT `fk_experiences_stagiaires` FOREIGN KEY (`stagiaires_idstagiaires`) REFERENCES `stagiaires` (`idstagiaires`) ON DELETE NO ACTION ON UPDATE NO ACTION; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
CREATE TABLE Metrics ( name TEXT NOT NULL, time TEXT NOT NULL, value_type TEXT NOT NULL, dvalue REAL, tvalue TEXT, PRIMARY KEY (name, time) )
INSERT INTO `position` (`id`, `name`) VALUES (1, 'Junior Software Engineer') ,(2, 'Software Engineer') ,(3, 'Senior Software Engineer') ,(4, 'Lead Software Engineer') ,(5, 'Software Architect') ,(6, 'Junior QA Engineer') ,(7, 'QA Engineer') ,(8, 'Senior QA Engineer') ,(9, 'Lead QA Engineer') ; INSERT INTO `skill` (`id`, `name`, `description`, `status`) VALUES (1, 'OOP', 'General Object Oriented Programming', 1) ,(2, 'C#', 'Proper knowledge of C# language and libraries', 1) ,(3, 'Software Architecture', 'Designing the proper software solution', 1) ,(4, 'CSS', 'Can create the right, optimized CSS', 1) ,(5, 'HTML', 'Can create the right, optimized markup', 1) ,(6, 'Typescript', 'Proper knowledge of the Typescript language', 1) ,(7, 'NPM/Yarn', 'Knows how and when to add packages', 1) ,(8, 'JS Packaging', 'Knows how to use webpack/gulp/... and optimize bundling', 1) ,(9, 'MVC/WebAPI', 'Knows the right methods and design of end-to-end web-requests', 1) ,(10, 'Caching', 'Knows caching strategies and optimizes the use of cache', 1) ,(11, 'Javascript', 'Proper knowledge of the Javascript language', 1) ,(12, 'Java', 'Proper knowledge of the Java language', 1) ,(13, 'Shell Scripting', 'Fluent use of Bash or Powershell', 1) ,(14, 'Analytic', 'Able to conduct a analysis supported by data', 1) ; INSERT INTO `characteristic` (`id`, `name`, `description`, `status`) VALUES (1, 'Starter', 'Enthousiastic starting new projects, creating POC', 1) ,(2, 'Finisher', 'Enthousiastic completing projects', 1) ,(3, 'Loner', 'Prefers working alone', 1) ,(4, 'Slow', 'Slow at execution', 1) ,(5, 'Fast', 'Fast at execution', 1) ,(6, 'Leader', 'Enjoys and good at leading a team', 1) ,(7, 'Follower', 'Prefers and good at following others', 1) ,(8, 'Procedural', 'Follows and/or creates procedures', 1) ,(9, 'Liberal', 'Doesn''t like procedures', 1) ;
Create table Token( Id_Token NUMBER NOT NULL, Id_Usuario NUMBER NOT NULL, Token VARCHAR2(500) NOT NULL, CONSTRAINT Token_PK PRIMARY KEY (Id_Token), CONSTRAINT Token_Usuario_FK FOREIGN KEY (Id_Usuario) REFERENCES Usuario (Id_Usuario) ); Create sequence SEQ_Token START WITH 1 INCREMENT BY 1; Alter table Token Add Situacao_Token NUMBER NOT NULL; ALTER TABLE Token Add CONSTRAINT Token_UK UNIQUE (Token);
insert into Company (companyID, companyName) VALUES (001, "Apple"), (002, "Google"), (003, "Microsoft"), (004, "Amazon"), (005, "JPMorgan"), (006, "Adobe"), (007, "Facebook"); insert into jobs (companyID,jobID, jobName, Location, Type, Field, Title) VALUES (001, 001001, "Apple Specialist", "Chicago, IL", "Full-time/Part-time","Business","Retail Customer Services and Sales"), (001, 001002, "Apple Technical Specialist", "Atlanta, GA","Full-time/Part-time", "Business", "Apple Technical Specialist"), (001, 001003, "Apple Human Engineering","Santa Clara Valley, CA","Internship","Engineering","HealthcareHuman Physiology Intern"), (001, 001004, "Software Engineer","Santa Clara Valley, CA","Full-time","Engineering","Software Engineer"), (001, 001005, "Silicon Validation Engineer","Portland, OR","Full-time","Engineering","Silicon Validation Engineer"), (001, 001006, "Apple Support PR Advisor","Los Angeles, CA","Full-time","International","Apple Support PR Advisor"), (001, 001007, "Apple Media Products Analytics","Santa Clara Valley, CA","Full-time","Engineering","Apple Media Products Analytics"), (002, 002001, "All-Source Analyst","San Bruno, CA","Full-time","Business","All-Source Analyst"), (002, 002002, "Technical Solutions Consultant","Mountain View, CA","Full-time","Engineering","Technical Solutions Consultant"), (002, 002003, "Software Engineer Compiler","Mountain View, CA","Full-time","Engineering","Software Engineer Compiler"), (002, 002004, "Teacher in Google Children's Center","Mountain View, CA","Full-time","Education","Teacher in Google Children's Center"), (002, 002005, "Data Analyst, Users and Products","Mountain View, CA","Full-time","Engineering","Data Analyst, Users and Products"), (002, 002006, "Technical Solutions Consultant, Google Fiber","Austin, TX","Full-time","Engineering","Technical Solutions Consultant, Google Fiber"), (002, 002007, "People Analyst, Reporting and Insights","New York, NY","Full-time","Business","People Analyst, Reporting and Insights"), (002, 002008, "Google Customer Solutions","Redwood City, CA","Full-time","Business","Product Insights Analyst"), (002, 002009, "Central Sales Operations Strategist","Sunnyvale, CA","Full-time","Engineering","Central Sales Operations Strategist"), (003, 003001, "Analytics Specialist","Redmond, WA","Full-time","Business","Analytics Specialist"), (003, 003002, "Program Manager","Redmond, WA","Full-time","Business","Program Manager"), (003, 003003, "Microsoft Legal Internship","Redmond, WA","Internship","Law","Microsoft Legal Internship"), (003, 003004, "Research Intern - Microsoft Translator","Redmond, WA","Internship","Engineering","Research Intern - Microsoft Translator"), (003, 003005, "Research Intern - Language Understanding","Bellevue, WA","Internship","Engineering","Research Intern - Language Understanding"), (003, 003006, "Research Intern - Medical Devices","Redmond, WA","Internship","Engineering","HealthcareResearch Intern - Medical Devices"), (003, 003007,"Data Analyst","Redmond, WA","Full-time","Engineering","Data Analyst"), (004, 004001,"Amazon Lead","Oak Brook, IL","Full-time","Business","Amazon Lead"), (004, 004002,"Data Center Technician","Manassas, VA","Full-time","Engineering","Data Center Technician"), (004, 004003,"Product Specialist","San Francisco, CA","Full-time","Business","Product Specialist"), (004, 004004,"IT Support Associate II","Channahon, IL","Full-time","Engineering","IT Support Associate II"), (005, 005001,"Legal- VP, Assistant General Counsel- Legal Discovery Management","Newark, DE","Full-time","Business","LawLegal- VP, Assistant General Counsel- Legal Discovery Management"), (005, 005002,"Recruiting Specialist - Sourcing Reports Specialist","Newark, DE","Full-time","Business","Recruiting Specialist - Sourcing Reports Specialist"), (005, 005003,"CIB - Global Commodities – Physical Natural Gas Trader – ED","Houston, TX","Full-time","International","CIB - Global Commodities – Physical Natural Gas Trader – ED"), (005, 005004,"Corporate Health Nurse","Fort Worth, TX","Full-time","Healthcare","Corporate Health Nurse"), (005, 005005,"Frontend Developer - UI Developer","New York, NY","Full-time","Engineering","Frontend Developer - UI Developer"), (006, 006001,"Data Scientist, Machine Learning Evaluation","San Francisco, CA","Full-time","Engineering","Data Scientist, Machine Learning Evaluation"), (006, 006002,"Government Relations Associate","McLean, VA","Full-time","Law","Government Relations Associate"), (006, 006003,"Education Evangelist","San Francisco, CA","Full-time","Education","Education Evangelist"), (006, 006004,"Marketing Content Specialist","Denver, CO","Full-time","Business","Marketing Content Specialist"), (006, 006005,"Senior Data Analyst","Austin, TX","Full-time","Engineering","Senior Data Analyst"), (006, 006006,"Product Marketing Specialist","Austin, TX","Full-time","Business","Product Marketing Specialist"), (006, 006007,"Computer Scientist","Boston, MA","Full-time","Engineering","EngineeringComputer Scientist"), (006, 006008,"Machine Learning Research Engineer","San Francisco, CA","Full-time","Engineering","Machine Learning Research Engineer"), (006, 006009,"Adobe Spark Content Writer and Editor","San Francisco, CA","Full-time","Business","Adobe Spark Content Writer and Editor"), (007, 007001,"Data Specialist, Global Operations","Austin, TX","Full-time","Engineering","Data Specialist, Global Operations"), (007, 007002,"Legal Investigation Analyst","Austin, TX","Full-time","Law","Legal Investigation Analyst"), (007, 007003,"Creative Coder - Instagram","San Francisco, CA","Full-time","Engineering","EngineeringCreative Coder - Instagram"), (007, 007004,"Business Product Specialist","Menlo Park, CA","Full-time","Business","Business Product Specialist"), (007, 007005,"Research Intern, Artificial Intelligence","Pittsburgh, PA","Internship","Engineering","Research Intern, Artificial Intelligence"), (007, 007006,"Demand Planning Analyst","Menlo Park, CA","Full-time","Business","Demand Planning Analyst"), (007, 007007,"Data Center Operations Engineer","Altoona, IA","Full-time","Engineering","Data Center Operations Engineer"), (007, 007008,"Product Specialist, Product Support Operations","Menlo Park, CA","Full-time","Healthcare","Product Specialist, Product Support Operations"), (007, 007009,"Data Center Logistics Analyst","Papillion, NE","Full-time","Engineering","Data Center Logistics Analyst"), (007, 007010,"Threat Investigator","Washington, DC","Full-time","International","Threat Investigator"); insert into Requires (jobID, skillName, level) VALUES (001001,"Communication Skills",3), (001001,"Passion in learning",3), (001002,"Time Management",3), (001002,"Problem Solving",3), (001003,"Data Science",1), (001003,"Kinesiology",3), (001003,"User Studies",2), (001004,"HTTP",2), (001004,"Hadoop",2), (001004,"Problem Solving",3), (001004,"NoSQL",1), (001004,"Machine Learning",1), (001005,"Validation",2), (001005,"C",2), (001005,"Perl",2), (001006,"Spanish",3), (001006,"Communication Skills",3), (001006,"Time Management",3), (001007,"Problem Solving",3), (001007,"Communication Skills",3), (001007,"SQL",3), (001007,"Python",2), (001007,"Swift",1), (002001,"Communication Skills",3), (002001,"Analytics",3), (002002,"Python",3), (002002,"Java",3), (002002,"C++",3), (002002,"JavaScript",3), (002002,"HTML",1), (002002,"HTTP",2), (002002,"Android/iOS",1), (002003,"Java",3), (002003,"C++",3), (002003,"C",2), (002003,"Clang",1), (002004,"Teaching",2), (002004,"Responsible",3), (002004,"Communication Skills",3), (002005,"SQL",3), (002005,"Python",2), (002005,"R",3), (002005,"Data Science",3), (002006,"Python",3), (002006,"Java",3), (002006,"C++",3), (002006,"HTML",2), (002006,"JavaScript",2), (002006,"Problem Solving",3), (002007,"SQL",3), (002007,"Communication Skills",3), (002007,"Teamwork",2), (002008,"SQL",3), (002008,"relational databases",3), (002008,"Problem Solving",3), (002009,"Java",3), (002009,"Python",2), (002009,"Problem Solving",1), (002009,"UI",1), (003001,"Data Science",3), (003001,"Hadoop",3), (003001,"SQL",3), (003001,"NoSQL",2), (003001,"Teamwork",2), (003002,"Problem Solving",3), (003002,"Communication Skills",2), (003002,"Teaching",1), (003003,"Teamwork",3), (003003,"Communication Skills",3), (003003,"Time Management",3), (003003,"Analytics",1), (003004,"Machine Learning",3), (003004,"NLP",3), (003004,"Teamwork",2), (003005,"Python",2), (003005,"C++",3), (003005,"C",2), (003005,"Machine Learning",3), (003005,"NLP",3), (003005,"Communication Skills",1), (003006,"Data Science",1), (003006,"Teamwork",2), (003006,"Communication Skills",2), (003007,"Data Science",1), (003007,"Machine Learning",3), (003007,"NLP",2), (003007,"SQL",3), (003007,"NoSQL",2), (003007,"Communication Skills",2), (004001,"Communication Skills",2), (004001,"Passion in learning",3), (004002,"Linux",1), (004002,"Perl",2), (004002,"Python",2), (004003,"Communication Skills",3), (004003,"Problem Solving",2), (004003,"Teamwork",1), (004004,"Linux",1), (004004,"Android/iOS",1), (005001,"Communication Skills",3), (005001,"Time Management",3), (005001,"Analytics",2), (005001,"Problem Solving",1), (005002,"Analytics",3), (005002,"Problem Solving",3), (005002,"Communication Skills",2), (005003,"Teamwork",3), (005003,"Communication Skills",3), (005003,"Spanish",1), (005004,"nursing",3), (005004,"Communication Skills",3), (005004,"Time Management",2), (005005,"UI",3), (005005,"HTML",3), (005005,"Analytics",1), (005005,"JavaScript",3), (006001,"Machine Learning",3), (006001,"Teamwork",3), (006001,"Validation",2), (006002,"Communication Skills",3), (006002,"Problem Solving",3), (006002,"Passion in learning",3), (006003,"Teaching",3), (006003,"Communication Skills",3), (006003,"Problem Solving",3), (006004,"Communication Skills",3), (006004,"Problem Solving",3), (006004,"Responsible",2), (006004,"Teamwork",1), (006005,"SQL",3), (006005,"R",3), (006005,"Python",3), (006005,"Matlab",3), (006005,"Communication Skills",3), (006006,"Passion in learning",3), (006006,"Analytics",3), (006006,"Communication Skills",3), (006006,"Time Management",2), (006007,"HTML",3), (006007,"JavaScript",3), (006007,"Passion in learning",3), (006008,"Python",3), (006008,"Communication Skills",3), (006008,"NLP",2), (006008,"Data Science",1), (006009,"Communication Skills",3), (006009,"Time Management",3), (006009,"HTML",3), (007001,"SQL",3), (007001,"Communication Skills",3), (007001,"Perl",1), (007001,"Python",1), (007001,"Matlab",2), (007002,"Communication Skills",3), (007002,"Responsible",3), (007002,"Analytics",3), (007002,"Spanish",2), (007002,"SQL",1), (007003,"JavaScript",3), (007003,"Time Management",3), (007003,"Teamwork",2), (007004,"SQL",3), (007004,"Analytics",2), (007004,"Communication Skills",3), (007005,"Communication Skills",3), (007005,"C",2), (007005,"C++",2), (007005,"Python",2), (007006,"SQL",3), (007006,"Tableau",1), (007006,"Communication Skills",3), (007007,"Linux",2), (007007,"Python",3), (007007,"SQL",3), (007007,"HTTP",2), (007008,"SQL",1), (007008,"Teamwork",2), (007008,"Communication Skills",3), (007009,"Tableau",3), (007009,"SQL",3), (007009,"Communication Skills",3), (007009,"Data Science",1), (007010,"Communication Skills",3), (007010,"Time Management",3), (007010,"Data Science",2), (007010,"Problem Solving",2); insert into Skills (skillName) VALUES ("Communication Skills"), ("Passion in learning"), ("Time Management"), ("Problem Solving"), ("Data Science"), ("Kinesiology"), ("User Studies"), ("HTTP"), ("Hadoop"), ("NoSQL"), ("Machine Learning"), ("Validation"), ("C"), ("Perl"), ("Spanish"), ("SQL"), ("Python"), ("Swift"), ("Analytics"), ("Java"), ("C++"), ("JavaScript"), ("HTML"), ("Clang"), ("Teaching"), ("Responsible"), ("R"), ("Teamwork"), ("relational databases"), ("UI"), ("NLP"), ("Linux"), ("Android/iOS"), ("nursing"), ("Matlab"), ("Tableau"), ("CPA"), ("Tax Software Programs"), ("Microsoft Offices"), ("Leadership"), ("State License / Certification"), ("Medical Degree"), ("Patience"), ("Compassion"), ("Organizational"), ("Written Skills"), ("Multitasking"), ("Presentation"); insert INTO Skilled (UIN, skillName, proficiency) VALUES (656966152, "C++", 3), (656966152, "SQL", 3), (656966152, "NoSQL", 3), (675513612, "Microsoft Offices", 4), (675513612, "Analytics", 3), (675513612, "R", 4), (675513612, "SQL", 1), (675513612, "Communication Skills", 3), (675513612, "Teamwork", 4), (675513612, "Leadership", 2), (674425016, "R", 3), (674425016, "Problem Solving", 4), (674425016, "Matlab", 4), (663626291, "Microsoft Offices", 4), (663626291, "Analytics", 2), (663626291, "R", 3), (663626291, "SQL", 2), (663626291, "Communication Skills", 4), (663626291, "Teamwork", 4), (663626291, "Leadership", 3), (650621898, "C", 2), (658738942, "Leadership", 4), (663636408, "Microsoft Offices", 4), (663636408, "Analytics", 3), (663636408, "R", 3), (663636408, "Communication Skills", 3), (663636408, "Teamwork", 4), (663636408, "Leadership", 2), (659099777, "Python", 3), (659099777, "Java", 3), (659099777, "C", 3), (659099777, "C++", 2), (659099777, "R", 3), (659099777, "NLP", 2), (677584175, "R", 3), (677584175, "SQL", 2), (677584175, "Microsoft Offices", 4), (677584175, "Teamwork", 4), (677584175, "Communication Skills", 3), (677584175, "Analytics", 3), (661561134, "Teamwork", 3), (661561134, "Microsoft Offices", 3), (661561134, "Communication Skills", 3), (671701797, "Microsoft Offices", 4), (671701797, "Teamwork", 3), (671701797, "Analytics", 2), (671701797, "Communication Skills", 3), (671701797, "Leadership", 2), (659460467, "Writing", 4), (659460467, "Social Media", 4), (659460467, "Communication Skills", 5), (668786319, "Python", 3), (668786319, "Android/iOS", 2), (668786319, "Matlab", 4), (665975450, "Microsoft Offices", 5), (665975450, "Communication Skills", 4), (665975450, "Leadership", 4), (670855078, "Python", 3), (668147079, "Microsoft Offices", 4), (668147079, "Communication Skills", 2), (668147079, "Teamwork", 4), (668147079, "Leadership", 3), (654310580, "Python", 3), (654310580, "Java", 4), (654310580, "C++", 3), (654310580, "SQL", 2), (654310580, "R", 2); insert INTO Students (UIN, studentName, Gender, Standing, Major, GPA) VALUES (656966152, "Cheng Lu", "Male", "Master", "Industrial Engineering", 3.85), (675513612, "Zhongyu Wu", "Female", "Senior", "Stat & Econ", 3.89), (674425016, "Chenxi Yan", "Male", "PhD", "Mechanical engineering", 3.87), (663626291, "Qiuning Ge", "Female", "Senior", "Statistics", 3.74), (650621898, "Jinrui Hu", "Male", "Freshman", "ECE", 3.95), (658738942, "Zhendong Hou", "Male", "Freshman", "Undeclared", 3.55), (663636408, "Hao Wang", "Female", "Junior", "Statistics", 3.97), (659099777, "Yu Hou", "Female", "Junior", "Stat+CS", 3.91), (677584175, "Han Xiao", "Female", "Senior", "Stat & Econ", 3.81), (661561134, "Duoduo Qi", "Female", "Junior", "Psychology & Economics", 3.93), (671701797, "Nanxi Chen", "Female", "Freshman", "Undeclared", 3.85), (659460467, "Yubo Xia", "Male", "Freshman", "Undeclared", 3.81), (668786319, "Peter", "Male", "Freshman", "engineering physics", 4.0), (665975450, "Jiashu Luo", "Male", "Freshman", "Undeclared", 3.61), (670855078, "Jack", "Male", "Freshman", "Undeclared", 3.7), (668147079, "Song Wang", "Male", "Sophomore", "ACE", 3.78), (654310580, "Yuchen Zeng", "Female", "Junior", "Stat+CS", 3.7);
--Permissão de leitura na View com o usuário usr_relatorio GRANT SELECT ON TABLE public.vw_mat_lista TO usr_relatorio;
INSERT INTO `orderitem` VALUES (1, 3, 249, 1, 71); INSERT INTO `orderitem` VALUES (2, 1, 358, 2, 51); INSERT INTO `orderitem` VALUES (3, 1, 18000, 3, 94); INSERT INTO `orderitem` VALUES (4, 1, 119, 4, 5); INSERT INTO `orderitem` VALUES (5, 1, 358, 5, 51); INSERT INTO `orderitem` VALUES (6, 1, 299, 5, 57); INSERT INTO `orderitem` VALUES (7, 2, 598, 6, 57); INSERT INTO `orderitem` VALUES (8, 1, 299, 7, 57); INSERT INTO `orderitem` VALUES (9, 1, 299, 9, 57); INSERT INTO `orderitem` VALUES (10, 1, 18000, 11, 94); INSERT INTO `orderitem` VALUES (11, 1, 358, 12, 42); INSERT INTO `orderitem` VALUES (12, 1, 299, 13, 57); INSERT INTO `orderitem` VALUES (13, 1, 9900, 14, 11); INSERT INTO `orderitem` VALUES (14, 2, 598, 15, 57); INSERT INTO `orderitem` VALUES (15, 1, 343, 16, 81); INSERT INTO `orderitem` VALUES (16, 1, 299, 16, 57); INSERT INTO `orderitem` VALUES (17, 2, 686, 17, 81); INSERT INTO `orderitem` VALUES (18, 1, 34, 18, 87); INSERT INTO `orderitem` VALUES (19, 2, 358, 19, 51); INSERT INTO `orderitem` VALUES (20, 1, 114, 20, 1); INSERT INTO `orderitem` VALUES (21, 1, 114, 23, 1); INSERT INTO `orderitem` VALUES (22, 1, 114, 25, 1); INSERT INTO `orderitem` VALUES (23, 2, 228, 26, 1); INSERT INTO `orderitem` VALUES (24, 1, 343, 27, 81); INSERT INTO `orderitem` VALUES (25, 4, 598, 28, 57); INSERT INTO `orderitem` VALUES (26, 1, 149.5, 29, 57); INSERT INTO `orderitem` VALUES (27, 1, 343, 30, 81); INSERT INTO `orderitem` VALUES (28, 1, 149.5, 30, 57); INSERT INTO `orderitem` VALUES (29, 2, 686, 31, 81); INSERT INTO `orderitem` VALUES (30, 1, 343, 33, 81); INSERT INTO `orderitem` VALUES (31, 1, 99, 34, 21); INSERT INTO `orderitem` VALUES (32, 1, 343, 35, 81); INSERT INTO `orderitem` VALUES (33, 1, 343, 37, 81); INSERT INTO `orderitem` VALUES (34, 1, 149.5, 37, 57); INSERT INTO `orderitem` VALUES (35, 1, 16200, 38, 94); INSERT INTO `orderitem` VALUES (36, 1, 149.5, 40, 57); INSERT INTO `orderitem` VALUES (37, 1, 343, 40, 81); INSERT INTO `orderitem` VALUES (38, 1, 41.5, 42, 68); INSERT INTO `orderitem` VALUES (39, 1, 343, 44, 81); INSERT INTO `orderitem` VALUES (40, 1, 16200, 45, 94); INSERT INTO `orderitem` VALUES (41, 1, 16200, 46, 94); INSERT INTO `orderitem` VALUES (42, 2, 32400, 47, 94); INSERT INTO `orderitem` VALUES (43, 1, 149.5, 49, 57); INSERT INTO `orderitem` VALUES (44, 1, 16200, 50, 94); INSERT INTO `orderitem` VALUES (45, 1, 343, 52, 81); INSERT INTO `orderitem` VALUES (46, 1, 149.5, 54, 57); INSERT INTO `orderitem` VALUES (47, 1, 16200, 55, 94); INSERT INTO `orderitem` VALUES (48, 1, 149.5, 57, 57); INSERT INTO `orderitem` VALUES (49, 2, 83, 59, 68); INSERT INTO `orderitem` VALUES (50, 1, 41.5, 61, 68); INSERT INTO `orderitem` VALUES (51, 10, 1495, 63, 57); INSERT INTO `orderitem` VALUES (52, 1, 41.5, 65, 68); INSERT INTO `orderitem` VALUES (53, 1, 41.5, 67, 68); INSERT INTO `orderitem` VALUES (54, 2, 83, 69, 68); INSERT INTO `orderitem` VALUES (55, 1, 41.5, 71, 68); INSERT INTO `orderitem` VALUES (56, 1, 41.5, 72, 68); INSERT INTO `orderitem` VALUES (57, 1, 41.5, 73, 68); INSERT INTO `orderitem` VALUES (58, 1, 41.5, 75, 68); INSERT INTO `orderitem` VALUES (59, 2, 83, 77, 68); INSERT INTO `orderitem` VALUES (60, 1, 41.5, 79, 68); INSERT INTO `orderitem` VALUES (61, 1, 41.5, 81, 68); INSERT INTO `orderitem` VALUES (62, 1, 41.5, 83, 68); INSERT INTO `orderitem` VALUES (63, 1, 41.5, 85, 98); INSERT INTO `orderitem` VALUES (64, 1, 343, 85, 81); INSERT INTO `orderitem` VALUES (65, 1, 149.5, 85, 57); INSERT INTO `orderitem` VALUES (66, 10, 3430, 87, 81); INSERT INTO `orderitem` VALUES (67, 2, 299, 89, 57); INSERT INTO `orderitem` VALUES (68, 1, 149.5, 91, 56); INSERT INTO `orderitem` VALUES (69, 1, 179, 93, 45); INSERT INTO `orderitem` VALUES (70, 2, 32400, 94, 94); INSERT INTO `orderitem` VALUES (71, 1, 114, 96, 1); INSERT INTO `orderitem` VALUES (72, 2, 172, 98, 2); INSERT INTO `orderitem` VALUES (73, 2, 119, 100, 3); INSERT INTO `orderitem` VALUES (74, 2, 119, 102, 4); INSERT INTO `orderitem` VALUES (75, 2, 119, 104, 5); INSERT INTO `orderitem` VALUES (76, 1, 59.5, 106, 9); INSERT INTO `orderitem` VALUES (77, 1, 79, 108, 10); INSERT INTO `orderitem` VALUES (78, 1, 4950, 110, 11); INSERT INTO `orderitem` VALUES (79, 1, 168, 112, 8); INSERT INTO `orderitem` VALUES (80, 1, 59.5, 114, 3); INSERT INTO `orderitem` VALUES (81, 1, 4950, 116, 12); INSERT INTO `orderitem` VALUES (82, 1, 16200, 118, 94); INSERT INTO `orderitem` VALUES (83, 1, 114, 119, 1); INSERT INTO `orderitem` VALUES (84, 1, 114, 121, 1); INSERT INTO `orderitem` VALUES (85, 198, 11781, 123, 4); INSERT INTO `orderitem` VALUES (86, 1, 59.5, 125, 4); INSERT INTO `orderitem` VALUES (87, 1, 171.5, 126, 81); INSERT INTO `orderitem` VALUES (88, 1, 171.5, 129, 81); INSERT INTO `orderitem` VALUES (89, 1, 171.5, 130, 81); INSERT INTO `orderitem` VALUES (90, 1, 171.5, 132, 81); INSERT INTO `orderitem` VALUES (91, 2, 343, 134, 81); INSERT INTO `orderitem` VALUES (92, 1, 343, 136, 104); INSERT INTO `orderitem` VALUES (93, 30, 5145, 137, 104); INSERT INTO `orderitem` VALUES (94, 2, 343, 139, 104); INSERT INTO `orderitem` VALUES (95, 4, 686, 140, 81); INSERT INTO `orderitem` VALUES (96, 1, 299, 142, 57); INSERT INTO `orderitem` VALUES (97, 2, 598, 143, 57); INSERT INTO `orderitem` VALUES (98, 1, 171.5, 143, 104); INSERT INTO `orderitem` VALUES (99, 3, 514.5, 145, 81); INSERT INTO `orderitem` VALUES (100, 1, 18000, 147, 94); INSERT INTO `orderitem` VALUES (101, 2, 343, 148, 81); INSERT INTO `orderitem` VALUES (102, 1, 171.5, 149, 81); INSERT INTO `orderitem` VALUES (103, 4, 686, 150, 81); INSERT INTO `orderitem` VALUES (104, 1, 343, 151, 104); INSERT INTO `orderitem` VALUES (105, 1, 343, 152, 81); INSERT INTO `orderitem` VALUES (106, 4, 1372, 153, 104); INSERT INTO `orderitem` VALUES (107, 2, 686, 153, 81); INSERT INTO `orderitem` VALUES (108, 4, 686, 154, 81); INSERT INTO `orderitem` VALUES (109, 2, 343, 155, 81); INSERT INTO `orderitem` VALUES (110, 1, 171.5, 156, 81); INSERT INTO `orderitem` VALUES (111, 1, 171.5, 158, 81); INSERT INTO `orderitem` VALUES (112, 2, 343, 159, 81); INSERT INTO `orderitem` VALUES (113, 2, 343, 160, 104); INSERT INTO `orderitem` VALUES (114, 2, 343, 162, 81); INSERT INTO `orderitem` VALUES (115, 2, 343, 164, 81); INSERT INTO `orderitem` VALUES (116, 2, 343, 164, 106); INSERT INTO `orderitem` VALUES (117, 1, 358, 164, 51); INSERT INTO `orderitem` VALUES (118, 5, 857.5, 165, 81); INSERT INTO `orderitem` VALUES (119, 2, 343, 167, 81); INSERT INTO `orderitem` VALUES (120, 2, 343, 168, 81); INSERT INTO `orderitem` VALUES (121, 2, 343, 169, 81); INSERT INTO `orderitem` VALUES (122, 4, 686, 171, 81); INSERT INTO `orderitem` VALUES (123, 2, 343, 171, 86); INSERT INTO `orderitem` VALUES (124, 2, 343, 172, 81); INSERT INTO `orderitem` VALUES (125, 5, 1432, 176, 51); INSERT INTO `orderitem` VALUES (126, 2, 343, 178, 81); INSERT INTO `orderitem` VALUES (127, 2, 343, 181, 81); INSERT INTO `orderitem` VALUES (128, 1, 171.5, 182, 81);
-- modified parts of crspmerge.sas -- see https://wrds-web.wharton.upenn.edu/wrds/support/code_show.cfm?path=CRSP/crspmerge.sas -- You need to load the string and replace -- {STAT_DATE} {END_DATE} {S} -- dates should be ISO-8601 format, YYYY-MM-DD (maybe?) -- {S}sf is the stock data -- {S}se is the events data -- {S}senames is further meta data? -- stock data select permco, permno, date, prc, shrout, ret, retx from crsp.{S}sf where date between '{STAT_DATE}' and '{END_DATE}' and permno in (select distinct permno from crsp.{S}senames WHERE '{END_DATE}' >= NAMEDT and '{STAT_DATE}' <= NAMEENDT) order by permno, date; -- event data select a.date, a.permno, a.exchcd from crsp.{S}se as a, (select distinct c.permno, min(c.namedt) as minnamedt from (select permno, namedt, nameendt from crsp.{S}senames WHERE '{END_DATE}'>= NAMEDT and '{STAT_DATE}'<= NAMEENDT) as c group by c.permno) as b where a.date >= b.minnamedt and a.date <= '{END_DATE}' and -- notice the not null check on exchcd a.permno = b.permno and a.exchcd IS NOT NULL order by a.permno, a.date;
CREATE TABLE users( username_hash character(64), password_hash character(64), session_id character(36) ); INSERT INTO users(username_hash, password_hash) VALUES ('4084b412a7bfd7bbca50812b0df4470e0413ad3335ac4af36b91c0d3d8172ff6', '4084b412a7bfd7bbca50812b0df4470e0413ad3335ac4af36b91c0d3d8172ff6');
create table EMPLOYEE as select j.emplid, min(p.first_name) as first_name, min(p.last_name) as last_name, min(e.email_addr) as email_addr, listagg(j.jobcode, ' | ') within group (order by j.jobcode) as job_code, listagg(j.DESCR, ' | ') within group (order by j.jobcode) as job_description, listagg(j.descrshort, ' | ') within group (order by j.jobcode) as job_abbr, listagg(j.JOB_FUNCTION, ' | ') within group (order by j.jobcode) as job_function, listagg(j.JOB_DESCR, ' | ') within group (order by j.jobcode) as job_title, listagg(j.GRADE, ' | ') within group (order by j.jobcode) as job_grade from CMSCOMMON.SFO_INSTR_JOBTYPE_INFO_MV j join CMSCOMMON.SFO_EF_PERSON_NAME_MV p on p.emplid=j.emplid join CMSCOMMON.SFO_EMAILADR_MV e on e.emplid=p.emplid and e.addr_type='OCMP' where (j.DEPTID like '3610' or j.DEPTID like '3611') and j.descrshort not like 'IF 12' group by j.emplid order by last_name, first_name
-- Oracle_48_Lab 6 (Exercises with Answers)_SET-Operators -------- --1. Produce a list of ALL rows showing order number by combining data from SALES and SALES_HISTORY table. SELECT * FROM SALES S,SALES_HISTORY H ORDER BY S.ORDER_ID,H.ORDER_ID; --2. Produce a list of DISTINCT rows from SALES and SALES_HISTORY table using column order number. SELECT DISTINCT(SALES.ORDER_ID) FROM SALES UNION SELECT DISTINCT(SALES_HISTORY.ORDER_ID) FROM SALES_HISTORY; --3. Produce a list of COMMON rows between SALES and SALES_HISTORY table using column order number. Select * from SALES intersect Select * from SALES_HISTORY where SALES_HISTORY.SALESPERSON_ID != SALES_HISTORY.SALESPERSON_ID; --4. Produce a list of rows which are present in SALES table and are not present in SALES_HISTORY table using -- column order number. SELECT * FROM SALES MINUS SELECT * FROM SALES_HISTORY;
create database ProjetoBanco use ProjetoBanco create table dados ( id int , nome varchar (50) ) select * from dados
CREATE FUNCTION hpx_nside(integer) RETURNS integer AS '$libdir/cdshealpix', 'hpx_nside' LANGUAGE C STRICT IMMUTABLE; CREATE FUNCTION hpx_hash(integer, double precision, double precision) RETURNS bigint AS '$libdir/cdshealpix', 'hpx_hash' LANGUAGE C STRICT IMMUTABLE; CREATE FUNCTION hpx_center(integer, bigint) RETURNS float8[] AS '$libdir/cdshealpix', 'hpx_center' LANGUAGE C STRICT IMMUTABLE;
Create Procedure mERP_SP_InsertRecdRptAbstract(@DocumentID nVarchar(100), @ReceivedDate nVarchar(255), @FromCompanyID nVarchar(255)) As Begin Insert Into tbl_mERP_RecdRptAckAbstract ( DocumentID, ReceivedDate, CompanyID) Values (@DocumentID, @ReceivedDate, @FromCompanyID) Select @@IDENTITY End
ALTER TABLE TYPE_EXPORT_ANALYTICS ADD COLUMN BOT_RELATED bool not null default false; comment on column TYPE_EXPORT_ANALYTICS.BOT_RELATED is 'Bot related'; UPDATE TYPE_EXPORT_ANALYTICS SET BOT_RELATED = true WHERE TEA_CD = 'UNKNOWN_MESSAGES'; UPDATE TYPE_EXPORT_ANALYTICS SET BOT_RELATED = false WHERE TEA_CD = 'SESSIONS';
DROP TABLE IF EXISTS user; CREATE TABLE user ( uid INT AUTO_INCREMENT PRIMARY KEY, name VARCHAR(250) NOT NULL, last VARCHAR(250) NOT NULL ); INSERT INTO user (name, last) VALUES ('Billie', 'Holiday'), ('Sarah', 'Vaughan'), ('Ella', 'Fitzgerald'), ('Nat King', 'Cole'), ('Louis', 'Armstrong'), ('Nina', 'Simone'), ('Dinah', 'Washington'), ('Frank', 'Sinatra'), ('Carmen', 'McRae'), ('Johnny', 'Hartman'), ('Etta', 'Jones'), ('Tony', 'Bennet'), ('Ernestine', 'Anderson'), ('Betty', 'Carter'), ('Diana', 'Krall'), ('Cassandra', 'Wilson'), ('Shirley', 'Horn'), ('Blossom', 'Dearie'), ('Nancy', 'Wilson'), ('Abbey', 'Lincoln'), ('Astrud', 'Gilberto'), ('Chet', 'Baker'), ('Billy', 'Eckstine'), ('Peggy', 'Lee'), ('Chris', 'Connor'), ('June', 'Christy'), ('Etta', 'James'), ('Freddy', 'Cole'), ('Ray', 'Charles'), ('Harry', 'Connick Jr'), ('Natalie', 'Cole'), ('Lee', 'Wiley'), ('Annie', 'Ross'), ('Jon', 'Hendricks'), ('Michael', 'Buble'), ('Mildred', 'Bailey'), ('Betty', 'Roche'), ('Mel', 'Torme'), ('Jeri', 'Southern'), ('Diane', 'Schuur'), ('Rosemary', 'Clooney'), ('Mark', 'Murphy'), ('Chaka', 'Khan'), ('Helen', 'Merril'), ('Carol', 'Sloane'), ('Dee Dee', 'Bridgewater'), ('Mary', 'Stallings'), ('Gloria', 'Lynne'), ('Jimmy', 'Rushing'), ('Lena', 'Horne'), ('Eartha', 'Kitt'), ('Al', 'Jarreau'), ('Ricki Lee', 'Jones'), ('Diane', 'Reeves'), ('George', 'Benson'), ('Alberta', 'Hunter'), ('Nneena', 'Freelon'), ('Jack', 'Teagarden'), ('Norah', 'Jones'), ('Stacy', 'Kent'), ('Ethel', 'Waters'), ('Karrin', 'Alyson'), ('Sheila', 'Jordan'), ('Jo', 'Stafford'), ('Ella Mae', 'Morse'), ('Eddie', 'Jefferson'), ('Oscar', 'Brown Jr'), ('Joe', 'Williams'), ('Teddy', 'Grace'), ('Ernie', 'Andrews'), ('Jimmy', 'Scott'), ('Fats', 'Waller'), ('Cab', 'Calloway'), ('Leon', 'Thomas'), ('Cleo', 'Laine'), ('Maxine', 'Sullivan'), ('Vanessa', 'Rubin'), ('Louis', 'Jordan'), ('Aaron', 'Neville'), ('Lou', 'Rawls'), ('Marlena', 'Shaw'), ('Ivie', 'Anderson'), ('Ruth', 'Brown'), ('King', 'Pleasure'), ('Jimmy', 'Witherspoon'), ('John', 'Pizzarelli'), ('Johnny', 'Mercer'), ('Susannah', 'Mccorkle'), ('Madeleine', 'Peyroux'), ('Helen', 'Humes'), ('Lavern', 'Butler'), ('Connee', 'Boswell'), ('Bob', 'Dorough'), ('Jamie', 'Cullum'), ('Kevin', 'Mahogany'), ('Kurt', 'Elling'), ('Dr', 'John'), ('Big Joe', 'Turner');
CREATE DATABASE temp_relaciones_uno_a_uno_cascada CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; use temp_relaciones_uno_a_uno_cascada; CREATE TABLE user ( id int(11) NOT NULL AUTO_INCREMENT, username varchar(255) DEFAULT NULL, password varchar(255) DEFAULT NULL, PRIMARY KEY (id) )ENGINE = INNODB; CREATE TABLE admins ( user_id int(11) NOT NULL AUTO_INCREMENT, `e-mail` varchar(255) DEFAULT NULL, PRIMARY KEY (user_id) )ENGINE = INNODB; ALTER TABLE user add CONSTRAINT FK_user_admin_user_id FOREIGN KEY (id) REFERENCES admins (user_id) ON DELETE CASCADE ON UPDATE CASCADE; ALTER TABLE admins add CONSTRAINT FK_admin_user_id FOREIGN KEY (user_id) REFERENCES user (id) ON DELETE CASCADE ON UPDATE CASCADE; SET FOREIGN_KEY_CHECKS = 0; INSERT INTO user VALUES(1,'user','Berruezin23'); INSERT INTO user VALUES(2,'editor','Berruezin23'); INSERT INTO user VALUES(3,'admin','Berruezin23'); INSERT INTO admins VALUES(1,'davidberruezo@davidberruezo.com'); INSERT INTO admins VALUES(2,'hola@davidberruezo.com'); INSERT INTO admins VALUES(3,'adios@davidberruezo.com'); # Test delete on cascade SET FOREIGN_KEY_CHECKS = 1; delete from user where id = 3; SET FOREIGN_KEY_CHECKS = 0; INSERT INTO user VALUES(3,'admin','Berruezin23'); INSERT INTO admins VALUES(3,'adios@davidberruezo.com'); SET FOREIGN_KEY_CHECKS = 1;
select tdp.nm_tdp as tipo_despesa, count(*) as qtde_total, sum(dsp.vl_dsp) as valor_total, sum(case when dsp.dt_cnf_usr >= '2019-01-01' and dsp.dt_cnf_usr < '2019-02-01' then 1 else 0 end) as qtde_jan, sum(case when dsp.dt_cnf_usr >= '2019-01-01' and dsp.dt_cnf_usr < '2019-02-01' then dsp.vl_dsp else 0 end) as valor_jan, sum(case when dsp.dt_cnf_usr >= '2019-02-01' and dsp.dt_cnf_usr < '2019-03-01' then 1 else 0 end) as qtde_fev, sum(case when dsp.dt_cnf_usr >= '2019-02-01' and dsp.dt_cnf_usr < '2019-03-01' then dsp.vl_dsp else 0 end) as valor_fev, sum(case when dsp.dt_cnf_usr >= '2019-03-01' and dsp.dt_cnf_usr < '2019-04-01' then 1 else 0 end) as qtde_mar, sum(case when dsp.dt_cnf_usr >= '2019-03-01' and dsp.dt_cnf_usr < '2019-04-01' then dsp.vl_dsp else 0 end) as valor_mar, sum(case when dsp.dt_cnf_usr >= '2019-04-01' and dsp.dt_cnf_usr < '2019-05-01' then 1 else 0 end) as qtde_abr, sum(case when dsp.dt_cnf_usr >= '2019-04-01' and dsp.dt_cnf_usr < '2019-05-01' then dsp.vl_dsp else 0 end) as valor_abr, sum(case when dsp.dt_cnf_usr >= '2019-05-01' and dsp.dt_cnf_usr < '2019-06-01' then 1 else 0 end) as qtde_mai, sum(case when dsp.dt_cnf_usr >= '2019-05-01' and dsp.dt_cnf_usr < '2019-06-01' then dsp.vl_dsp else 0 end) as valor_mai, sum(case when dsp.dt_cnf_usr >= '2019-06-01' and dsp.dt_cnf_usr < '2019-07-01' then 1 else 0 end) as qtde_jun, sum(case when dsp.dt_cnf_usr >= '2019-06-01' and dsp.dt_cnf_usr < '2019-07-01' then dsp.vl_dsp else 0 end) as valor_jun, sum(case when dsp.dt_cnf_usr >= '2019-07-01' and dsp.dt_cnf_usr < '2019-08-01' then 1 else 0 end) as qtde_jul, sum(case when dsp.dt_cnf_usr >= '2019-07-01' and dsp.dt_cnf_usr < '2019-08-01' then dsp.vl_dsp else 0 end) as valor_jul, sum(case when dsp.dt_cnf_usr >= '2019-08-01' and dsp.dt_cnf_usr < '2019-09-01' then 1 else 0 end) as qtde_ago, sum(case when dsp.dt_cnf_usr >= '2019-08-01' and dsp.dt_cnf_usr < '2019-09-01' then dsp.vl_dsp else 0 end) as valor_ago, sum(case when dsp.dt_cnf_usr >= '2019-09-01' and dsp.dt_cnf_usr < '2019-10-01' then 1 else 0 end) as qtde_set, sum(case when dsp.dt_cnf_usr >= '2019-09-01' and dsp.dt_cnf_usr < '2019-10-01' then dsp.vl_dsp else 0 end) as valor_set, sum(case when dsp.dt_cnf_usr >= '2019-10-01' and dsp.dt_cnf_usr < '2019-11-01' then 1 else 0 end) as qtde_out, sum(case when dsp.dt_cnf_usr >= '2019-10-01' and dsp.dt_cnf_usr < '2019-11-01' then dsp.vl_dsp else 0 end) as valor_out, sum(case when dsp.dt_cnf_usr >= '2019-11-01' and dsp.dt_cnf_usr < '2019-12-01' then 1 else 0 end) as qtde_nov, sum(case when dsp.dt_cnf_usr >= '2019-11-01' and dsp.dt_cnf_usr < '2019-12-01' then dsp.vl_dsp else 0 end) as valor_nov, sum(case when dsp.dt_cnf_usr >= '2019-12-01' and dsp.dt_cnf_usr < '2020-01-01' then 1 else 0 end) as qtde_dez, sum(case when dsp.dt_cnf_usr >= '2019-12-01' and dsp.dt_cnf_usr < '2020-01-01' then dsp.vl_dsp else 0 end) as valor_dez from sc_cap.tbl_dsp dsp inner join sc_cap.tbl_tdp tdp on dsp.cd_tdp = tdp.cd_tdp where dsp.dt_cnf_usr >= '2019-01-01' and dsp.st_dsp = 3 group by tdp.nm_tdp order by tipo_despesa; select * from sc_cad.tbl_dmn where nm_cmp_dmn = 'ST_DSP'; select * from sc_cap.tbl_atd
CREATE TABLE IF NOT EXISTS users( id SERIAL PRIMARY KEY, username text, password text ); CREATE TABLE IF NOT EXISTS tweets( id SERIAL PRIMARY KEY, username text, tweet text );
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8 */; /*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; /*!40103 SET TIME_ZONE='+00:00' */; /*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; /*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; /*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; /*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; DROP TABLE IF EXISTS `point_vente`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `point_vente` ( `id` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `user_id` int(11) DEFAULT NULL, `nom` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `type` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `nomGerant` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `telGerant` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `tel` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `pays` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `ville` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `adresse` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `quartier` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `description` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `latitude` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `longitude` decimal(10,6) DEFAULT NULL, `date` date DEFAULT NULL, `created_at` date DEFAULT NULL, PRIMARY KEY (`id`), KEY `IDX_2BBFAADFA76ED395` (`user_id`), CONSTRAINT `FK_2BBFAADFA76ED395` FOREIGN KEY (`user_id`) REFERENCES `client` (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; /*!40101 SET character_set_client = @saved_cs_client */; DROP TABLE IF EXISTS `visite`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `visite` ( `id` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `point_vente_id` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `user_id` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `map` tinyint(1) DEFAULT NULL, `pre` tinyint(1) DEFAULT NULL, `aff` tinyint(1) DEFAULT NULL, `exc` tinyint(1) DEFAULT NULL, `vpt` tinyint(1) DEFAULT NULL, `sapp` tinyint(1) DEFAULT NULL, `commentaire` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `fp` int(11) DEFAULT NULL, `rpp` tinyint(1) DEFAULT NULL, `rpd` tinyint(1) DEFAULT NULL, `date` date NOT NULL, `week` int(11) NOT NULL, `week_text` varchar(255) COLLATE utf8_unicode_ci NOT NULL, PRIMARY KEY (`id`), KEY `IDX_B09C8CBBEFA24D68` (`point_vente_id`), KEY `IDX_B09C8CBBA76ED395` (`user_id`), CONSTRAINT `FK_B09C8CBBEFA24D68` FOREIGN KEY (`point_vente_id`) REFERENCES `point_vente` (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; /*!40101 SET character_set_client = @saved_cs_client */; DROP TABLE IF EXISTS `situation`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `situation` ( `id` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `produit_id` int(11) NOT NULL, `visite_id` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `map` tinyint(1) DEFAULT NULL, `pre` tinyint(1) DEFAULT NULL, `aff` tinyint(1) DEFAULT NULL, `rpp` tinyint(1) DEFAULT NULL, `rpd` tinyint(1) DEFAULT NULL, `stock` int(11) DEFAULT NULL, `stockg` int(11) DEFAULT NULL, `mvj` int(11) DEFAULT NULL, `ecl` int(11) DEFAULT NULL, PRIMARY KEY (`id`), KEY `IDX_EC2D9ACAF347EFB` (`produit_id`), KEY `IDX_EC2D9ACAC1C5DC59` (`visite_id`), CONSTRAINT `FK_EC2D9ACAC1C5DC59` FOREIGN KEY (`visite_id`) REFERENCES `visite` (`id`), CONSTRAINT `FK_EC2D9ACAF347EFB` FOREIGN KEY (`produit_id`) REFERENCES `produit` (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; /*!40101 SET character_set_client = @saved_cs_client */; DROP TABLE IF EXISTS `produit`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `produit` ( `id` int(11) NOT NULL AUTO_INCREMENT, `concurent_id` int(11) DEFAULT NULL, `nom` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `dossier` varchar(255) COLLATE utf8_unicode_ci NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `UNIQ_29A5EC276C6E55B5` (`nom`), UNIQUE KEY `UNIQ_29A5EC27D1D4B111` (`concurent_id`), CONSTRAINT `FK_29A5EC27D1D4B111` FOREIGN KEY (`concurent_id`) REFERENCES `produit` (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; /*!40101 SET character_set_client = @saved_cs_client */; DROP TABLE IF EXISTS `etape`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `etape` ( `id` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `user_id` int(11) DEFAULT NULL, `suivant_id` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `date` date NOT NULL, `heure` time NOT NULL, `latitude` decimal(10,6) DEFAULT NULL, `longitude` decimal(5,2) DEFAULT NULL, `type` varchar(255) COLLATE utf8_unicode_ci NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `UNIQ_285F75DD9C2BB0CC` (`suivant_id`), KEY `IDX_285F75DDA76ED395` (`user_id`), CONSTRAINT `FK_285F75DD9C2BB0CC` FOREIGN KEY (`suivant_id`) REFERENCES `etape` (`id`), CONSTRAINT `FK_285F75DDA76ED395` FOREIGN KEY (`user_id`) REFERENCES `client` (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; /*!40101 SET character_set_client = @saved_cs_client */; /*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; /*!40101 SET SQL_MODE=@OLD_SQL_MODE */; /*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; /*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; /*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- phpMyAdmin SQL Dump -- version 4.9.3 -- https://www.phpmyadmin.net/ -- -- Host: localhost:8889 -- Generation Time: Nov 27, 2020 at 10:16 AM -- Server version: 5.7.26 -- PHP Version: 7.4.2 SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; SET time_zone = "+00:00"; /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8mb4 */; -- -- Database: `db_coopers` -- -- -------------------------------------------------------- -- -- Table structure for table `tbl_cars` -- CREATE TABLE `tbl_cars` ( `ID` int(11) NOT NULL, `model` varchar(30) NOT NULL, `price` varchar(20) NOT NULL, `description` text NOT NULL, `features` varchar(350) NOT NULL, `img1` varchar(20) NOT NULL, `img2` varchar(20) NOT NULL, `img3` varchar(20) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- -- Dumping data for table `tbl_cars` -- INSERT INTO `tbl_cars` (`ID`, `model`, `price`, `description`, `features`, `img1`, `img2`, `img3`) VALUES (1, 'MINI CONVERTIBLE', 'FROM £20,830*', 'STAY OPEN.\r\nOpen-aired, open-ended adventure, with an electric roof that opens in just 18 seconds.', '✓ Fully-electric roof\r\n✓ Rear Parking Distance Control\r\n✓ Full front and rear LED Lights', 'miniconvertible.jpg', 'miniconvertible1.jpg', 'miniconvertible2.jpg'), (2, 'MINI 3-DOOR HATCH', 'FROM £16,400*', 'EXPLORE EVERY CORNER.\r\nMaster of the city, complete with iconic go-kart feeling and low-centre-of-gravity handling.\r\n\r\nVoted Auto Trader’s Most Fun Car to Drive 2019.\r\n\r\n', '✓ Full & Front rear LED Lights\r\n✓ 6.5” Colour Infotainment Screen\r\n✓ Intelligent Emergency call (MINI E-call)\r\n✓ DAB Digital Tuner\r\n✓ Bluetooth Connectivity\r\n\r\n', 'mini3.jpg', 'mini3-1.jpg', 'mini3-2.jpg'), (3, 'MINI 5-DOOR HATCH', 'FROM £17,100*', 'MORE ROOM TO FIT MORE IN.\r\nAll the hallmarks of a classic MINI – from legendary design to iconic go-kart handling – with space to fit more in. \r\nVoted Auto Trader’s Most Fun Car to Drive 2019.', '✓ Full & Front rear LED Lights\r\n✓ 6.5” Colour Infotainment Screen\r\n✓ Intelligent Emergency Call (MINI E-call)', 'mini5.jpg', 'mini5-1.jpg', 'mini5-2.jpg'); -- -- Indexes for dumped tables -- -- -- Indexes for table `tbl_cars` -- ALTER TABLE `tbl_cars` ADD PRIMARY KEY (`ID`); -- -- AUTO_INCREMENT for dumped tables -- -- -- AUTO_INCREMENT for table `tbl_cars` -- ALTER TABLE `tbl_cars` MODIFY `ID` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=4; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
SELECT * FROM maestro; #DELETE FROM maestro; INSERT INTO maestro (idMaestro, idAsignatura) VALUES (1, 131); #Matemáticas 3ºA INSERT INTO maestro (idMaestro, idAsignatura) VALUES (1, 211); #Conocimiento del medio 1ºA INSERT INTO maestro (idMaestro, idAsignatura) VALUES (1, 441); #Religiones 4ºA INSERT INTO maestro (idMaestro, idAsignatura) VALUES (1, 331); #Lengua Castellana 3ºA INSERT INTO maestro (idMaestro, idAsignatura) VALUES (2, 641); #Historia 2ºA #DELETE FROM maestro WHERE (idMaestro = 1 and idAsignatura = 211); SELECT * FROM asignatura; #DELETE FROM asignatura; INSERT INTO asignatura (idAsignatura, nombre, optativa) VALUES (111, "Matemáticas 1ºA", 0); # 1--> matemáticas 1ºA INSERT INTO asignatura (idAsignatura, nombre, optativa) VALUES (121, "Matemáticas 2ºA", 0); # 1--> matemáticas 2ºA INSERT INTO asignatura (idAsignatura, nombre, optativa) VALUES (131, "Matemáticas 3ºA", 0); # 1--> matemáticas 3ºA INSERT INTO asignatura (idAsignatura, nombre, optativa) VALUES (132, "Matemáticas 3ºB", 0); # 1--> matemáticas 3ºB INSERT INTO asignatura (idAsignatura, nombre, optativa) VALUES (133, "Matemáticas 3ºC", 0); # 1--> matemáticas 3ºC INSERT INTO asignatura (idAsignatura, nombre, optativa) VALUES (211, "Conocimiento del medio 1ºA", 0); # 2--> conocimiento del medio 1ºA INSERT INTO asignatura (idAsignatura, nombre, optativa) VALUES (331, "Lengua Castellana 3ºA", 0); # 3 --> lengua 3ºA INSERT INTO asignatura (idAsignatura, nombre, optativa) VALUES (441, "Religiones 4ºA", 1); # 4 --> religiones 4ºA, optativa INSERT INTO asignatura (idAsignatura, nombre, optativa) VALUES (521, "Educación física 2ºA", 0); # 5-->educación física 2ºA INSERT INTO asignatura (idAsignatura, nombre, optativa) VALUES (641, "Historia 4ºA", 0); # 6 --> historia 4ºA SELECT * FROM curso; #DELETE FROM curso; INSERT INTO curso (idCurso, nombre) VALUES (11, "1ºA"); INSERT INTO curso (idCurso, nombre) VALUES (21, "2ºA"); INSERT INTO curso (idCurso, nombre) VALUES (31, "3ºA"); INSERT INTO curso (idCurso, nombre) VALUES (32, "3ºB"); INSERT INTO curso (idCurso, nombre) VALUES (33, "3ºC"); INSERT INTO curso (idCurso, nombre) VALUES (41, "4ºA"); SELECT * FROM asignaturasencurso; #DELETE FROM asignaturasencurso; INSERT INTO asignaturasencurso (idCurso, idAsignatura) VALUES (31, 131); #3ºA --> matemáticas INSERT INTO asignaturasencurso (idCurso, idAsignatura) VALUES (32, 132); #3ºB --> matemáticas INSERT INTO asignaturasencurso (idCurso, idAsignatura) VALUES (33, 133); #3ºC --> matemáticas INSERT INTO asignaturasencurso (idCurso, idAsignatura) VALUES (31, 331); #3ºA --> lengua INSERT INTO asignaturasencurso (idCurso, idAsignatura) VALUES (41, 641); #4ºA --> historia INSERT INTO asignaturasencurso (idCurso, idAsignatura) VALUES (41, 441); #4ºA --> religión INSERT INTO asignaturasencurso (idCurso, idAsignatura) VALUES (11, 111); #1ºA --> matemáticas INSERT INTO asignaturasencurso (idCurso, idAsignatura) VALUES (11, 211); #1ºA --> conocimiento del medio INSERT INTO asignaturasencurso (idCurso, idAsignatura) VALUES (21, 521); #2ºA --> ed.física SELECT * FROM alumno; #DELETE FROM alumno; INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (1, "Álex", "Tintor Seva", "03652145G", "26/09/1997"); INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (2, "Laura", "Río Jordan", "25632458A", "10/12/2000"); INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (3, "Geadalu", "Grecia", "05965873B", "27/12/2001"); INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (4, "Alleria", "Brisaveloz", "05962684Q", "07/12/2000"); INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (5, "Juan José", "Jiménez Prieto", "96542365Z", "26/09/1997"); INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (6, "Elwynn", "Pueblo Márquez", "02659874S", "14/05/2000"); INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (7, "Raquel", "López Cantabria", "05632985J", "10/12/2000"); INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (8, "Lucas", "Hernández Cobos", "06895465I", "10/05/2000"); INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (9, "María José", "Castro Zurugaia", "06321569K", "01/12/2000"); INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (10, "Ruth", "Campos Ramos", "06895642H", "05/03/2000"); INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (11, "Diego", "Torres Casas", "03265987F", "09/05/2000"); INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (12, "Michael", "Jordan", "03269432F", "06/10/2000"); INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (13, "Varian", "Wrynn", "03653569B", "19/12/2000"); INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (14, "Anduin", "Lothar", "02145459H", "30/03/2000"); INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (15, "Inés", "Domínguez Vázquez", "06984563B", "01/01/2000"); INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (16, "Pablo", "Ruiz Ciudad", "02145638F", "02/06/2000"); INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (17, "Absol", "Moreno Santander", "06895312D", "03/04/2000"); INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (18, "Angela", "Ziegler", "03697415F", "05/11/2000"); INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (19, "Elena", "Alonso Romero", "03265945A", "06/11/2000"); INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (20, "Ysera", "Muñoz Álvarez", "05632189M", "05/04/2000"); INSERT INTO alumno (idAlumno, nombre, apellidos, dni, fechaNacimiento) VALUES (21, "Alexstrasza", "Muñoz Álvarez", "03265955A", "10/10/2000"); SELECT * FROM prueba; #DELETE FROM prueba; INSERT INTO prueba (idPrueba, idAsignatura, titulo, etiqueta, fecha, trimestre, peso) VALUES (1, 131, "Examen 1 Matemáticas", "EX1MA", "01/01/2001", 1, 8); INSERT INTO prueba (idPrueba, idAsignatura, titulo, etiqueta, fecha, trimestre, peso) VALUES (2, 131, "Examen 2 Matemáticas", "EX2MA", "01/01/2001", 1, 2); INSERT INTO prueba (idPrueba, idAsignatura, titulo, etiqueta, fecha, trimestre, peso) VALUES (3, 131, "Examen 3 Matemáticas", "EX3MA", "01/01/2001", 2, 5); INSERT INTO prueba (idPrueba, idAsignatura, titulo, etiqueta, fecha, trimestre, peso) VALUES (4, 131, "Examen 4 Matemáticas", "EX4MA", "01/01/2001", 2, 6); INSERT INTO prueba (idPrueba, idAsignatura, titulo, etiqueta, fecha, trimestre, peso) VALUES (5, 131, "Examen 5 Matemáticas", "EX5MA", "01/01/2001", 3, 5.5); INSERT INTO prueba (idPrueba, idAsignatura, titulo, etiqueta, fecha, trimestre, peso) VALUES (6, 131, "Examen 6 Matemáticas", "EX6MA", "01/01/2001", 3, 4); INSERT INTO prueba (idPrueba, idAsignatura, titulo, etiqueta, fecha, trimestre, peso) VALUES (7, 131, "Prueba Monomios", "PRUMO", "01/01/2001", 2, 0); INSERT INTO prueba (idPrueba, idAsignatura, titulo, etiqueta, fecha, trimestre, peso) VALUES (8, 131, "Trabajo Ecuaciones", "ECUAC", "01/01/2001", 3, 0.5); INSERT INTO prueba (idPrueba, idAsignatura, titulo, etiqueta, fecha, trimestre, peso) VALUES (9, 641, "Examen 1 Historia", "EX1HI", "01/01/2001", 1, 5); INSERT INTO prueba (idPrueba, idAsignatura, titulo, etiqueta, fecha, trimestre, peso) VALUES (10, 641, "Examen 2 Historia", "EX2HI", "01/01/2001", 2, 2); INSERT INTO prueba (idPrueba, idAsignatura, titulo, etiqueta, fecha, trimestre, peso) VALUES (11, 641, "Examen 3 Historia", "EX3HI", "01/01/2001", 3, 3); INSERT INTO prueba (idPrueba, idAsignatura, titulo, etiqueta, fecha, trimestre, peso) VALUES (12, 331, "Trabajo 1 Lengua", "EX1LE", "01/01/2001", 1, 1); INSERT INTO prueba (idPrueba, idAsignatura, titulo, etiqueta, fecha, trimestre, peso) VALUES (13, 331, "Trabajo 2 Lengua", "EX2LE", "01/01/2001", 2, 2); INSERT INTO prueba (idPrueba, idAsignatura, titulo, etiqueta, fecha, trimestre, peso) VALUES (14, 331, "Trabajo 3 Lengua", "EX3LE", "01/01/2001", 2, 3); INSERT INTO prueba (idPrueba, idAsignatura, titulo, etiqueta, fecha, trimestre, peso) VALUES (15, 331, "Trabajo 4 Lengua", "EX4LE", "01/01/2001", 3, 1); INSERT INTO prueba (idPrueba, idAsignatura, titulo, etiqueta, fecha, trimestre, peso) VALUES (16, 331, "Trabajo 5 Lengua", "EX5LE", "01/01/2001", 3, 3); SELECT * FROM nota; #DELETE FROM nota; INSERT INTO nota (idAlumno, idPrueba, nota, comentario) VALUES (4, 1, 6.7, "Muy bien"); INSERT INTO nota (idAlumno, idPrueba, nota, comentario) VALUES (4, 2, 6.4, "Un poco flojo ej. 3"); INSERT INTO nota (idAlumno, idPrueba, nota, comentario) VALUES (4, 3, 5.7, "Necesita repasar"); INSERT INTO nota (idAlumno, idPrueba, nota, comentario) VALUES (4, 4, 4.2, ""); INSERT INTO nota (idAlumno, idPrueba, nota, comentario) VALUES (4, 5, 5, ""); INSERT INTO nota (idAlumno, idPrueba, nota, comentario) VALUES (4, 6, 7.2, ""); INSERT INTO nota (idAlumno, idPrueba, nota, comentario) VALUES (5, 1, 10, ""); INSERT INTO nota (idAlumno, idPrueba, nota, comentario) VALUES (5, 2, 9.5, "Tiene potencial"); INSERT INTO nota (idAlumno, idPrueba, nota, comentario) VALUES (5, 3, 10, ""); INSERT INTO nota (idAlumno, idPrueba, nota, comentario) VALUES (5, 4, 9.3, ""); INSERT INTO nota (idAlumno, idPrueba, nota, comentario) VALUES (5, 5, 9.8, "Creo que copió del compañero"); INSERT INTO nota (idAlumno, idPrueba, nota, comentario) VALUES (5, 6, 10, ""); INSERT INTO nota (idAlumno, idPrueba, nota, comentario) VALUES (13, 5, 6, ""); INSERT INTO nota (idAlumno, idPrueba, nota, comentario) VALUES (13, 6, 6, ""); INSERT INTO nota (idAlumno, idPrueba, nota, comentario) VALUES (14, 1, 10, "Mejor que el anterior"); INSERT INTO nota (idAlumno, idPrueba, nota, comentario) VALUES (14, 2, 10, "Sigue mejorando"); SELECT * FROM alumno a, nota n, prueba p WHERE a.idAlumno = n.idAlumno AND n.idPrueba = p.idPrueba; SELECT nota FROM nota n, prueba p, alumno a WHERE a.idAlumno = n.idAlumno AND n.idPrueba = p.idPrueba AND p.trimestre = 1 AND a.idAlumno = 4; SELECT nota FROM nota n, prueba p, alumno a WHERE a.idAlumno = 4 AND a.idAlumno = n.idAlumno AND n.idPrueba = p.idPrueba AND p.trimestre = 1; SELECT * FROM notafinal; #DELETE FROM notafinal; INSERT INTO notafinal (idAlumno, idAsignatura, notaTrimestre1, notaTrimestre2, notaTrimestre3, notaFinal, comentario) VALUES (4, 131, 10, 9, 9.5, 10, "¡Muy buenas notas!"); INSERT INTO notafinal (idAlumno, idAsignatura, notaTrimestre1, notaTrimestre2, notaTrimestre3, notaFinal, comentario) VALUES (5, 131, 5, 9, 4, 4, "Curso suspenso."); INSERT INTO notafinal (idAlumno, idAsignatura, notaTrimestre1, notaTrimestre2, notaTrimestre3, notaFinal, comentario) VALUES (6, 131, 1, 7, 3, 5, "Conseguido."); INSERT INTO notafinal (idAlumno, idAsignatura, notaTrimestre1, notaTrimestre2, notaTrimestre3, notaFinal, comentario) VALUES (7, 131, 2, 4, 4, 4, "Nota para el claustro: preguntar por repetición de curso"); SELECT * FROM alumnosporasignatura; #DELETE FROM alumnosporasignatura; INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (1, 131); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (1, 331); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (2, 131); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (2, 331); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (3, 131); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (3, 331); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (4, 131); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (4, 331); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (5, 131); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (5, 331); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (6, 131); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (6, 331); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (7, 131); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (7, 331); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (10, 133); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (11, 133); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (12, 131); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (12, 331); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (13, 131); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (13, 331); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (14, 131); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (14, 331); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (15, 211); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (16, 211); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (17, 211); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (18, 521); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (19, 521); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (20, 441); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (20, 641); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (21, 441); INSERT INTO alumnosporasignatura (idAlumno, idAsignatura) VALUES (21, 641); SELECT * FROM competencia; #DELETE FROM competencia; INSERT INTO competencia (idCompetencia, nombre, descripcion) VALUES (1, "Ecuaciones cuadráticas sencillas", "Comprender las ecuaciones cuadráticas y saber resolverlas."); INSERT INTO competencia (idCompetencia, nombre, descripcion) VALUES (2, "Datos, poblaciones, estadística", "Comprender la estadística básica, qué es una población y qué ejercicios básicos se pueden hacer con los datos."); INSERT INTO competencia (idCompetencia, nombre, descripcion) VALUES (3, "Teorema de Pitágoras", "Comprender y saber aplicar el Teorema de Pitágoras en triángulos rectángulos."); INSERT INTO competencia (idCompetencia, nombre, descripcion) VALUES (4, "Transformaciones en el plano", "Saber y comprender cómo se transpone una figura en un plano."); INSERT INTO competencia (idCompetencia, nombre, descripcion) VALUES (5, "Gráficas y tablas", "Saber identificar e interpretar los distintos tipos de gráficas y realizarlas con una tabla"); INSERT INTO competencia (idCompetencia, nombre, descripcion) VALUES (6, "Comunicación lingüística", "Comunicarse correctamente tanto verbalmente como por escrito."); INSERT INTO competencia (idCompetencia, nombre, descripcion) VALUES (7, "Conocimiento de la lengua", "Conocimiento de la lengua castellana"); SELECT * FROM competenciasporasignatura; INSERT INTO competenciasporasignatura (idCompetencia, idAsignatura) VALUES (1, 131); INSERT INTO competenciasporasignatura (idCompetencia, idAsignatura) VALUES (2, 131); INSERT INTO competenciasporasignatura (idCompetencia, idAsignatura) VALUES (3, 131); INSERT INTO competenciasporasignatura (idCompetencia, idAsignatura) VALUES (4, 131); INSERT INTO competenciasporasignatura (idCompetencia, idAsignatura) VALUES (5, 131); INSERT INTO competenciasporasignatura (idCompetencia, idAsignatura) VALUES (6, 331); INSERT INTO competenciasporasignatura (idCompetencia, idAsignatura) VALUES (7, 331); SELECT * FROM competenciasporprueba; INSERT INTO competenciasporprueba (idCompetencia, idPrueba) VALUES (1, 31); INSERT INTO competenciasporprueba (idCompetencia, idPrueba) VALUES (1, 34); INSERT INTO competenciasporprueba (idCompetencia, idPrueba) VALUES (2, 31); INSERT INTO competenciasporprueba (idCompetencia, idPrueba) VALUES (2, 33); INSERT INTO competenciasporprueba (idCompetencia, idPrueba) VALUES (3, 33); SELECT * FROM datossesion; #DELETE FROM datossesion; INSERT INTO datossesion (idMaestro, contraseña, nombre) VALUES (1, "1234", "Lucía Calzado"); INSERT INTO datossesion (idMaestro, contraseña, nombre) VALUES (2, "1234", "Montserrat Piedrabuena");
CREATE TABLE employee ( id INTEGER PRIMARY KEY, name VARCHAR(64) NOT NULL, age int(4) NOT NULL ); CREATE TABLE parkingLot( id INTEGER PRIMARY KEY, availablePositionCount INTEGER NOT NULL, capacity INTEGER NOT NULL, parkingBoyId INTEGER NOT NULL );
/* SIDE NOTE: Compare performance of the query rows by using Explain Icon first (Before Indexing and After Indexing)*/ /* Create index on accident_index as it is using in both vehicles and accident tables and join clauses using indexes will perform faster */ CREATE INDEX accident_index ON accident(accident_index); CREATE INDEX accident_index ON vehicles(accident_index); /* get Accident Severity and Total Accidents per Vehicle Type */ SELECT vt.vehicle_type AS 'Vehicle Type', a.accident_severity AS 'Severity', COUNT(vt.vehicle_type) AS 'Number of Accidents' FROM accident a JOIN vehicles v ON a.accident_index = v.accident_index JOIN vehicle_types vt ON v.vehicle_type = vt.vehicle_code GROUP BY 1 ORDER BY 2,3; /* Average Severity by vehicle type */ SELECT vt.vehicle_type AS 'Vehicle Type', AVG(a.accident_severity) AS 'Average Severity', COUNT(vt.vehicle_type) AS 'Number of Accidents' FROM accident a JOIN vehicles v ON a.accident_index = v.accident_index JOIN vehicle_types vt ON v.vehicle_type = vt.vehicle_code GROUP BY 1 ORDER BY 2,3; /* Average Severity and Total Accidents by Motorcyle */ SELECT vt.vehicle_type AS 'Vehicle Type', AVG(a.accident_severity) AS 'Average Severity', COUNT(vt.vehicle_type) AS 'Number of Accidents' FROM accident a JOIN vehicles v ON a.accident_index = v.accident_index JOIN vehicle_types vt ON v.vehicle_type = vt.vehicle_code WHERE vt.vehicle_type LIKE '%otorcycle%' GROUP BY 1 ORDER BY 2,3;
#spGetAllEntries DROP PROCEDURE IF EXISTS spGetAllEntries; DELIMITER // CREATE PROCEDURE spGetAllEntries() BEGIN SELECT * FROM Vowels; END // DELIMITER ; #spGetEntry DROP PROCEDURE IF EXISTS spGetEntry; DELIMITER // CREATE PROCEDURE spGetEntry(IN idVowels VARCHAR(256)) BEGIN SELECT * FROM Vowels WHERE Vowels.idVowels = idVowels; END // DELIMITER ; #spInsertEntry DROP PROCEDURE IF EXISTS spInsertEntry; DELIMITER // CREATE PROCEDURE spInsertEntry(IN idUsers VARCHAR(256),IN text VARCHAR(256),IN totalVowels INT,IN lineData VARCHAR(5000)) BEGIN INSERT INTO Vowels (Vowels.idVowels,Vowels.idUsers, Vowels.text, Vowels.totalVowels, Vowels.lineData, Vowels.dateUpdated) VALUES ((SELECT UUID()),idUsers, text, totalVowels, lineData, NOW()) ON DUPLICATE KEY UPDATE Vowels.text = text, Vowels.totalVowels = totalVowels, Vowels.lineData = lineData, Vowels.dateUpdated = NOW(); END // DELIMITER ;
-- phpMyAdmin SQL Dump -- version 5.1.1 -- https://www.phpmyadmin.net/ -- -- ホスト: localhost:8889 -- 生成日時: 2021 年 10 月 06 日 13:37 -- サーバのバージョン: 5.7.34 -- PHP のバージョン: 7.4.21 SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; START TRANSACTION; SET time_zone = "+00:00"; /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8mb4 */; -- -- データベース: `gs_db` -- -- -------------------------------------------------------- -- -- テーブルの構造 `gs_user_table` -- CREATE TABLE `gs_user_table` ( `id` int(12) NOT NULL, `name` varchar(64) NOT NULL, `lid` varchar(128) NOT NULL, `lpw` varchar(64) NOT NULL, `kanri_flg` int(1) NOT NULL, `life_flg` int(1) NOT NULL, `indate` datetime NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- -- テーブルのデータのダンプ `gs_user_table` -- INSERT INTO `gs_user_table` (`id`, `name`, `lid`, `lpw`, `kanri_flg`, `life_flg`, `indate`) VALUES (1, 'test1', '11111', 'aaaaaaa', 0, 0, '2021-10-05 21:16:17'), (3, 'erksdughrt', '33333', 'cccccc', 1, 1, '2021-10-05 21:50:47'); -- -- ダンプしたテーブルのインデックス -- -- -- テーブルのインデックス `gs_user_table` -- ALTER TABLE `gs_user_table` ADD PRIMARY KEY (`id`); -- -- ダンプしたテーブルの AUTO_INCREMENT -- -- -- テーブルの AUTO_INCREMENT `gs_user_table` -- ALTER TABLE `gs_user_table` MODIFY `id` int(12) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=4; COMMIT; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
-- phpMyAdmin SQL Dump -- version 2.10.3 -- http://www.phpmyadmin.net -- -- Host: localhost -- Generation Time: Feb 26, 2013 at 12:15 AM -- Server version: 5.0.51 -- PHP Version: 5.2.6 SET SQL_MODE="NO_AUTO_VALUE_ON_ZERO"; -- -- Database: `db_buddies` -- -- -------------------------------------------------------- -- -- Table structure for table `tbl_buddies` -- CREATE TABLE `tbl_buddies` ( `sender` varchar(45) NOT NULL, `receiver` varchar(45) NOT NULL, `status` int(10) unsigned NOT NULL, `message` varchar(45) default NULL, PRIMARY KEY USING BTREE (`sender`,`receiver`) ) ENGINE=MyISAM DEFAULT CHARSET=latin1; -- -- Dumping data for table `tbl_buddies` -- INSERT INTO `tbl_buddies` VALUES ('prash', 'admin', 2, 'Nothing'); INSERT INTO `tbl_buddies` VALUES ('prash', 'shru6', 2, 'Nothing'); INSERT INTO `tbl_buddies` VALUES ('shru6', 'admin', 2, 'Nothing'); -- -------------------------------------------------------- -- -- Table structure for table `tbl_offlinemsg` -- CREATE TABLE `tbl_offlinemsg` ( `id` int(10) unsigned NOT NULL auto_increment, `msg_from` varchar(45) NOT NULL, `msg_to` varchar(45) NOT NULL, `msg` varchar(150) NOT NULL, `time` timestamp NOT NULL default CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP, PRIMARY KEY (`id`) ) ENGINE=MyISAM DEFAULT CHARSET=latin1 AUTO_INCREMENT=1 ; -- -- Dumping data for table `tbl_offlinemsg` -- -- -------------------------------------------------------- -- -- Table structure for table `tbl_register` -- CREATE TABLE `tbl_register` ( `name` varchar(45) NOT NULL, `username` varchar(45) NOT NULL, `password` varchar(45) NOT NULL, `repeat` varchar(45) NOT NULL, `emailid` varchar(45) NOT NULL, `online` int(10) unsigned default NULL, PRIMARY KEY (`username`) ) ENGINE=MyISAM DEFAULT CHARSET=latin1; -- -- Dumping data for table `tbl_register` -- INSERT INTO `tbl_register` VALUES ('administrator', 'pob', '1234', 'admin', 'admin@admin.com', 0); INSERT INTO `tbl_register` VALUES ('lucky', 'lucky6', 'shruti', 'shruti', 'lucky@yahoo.com', 0); INSERT INTO `tbl_register` VALUES ('prashant', 'prash', 'prashant', 'prashant', 'yohoprashant@yahoo.com', 0); INSERT INTO `tbl_register` VALUES ('sfasf', 'safasf', 'aa', 'aa', '', 0); INSERT INTO `tbl_register` VALUES ('shruti', 'shru6', 'shruti', 'shruti', '', 1); INSERT INTO `tbl_register` VALUES ('cazcz', 'sxcsa', 'ssds', 'dscx', 'sds', 0);
-- MySQL Script generated by MySQL Workbench -- Thu May 28 14:51:34 2015 -- Model: New Model Version: 1.0 -- MySQL Workbench Forward Engineering SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0; SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL,ALLOW_INVALID_DATES'; -- ----------------------------------------------------- -- Schema mecanico -- ----------------------------------------------------- DROP SCHEMA IF EXISTS `mecanico` ; -- ----------------------------------------------------- -- Schema mecanico -- ----------------------------------------------------- CREATE SCHEMA IF NOT EXISTS `mecanico` DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci ; USE `mecanico` ; -- ----------------------------------------------------- -- Table `mecanico`.`Vehiculo` -- ----------------------------------------------------- DROP TABLE IF EXISTS `mecanico`.`Vehiculo` ; CREATE TABLE IF NOT EXISTS `mecanico`.`Vehiculo` ( `IdVehiculo` INT NOT NULL AUTO_INCREMENT, `Placa` VARCHAR(8) NOT NULL, `Marca` VARCHAR(45) NOT NULL, `Linea` VARCHAR(45) NOT NULL, `Modelo` INT NOT NULL, `Color` VARCHAR(45) NOT NULL, `Kilometraje` INT NOT NULL, PRIMARY KEY (`IdVehiculo`)) ENGINE = InnoDB; CREATE UNIQUE INDEX `Placa_UNIQUE` ON `mecanico`.`Vehiculo` (`Placa` ASC); CREATE UNIQUE INDEX `IdVehiculo_UNIQUE` ON `mecanico`.`Vehiculo` (`IdVehiculo` ASC); -- ----------------------------------------------------- -- Table `mecanico`.`Cliente` -- ----------------------------------------------------- DROP TABLE IF EXISTS `mecanico`.`Cliente` ; CREATE TABLE IF NOT EXISTS `mecanico`.`Cliente` ( `IdCliente` INT NOT NULL AUTO_INCREMENT, `Cedula` VARCHAR(20) NOT NULL, `Nombres` VARCHAR(50) NOT NULL, `Apellidos` VARCHAR(50) NOT NULL, `Movil` VARCHAR(12) NOT NULL, `Telefono` VARCHAR(12) NOT NULL, `Email` VARCHAR(50) NOT NULL, `Direccion` VARCHAR(45) NOT NULL, PRIMARY KEY (`IdCliente`)) ENGINE = InnoDB; CREATE UNIQUE INDEX `IdCliente_UNIQUE` ON `mecanico`.`Cliente` (`IdCliente` ASC); CREATE UNIQUE INDEX `Cedula_UNIQUE` ON `mecanico`.`Cliente` (`Cedula` ASC); -- ----------------------------------------------------- -- Table `mecanico`.`Mecanico` -- ----------------------------------------------------- DROP TABLE IF EXISTS `mecanico`.`Mecanico` ; CREATE TABLE IF NOT EXISTS `mecanico`.`Mecanico` ( `IdMecanico` INT NOT NULL AUTO_INCREMENT, `Cedula` VARCHAR(20) NOT NULL, `Nombres` VARCHAR(50) NOT NULL, `Apellidos` VARCHAR(50) NOT NULL, `Telefono` VARCHAR(12) NOT NULL, `Direccion` VARCHAR(45) NOT NULL, PRIMARY KEY (`IdMecanico`)) ENGINE = InnoDB; CREATE UNIQUE INDEX `Cedula_UNIQUE` ON `mecanico`.`Mecanico` (`Cedula` ASC); CREATE UNIQUE INDEX `IdMecanico_UNIQUE` ON `mecanico`.`Mecanico` (`IdMecanico` ASC); -- ----------------------------------------------------- -- Table `mecanico`.`Orden_trabajo` -- ----------------------------------------------------- DROP TABLE IF EXISTS `mecanico`.`Orden_trabajo` ; CREATE TABLE IF NOT EXISTS `mecanico`.`Orden_trabajo` ( `IdOrden` INT NOT NULL AUTO_INCREMENT, `Numero_Orden` VARCHAR(50) NOT NULL, `Fecha_Ingreso` DATETIME NOT NULL, `Descripcion_cliente` VARCHAR(1000) NOT NULL, `Descripcion_taller` VARCHAR(1000) NOT NULL, `Fecha_salida` DATETIME NOT NULL, `Cambio_aceite` TINYINT(1) NOT NULL, `Cambio_correa` TINYINT(1) NOT NULL, `Cambio_embrague` TINYINT(1) NOT NULL, `Cambio_Frenos` TINYINT(1) NOT NULL, `IdVehiculo` INT NOT NULL, `IdCliente` INT NOT NULL, `IdMecanico` INT NOT NULL, `Total` DECIMAL(10,2) NULL, PRIMARY KEY (`IdOrden`, `IdVehiculo`, `IdCliente`, `IdMecanico`), CONSTRAINT `fk_Orden_trabajo_Vehiculo` FOREIGN KEY (`IdVehiculo`) REFERENCES `mecanico`.`Vehiculo` (`IdVehiculo`) ON DELETE CASCADE ON UPDATE CASCADE, CONSTRAINT `fk_Orden_trabajo_Cliente` FOREIGN KEY (`IdCliente`) REFERENCES `mecanico`.`Cliente` (`IdCliente`) ON DELETE CASCADE ON UPDATE CASCADE, CONSTRAINT `fk_Orden_trabajo_Mecanico` FOREIGN KEY (`IdMecanico`) REFERENCES `mecanico`.`Mecanico` (`IdMecanico`) ON DELETE CASCADE ON UPDATE CASCADE) ENGINE = InnoDB; CREATE INDEX `fk_Orden_trabajo_Vehiculo` ON `mecanico`.`Orden_trabajo` (`IdVehiculo` ASC); CREATE INDEX `fk_Orden_trabajo_Cliente` ON `mecanico`.`Orden_trabajo` (`IdCliente` ASC); CREATE INDEX `fk_Orden_trabajo_Mecanico` ON `mecanico`.`Orden_trabajo` (`IdMecanico` ASC); CREATE UNIQUE INDEX `Numero_Orden_UNIQUE` ON `mecanico`.`Orden_trabajo` (`Numero_Orden` ASC); CREATE UNIQUE INDEX `IdOrden_UNIQUE` ON `mecanico`.`Orden_trabajo` (`IdOrden` ASC); -- ----------------------------------------------------- -- Table `mecanico`.`Detalle_orden` -- ----------------------------------------------------- DROP TABLE IF EXISTS `mecanico`.`Detalle_orden` ; CREATE TABLE IF NOT EXISTS `mecanico`.`Detalle_orden` ( `idDetalle` INT NOT NULL AUTO_INCREMENT, `Cantidad` INT NOT NULL, `IdOrden` INT NOT NULL, PRIMARY KEY (`idDetalle`, `IdOrden`), CONSTRAINT `fk_Detalle_orden_Orden_trabajo` FOREIGN KEY (`IdOrden`) REFERENCES `mecanico`.`Orden_trabajo` (`IdOrden`) ON DELETE CASCADE ON UPDATE CASCADE) ENGINE = InnoDB; CREATE UNIQUE INDEX `id_UNIQUE` ON `mecanico`.`Detalle_orden` (`idDetalle` ASC); CREATE INDEX `fk_Detalle_orden_Orden_trabajo` ON `mecanico`.`Detalle_orden` (`IdOrden` ASC); -- ----------------------------------------------------- -- Table `mecanico`.`Refacciones_Servicios` -- ----------------------------------------------------- DROP TABLE IF EXISTS `mecanico`.`Refacciones_Servicios` ; CREATE TABLE IF NOT EXISTS `mecanico`.`Refacciones_Servicios` ( `IdRefacccion` INT NOT NULL AUTO_INCREMENT, `Descripcion` VARCHAR(200) NOT NULL, `Cantidad_stock` INT NOT NULL, `Precio_unidad` DECIMAL(10,2) NOT NULL, `Marca` VARCHAR(50) NOT NULL, `IdDetalle` INT NOT NULL, PRIMARY KEY (`IdRefacccion`, `IdDetalle`), CONSTRAINT `fk_Refacciones_Servicios_Detalle_orden` FOREIGN KEY (`IdDetalle`) REFERENCES `mecanico`.`Detalle_orden` (`idDetalle`) ON DELETE CASCADE ON UPDATE CASCADE) ENGINE = InnoDB; CREATE UNIQUE INDEX `id_UNIQUE` ON `mecanico`.`Refacciones_Servicios` (`IdRefacccion` ASC); CREATE INDEX `fk_Refacciones_Servicios_Detalle_orden` ON `mecanico`.`Refacciones_Servicios` (`IdDetalle` ASC); -- ----------------------------------------------------- -- Table `mecanico`.`Usuario` -- ----------------------------------------------------- DROP TABLE IF EXISTS `mecanico`.`Usuario` ; CREATE TABLE IF NOT EXISTS `mecanico`.`Usuario` ( `IdLogin` INT NOT NULL AUTO_INCREMENT, `Login` VARCHAR(50) NOT NULL, `Password` VARCHAR(8) NOT NULL, `Tipo` VARCHAR(1) NOT NULL COMMENT '1 - Admin\n2 - Normal', `Estado` VARCHAR(1) NOT NULL COMMENT 'A - Activo\nI - Inactivo', PRIMARY KEY (`IdLogin`)) ENGINE = InnoDB; CREATE UNIQUE INDEX `Login_UNIQUE` ON `mecanico`.`Usuario` (`Login` ASC); CREATE UNIQUE INDEX `IdLogin_UNIQUE` ON `mecanico`.`Usuario` (`IdLogin` ASC); SET SQL_MODE=@OLD_SQL_MODE; SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS;
create table roles( id SERIAL primary key, name varchar not null ); ALTER TABLE roles ADD CONSTRAINT uk_roles_name UNIQUE (name); create table users_roles( id SERIAL primary key, user_id integer NOT NULL, role_id integer NOT NULL ); ALTER TABLE users_roles ADD CONSTRAINT uk_users_roles UNIQUE (user_id, role_id); ALTER TABLE users_roles ADD CONSTRAINT fk_users_roles_users FOREIGN KEY (user_id) REFERENCES application_user(id); ALTER TABLE users_roles ADD CONSTRAINT fk_users_roles_roles FOREIGN KEY (role_id) REFERENCES roles(id); INSERT INTO roles(name) VALUES ('ROLE_DEANOFFICER'); INSERT INTO roles(name) VALUES ('ROLE_ADMIN'); INSERT INTO roles(name) VALUES ('ROLE_NAVCH_METHOD'); INSERT INTO users_roles(user_id, role_id) VALUES (1, 1);
INSERT INTO resume (uuid, full_name) VALUES ('7de882da-02f2-4d16-8daa-60660aaf4071', 'Евгений'), ('a97b3ac3-3817-4c3f-8a5f-178497311f1d', 'Дмитрий'), ('dd0a70d1-5ed3-479a-b452-d5e04f21ca73', 'Николай');
create table schemas_libraries ( id serial not null, local_authority_code character (9), name character varying (250), address_1 character varying (250), address_2 character varying (250), address_3 character varying (250), postcode character varying (9), statutory boolean not null, unique_property_reference_number numeric, library_type_id integer, year_opened numeric (4), year_closed numeric (4), monday_staffed_hours character varying (100), tuesday_staffed_hours character varying (100), wednesday_staffed_hours character varying (100), thursday_staffed_hours character varying (100), friday_staffed_hours character varying (100), saturday_staffed_hours character varying (100), sunday_staffed_hours character varying (100), monday_unstaffed_hours character varying (100), tuesday_unstaffed_hours character varying (100), wednesday_unstaffed_hours character varying (100), thursday_unstaffed_hours character varying (100), friday_unstaffed_hours character varying (100), saturday_unstaffed_hours character varying (100), sunday_unstaffed_hours character varying (100), special_hours text, colocated boolean not null, colocated_with character varying (250), notes text, url text, email_address text, constraint pk_schemaslibraries_id primary key (id), constraint fk_schemaslibraries_localauthoritycode foreign key (local_authority_code) references schemas_local_authority (code), constraint fk_schemaslibraries_librarytypeid foreign key (library_type_id) references schemas_library_type (id) ); create unique index idx_schemaslibraries_id on schemas_libraries (id); cluster schemas_libraries using idx_schemaslibraries_id; create index idx_schemaslibraries_local_authority_code on schemas_libraries (local_authority_code);
SELECT es.id, es.tenant_id, es.employee_id, es.payday, es.amount, employee.person_id AS person_id, es_cb.user_id AS created_by, es_cb.created_at AS created_at, ese_cb.user_id AS ended_by, ese_cb.created_at AS ended_at FROM employee_salary es JOIN created_by es_cb ON es_cb.id = es.id JOIN employee ON employee.id = es.employee_id LEFT JOIN employee_salary_end ese ON es.id = ese.salary_id LEFT JOIN created_by ese_cb ON ese_cb.id = ese.id
CREATE TABLE product( id IDENTITY, product_name VARCHAR(255), product_price DOUBLE, image_url VARCHAR(255), is_active BOOLEAN, description VARCHAR(255), CONSTRAINT pk_product_id PRIMARY KEY (id) ); CREATE TABLE sports( id IDENTITY, sport_name VARCHAR(255), image_url VARCHAR(255), is_active BOOLEAN, description VARCHAR(255), CONSTRAINT pk_sport_id PRIMARY KEY (id) ); drop table sports
CREATE TABLE `ei2030`.( `id` INT NOT NULL AUTO_INCREMENT, `date` DATE NOT NULL, `time` TIME NOT NULL, `direction` VARCHAR(8) NOT NULL, `point` VARCHAR(128) NOT NULL, `distance` INT NOT NULL, `elapse` INT NOT NULL, PRIMARY KEY (`id`) ) ENGINE = InnoDB;
/*!50003 SET character_set_client = utf8 */ ; /*!50003 SET character_set_results = utf8 */ ; /*!50003 SET collation_connection = utf8_general_ci */ ; ALTER TABLE `bills` ADD COLUMN type_id int(2) DEFAULT 0 COMMENT '0:租户账单,1:运营商账单'; ALTER TABLE `notifications` ADD COLUMN status_msg varchar(255) DEFAULT NULL COMMENT '状态消息'; ALTER TABLE `operators` ADD COLUMN amount decimal(10,2) DEFAULT '0.00' COMMENT '帐户余额'; ALTER TABLE `monit`.`packages` ADD COLUMN `multi_regional` int(11), ADD COLUMN `report` int(11), ADD COLUMN `customer_support` VARCHAR(50), ADD COLUMN `data_ratention` int(11), ADD COLUMN `sms_num` int(11), ADD COLUMN `special_features` VARCHAR(50);
#------------------------------------------------------------ # Script MySQL. #------------------------------------------------------------ #------------------------------------------------------------ # Table: businessman #------------------------------------------------------------ CREATE TABLE businessman( com_id Int Auto_increment NOT NULL , com_mdp Varchar (80) NOT NULL , com_nom Varchar (80) NOT NULL , com_prenom Varchar (80) NOT NULL , com_datenaissance Date NOT NULL , com_ville Varchar (80) NOT NULL , com_num Varchar (80) NOT NULL , com_mail Varchar (80) NOT NULL , com_sexe Varchar (80) NOT NULL ,CONSTRAINT businessman_PK PRIMARY KEY (com_id) )ENGINE=InnoDB; #------------------------------------------------------------ # Table: customers #------------------------------------------------------------ CREATE TABLE customers( customer_id Int Auto_increment NOT NULL , customer_firstname Varchar (50) NOT NULL , customer_lastname Varchar (50) NOT NULL , customer_etp Varchar (50) NOT NULL , customer_town Varchar (50) NOT NULL , customer_num Varchar (50) NOT NULL , customer_mail Varchar (50) NOT NULL ,CONSTRAINT customers_PK PRIMARY KEY (customer_id) )ENGINE=InnoDB; #------------------------------------------------------------ # Table: expense #------------------------------------------------------------ CREATE TABLE expense( expense_id Int Auto_increment NOT NULL , expense_buisnessman Int NOT NULL , expense_date Date NOT NULL , expense_type Varchar (40) NOT NULL , expense_mission Varchar (40) NOT NULL , expense_tva Double NOT NULL , expense_amountHT Double NOT NULL , expense_amountTTC Double NOT NULL , expense_validate Int NOT NULL , com_id Int NOT NULL ,CONSTRAINT expense_PK PRIMARY KEY (expense_id) ,CONSTRAINT expense_businessman_FK FOREIGN KEY (com_id) REFERENCES businessman(com_id) )ENGINE=InnoDB; #------------------------------------------------------------ # Table: enterprise #------------------------------------------------------------ CREATE TABLE enterprise( etp_id Int Auto_increment NOT NULL , etp_username Varchar (40) NOT NULL , etp_pass Varchar (40) NOT NULL , etp_siret Varchar (80) NOT NULL , etp_dirigeant Varchar (40) NOT NULL , etp_siege Varchar (40) NOT NULL , etp_ville Varchar (40) NOT NULL , etp_num Varchar (40) NOT NULL , etp_mail Varchar (40) NOT NULL , etp_dateinscription Date NOT NULL , etp_nbcommerciaux Int NOT NULL , expense_id Int NOT NULL ,CONSTRAINT enterprise_PK PRIMARY KEY (etp_id) ,CONSTRAINT enterprise_expense_FK FOREIGN KEY (expense_id) REFERENCES expense(expense_id) )ENGINE=InnoDB; #------------------------------------------------------------ # Table: employ #------------------------------------------------------------ CREATE TABLE employ( com_id Int NOT NULL , etp_id Int NOT NULL ,CONSTRAINT employ_PK PRIMARY KEY (com_id,etp_id) ,CONSTRAINT employ_businessman_FK FOREIGN KEY (com_id) REFERENCES businessman(com_id) ,CONSTRAINT employ_enterprise0_FK FOREIGN KEY (etp_id) REFERENCES enterprise(etp_id) )ENGINE=InnoDB; #------------------------------------------------------------ # Table: manage #------------------------------------------------------------ CREATE TABLE manage( customer_id Int NOT NULL , com_id Int NOT NULL ,CONSTRAINT manage_PK PRIMARY KEY (customer_id,com_id) ,CONSTRAINT manage_customers_FK FOREIGN KEY (customer_id) REFERENCES customers(customer_id) ,CONSTRAINT manage_businessman0_FK FOREIGN KEY (com_id) REFERENCES businessman(com_id) )ENGINE=InnoDB;
create table category ( id int identity(1, 1) not null primary key, name varchar(30) not null, createdAt datetime not null default getdate() ); create table users ( id int identity(1, 1) not null primary key, name varchar(10) not null, password varchar(20) not null, email varchar(20) not null, status int default 0 not null, createdAt datetime not null default getdate() ); create table article ( id int identity(1, 1) not null primary key, categoryId int not null, userId int not null, title varchar(300) not null, content text not null, status int default 0 not null, createdAt datetime not null default getdate(), updatedAt datetime ) create table comment ( id int identity(1, 1) not null primary key, articleId int not null, userId int not null, content text not null, status int default 0 not null, createdAt datetime not null default getdate(), updatedAt datetime ) insert into category(name) values ( 'test'); insert into category(name) values ( 'Programmer'); insert into category(name) values ( 'java'); insert into category(name) values ( 'ruby');
EXEC ('ALTER TABLE DB_PROFILE_DATA2.dbo.TB_DOCUMENTO NOCHECK CONSTRAINT ALL'); EXEC ('ALTER TABLE DB_PROFILE_DATA2.dbo.TB_DOCUMENTO_AUX NOCHECK CONSTRAINT ALL'); --EXEC ('ALTER TABLE DB_PROFILE_DATA2.dbo.TB_PROCESSO NOCHECK CONSTRAINT ALL'); EXEC ('ALTER TABLE DB_PROFILE_DATA2.dbo.TB_DOCUMENTO DISABLE TRIGGER ALL'); EXEC ('DELETE FROM DB_PROFILE_DATA2.dbo.TB_DOCUMENTO '); EXEC ('ALTER TABLE DB_PROFILE_DATA2.dbo.TB_DOCUMENTO CHECK CONSTRAINT ALL '); EXEC ('ALTER TABLE DB_PROFILE_DATA2.dbo.TB_DOCUMENTO ENABLE TRIGGER ALL'); --EXEC ('SELECT * FROM DB_PROFILE_DATA2.dbo.TB_DOCUMENTO'); EXEC ('select COUNT (*) from DB_PROFILE_DATA2.dbo.TB_DOCUMENTO');
drop table YELP_USER cascade constraints; drop table REVIEWS cascade constraints; drop table NATIVE_ATTRIBUTE cascade constraints; drop table NATIVE_CATEGORY cascade constraints; drop table BUSINESS cascade constraints; drop table BUSINESS_CATEGORY cascade constraints; drop table BUSINESS_SUB_CATEGORY cascade constraints;
-- Which countries have the most Invoices? SELECT BillingCountry, count(*) AS Invoices FROM Invoice GROUP BY BillingCountry ORDER BY Invoices DESC;
/* 执行用时:667 ms, 在所有 MySQL 提交中击败了93.05% 的用户 内存消耗:0 B, 在所有 MySQL 提交中击败了100.00% 的用户 通过测试用例:14 / 14 */ # Write your MySQL query statement below select d.name as Department, b.name as Employee, Salary from ( select * from ( select name, salary, departmentid, rank() over (partition by departmentid order by salary desc) as r from Employee ) a where r = 1 ) b join department d on b.departmentid = d.id
use codeup_test_db; -- select artist, name as "Albums after 1991" from albums where release_date > 1991; delete from albums where release_date > 1991; -- select artist, name as "Disco Albums" from albums where genre like "%Disco%"; delete from albums where genre like "%Disco%"; -- select name as "Whitney Houston Albums" from albums where artist like "%Whitney Houston%"; delete from albums where artist like "%Whitney Houston%";
--修改日期:20120918 --修改人:叶爱军 --需求编号:XD-JD03-027 系统功能-增加借款单位 --参数设置: ALTER TABLE CMS_LOAN_INFO ADD BORROW_CORP_ID VARCHAR2(4); UPDATE CMS_LOAN_INFO SET BORROW_CORP_ID = NET_CODE; COMMIT; --修改日期:20120918 --修改人:叶爱军 --需求编号:XD-JD03-027 系统功能-增加付息方式 --参数设置: ALTER TABLE CMS_PROVIDE_LOAN_INFO ADD PAY_INTEREST VARCHAR2(3); UPDATE CMS_PROVIDE_LOAN_INFO SET PAY_INTEREST = '001'; COMMIT;
Create Procedure mERP_Sp_GetCategories(@Level int) As Begin select CategoryID,Category_name from ItemCategories where Active=1 and Level=@Level End
# Oefening 9 # Toon de naam, district & populatie van 3 steden uit Mexico waar de populatie > = 100000 en sorteer op populatie van hoog naar laag. # Geef een oplossing zonder en met tabel aliassen. SELECT city.name, district, city.population FROM city INNER JOIN country ON city.countrycode = country.code WHERE country.name= "Mexico" AND city.population > = 100000 ORDER BY city.population DESC LIMIT 3 # Toon de naam van de Nederlandse steden # Toon ook de naam, continent en regio van het land en gebruik eventueel aliassen om alles te verduidelijken SELECT city.name, country.name, country.continent, country.region FROM city INNER JOIN country ON city.countrycode = country.code WHERE country.name = "Netherlands" # Toon de naam van de Belgische steden waar de populatie < 1000000. # Toon ook de naam, continent en region van het land en gebruik eventueel aliassen om alles te verduidelijken. SELECT city.name, country.name, country.continent, country.region FROM city INNER JOIN country ON city.countrycode = country.code WHERE country.name = "Belgium" AND city.population < 1000000 # Toon de naam van de Afrikaanse steden waar de onafhankelijkheid van het land is gevierd voor 1945. # Toon ook de naam en jaar van onafhankelijkheid van het land. # Sorteer alfabetisch op naam land en dan op jaar van onafhankelijkheid van eerst naar # laatst en gebruik eventueel aliassen om alles te verduidelijken. SELECT city.name, country.name, indepyear FROM city INNER JOIN country ON city.countrycode = country.code WHERE country.name = "Africa" AND indepyear < 1945 ORDER BY country.name, indepyear # Toon de naam van de Aziatische steden waar de levensverwachting hoger is dan 60 en # de populatie (vd. stad) groter is dan 300000. # Toon ook de naam en levensverwachting van het land. Sorteer alfabetisch op naam land # en dan op levensverwachting van hoog naar laag en gebruik eventueel aliassen om alles te verduidelijken. SELECT city.name, country.name, country.lifeexpectancy FROM city INNER JOIN country ON city.countrycode = country.code WHERE country.region = "Asia" AND lifeexpectancy < 60 AND city.population > 300000 ORDER BY country.name, lifeexpectancy DESC # Toon de naam van de steden uit de landen met een oppervlakte kleiner dan 100. # Toon ook de naam en oppervlakte van het land en sorteer alfabetisch op naam land en dan # op oppervlakte van laag naar hoog en gebruik eventueel aliassen om alles te verduidelijken. SELECT city.name, country.name, surfacearea FROM city INNER JOIN country ON city.countrycode = country.code WHERE surfacearea < 100 ORDER BY country.name, surfacearea # Toon de naam van de Afrikaanse landen, de officiële taal & door hoeveel percent van de # bevolking ze gesproken wordt, sorteer op naam land SELECT name, language, percentage FROM country INNER JOIN countrylanguage ON country.code = countrylanguage.countrycode WHERE continent="Africa" AND isofficial="T" ORDER BY name # Toon de naam van de Franse steden met een bevolking > 400000. # Toon ook de naam, continent en region van het land en de officiële taal die ze er # spreken en gebruik eventueel aliassen om alles te verduidelijken. SELECT city.name, country.name, continent, region, language FROM city INNER JOIN (country INNER JOIN countrylanguage ON country.code = countrylanguage.countrycode) ON country.code=city.countrycode WHERE country.name="France" AND isofficial="T" AND city.population > 400000 ################################################################################################################## # Toon de naam van de Duitse steden waar de populatie < 1000000. # # Toon ook de officiële taal die ze er spreken en gebruik eventueel aliassen om alles te verduidelijken. # SELECT city.name, language FROM countrylanguage # INNER JOIN (city INNER JOIN country ON city.countrycode = country.code) # ON countrylanguage.countrycode=city.countrycode # WHERE country.name="Germany" AND isofficial="T" AND city.population < 1000000 # ################################################################################################################## # Toon de naam van de stad en levensverwachting in Gent SELECT city.name, lifeexpectancy FROM city INNER JOIN country ON country.code=city.countrycode WHERE city.name="Gent" # Toon de naam en de government form in de steden Berlijn, Parijs, Londen en Peking SELECT city.name, governmentform FROM city INNER JOIN country ON country.code=city.countrycode WHERE city.name IN ("Berlin", "Paris", "London", "Peking") # Toon de naam en Head of state van de steden uit Angola SELECT city.name, headofstate FROM city INNER JOIN country ON country.code=city.countrycode WHERE country.name = "Angola" # Toon de naam van alle landen en steden sorteer alfabetisch op naam land SELECT country.name, city.name FROM city INNER JOIN country ON country.code = city.countrycode ORDER BY country.name # Gebruik zowel eens een INNER als een LEFT/RIGHT JOIN en kijk of er verschillen zijn op het aantal records in de result-set # Toon de naam, soort en brouwerij van de bieren die beginnen met de letter a SELECT bieren.naam, soort, brouwers.brnaam FROM bieren INNER JOIN (brouwers INNER JOIN soorten ON bieren.soortnr = soorten.soortnr) ON bieren.brouwernr = brouwers.brouwernr WHERE bieren.naam LIKE "a%" # Toon de naam, soort en brouwerij van de bieren met een soort die de letter t bevat # sorteer op brouwerij alfabetisch en dan op naam alfabetisch SELECT bieren.naam, soort, brouwers.brnaam FROM bieren INNER JOIN (brouwers INNER JOIN soorten ON bieren.soortnr = soorten.soortnr) ON bieren.brouwernr = brouwers.brouwernr WHERE soort LIKE "%t%" ORDER BY 3,1 # Toon de naam, soort en brouwerij van de bieren uit brouwerijen die een omzet hebben # van meer dan 100000 sorteer op naam bier alfabetisch SELECT bieren.naam, soort, brouwers.brnaam FROM bieren INNER JOIN (brouwers INNER JOIN soorten ON bieren.soortnr = soorten.soortnr) ON bieren.brouwernr = brouwers.brouwernr WHERE brouwers.omzet > 100000 ORDER BY 1 # Toon een overzicht van alle brouwers en de (unieke) soorten die ze brouwen, sorteer op # brouwer alfabetisch en dan op soort alfabetisch SELECT DISTINCT brouwers.brnaam, soort FROM bieren INNER JOIN (brouwers INNER JOIN soorten ON bieren.soortnr = soorten.soortnr) ON bieren.brouwernr = brouwers.brouwernr ORDER BY 1
COPY (with query_data as ( select coalesce(gears.sfp_code, gears.isscfg) as isscfg, gears.standard_abbreviation, gears.name as gear_name, gear_groups.name as gear_group, gears.description from gears inner join gear_groups on gears.gear_group_id = gear_groups.id UNION select gear_groups.isscfg, gear_groups.standard_abbreviation, NULL as gear_name, gear_groups.name as gear_group, gear_groups.description from gear_groups ) select * from query_data order by gear_group, isscfg) to '/tmp/gears.csv' with CSV HEADER;
WITH ls AS ( SELECT * FROM ( SELECT * FROM dna_repositories.repository_100 UNION SELECT * FROM dna_repositories.repository_132 UNION SELECT * FROM dna_repositories.repository_133 UNION SELECT * FROM dna_repositories.repository_134 UNION SELECT * FROM dna_repositories.repository_135 UNION SELECT * FROM dna_repositories.repository_136 UNION SELECT * FROM dna_repositories.repository_137 UNION SELECT * FROM dna_repositories.repository_138 UNION SELECT * FROM dna_repositories.repository_139 UNION SELECT * FROM dna_repositories.repository_140 UNION SELECT * FROM dna_repositories.repository_141 UNION SELECT * FROM dna_repositories.repository_141 ) AS repo_all WHERE field_standard = 'Wall Street') SELECT --* s.local_student_id, COUNT(DISTINCT field_data_entry_date) FROM ls INNER JOIN public.students s ON s.student_id = ls.student_id GROUP BY s.local_student_id
CREATE OR REPLACE SYNONYM hr.departments_org FOR hr.departments;
CREATE DATABASE animals_db; USE animals_db; CREATE TABLE animals( id INT AUTO_INCREMENT, animal_name VARCHAR(255), PRIMARY KEY(id) );
-- -- Table structure for table `genome_db_stats` -- -- table to hold genome level summary statistics -- Used by Bio::EnsEMBL::Compara::RunnableDB::GenomeCalcStats module -- CREATE TABLE genome_db_stats ( genome_db_id int(10) NOT NULL default '0', data_type varchar(20) NOT NULL, count int(10) NOT NULL, mean double NOT NULL default '0', median double NOT NULL default '0', mode double NOT NULL, stddev double NOT NULL, variance double NOT NULL, min double NOT NULL default '0', max double NOT NULL default '0', overlap_count int(10) NOT NULL default '0', UNIQUE KEY genome_db_id_type (genome_db_id, data_type) );
INSERT INTO snake_state (time_created, time_light_switch, shed_imminent) VALUES ((NOW() - INTERVAL '20 days'),7, 't'); INSERT INTO snake_state (time_created, time_light_switch, shed_complete) VALUES ((NOW() - INTERVAL '4 days'),7, 't'); INSERT INTO snake_state (time_created, time_light_switch, rat_offered) VALUES (NOW()- INTERVAL'15 days',7, 't'); INSERT INTO snake_state (time_created, time_light_switch, snake_length) VALUES (NOW()- INTERVAL'100 days',7, 50); INSERT INTO snake_state (time_created, time_light_switch, snake_weight) VALUES (NOW()- INTERVAL'90 days',7, 10); INSERT INTO snake_state (time_created, time_light_switch, urate_found) VALUES (NOW()- INTERVAL'20 days',7, 't'); INSERT INTO snake_state (time_created, time_light_switch, rat_ate) VALUES (NOW()- INTERVAL'30 days',7, 't'); INSERT INTO snake_state (time_created, time_light_switch, rat_ignored) VALUES (NOW()- INTERVAL'16 days',7, 't'); INSERT INTO snake_state (time_created, time_light_switch, poop_found) VALUES (NOW()-INTERVAL'14 days',7, 't'); INSERT INTO snake_state (time_created, time_light_switch, poop_found) VALUES (NOW()-INTERVAL'99 days',7, 't'); INSERT INTO snake_state (time_created, time_light_switch, poop_found) VALUES (NOW()-INTERVAL'37 days',7, 't'); INSERT INTO snake_state (time_created, time_light_switch, poop_found) VALUES (NOW()-INTERVAL'60 days',7, 't'); INSERT INTO snake_state (time_created, time_light_switch, poop_found) VALUES (NOW()-INTERVAL'115 days',7, 't');
DELETE FROM MW_GEN_STATS WHERE STAT_TYPE LIKE '%HW_%'; INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_VIRTUAL INTEL', 'TOMCAT', (SELECT COUNT (model) "VIRTUAL INTEL" FROM TOMCAT_GEN_UNIQUE_HOSTS WHERE model = 'VMWARE VIRTUAL PLATFORM'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_VIRTUAL SPARC', 'TOMCAT', (SELECT COUNT (model) "VIRTUAL SPARC" FROM TOMCAT_GEN_UNIQUE_HOSTS WHERE model LIKE 'SPARC T%' OR model LIKE '%SOLARIS ZONE%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_INTEL G6', 'TOMCAT', (SELECT COUNT (model) "INTEL G6" FROM TOMCAT_GEN_UNIQUE_HOSTS WHERE model LIKE '%PROLIANT%G5%' OR model LIKE '%G6%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_INTEL G7', 'TOMCAT', (SELECT COUNT (model) "INTEL G7" FROM TOMCAT_GEN_UNIQUE_HOSTS WHERE model LIKE '%PROLIANT%' AND model LIKE '%G7%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_INTEL G8', 'TOMCAT', (SELECT COUNT (model) "INTEL G8" FROM TOMCAT_GEN_UNIQUE_HOSTS WHERE model LIKE '%PROLIANT%' AND model LIKE '%GEN8%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_INTEL G5', 'TOMCAT', (SELECT COUNT (model) "INTEL G5" FROM TOMCAT_GEN_UNIQUE_HOSTS WHERE model LIKE '%PROLIANT%' AND model LIKE '%G5%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ( 'HW_PHYSICAL SPARC', 'TOMCAT', (SELECT COUNT (model) "PHYSICAL SPARC" FROM TOMCAT_GEN_UNIQUE_HOSTS WHERE model LIKE '%FIRE%' OR model LIKE '%SPARC ENTERPRISE%' OR model LIKE 'T52%' OR model LIKE 'T54%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_IBM', 'TOMCAT', (SELECT COUNT (model) "IBM" FROM TOMCAT_GEN_UNIQUE_HOSTS WHERE model LIKE '%IBM%' OR model LIKE '%POWER%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ( 'HW_OTHER_HW', 'TOMCAT', (SELECT COUNT (model) "OTHER_HW" FROM TOMCAT_GEN_UNIQUE_HOSTS WHERE model NOT LIKE '%IBM%' AND model NOT LIKE '%PROLIANT%' AND model NOT LIKE '%VMWARE%' AND model NOT LIKE '%SPARC T%' AND model NOT LIKE '%SPARC ENTERPRISE%' AND model LIKE 'T52%' AND model LIKE 'T54%' AND model NOT LIKE '%POWER%' AND model NOT LIKE '%PROLIANT%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_VIRTUAL INTEL', 'WL', (SELECT COUNT (model) "VIRTUAL INTEL" FROM WL_GEN_UNIQUE_HOSTS WHERE model = 'VMWARE VIRTUAL PLATFORM'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_VIRTUAL SPARC', 'WL', (SELECT COUNT (model) "VIRTUAL SPARC" FROM WL_GEN_UNIQUE_HOSTS WHERE model LIKE 'SPARC T%' OR model LIKE '%SOLARIS ZONE%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_INTEL G6', 'WL', (SELECT COUNT (model) "INTEL G6" FROM WL_GEN_UNIQUE_HOSTS WHERE model LIKE '%PROLIANT%G5%' OR model LIKE '%G6%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_INTEL G7', 'WL', (SELECT COUNT (model) "INTEL G7" FROM WL_GEN_UNIQUE_HOSTS WHERE model LIKE '%PROLIANT%' AND model LIKE '%G7%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_INTEL G8', 'WL', (SELECT COUNT (model) "INTEL G8" FROM WL_GEN_UNIQUE_HOSTS WHERE model LIKE '%PROLIANT%' AND model LIKE '%GEN8%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_INTEL G5', 'WL', (SELECT COUNT (model) "INTEL G5" FROM WL_GEN_UNIQUE_HOSTS WHERE model LIKE '%PROLIANT%' AND model LIKE '%G5%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ( 'HW_PHYSICAL SPARC', 'WL', (SELECT COUNT (model) "PHYSICAL SPARC" FROM WL_GEN_UNIQUE_HOSTS WHERE model LIKE '%FIRE%' OR model LIKE '%SPARC ENTERPRISE%' OR model LIKE 'T52%' OR model LIKE 'T54%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_IBM', 'WL', (SELECT COUNT (model) "IBM" FROM WL_GEN_UNIQUE_HOSTS WHERE model LIKE '%IBM%' OR model LIKE '%POWER%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ( 'HW_OTHER', 'WL', (SELECT COUNT (model) "OTHER_HW" FROM WL_GEN_UNIQUE_HOSTS WHERE model NOT LIKE '%IBM%' AND model NOT LIKE '%PROLIANT%' AND model NOT LIKE '%VMWARE%' AND model NOT LIKE '%SPARC T%' AND model NOT LIKE '%SPARC ENTERPRISE%' AND model LIKE 'T52%' AND model LIKE 'T54%' AND model NOT LIKE '%POWER%' AND model NOT LIKE '%PROLIANT%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_VIRTUAL INTEL', 'APACHE', (SELECT COUNT (model) "VIRTUAL INTEL" FROM APACHE_GEN_UNIQUE_HOSTS WHERE model = 'VMWARE VIRTUAL PLATFORM'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_VIRTUAL SPARC', 'APACHE', (SELECT COUNT (model) "VIRTUAL SPARC" FROM APACHE_GEN_UNIQUE_HOSTS WHERE model LIKE 'SPARC T%' OR model LIKE '%SOLARIS ZONE%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_INTEL G6', 'APACHE', (SELECT COUNT (model) "INTEL G6" FROM APACHE_GEN_UNIQUE_HOSTS WHERE model LIKE '%PROLIANT%G5%' OR model LIKE '%G6%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_INTEL G7', 'APACHE', (SELECT COUNT (model) "INTEL G7" FROM APACHE_GEN_UNIQUE_HOSTS WHERE model LIKE '%PROLIANT%' AND model LIKE '%G7%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_INTEL G8', 'APACHE', (SELECT COUNT (model) "INTEL G8" FROM APACHE_GEN_UNIQUE_HOSTS WHERE model LIKE '%PROLIANT%' AND model LIKE '%GEN8%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_INTEL G5', 'APACHE', (SELECT COUNT (model) "INTEL G5" FROM APACHE_GEN_UNIQUE_HOSTS WHERE model LIKE '%PROLIANT%' AND model LIKE '%G5%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ( 'HW_PHYSICAL SPARC', 'APACHE', (SELECT COUNT (model) "PHYSICAL SPARC" FROM APACHE_GEN_UNIQUE_HOSTS WHERE model LIKE '%FIRE%' OR model LIKE '%SPARC ENTERPRISE%' OR model LIKE 'T52%' OR model LIKE 'T54%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ('HW_IBM', 'APACHE', (SELECT COUNT (model) "IBM" FROM APACHE_GEN_UNIQUE_HOSTS WHERE model LIKE '%IBM%' OR model LIKE '%POWER%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ( 'HW_OTHER_HW', 'APACHE', (SELECT COUNT (model) "OTHER_HW" FROM APACHE_GEN_UNIQUE_HOSTS WHERE model NOT LIKE '%IBM%' AND model NOT LIKE '%PROLIANT%' AND model NOT LIKE '%VMWARE%' AND model NOT LIKE '%SPARC T%' AND model NOT LIKE '%SPARC ENTERPRISE%' AND model LIKE 'T52%' AND model LIKE 'T54%' AND model NOT LIKE '%POWER%' AND model NOT LIKE '%PROLIANT%'), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ( 'VIRTUALIZATION', 'APACHE', (SELECT ROUND (VIRTUAL / ALL_ * 100) "APACHE VIRTUALIZATION" FROM (SELECT COUNT (model) "VIRTUAL" FROM APACHE_GEN_UNIQUE_HOSTS WHERE model = 'VMWARE VIRTUAL PLATFORM' OR model LIKE 'SPARC T%' OR model LIKE 'SOLARIS ZONE%'), (SELECT COUNT (model) "ALL_" FROM APACHE_GEN_UNIQUE_HOSTS)), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ( 'VIRTUALIZATION', 'WL', (SELECT ROUND (VIRTUAL / ALL_ * 100) "WLS VIRTUALIZATION" FROM (SELECT COUNT (model) "VIRTUAL" FROM WL_GEN_UNIQUE_HOSTS WHERE model = 'VMWARE VIRTUAL PLATFORM' OR model LIKE 'SPARC T%' OR model LIKE 'SOLARIS ZONE%'), (SELECT COUNT (model) "ALL_" FROM WL_GEN_UNIQUE_HOSTS)), TRUNC (SYSDATE)); INSERT INTO APX_ASA.MW_GEN_STATS VALUES ( 'VIRTUALIZATION', 'TOMCAT', (SELECT ROUND (VIRTUAL / ALL_ * 100) "TOMCAT VIRTUALIZATION" FROM (SELECT COUNT (model) "VIRTUAL" FROM TOMCAT_GEN_UNIQUE_HOSTS WHERE model = 'VMWARE VIRTUAL PLATFORM' OR model LIKE 'SPARC T%' OR model LIKE 'SOLARIS ZONE%'), (SELECT COUNT (model) "ALL_" FROM TOMCAT_GEN_UNIQUE_HOSTS)), TRUNC (SYSDATE)); COMMIT; EXIT;
-- phpMyAdmin SQL Dump -- version 4.8.5 -- https://www.phpmyadmin.net/ -- -- Host: 127.0.0.1 -- Generation Time: Aug 09, 2021 at 01:04 PM -- Server version: 10.1.38-MariaDB -- PHP Version: 5.6.40 SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; SET AUTOCOMMIT = 0; START TRANSACTION; SET time_zone = "+00:00"; /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8mb4 */; -- -- Database: `sayu` -- -- -------------------------------------------------------- -- -- Table structure for table `madawa` -- CREATE TABLE `madawa` ( `id` int(11) NOT NULL, `users_id` int(11) NOT NULL, `name` varchar(200) NOT NULL, `quantity` varchar(200) NOT NULL, `price` varchar(100) NOT NULL, `postdate` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP ) ENGINE=InnoDB DEFAULT CHARSET=latin1; -- -- Dumping data for table `madawa` -- INSERT INTO `madawa` (`id`, `users_id`, `name`, `quantity`, `price`, `postdate`) VALUES (5, 6, 'booster', 'mfuko mmoja', '20000/=', '2021-08-03 12:26:35'), (6, 6, 'Sumu', 'lita: 1', '22000/=', '2021-08-03 12:26:22'), (7, 8, 'booster', 'lita: 1', '20000/=', '2021-07-18 19:03:02'), (8, 6, 'Randam', 'lita: 1', '20000/=', '2021-07-19 06:08:51'), (9, 12, 'randap', 'mfuko mmoja', '22000/=', '2021-08-07 13:16:49'), (10, 12, 'booster', 'lita: 1', '23000/=', '2021-08-07 13:17:03'), (11, 14, 'booster', 'lita: 1', '20000/=', '2021-08-08 23:53:37'), (12, 15, 'booster', 'lita: 1', '2100/=', '2021-08-09 00:00:47'), (13, 10, 'Carate', 'lita: 1', '15000/=', '2021-08-09 06:44:21'), (14, 10, 'roundup', 'lita: 1', '14000/=', '2021-08-09 06:45:43'), (15, 11, 'rondo', 'lita: 1', '13000/=', '2021-08-09 06:51:56'), (16, 16, 'Carate', 'lita: 1', '16000/=', '2021-08-09 07:19:06'), (17, 16, 'roundup', 'lita: 1', '15000/=', '2021-08-09 07:20:01'), (18, 17, 'rondo', 'lita: 1', '15000/=', '2021-08-09 07:37:08'), (19, 17, 'Carate', 'lita: 1', '15500/=', '2021-08-09 07:37:55'), (20, 13, 'roundup', 'lita: 1', '15000/=', '2021-08-09 07:42:10'); -- -------------------------------------------------------- -- -- Table structure for table `mbegu` -- CREATE TABLE `mbegu` ( `id` int(11) NOT NULL, `users_id` int(11) NOT NULL, `name` varchar(200) NOT NULL, `quantity` varchar(200) NOT NULL, `price` varchar(100) NOT NULL, `postdate` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP ) ENGINE=InnoDB DEFAULT CHARSET=latin1; -- -- Dumping data for table `mbegu` -- INSERT INTO `mbegu` (`id`, `users_id`, `name`, `quantity`, `price`, `postdate`) VALUES (1, 6, 'seedco', 'mfuko mmoja', '5200/=', '2021-08-03 12:22:46'), (3, 8, 'alizeti', 'mfuko mmoja', '4500/=', '2021-08-03 12:22:33'), (4, 6, 'soya', 'mfuko mmoja', '5520/=', '2021-08-03 08:37:07'), (5, 12, 'seedco', 'mfuko mmoja', '3200/=', '2021-08-07 13:13:32'), (6, 12, 'carrot', 'nusu kilo', '5100/=', '2021-08-07 13:13:47'), (7, 13, 'tomato', 'gramu 50', '5200/=', '2021-08-08 14:14:02'), (8, 13, 'carrot', 'gramu 50', '6000/=', '2021-08-08 23:13:53'), (9, 14, 'SEEDCO', 'mfuko mmoja', '57000/=', '2021-08-08 23:53:10'), (10, 15, 'carrot', 'gramu 50', '5000/=', '2021-08-08 23:59:33'), (11, 11, 'SEEDCO', 'mfuko mmoja', '58000/=', '2021-08-09 06:49:59'), (12, 11, 'tomato', 'gramu 50', '52000/=', '2021-08-09 06:51:10'), (13, 16, 'seedco', 'mfuko mmoja', '3000/=', '2021-08-09 07:16:28'), (14, 16, 'spinachi', 'gramu 50', '2500/=', '2021-08-09 07:17:36'), (15, 17, 'maharage', 'plastiki moja ya lita 20', '35000/=', '2021-08-09 07:35:55'), (16, 17, 'seedco', 'mfuko mmoja', '3000/=', '2021-08-09 07:36:24'); -- -------------------------------------------------------- -- -- Table structure for table `mbolea` -- CREATE TABLE `mbolea` ( `id` int(11) NOT NULL, `users_id` int(11) NOT NULL, `name` varchar(200) NOT NULL, `quantity` varchar(200) NOT NULL, `price` varchar(100) NOT NULL, `postdate` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP ) ENGINE=InnoDB DEFAULT CHARSET=latin1; -- -- Dumping data for table `mbolea` -- INSERT INTO `mbolea` (`id`, `users_id`, `name`, `quantity`, `price`, `postdate`) VALUES (11, 3, 'CAN', 'mfuko mmoja', '57200/=', '2021-07-09 06:47:57'), (12, 2, 'NPK', 'mfuko mmoja', '58000/=', '2021-07-09 12:11:55'), (15, 6, 'CAN', 'mfuko mmoja', '53200/=', '2021-08-03 12:19:55'), (16, 6, 'NPK', 'mfuko mmoja', '58000/=', '2021-08-03 12:21:38'), (19, 6, 'CAN', 'mfuko mmoja', '52000/=', '2021-07-19 04:04:46'), (20, 6, 'UREA', 'mfuko mmoja', '57000/=', '2021-08-03 21:23:47'), (21, 6, 'UREA', 'mfuko mmoja', '57000/=', '2021-08-03 21:58:25'), (22, 12, 'UREA', 'mfuko mmoja', '57000/=', '2021-08-05 17:17:55'), (23, 7, 'UREA', 'mfuko mmoja', '53000/=', '2021-08-06 22:39:45'), (24, 7, 'CAN', 'mfuko mmoja', '57000/=', '2021-08-06 22:38:03'), (25, 12, 'CAN', 'mfuko mmoja', '57000/=', '2021-08-07 22:05:43'), (26, 13, 'UREA', 'mfuko mmoja', '58200/=', '2021-08-08 14:08:40'), (27, 13, 'CAN', 'mfuko mmoja', '59000/=', '2021-08-08 14:08:54'), (28, 14, 'NPK', 'mfuko mmoja', '10000', '2021-08-08 23:52:17'), (29, 15, 'UREA', 'mfuko mmoja', '58000/=', '2021-08-08 23:58:52'), (30, 15, 'DAP', 'mfuko mmoja', '53000/=', '2021-08-08 23:59:11'), (31, 10, 'DAP', 'mfuko mmoja', '53000/=', '2021-08-09 06:40:18'), (32, 10, 'CAN', 'mfuko mmoja', '58000/=', '2021-08-09 06:40:48'), (33, 11, 'NPK', 'mfuko mmoja', '62000/=', '2021-08-09 06:47:43'), (34, 11, 'UREA', 'mfuko mmoja', '57000/=', '2021-08-09 06:48:41'), (35, 16, 'CAN', 'mfuko mmoja', '57200/=', '2021-08-09 07:13:18'), (36, 16, 'DAP', 'mfuko mmoja', '57000/=', '2021-08-09 07:13:51'), (37, 17, 'UREA', 'mfuko mmoja', '53000/=', '2021-08-09 07:29:49'), (38, 17, 'CAN', 'mfuko mmoja', '57200/=', '2021-08-09 07:30:16'); -- -------------------------------------------------------- -- -- Table structure for table `users` -- CREATE TABLE `users` ( `id` int(11) NOT NULL, `fname` varchar(200) NOT NULL, `lname` varchar(200) NOT NULL, `password` varchar(50) NOT NULL, `email` varchar(100) NOT NULL, `location` varchar(300) NOT NULL, `phonenumber` varchar(11) NOT NULL, `shopname` varchar(200) NOT NULL, `role` varchar(50) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=latin1; -- -- Dumping data for table `users` -- INSERT INTO `users` (`id`, `fname`, `lname`, `password`, `email`, `location`, `phonenumber`, `shopname`, `role`) VALUES (6, 'said', 'husen', '123', 'said@gmail.com', 'kabwe', '0758990120', 'kishiri shop', 'admin'), (7, 'vise', 'zao', '321', 'vise@gmail.com', 'karinga', '0768990911', 'ruparupa shop', 'normal'), (8, 'yusuph', 'simon', '111111111', 'yusuph@gmail.com', 'mwanjelwa', '0758990120', 'happylife shop', 'admin'), (9, 'peter', 'zao', '213', 'peter@gmail.com', 'mwanjelwa', '0768990911', 'maperere shop', 'normal'), (10, 'mati', 'musa', '987', 'mati@gmail.com', 'mwasyoge', '0768990911', 'greenshop', 'normal'), (11, 'mamba', 'mamba', '111', 'musa@gmail.com', 'mwasyoge', '0624574432', 'mambashop', 'normal'), (12, 'john', 'john', '222', 'sam@gmail.com', 'soweto', '0768990911', 'makiwa shop', 'normal'), (13, 'kenge', 'kenge', '123', 'deo@gmail.com', 'mbata', '0624763332', 'deokilimo shop', 'normal'), (14, 'hussein', 'shaban', '555', 'husseinshan@gmail.com', 'kabwe', '0743339065', 'kasimiri shop', 'normal'), (15, 'yusuph', 'smon', 'yusuph', 'yusup@mail.com', 'mwanjelwa', '0624673332', 'yusuph shop', 'normal'), (16, 'masawe', 'noa', '111', 'masawe@gmail.com', 'nyibuko', '0624574432', 'masaweshop', 'normal'), (17, 'zakayo', 'musa', '222', 'zakayo@gmail.com', 'utukuyu', '0750111111', 'zakashop', 'normal'); -- -- Indexes for dumped tables -- -- -- Indexes for table `madawa` -- ALTER TABLE `madawa` ADD PRIMARY KEY (`id`); -- -- Indexes for table `mbegu` -- ALTER TABLE `mbegu` ADD PRIMARY KEY (`id`); -- -- Indexes for table `mbolea` -- ALTER TABLE `mbolea` ADD PRIMARY KEY (`id`); -- -- Indexes for table `users` -- ALTER TABLE `users` ADD PRIMARY KEY (`id`); -- -- AUTO_INCREMENT for dumped tables -- -- -- AUTO_INCREMENT for table `madawa` -- ALTER TABLE `madawa` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=21; -- -- AUTO_INCREMENT for table `mbegu` -- ALTER TABLE `mbegu` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=17; -- -- AUTO_INCREMENT for table `mbolea` -- ALTER TABLE `mbolea` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=39; -- -- AUTO_INCREMENT for table `users` -- ALTER TABLE `users` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=18; COMMIT; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
-- -------------------------------------------------------- -- Хост: 127.0.0.1 -- Версия сервера: 10.3.22-MariaDB - mariadb.org binary distribution -- Операционная система: Win64 -- HeidiSQL Версия: 11.0.0.5958 -- -------------------------------------------------------- /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET NAMES utf8 */; /*!50503 SET NAMES utf8mb4 */; /*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; /*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; -- Дамп структуры для таблица yii.phone_item DROP TABLE IF EXISTS `phone_item`; CREATE TABLE IF NOT EXISTS `phone_item` ( `user_id` int(11) NOT NULL, `number` bigint(11) NOT NULL DEFAULT 0 ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- Дамп данных таблицы yii.phone_item: ~3 rows (приблизительно) DELETE FROM `phone_item`; /*!40000 ALTER TABLE `phone_item` DISABLE KEYS */; INSERT INTO `phone_item` (`user_id`, `number`) VALUES (1, 89991234567), (2, 89997654321), (1, 89991357924); /*!40000 ALTER TABLE `phone_item` ENABLE KEYS */; -- Дамп структуры для таблица yii.phone_user DROP TABLE IF EXISTS `phone_user`; CREATE TABLE IF NOT EXISTS `phone_user` ( `id` int(11) NOT NULL AUTO_INCREMENT, `name` varchar(50) COLLATE utf8mb4_unicode_ci NOT NULL, `surname` varchar(50) COLLATE utf8mb4_unicode_ci NOT NULL, `midname` varchar(50) COLLATE utf8mb4_unicode_ci NOT NULL, `edit_date` datetime NOT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=5 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- Дамп данных таблицы yii.phone_user: ~3 rows (приблизительно) DELETE FROM `phone_user`; /*!40000 ALTER TABLE `phone_user` DISABLE KEYS */; INSERT INTO `phone_user` (`id`, `name`, `surname`, `midname`, `edit_date`) VALUES (1, 'Егор', 'Деревянко', 'Петрович', '2021-09-02 01:56:31'), (2, 'Пётр', 'Симонов', 'Андреевич', '2021-09-02 22:23:07'), (3, 'Алексей', 'Киренец', 'Викторович', '2021-09-02 22:23:33'); /*!40000 ALTER TABLE `phone_user` ENABLE KEYS */; /*!40101 SET SQL_MODE=IFNULL(@OLD_SQL_MODE, '') */; /*!40014 SET FOREIGN_KEY_CHECKS=IF(@OLD_FOREIGN_KEY_CHECKS IS NULL, 1, @OLD_FOREIGN_KEY_CHECKS) */; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
//////////////////////////////////////////////////////////////////////////////// // // DFO Area // UNWIND [ ['FIA', 'Fraser and Interior Area'], ['NCA', 'North Coast Area'], ['SCA', 'South Coast Area'], ['Yukon', 'Yukon'] ] AS dfo_area WITH dfo_area MERGE (n:DFO_AREA {uid: dfo_area[0]}) SET n.name = dfo_area[1]; // //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// // // Policy and Program Connections // UNWIND [ ['Species At Risk Act (SARA) Recovery Plans', 'Project supports implementation of priority activities described in federal recovery strategies, action plans or management plans for listed Species at Risk.'], ['COSEWIC Assessed Populations', 'Activities targeting species without federal recovery documents that seek to address habitats, threats and other considerations identified in COSEWIC assessments.'], ['WSP Implementation', 'Activities contribute to the WSP Implementation Plan at the watershed/CU level to advance Implementation Strategies.'], ['Fisheries Act Rebuilding Plans', 'Placeholder to be defined.'], ['Southern BC Chinook Initiative', 'Activities directly link to SBC high-level strategic plan that includes trends in aggregated CU and habitat status, limiting factors and threats, objectives, and management strategies.'] ] AS pr_policy WITH pr_policy MERGE (n:POLICY_PROGRAM {uid: pr_policy[0]}) SET n.name = pr_policy[1]; // //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// // // G&C Funding Sources // UNWIND [ ['AFSAR', 'Aboriginal Fund for Species at Risk'], ['AHRF', 'Aquatic Habitat Restoration Fund'], ['BCSRIF', 'British Columbia Salmon Restoration Innovation Fund'], ['CNFASAR', 'Canadian Nature Fund for Aquatic Species at Risk'], ['CRF', 'Coastal Restoration Fund'], ['FHRI', 'Fisheries Habitat Restoration Initiative '], ['HSP', 'Habitat Stewardship Program for Aquatic Species at Risk'], ['IHPP', 'Indigenous Habitat Participation Program'], ['SEP', 'Salmon Enhancement Program'], ['RFCPP', 'Recreational Fisheries Conservation Partnership Program'] ] AS pr_fndsrc WITH pr_fndsrc MERGE (n:GC_FUNDING_PROGRAM {uid: pr_fndsrc[0]}) SET n.name = pr_fndsrc[1]; // //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// // // Project Phase // UNWIND [ ['Proposed', 'A project that has been proposed, but has not undergone the planning and design and feasibility phase.'], ['Planning', 'A proposed project in the developmental and technical planning stage, typically undergoing design and feasibility analysis.'], ['Active', 'A project that is currently underway and being implemented (i.e. the project is being physically executed including activities such as construction, maintenance, site assessment, etc.).'], ['Completed', 'A project that was implemented and completed. This project may or may not be maintained and/or monitored after completion.'] ] AS pr_phase WITH pr_phase MERGE (n:PROJECT_PHASE {uid: pr_phase[0]}) SET n.name = pr_phase[1]; // //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// // // Project Primary Activities // UNWIND [ ['Design and Feasibility', 'The development, technical planning, and/or feasibility analysis of a restoration project not in the project implementation stage (i.e. the actual physical application of a project).'], ['Decommissioning', 'The planned shut-down or removal of infrastructure, equipment, facilities, etc. from operation or usage.'], ['Implementation', 'The physical application of tasks for a project resulting from planning (e.g. habitat or infrastructure construction, earth moving, improvements to physical infrastructure, consultations/workshops).'], ['Maintenance', 'The physical application of tasks for a project on existing infrastructure with the intention of maintaining and/or modifying existing efficiency (i.e. not improvement).'], ['Stewardship', 'The application of tasks for a project with a large component of community involvement to promote salmon stewardship and salmon watershed conservation (e.g. Stream to Sea Education Program).'], ['Research and Monitoring', 'The application of research/monitoring tasks for a project with the intention of data collection to address information gaps. This can include the collection of baseline information to inform design.'] ] AS pr_prmact WITH pr_prmact MERGE (n:PROJECT_ACTIVITY {uid: pr_prmact[0]}) SET n.name = pr_prmact[1]; // //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// // // Eco System Types // UNWIND [ ['Freshwater', 'Aquatic ecosystem with naturally occurring water that is neither seawater or brackish. Includes lakes, ponds, rivers, streams, and creeks. Includes Riparian.'], ['Estuarine', 'Aquatic ecosystem with naturally occurring water that is brackish and found at the interface where freshwater, usually from river and streams, mix with saltwater from the ocean.'], ['Marine', 'Aquatic ecosystem with naturally occurring water that is saltwater'] ] AS ecosystyp WITH ecosystyp MERGE (n:ECO_SYSTEM_TYPE {uid: ecosystyp[0]}) SET n.name = ecosystyp[1]; // //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// // // Target Species // UNWIND [ ['BT', 'Bull Trout (Salvelinus confluentus) targeted in restoration activities.'], ['CH', 'Chinook Salmon (Oncorhynchus tshawytscha) targeted in restoration activities.'], ['CM', 'Chum Salmon (Oncorhynchus keta) targeted in restoration activities.'], ['CO', 'Coho Salmon (Oncorhynchus kisutch) targeted in restoration activities.'], ['CT', 'Cutthroat Trout (Oncorhynchus clarkii) targeted in restoration activities.'], ['DV', 'Dolly Varden (Salvelinus malma) targeted in restoration activities.'], ['PK', 'Pink Salmon (Oncorhynchus gorbuscha) targeted in restoration activities.'], ['RB', 'Rainbow Trout (Oncorhynchus mykiss) targeted in restoration activities.'], ['UDC', 'Umatilla Dace (Rhynichthys umatilla) targeted in restoration activities.'], ['SG', 'Sturgeon (General) targeted in restoration activities.'], ['SK', 'Sockeye Salmon (Oncorhynchus nerka) targeted in restoration activities.'], ['SSU', 'Salish Sucker (Catostomus sp.) targeted in restoration activities.'], ['ST', 'Steelhead (Oncorhynchus mykiss) targeted in restoration activities.'], ['WCT', 'Westslope (Yellowstone) Cutthroat Trout (Oncorhynchus clarki lewisi) targeted in restoration activities.'], ['WSG', 'White Sturgeon (Acipenser transmontanus) targeted in restoration activities.'], ['Other', 'Other aquatic species targeted in restoration activites provided as a list using the following format: CCA, NP, intertidal bivalves.'] ] AS tgtspc WITH tgtspc MERGE (n:TARGET_SPECIES:SPECIES {uid: tgtspc[0]}) SET n.name = tgtspc[1]; // //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// // // Life Stages // UNWIND [ ['Returning adult', 'Migrating adult and spawner stages.'], ['Estuarine juvenile', 'Marine and estuarine rearing life stages as juveniles grow into adults.'], ['Freshwater juvenile', 'Freshwater rearing and over-wintering life stages including fry, parr, and migrating smolt.'], ['Incubation', 'Inter-gravel development phase including the egg and alevin life cycle stages.'] ] AS lfestg WITH lfestg MERGE (n:LIFE_STAGE {uid: lfestg[0]}) SET n.name = lfestg[1]; // //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// // // Restoration Activities // UNWIND [ ['Fish passage', 'Removal and/or remediation of obstructions to improve access to habitat above and below those obstructions. Includes maintenance and effectiveness monitoring of fish passage removal structures.'], ['Riparian restoration and management', 'Restoration activities focused on re-establishing riparian habitat (e.g. riparian planting, riparian fencing, riparian bank stabilization, invasive species control, treatment, etc.). Includes maintenance and effectiveness monitoring of riparian habitat.'], ['Estuarine restoration', 'Restoration activities focused on re-establishing estuarine habitat (e.g. distributary channels, breaching, marsh building, eelgrass planting, invasive species control, etc.). Includes maintenance and effectiveness monitoring of estuarine habitat.'], ['Nearshore and marine restoration', 'Restoration activities focused on re-establishing nearshore and marine habitat (e.g. bull-kelp planting, saltmarsh bench creation, shoreline stabilization, invasive species control, etc.). Includes maintenance and effectiveness monitoring of nearshore and marine habitat.'], ['Instream structure', 'Restoration activities involving rehabilitation or manipulation of instream habitat through the placement of natural and/or man-made materials (e.g. LWD, rocks, boulders, gravel, instream bank stabilization, etc.) to support channel structure and function. Includes maintenance and effectiveness monitoring of instream habitat.'], ['Instream flow', 'Restoration activities focused on re-establishing instream flow regimes (e.g. water storage and releases, reducing water withdrawals, etc). Includes maintenance and effectiveness monitoring of instream flow.'], ['Floodplain connectivity', 'Restoration activities that improves floodplain connectivity. For example, activities that include the development of alcoves, side channels, off-channels and groundwater channels that lie adjacent to and connect to the main river stem. Includes maintenance and effectiveness monitoring of floodplain connectivity.'], ['Watershed planning and assessment', 'Broad implementation of high-level watershed recovery plans including stakeholder involvement and management action. Includes watershed assessments to identify restoration options and sequencing.'], ['Nutrient supplementation', 'Activities focussed on improving the physical, chemical and biological characteristics of freshwater stream and lake habitats (e.g. carcass placement, stream and lake fertilization, etc.).'], ['NA', 'No restoration activities were completed.'] ] AS resact WITH resact MERGE (n:RESTORATION_ACTIVITY {uid: resact[0]}) SET n.name = resact[1]; // //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// // // Habitat Outcome Metric // UNWIND [ ['Number of obstructions removed', 'The total number of obstructions removed for fish passage. Obstructions include dams, road crossings, berms, tidal gates, culverts or any other feature that impedes the upstream or downstream movement of fish.'], ['Number of fish screens installed', 'The total number of fish screens installed to prevent fish from being drawn into a aqueduct, water intake, dam, or other diversion on a river, lake, or waterway.'], ['Stream lengths (km) habitat made accessible', 'The total square-metres of habitat maintained after fish passage restoration activities have been implemented (e.g. removal or remediation of an obstruction). Obstructions include dams, road crossings, berms, tidal gates, culverts or any other feature that impedes the upstream or downstream movement of fish.'], ['Square-metres habitat maintained', 'The total stream lengths (km) of habitat maintained after fish passage restoration activities have been implemented (e.g. removal or remediation of an obstruction). Obstructions include dams, road crossings, berms, tidal gates, culverts or any other feature that impedes the upstream or downstream movement of fish.'], ['Stream lengths (m) habitat maintained', 'The total square-metres of habitat monitored after fish passage restoration activities have been implemented (e.g. removal or remediation of an obstruction). Obstructions include dams, road crossings, berms, tidal gates, culverts or any other feature that impedes the upstream or downstream movement of fish.'], ['Square-metres habitat monitored', 'The total stream lengths (km) of habitat monitored after fish passage restoration activities have been implemented (e.g. removal or remediation of an obstruction). Obstructions include dams, road crossings, berms, tidal gates, culverts or any other feature that impedes the upstream or downstream movement of fish.'], ['Stream lengths (m) habitat monitored', 'The total square-metres of habitat (e.g. riparian, instream, floodplain, estuarine, nearshore, etc.) made accessible for fish passage after the removal or remediation of an obstruction. Obstructions include dams, road crossings, berms, tidal gates, culverts or any other feature that impedes the upstream or downstream movement of fish.'], ['Square-metres habitat made accessible', 'The total stream lengths (km) of habitat (e.g. riparian, instream, floodplain etc.) made accessible for fish passage after the removal or remediation of an obstruction. Obstructions include dams, road crossings, berms, tidal gates, culverts or any other feature that impedes the upstream or downstream movement of fish.'], ['Square-metres riparian habitat treated', 'The total square-metres of riparian habitat treated through activities such as riparian planting, stand management, riparian fencing, bank stabilization, invasive species control, and riparian treatment.'], ['Square-metres riparian habitat created', 'The total square-metres of riparian habitat created through activities such as riparian planting.'], ['Stream lengths (m) riparian habitat treated', 'The total stream lengths (m) of riparian habitat treated through activities such as riparian planting, riparian fencing, bank stabilization, invasive species control, and riparian treatment.'], ['Stream lengths (m) riparian habitat created', 'The total stream lengths (m) of riparian habitat created through activities such as riparian planting. '], ['Square-metres riparian habitat maintained', 'The total square-metres of riparian habitat maintained after riparian restoration activities have been implemented (e.g. riparian planting, riparian fencing, bank stabilization, invasive species control, and riparian treatment).'], ['Stream lengths (m) riparian habitat maintained', 'The total stream lengths (m) of riparian habitat maintained after riparian restoration activities have been implemented (e.g. riparian planting, riparian fencing, bank stabilization, invasive species control, and riparian treatment).'], ['Square-metres riparian habitat monitored', 'The total square-metres of riparian habitat monitored after riparian restoration activities have been implemented (e.g. riparian planting, riparian fencing, bank stabilization, invasive species control, and riparian treatment).'], ['Stream lengths (m) riparian habitat monitored', 'The total stream lengths (m) of riparian habitat monitored after riparian restoration activities have been implemented (e.g. riparian habitat treated or created through riparian planting, riparian fencing, bank stabilization, invasive species control, and riparian treatment).'], ['Square-metres estuarine habitat treated', 'The total square-metres of estuarine habitat treated through activities such as distributary channeling, breaching, marsh building, estuarine vegetation transplanting, and invasive species control.'], ['Square-metres estuarine habitat created', 'The total square-metres of estuarine habitat created through activities such as marsh building and eelgrass planting.'], ['Square-metres estuarine habitat maintained', 'The total square-metres of estuarine habitat maintained after estuarine restoration activities have been implemented (e.g. distributary channeling, breaching, marsh building, eelgrass planting, and invasive species control).'], ['Square-metres estuarine habitat monitored', 'The total square-metres of estuarine habitat monitored after estuarine restoration activities have been implemented (e.g. distributary channeling, breaching, marsh building, eelgrass planting, and invasive species control).'], ['Square-metres nearshore and marine habitat treated', 'The total square-metres of nearshore and marine habitat treated through activities such as nearshore and marine vegetation transplanting, shoreline stabilization, and invasive species control.'], ['Square-metres nearshore and marine habitat created', 'The total square-metres of nearshore and marine habitat created through activities such as bull-kelp planting and saltmarsh bench creation.'], ['Length (m) marine shoreline treated', 'he total length (m) of marine shoreline treated through activities such as nearshore and marine vegetation transplanting, saltmarsh bench creation, shoreline stabilization, invasive species control.'], ['Length (m) marine shoreline created', 'The total length (m) of marine shoreline created through activities such as bull-kelp planting and saltmarsh bench creation.'], ['Square-metres nearshore and marine habitat maintained', 'The total square-metres of nearshore and marine habitat maintained after nearshore/marine restoration activities have been implemented (e.g. bull-kelp planting, saltmarsh bench creation, shoreline stabilization, invasive species control).'], ['Lengths (m) marine shoreline maintained', 'The total length (m) of marine shoreline maintained after nearshore/marine restoration activities have been implemented (e.g. bull-kelp planting, saltmarsh bench creation, shoreline stabilization, invasive species control).'], ['Square-metres nearshore and marine habitat monitored', 'The total square-metres of nearshore and marine habitat monitored after nearshore/marine restoration activities have been implemented (e.g. bull-kelp planting, saltmarsh bench creation, shoreline stabilization, invasive species control).'], ['Length (m) marine shoreline monitored', 'The total length (m) of marine shoreline monitored after nearshore/marine restoration activities have been implemented (e.g. bull-kelp planting, saltmarsh bench creation, shoreline stabilization, invasive species control).'], ['Square-metres instream habitat treated', 'The total square-metres of instream habitat treated through activities such as the placement of natural and/or man-made materials (e.g. LWD, rocks, boulders, and gravel) to support channel structure and function.'], ['Square-metres instream habitat created', 'The total square-metres of instream habitat created through activities such as the placement of natural and/or man-made materials (e.g. LWD, rocks, boulders, and gravel) to support channel structure and function.'], ['Stream lengths (m) instream habitat treated', 'The total stream lengths (m) of instream habitat treated through activities such as the placement of natural and/or man-made materials (e.g. LWD, rocks, boulders, and gravel) to support channel structure and function.'], ['Stream lengths (m) instream habitat created', 'The total stream lengths (m) of instream habitat created through activities such as the placement of natural and/or man-made materials (e.g. LWD, rocks, boulders, and gravel) to support channel structure and function.'], ['Square-metres instream habitat maintained', 'The total square-metres of instream habitat maintained after instream restoration activities have been implemented (e.g. placement of natural and/or man-made materials such as LWD, rocks, boulders, and gravel).'], ['Stream lengths (m) instream habitat maintained', 'The total stream lengths (m) of instream habitat maintained after instream restoration activities have been implemented (e.g. placement of natural and/or man-made materials such as LWD, rocks, boulders, and gravel).'], ['Square-metres instream habitat monitored', 'The total square-metres of instream habitat monitored after instream restoration activities have been implemented (e.g. placement of natural and/or man-made materials such as LWD, rocks, boulders, and gravel).'], ['Stream lengths (m) instream habitat monitored', 'The total stream lengths (m) of instream habitat monitored after instream restoration activities have been implemented (e.g. placement of natural and/or man-made materials such as LWD, rocks, boulders, and gravel).'], ['Number of water use plans developed/implemented', 'The total number of water use plans developed and implemented to manage flow releases during critical flow periods.'], ['Number of real-time hydrometer stations installed', 'The total number of real-time hydrometer stations installed measuring water flows, levels, sediment, and temperature.'], ['Number of real-time hydrometer stations maintained', 'The total number of real-time hydrometers stations maintained after installment that measure water flows, levels, sediment, and temperature.'], ['Square-metres floodplain habitat treated', 'The total square-metres of floodplain habitat treated resulting in improved floodplain connectivity. Floodplain restoration activities may include restoring or building new alcoves, side channels, off-channels, and groundwater channels.'], ['Square-metres floodplain habitat created', 'The total square-metres of floodplain habitat created resulting in improved floodplain connectivity. Floodplain restoration activities may include restoring or building new alcoves, side channels, off-channels, and groundwater channels.'], ['Square-metres floodplain habitat made accessible', 'The total square-metres of floodplain habitat made accessible through activities such as the removal of an obstruction(s) or the restoration of a floodplain feature such as alcoves, side channels, off-channels, and groundwater channels'], ['Square-metres floodplain habitat maintained', 'The total square-metres of floodplain habitat maintained after floodplain restoration activities have been implemented (i.e. after the creation of alcoves, side channels, off-channels, and groundwater channels).'], ['Square-metres floodplain habitat monitored', 'The total square-metres of floodplain habitat monitored after floodplain restoration activities have been implemented (i.e. after the creation of alcoves, side channels, off-channels, and groundwater channels).'], ['Number of watershed plans and assessments completed', 'The total number of watershed plans and assessments produced through technical committees, working groups, etc.'], ['Number of recovery plans completed', 'The total number of recovery plans completed through COSEWIC, SARA, etc. processes.'], ['Mass (kg) of fertilizer applied', 'The total mass (kg) of fertilizer applied to a waterbody with the goal of enhancing nutrients and productivity.'], ['Volume (L) of fertilizer applied', 'The total volume (L) of fertilizer applied to a waterbody with the goal of enhancing nutrients and productivity.'], ['Number of salmon carcasses placed', 'The total number of salmon carcasses placed near a waterbody with the goal of enhancing nutrient and productivity.'], ['Biomass (kg) of salmon carcasses placed', 'The total biomass (kg) of salmon carcasses placed near a waterbody with the goal of enhancing nutrient and productivity.'] ] AS hbtoutmtr WITH hbtoutmtr MERGE (n:HABITAT_OUTCOME_METRIC {uid: hbtoutmtr[0]}) SET n.name = hbtoutmtr[1]; // //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// // // Socio-Economic Outcome // UNWIND [ ['Number of volunteers involved', 'Number of volunteers involved in the planning and/or implementation of a restoration project.'], ['Number of hours of volunteer time donated', 'Number of volunteer hours donated to the planning and/or implementation of a restoration project.'], ['Number of volunteer person days donated', 'Number of volunteer days donated to the planning and/or implementation of a restoration project.'], ['Number of schools involved', 'Number of schools involved in the planning and/or implementation of a restoration project.'], ['Number of classes involved', 'Number of classes involved in the planning and/or implementation of a restoration project.'], ['Number of jobs created', 'Number of part-time and/or full-time employment opportunities created that employ indigenous and non-indigenous peoples.'], ['Number of employment days created', 'Number of days of part-time and/or full-time employment opportunities created that employ indigenous and non-indigenous peoples.'], ['Number of public engagement events', 'Number of days of public engagement events hosted as part of a restoration project. Includes educational/stewardship activities, local stakeholders engagement meetings, etc.'], ['Number of people trained', 'Number of people trained as part of a restoration project to support project planning, implementation, maintenance, and/or monitoring activities.'] ] AS socecoout WITH socecoout MERGE (n:SOCIAL_OUTCOME_METRIC {uid: socecoout[0]}) SET n.name = socecoout[1]; // //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// // // Monitoring objectives // UNWIND [ ['Baseline information', 'Monitoring to collect baseline information at a proposed restoration site to inform restoration prioritization or to support feasibility analysis.'], ['Construction impacts', 'Monitoring conducted during the implementation of a restoration project, typically while construction is occurring to ensures restoration activities are not harming the site during implementation.'], ['Infrastructure inspection and design', 'Monitoring to determine whether or not the project was constructed as designed, if the project matches the project plan, and if the structural elements of the projects are in place and functioning.'], ['Biological and productivity', 'Monitoring to quantify the productivity of a restored or newly constructed habitat by measuring abundance, density, and production of target fish species (e.g. biomass or numbers of juveniles per unit area), number of juvenile fry or smolts from a spawning channel, condition factors, water quality (e.g. temperature, DO, pH), and other measures.'], ['Habitat structure', 'Monitoring to quantify the productivity or change in habitat structure of a restored or newly constructed habitat by measuring abundance, density, and production of vegetation, instream sedimentation, instream LWD, bank stabilization, and other measures.'], ['Other', 'Other monitoring objectives you would like to comment on that was not provided in our list.'] ] AS monobj WITH monobj MERGE (n:MONITORING_OBJECTIVE {uid: monobj[0]}) SET n.name = monobj[1]; // //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// // // // UNWIND [ ['Aerial surveys', 'Monitoring that employs aerial surveys.'], ['eDNA', 'Monitoring that employs environmental DNA (eDNA) collection from the environment (e.g. water, soil, feces).'], ['Electrofishing', 'Monitoring that employs an electrical current to survey fish.'], ['Hydrological modelling', 'Monitoring that employs statistical modeling to simulate water flows and other water characteristics.'], ['Invasive species surveys', 'Monitoring that employs invasive species surveys to determine changes in the abundance, density, production, and distribution of invasive species.'], ['Physical habitat surveys', 'Monitoring that employs surveys to determine amount of LWD, percent shade, substrate and sediment type, soil quality, and bank/shoreline stability.'], ['Vegetation surveys', 'Monitoring that employs vegetation surveys to determine vegetation cover, vegetation diversity, plant survival, and stem density.'], ['Nets and traps', 'Monitoring that employs nets (e.g. gill, seine) and traps (e.g. fyke, minnow) to survey fish and fish habitat.'], ['Photo point monitoring', 'Monitoring that employs repeated photo point monitoring to determine physical and visual changes at a restoration site.'], ['PIT tagging and telemetry', 'Monitoring that employs PIT Tagging and telemetry to track fish movement, fish escapement and returns, and fish counts.'], ['Snorkel surveys', 'Monitoring that employs snorkeling to survey fish.'], ['Temperature loggers', 'Monitoring that employs temperature loggers to monitor changes in water temperature.'], ['Hydrometer installments', 'Monitoring that employs hydrometer data to monitor changes in water flows.'], ['Water sampling', 'Monitoring that employs water sampling techniques to monitor changes in water chemistry (e.g. dissolved oxygen, salinity, pH, nutrients) and quality.'], ['Qualitative visual assessment', 'Monitoring that employs qualitative visual assessment of a restoration site to determine fish utilization, fish and riparian species, changes to infrastructure, etc.'], ['Other', 'Other monitoring activities provided as a list using the following format: Capture-mark-recapture, Underwater video, PIT tagging'] ] AS monact WITH monact MERGE (n:MONITORING_ACTIVITY {uid: monact[0]}) SET n.name = monact[1]; // //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// // // // UNWIND [ ['BA', 'Before-after design'], ['CI', 'Control-impact design'], ['BACI', 'Before-after-control-impact design'], ['Multi-BACI', 'Multiple before-after-control-impact design'], ['Unknown', ''], ['None', ''], ['Other', ''] ] AS mondsg WITH mondsg MERGE (n:MONITORING_DESIGN {uid: mondsg[0]}) SET n.name = mondsg[1]; // //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// // // // UNWIND [ ['Fall'], ['Spring'], ['Summer'], ['Winter'] ] AS seamon WITH seamon MERGE (n:SEASON_MONITORED {uid: seamon[0]}); // //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// // // Load PARR project data from the import/ directory // CALL apoc.periodic.iterate( " CALL apoc.load.json('parr_projects_2.1.json') YIELD value AS map RETURN map ", " WITH map CREATE (n:PROJECT {uid: apoc.create.uuid()}) SET n.data_source = map.data_source, n.number_of_sites = map.number_of_sites, n.rru_involvement = map.rru_involvement, n.reporting_fiscal_year = map.reporting_fiscal_year, n.pid = map.project_id, n.name = map.project_name, n.desc = map.project_description, n.goals = map.goals, n.project_duration = map.project_duration, n.year_project_was_initiated = map.year_project_was_initiated, n.year_project_was_last_modified = map.year_project_was_last_modified, n.number_of_indigenous_partners = map.number_of_indigenous_partners, n.location = POINT({latitude:map.latitude_in_decimal_degrees, longitude:map.longitude_in_decimal_degrees, crs: 'WGS-84'}), n.sep_rru_in_kind_contributions = map.sep_rru_in_kind_contributions, n.sep_rru_cash_contributions = map.sep_rru_cash_contributions, n.sep_cip_in_kind_contributions = map.sep_cip_in_kind_contributions, n.sep_cip_cash_contributions = map.sep_cip_cash_contributions, n.was_this_a_g_c_funded_project = map.was_this_a_g_c_funded_project, n.g_c_cash_contributions = map.g_c_cash_contributions, n.other_cash_contributions = map.other_cash_contributions, n.other_in_kind_contributions = map.other_in_kind_contributions, n.amount_of_money_you_spent_on_the_project_within_this_fiscal_year = map.what_was_the_amount_of_money_you_spent_on_the_project_within_this_fiscal_year, n.the_total_cost_of_the_project = map.what_is_the_total_cost_of_the_project, n.outcome_value = map.outcome_value, n.outcome_value_2 = map.outcome_value_2, n.primary_socio_economic_outcome = map.what_was_the_primary_socio_economic_outcome, n.value_of_the_primary_socio_economic_outcome = map.provide_the_value_of_the_primary_socio_economic_outcome, n.secondary_socio_economic_outcome = map.what_was_the_secondary_socio_economic_outcome, n.value_of_the_secondary_socio_economic_outcome = map.provide_the_value_of_the_secondary_socio_economic_outcome, n.was_project_monitoring_completed = map.was_project_monitoring_completed, n.number_of_years_of_monitoring_before_restoration = map.number_of_years_of_monitoring_before_restoration, n.number_of_years_of_monitoring_after_restoration = map.number_of_years_of_monitoring_after_restoration, n.key_lessons_learned = n.key_lessons_learned, n.are_sara_listed_aquatic_species_present = map.are_sara_listed_aquatic_species_present, n.are_aquatic_invasive_species_present = map.are_aquatic_invasive_species_present, n.other_information = map.other_information WITH map, n FOREACH (area IN map.dfo_area | MERGE (dfo_area:DFO_AREA {uid: area}) MERGE (dfo_area)<-[:IN_DFO_AREA]-(n) ) FOREACH (p IN map.project_contacts | MERGE (person:CONTACT_PERSON {name: p.name}) SET person.tel = p.tel, person.email = p.email MERGE (person)<-[:HAS_PROJECT_CONTACT]-(n) ) FOREACH (p IN map.project_lead_organization | FOREACH (_ IN CASE p IN ['', 'N/A'] WHEN TRUE THEN [] ELSE [1] END | MERGE (pr_org:ORGANIZATION:LEAD_ORGANIZATION {uid: p}) MERGE (pr_org)<-[:HAS_LEAD_ORGANIZATION]-(n) ) ) FOREACH (p IN map.project_partners | FOREACH (_ IN CASE p IN ['', 'N/A'] WHEN TRUE THEN [] ELSE [1] END | MERGE (pr_ppn:ORGANIZATION {uid: p}) MERGE (pr_ppn)<-[:HAS_PROJECT_PARTNER]-(n) ) ) FOREACH (p IN map.policy_and_program_connections | MERGE (pr_policy:POLICY_PROGRAM {uid: p}) MERGE (pr_policy)<-[:HAS_POLICY_PROGRAM]-(n) ) FOREACH (_ IN CASE map.watershed_name IS NOT NULL AND map.watershed_name <> '' WHEN TRUE THEN [1] ELSE [] END | MERGE (pr_loc:WATERSHED {uid: map.watershed_name}) MERGE (pr_loc)<-[:HAS_WATERSHED]-(n) ) FOREACH (p IN map.g_c_funding_sources | MERGE (pr_fndsrc:GC_FUNDING_SOURCE {uid: p}) MERGE (pr_fndsrc)<-[:GC_FUNDING_PROGRAM]-(n) ) FOREACH (p IN map.other_funding_sources | MERGE (pr_ofs:ORGANIZATION {uid: p}) MERGE (pr_ofs)<-[:HAS_OTHER_FUNDING_SOURCE]-(n) ) FOREACH (_ IN CASE map.project_phase IS NOT NULL AND map.project_phase <> '' WHEN TRUE THEN [1] ELSE [] END | MERGE (pr_phase:PROJECT_PHASE {uid: map.project_phase}) MERGE (pr_phase)<-[:HAS_PROJECT_PHASE]-(n) ) FOREACH (p IN map.primary_project_activities | MERGE (pr_prmact:PROJECT_ACTIVITY {uid: p}) MERGE (pr_prmact)<-[:HAS_PRIMARY_ACTIVITY]-(n) ) FOREACH (p IN map.ecosystem_type | MERGE (ecosystyp:ECO_SYSTEM_TYPE {uid: p}) MERGE (ecosystyp)<-[:HAS_ECO_SYSTEM_TYPE]-(n) ) FOREACH (p IN map.target_species | MERGE (target_species:TARGET_SPECIES {uid: p}) MERGE (target_species)<-[:HAS_TARGET_SPECIES]-(n) ) FOREACH (p IN map.life_stage | MERGE (lfestg:LIFE_STAGE {uid: p}) MERGE (lfestg)<-[:HAS_LIFE_STAGE]-(n) ) FOREACH (_ IN CASE map.what_was_the_primary_restoration_activity IS NOT NULL AND map.what_was_the_primary_restoration_activity <> '' WHEN TRUE THEN [1] ELSE [] END | MERGE (resact:RESTORATION_ACTIVITY {uid: map.what_was_the_primary_restoration_activity}) MERGE (resact)<-[:HAS_PRIMARY_RESTORATION_ACTIVITY]-(n) ) FOREACH (_ IN CASE map.outcome_metric IS NOT NULL AND map.outcome_metric <> '' WHEN TRUE THEN [1] ELSE [] END | MERGE (hbtoutmtr:HABITAT_OUTCOME_METRIC {uid: map.outcome_metric}) MERGE (hbtoutmtr)<-[:HAS_PRIMARY_OUTCOME_METRIC]-(n) ) FOREACH (_ IN CASE map.what_was_the_secondary_restoration_activity IS NOT NULL AND map.what_was_the_secondary_restoration_activity <> '' WHEN TRUE THEN [1] ELSE [] END | MERGE (resact:RESTORATION_ACTIVITY {uid: map.what_was_the_secondary_restoration_activity}) MERGE (resact)<-[:HAS_SECONDARY_RESTORATION_ACTIVITY]-(n) ) FOREACH (_ IN CASE map.outcome_metric_2 IS NOT NULL AND map.outcome_metric_2 <> '' WHEN TRUE THEN [1] ELSE [] END | MERGE (hbtoutmtr:HABITAT_OUTCOME_METRIC {uid: map.outcome_metric_2}) MERGE (hbtoutmtr)<-[:HAS_SECONDARY_OUTCOME_METRIC]-(n) ) FOREACH (p IN map.monitoring_objectives | MERGE (monobj:MONITORING_OBJECTIVE {uid: p}) MERGE (monobj)<-[:HAS_MONITORING_OBJECTIVE]-(n) ) FOREACH (p IN map.monitoring_activities | MERGE (monact:MONITORING_ACTIVITY {uid: p}) MERGE (monact)<-[:HAS_MONITORING_ACTIVITY]-(n) ) FOREACH (_ IN CASE map.monitoring_design IS NOT NULL AND map.monitoring_design <> '' WHEN TRUE THEN [1] ELSE [] END | MERGE (mondsg:MONITORING_DESIGN {uid: map.monitoring_design}) MERGE (mondsg)<-[:HAS_MONITORING_DESIGN]-(n) ) FOREACH (p IN map.season_monitored | MERGE (seamon:SEASON_MONITORED {uid: p}) MERGE (seamon)<-[:HAS_SEASON_MONITORED]-(n) ) FOREACH (p IN map.life_stage_monitored | MERGE (lfestgmon:LIFE_STAGE {uid: p}) MERGE (lfestgmon)<-[:HAS_LIFE_STAGE_MONITORED]-(n) ) FOREACH (p IN map.other_benefitting_species | MERGE (other_species:SPECIES {uid: p}) MERGE (other_species)<-[:HAS_OTHER_BENEFITTING_SPECIES]-(n) ) ", { batchSize:100, iterateList:true, parallel:false }); //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// // // Call the nlp micro service to process organization's name // MATCH (n:ORGANIZATION) WITH n, [apoc.map.fromPairs([['u', 'uid'], ['c', n.uid]])] AS input CALL custom.nlp_import(n, 'http://nlp:8000/process/', apoc.convert.toJson(input)) YIELD result RETURN 1; // //////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////// // // Load PARR_PR name file from the import/ directory // Call the nlp micro service to process the documents. // CALL apoc.periodic.iterate( " MATCH (n:PROJECT) WITH n, [ apoc.map.fromPairs([['u', 'name'], ['c', n.name]]), apoc.map.fromPairs([['u', 'desc'], ['c', n.desc]]), apoc.map.fromPairs([['u', 'goals'], ['c', CASE n.goals IS NULL WHEN TRUE THEN '' ELSE n.goals END]]), apoc.map.fromPairs([['u', 'primary_socio_economic_outcome'], ['c', CASE n.primary_socio_economic_outcome IS NULL WHEN TRUE THEN '' ELSE n.primary_socio_economic_outcome END]]), apoc.map.fromPairs([['u', 'secondary_socio_economic_outcome'], ['c', CASE n.secondary_socio_economic_outcome IS NULL WHEN TRUE THEN '' ELSE n.secondary_socio_economic_outcome END]]), apoc.map.fromPairs([['u', 'key_lessons_learned'], ['c', CASE n.key_lessons_learned IS NULL WHEN TRUE THEN '' ELSE n.key_lessons_learned END]]) ] AS input RETURN n, input ", " WITH n, input CALL custom.nlp_import(n, 'http://nlp:8000/process/', apoc.convert.toJson(input)) YIELD result RETURN 1; ", { batchSize:100, iterateList:true, parallel:false }); // // Set project site // MATCH (n:PROJECT) WITH DISTINCT(n.pid) AS pid, COUNT(n) AS c WITH pid, c WHERE c > 1 MATCH (n:PROJECT {pid: pid}) SET n:PROJECT_SITE; // ////////////////////////////////////////////////////////////////////////////////
CREATE DATABASE datacolle CHARACTER SET utf8 COLLATE utf8_general_ci; CREATE TABLE datacolle.stock_map ( symbol INT NOT NULL COMMENT 'javatype=int;', chineseName varchar(100) NOT NULL COMMENT 'javatype=String;', isHSI bit(0) NOT NULL DEFAULT b'0' COMMENT 'javatype=boolean;', PRIMARY KEY (symbol) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_general_ci ; CREATE TABLE datacolle.stock_instant_record ( rowId INT NOT NULL AUTO_INCREMENT COMMENT 'javatype=int;', symbol INT NOT NULL COMMENT 'javatype=int;', recordDt TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'javatype=String;', value varchar(100) NOT NULL COMMENT 'javatype=String;', PRIMARY KEY (rowId) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_general_ci ; CREATE TABLE datacolle.stock_daily_record ( rowId INT NOT NULL AUTO_INCREMENT COMMENT 'javatype=int;', symbol INT NOT NULL COMMENT 'javatype=int;', recordDt TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'javatype=String;', closeValue varchar(100) NOT NULL COMMENT 'javatype=String;', lastValue varchar(100) NOT NULL COMMENT 'javatype=String', dayHighValue varchar(100) NOT NULL COMMENT 'javatype=String', dayLowValue varchar(100) NOT NULL COMMENT 'javatype=String', PRIMARY KEY (rowId) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_general_ci ;
-- phpMyAdmin SQL Dump -- version 4.1.14 -- http://www.phpmyadmin.net -- -- Host: 127.0.0.1 -- Generation Time: Oct 19, 2014 at 03:25 PM -- Server version: 5.6.17 -- PHP Version: 5.5.12 SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; SET time_zone = "+00:00"; /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8 */; -- -- Database: `cl53-dbtopgift` -- CREATE DATABASE IF NOT EXISTS `cl53-dbtopgift` DEFAULT CHARACTER SET latin1 COLLATE latin1_swedish_ci; USE `cl53-dbtopgift`; -- -------------------------------------------------------- -- -- Table structure for table `personalmug` -- DROP TABLE IF EXISTS `personalmug`; CREATE TABLE IF NOT EXISTS `personalmug` ( `image_id` int(11) NOT NULL AUTO_INCREMENT, `image_uri` varchar(255) NOT NULL, PRIMARY KEY (`image_id`), KEY `image_uri` (`image_uri`) ) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=37 ; -- -- Dumping data for table `personalmug` -- INSERT INTO `personalmug` (`image_id`, `image_uri`) VALUES (2, '\n /img/image_uri/5442708869ddb.jpg'), (3, '\n /img/image_uri/5442708869ddb.jpg'), (4, '\n /img/image_uri/5442708869ddb.jpg'), (5, '../img/image_uri/544288b58f75a.jpg'), (6, '../img/image_uri/544288b58f75a.jpg'), (7, '../img/image_uri/54428afa67dce.jpg'), (8, '../img/image_uri/54428afa67dce.jpg'), (9, '../img/image_uri/54428afa67dce.jpg'), (10, '../img/image_uri/54428afa67dce.jpg'), (11, '../img/image_uri/54428c2da720e.jpg'), (12, '../img/image_uri/54428c2da720e.jpg'), (13, '../img/image_uri/54428c2da720e.jpg'), (14, '../img/image_uri/54428c2da720e.jpg'), (15, '../img/image_uri/5442922f104cb.jpg'), (16, '../img/image_uri/544293b4df88e.jpg'), (17, '../img/image_uri/544294e13bdc0.jpg'), (18, '../img/image_uri/544294e13bdc0.jpg'), (19, '../img/image_uri/544294e13bdc0.jpg'), (20, '../img/image_uri/544294e13bdc0.jpg'), (21, '../img/image_uri/54429f910c3f1.jpg'), (22, '../img/image_uri/5442a04837190.jpg'), (23, '../img/image_uri/5442a1634953f.jpg'), (24, '../img/image_uri/5442a1634953f.jpg'), (25, '../img/image_uri/5442a1634953f.jpg'), (26, '../img/image_uri/5442a1634953f.jpg'), (27, '../img/image_uri/5442a1634953f.jpg'), (28, '../img/image_uri/5442a1634953f.jpg'), (29, '../img/image_uri/5442a33a9e9d4.jpg'), (30, '../img/image_uri/5442a33a9e9d4.jpg'), (31, '../img/image_uri/5442a33a9e9d4.jpg'), (32, '../img/image_uri/5442a33a9e9d4.jpg'), (33, '../img/image_uri/5442a3c76f36a.jpg'), (35, '../img/image_uri/5442ccccb4c4e.jpg'), (36, '../img/image_uri/5443b94a726bc.jpg'), (34, 'http://toxicfox.com/img/image_uri/5442ab59273f0.jpg'); /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
SELECT * INTO TempTable FROM Employees WHERE Salary > 30000 DELETE FROM TempTable WHERE ManagerID=42 UPDATE TempTable SET Salary+=5000 WHERE DepartmentID=1 SELECT DepartmentID, AVG(Salary) AS AverageSalary FROM TempTable GROUP BY DepartmentID
-- Apr 7, 2010 11:08:21 AM CEST UPDATE C_POS SET C_BankAccount_ID=100,Updated=TO_TIMESTAMP('2010-04-07 11:08:21','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE C_POS_ID=100 ;
-- phpMyAdmin SQL Dump -- version 5.0.2 -- https://www.phpmyadmin.net/ -- -- Servidor: 127.0.0.1 -- Tiempo de generación: 13-08-2020 a las 21:46:50 -- Versión del servidor: 10.4.13-MariaDB -- Versión de PHP: 7.4.8 SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; START TRANSACTION; SET time_zone = "+00:00"; /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8mb4 */; -- -- Base de datos: `thincrs` -- -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `admin` -- CREATE TABLE `admin` ( `id` int(7) NOT NULL, `nombre` varchar(99) NOT NULL, `apellidos` varchar(99) NOT NULL, `contrasena` varchar(99) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `biblioteca` -- CREATE TABLE `biblioteca` ( `id` int(7) NOT NULL, `entrada` datetime NOT NULL, `salida` datetime NOT NULL, `noEntrada` int(11) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `horas` -- CREATE TABLE `horas` ( `id` int(7) NOT NULL, `entrada` datetime NOT NULL, `salida` datetime NOT NULL, `noDia` int(99) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `usuario` -- CREATE TABLE `usuario` ( `id` int(7) NOT NULL, `nombre` varchar(99) NOT NULL, `apellidos` varchar(99) NOT NULL, `puesto` varchar(99) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; -- -- Índices para tablas volcadas -- -- -- Indices de la tabla `admin` -- ALTER TABLE `admin` ADD PRIMARY KEY (`id`); -- -- Indices de la tabla `biblioteca` -- ALTER TABLE `biblioteca` ADD PRIMARY KEY (`noEntrada`), ADD KEY `id_bivlioteca` (`id`); -- -- Indices de la tabla `horas` -- ALTER TABLE `horas` ADD PRIMARY KEY (`noDia`), ADD KEY `id_horas` (`id`); -- -- Indices de la tabla `usuario` -- ALTER TABLE `usuario` ADD PRIMARY KEY (`id`); -- -- AUTO_INCREMENT de las tablas volcadas -- -- -- AUTO_INCREMENT de la tabla `admin` -- ALTER TABLE `admin` MODIFY `id` int(7) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=7; -- -- AUTO_INCREMENT de la tabla `biblioteca` -- ALTER TABLE `biblioteca` MODIFY `noEntrada` int(11) NOT NULL AUTO_INCREMENT; -- -- AUTO_INCREMENT de la tabla `horas` -- ALTER TABLE `horas` MODIFY `noDia` int(99) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=2; -- -- AUTO_INCREMENT de la tabla `usuario` -- ALTER TABLE `usuario` MODIFY `id` int(7) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=7; -- -- Restricciones para tablas volcadas -- -- -- Filtros para la tabla `biblioteca` -- ALTER TABLE `biblioteca` ADD CONSTRAINT `id_bivlioteca` FOREIGN KEY (`id`) REFERENCES `usuario` (`id`) ON DELETE CASCADE ON UPDATE CASCADE; -- -- Filtros para la tabla `horas` -- ALTER TABLE `horas` ADD CONSTRAINT `id_horas` FOREIGN KEY (`id`) REFERENCES `usuario` (`id`) ON DELETE CASCADE ON UPDATE CASCADE; COMMIT; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
-- 1. Hiển thị thông tin của tất cả nhân viên có trong hệ thống có tên bắt đầu là một trong các ký tự “H”, “T” hoặc “K” và có tối đa 15 ký tự. select*from nhanVien where (hoTenNhanVien like 'T%' or hoTenNhanVien like 'H%' or hoTenNhanVien like 'K%' ) and char_length(hoTenNhanVien)<15;
drop table emp04; create table emp04(empno number(4) constraint emp04_empno_uk unique, ename varchar2(10) constraint emp04_ename_nn not null, job varchar2(9), deptno number(2)); select table_name, constraint_name from user_constraints where table_name = 'emp04'; select table_name, constraint_name from user_constraints where TABLE_NAME = 'EMP04'; insert into emp04(empno, ename, job, deptno) values (7499, 'allen', 'manager', 30); insert into emp04(empno, ename, job, deptno) values (7499, 'jones', 'manager', 20); create table emp05(empno number(4) constraint emp05_empno_pk primary key, ename varchar2(10) constraint emp05_ename_nn not null, job varchar(9), deptno number(2)); insert into emp05 values(7499, 'allen', 'salesman', 30); select * from emp05; drop table emp05; create table emp05 as select * from emp04 where 1 = 2; select * from emp05; alter table emp05 add constraint emp05_empno_pk primary key(empno); insert into emp05 values(7499, 'jones', 'manager', 20); insert into emp05 values(null, 'allen', 'salesman', 30); select * from emp05; --insert into emp(empno, ename, deptno) --values (8000, 'min', 50); SELECT TABLE_NAME, CONSTRAINT_TYPE, CONSTRAINT_NAME, R_CONSTRAINT_NAME FROM USER_CONSTRAINTS WHERE TABLE_NAME IN ('DEPT', 'EMP'); CREATE TABLE EMP06(EMPNO NUMBER(4) CONSTRAINT EMP06_EMPNO_PK PRIMARY KEY, ENAME VARCHAR2(10) CONSTRAINT EMP06_ENAME_NN NOT NULL, JOB VARCHAR(9), DEPTNO NUMBER(2) CONSTRAINT EMP06_DEPTNO_FK REFERENCES DEPT(DEPTNO)); insert into emp06 values(7499, 'allen', 'salesman', 30); select * from emp06; drop table emp06; create table emp06 as select * from emp05 where 1 = 2; select * from emp06; insert into emp06 values(7499, 'allen', 'salesman', 30); select * from emp06; insert into emp06 values(7566, 'joens', 'manager', 50); drop table emp06; create table emp06 as select * from emp05 where 1=2; alter table emp06 add constraint emp06_empno_pk primary key(empno); alter table emp06 add constraint emp06_deptno_fk foreign key(deptno) references dept(deptno); insert into emp06 values(7499, 'allen', 'salesman', 30); select * from emp06; insert into emp06 values(7566, 'joens', 'manager', 50); insert into dept values(50, 'insa', 'seoul'); insert into emp06 values(7566, 'joens', 'manager', 50); select * from emp06;
DROP DATABASE IF EXISTS employeeDB; CREATE DATABASE employeeDB;
--WHERE 절을 이용한 조건 검색 /* SELECT 컬럼, 컬럼, ... FROM 테이블 WHERE 조건; ORDER BY 컬럼; 1) 조건 사용되는 연산자 =(같아), <, >, <=, >=, !=(같지않다) 2) DB의 개념이 부족한 개발자들 2-1)SELECT * FROM emp; 2-2)컬렉션에 모두 저장한다 2-3)반복문과 if문으로 원하는 값만 추출한다 3) DB의 개념이 있는 개발자 3-1) SELECT 필요컬럼, ... FROM 테이블 WHERE 조건; 3-2) DBMS가 필요한 데이터만 준다(빠르다) */ --사원중에 급여가 3000이상인 사원의 명단 SELECT eno, ename, sal FROM emp WHERE sal>=3000; --이름이 '김연아'인 사원의 정보 SELECT * FROM emp WHERE ename='김연아'; --10번 부서를 제외한 사원의 명단 SELECT dno "10번이 아닌 부서", eno,ename FROM emp WHERE dno!=10 ORDER BY dno; DESC emp; --자료형을 아래처럼 일치시켜야 한다 SELECT dno "10번이 아닌 부서", eno,ename FROM emp WHERE dno!='10' ORDER BY dno; /* dno는 자료형이 VARCHAR2 이다 10을 비교하면 이 10은 정수 자료형이다 이럴 때 모든 행의 데이터를 검색할 때마다 dno를 정수 자료형으로 자동 변환한다 결과적으로 별 문제는 없지만 기업환경의 테이블은 수천만 이상의 행이 보통이므로 위처럼 검색하면 엄청나게 속도가 저하된다 */ --연봉이 30,000 이상인 사원의 이름을 검색한다 DESC emp; SELECT eno,ename,sal*12+NVL(comm,0) 연봉 FROM emp WHERE sal*12+NVL(comm,0) >='30000' ORDER BY 연봉 DESC; --보너스가 200이하인 사원을 검색하라 SELECT eno,ename,comm FROM emp WHERE NVL(comm,0) <= 200; --입사일이 1996년 이후인 사원의 정보를 검색하라 SELECT * FROM emp WHERE hdate > '1995/12/31'; ALTER SESSION SET nls_date_format='YYYY/MM/DD'; ALTER SESSION SET nls_date_format='YY/MM/DD'; SELECT * FROM emp WHERE hdate > '1995/12/31'; --보너스 컬럼이 널 값인 사원을 검색하라 --=, < : 널값일 때 사용불가 --IS NULL :널값인가 --IS NOT NULL :널값이 아닌가 SELECT * FROM emp WHERE comm IS NULL;
-- SELECT AVG(score) AS average FROM second_table;
INSERT INTO candidate VALUES (null,'DRAGNIR','Natsu',0987654321,NOW(),'1 rue de la liberté','natsu.dragnir@orange.fr',null,'Bonjour je suis natsu DRAGNIR ','Nat','pwd','candidate'); INSERT INTO candidate VALUES (null,'NEYMAR','Jean',1234567890,NOW(),'1 rue de PSG', 'jean.neymar@orange.fr',null,'Bonjour je vaux 222 millions €', 'Jean', 'foot','candidate' ); INSERT INTO enterprise VALUES (null,'QUATAR',0102030405, '1 rue du quatar','PARIS','lesrichesduMoyenOrient.com','lesrichesduMoyenOrient@pretentieux.com','Bonjour je possede une carte bleue non je voulais dire une carte dieu',null,'lesrichesduMoyenOrient@twitter.com','lesrichesduMoyenOrient@linkedin.com','Recherche de footballeur sans le bac','enterprise','dieuxDuFoot','petrole'); INSERT INTO enterprise VALUES (null,'FAIRYTAIL',0607080910, '1 rue de Magnolia','MAGNOLIA','fairytail.com','fairytail@pauvre.com','Bonjour je suis une guilde',null,'fairytail@twitter.com','fairytail@linkedin.com','Recherche de mage funs','enterprise','makarof','fee'); INSERT INTO enterprise VALUES (null,'SCIENCEINFUSE',9874561236, '1 rue du savoir','RENNES','lascienceinfuse.com','lascienceinfuse@pretentieux.com','Je sais tout',null,'lascienceinfuse@twitter.com','lascienceinfuse@linkedin.com','Recherche de savoir être ','enterprise','Eisnstein','prof'); INSERT INTO enterprise VALUES (null,'ONEPIECE',8569741252, '1 rue de Redline','FUSCHIA','lespiratesdunouveaumonde.com','lespiratesdunouveaumonde@pirate.com','Bonjour je suis le roi des pirates',null,'lespiratesdunouveaumonde@twitter.com','lespiratesdunouveaumonde@linkedin.com','Recherche de pirate','enterprise','chapeauDePaille','luffy'); INSERT INTO enterprise VALUES (null,'LOL',4578129632, '1 rue de riot games','LOS ANGELES','gamers.com','gamers@nolife.com','Bonjour je recherche des developpers/gamers',null,'gamers@twitter.com','gamers@linkedin.com','Recherche de joueurs très mauvais','enterprise','allstar','nexus'); INSERT INTO enterprise VALUES (null,'TROLL',4525658579, '1 rue du cest pas ici','TROLLCITY','.com','cestpasici@vienspas.com','Bonjour je ne recherche pas des trolls cest faux',null,'cestpasici@twitter.com','cestpasici@linkedin.com','Recheche personne ','enterprise','carleric','non'); INSERT INTO skill VALUES (null,'JAVA','Langage'); INSERT INTO skill VALUES (null,'SQL','Langage'); INSERT INTO skill VALUES (null,'C','Langage'); INSERT INTO skill VALUES (null,'C++','Langage'); INSERT INTO skill VALUES (null,'Python','Langage'); INSERT INTO skill VALUES (null,'HTML','Langage'); INSERT INTO skill VALUES (null,'CSS','Langage'); INSERT INTO skill VALUES (null,'Javascript','Langage'); INSERT INTO skill VALUES (null,'Eclipse','Logiciel'); INSERT INTO skill VALUES (null,'StarUML','Logiciel'); INSERT INTO skill VALUES (null,'Git Bash','Logiciel'); INSERT INTO contract_type VALUES (null,'CDD'); INSERT INTO contract_type VALUES (null,'CDI'); INSERT INTO contract_type VALUES (null,'ALTERNANCE'); INSERT INTO contract_type VALUES (null,'CONTRAT DE PROFESSIONALISATION'); INSERT INTO contract_type VALUES (null,'INTERIM'); INSERT INTO job VALUES (null,'Développeur',' ','Recherche un développeur java',2,5,null); INSERT INTO job VALUES (null,'Mage',' ','Recherche un mage de feu',5,null,null); INSERT INTO job VALUES (null,'Footballeur',' ','Recherche un footballeur qui ne possede pas le bac et qui de preference ne sais pas écrire',3,null,null); INSERT INTO job VALUES (null,'Prof',' ','Recherche un prof qui a la science infuse',1,null,null); INSERT INTO job VALUES (null,'Pirate',' ','Recherche un pirate sympa et riche',4,null,null); INSERT INTO job VALUES (null,'Développeur/gamer',' ','Recherche un développeur qui joue à league of legend et qui rage jamais',3,null,null); INSERT INTO job VALUES (null,'Troll',' ','On vous prendra pas, pas la peine de postuler',2,null,null); INSERT INTO administrator VALUES (null,'leboss','dujava','lebossdujava@orange.fr','7845126398','admin','admin','admin'); INSERT INTO headhunter Values (null,'Markides','Sacha','1245789656','sacha.markides@orange.fr',null,'sacha.markides@twitter.fr','sacha.markides@linkedin.fr','bonjour je suis un headhunter','root','root','headhunter'); INSERT INTO headhunter Values (null,'Izzar','Yannick','0680913289','yannick.izzar@orange.fr',null,'yannick.izzar@twitter.fr','yannick.izzar@linkedin.fr','bonjour je suis un yannick','nouk','pwd','headhunter'); INSERT INTO city VALUES (null,'Paris','75000'); INSERT INTO city VALUES (null,'Lyon','69000'); INSERT INTO city VALUES (null,'Marseille','13000'); INSERT INTO city VALUES (null,'Lille','59000'); INSERT INTO city VALUES (null,'Rennes','35000'); INSERT INTO city VALUES (null,'Nantes','44000'); INSERT INTO city VALUES (null,'Bordeaux','33000'); INSERT INTO city VALUES (null,'Toulouse','64000'); INSERT INTO headhunter_enterprise VALUES (1, 1); INSERT INTO headhunter_enterprise VALUES (1, 2);
-- @block -- @conn mappingasia-dev datascience select "official_name_en" as "name", "UNTERM_English_Formal" as "name_long", "ISO3166_1_Alpha_2" as "iso_a2", "ISO3166_1_Alpha_3" as "iso_a3", "ISO3166_1_numeric" as "iso_numeric", "M49" as "unsd_m49", "Continent" as "continent", "Developed_or_Developing_Countries" as "developed_developing", "Languages" as "lang", "Region_Code" as region_code, "Region_Name" as region_name, "Sub_region_Code" as subregion_code, "Sub_region_Name" as subregion_name, "is_independent" as is_independent from datascience.country_list -- where "Region_Name" = 'Asia' -- and "Developed_or_Developing_Countries" = 'Developed';
CREATE Procedure sp_han_Get_sqlagent_proxy_account (@Type nvarchar(100)) as EXEC master.dbo.xp_sqlagent_proxy_account @Type SET QUOTED_IDENTIFIER OFF
select * from employee, department where employee.DepartmentID = department.DepartmentID
--SELECT COUNT (DISTINCT JobTitle) --FROM HumanResources.Employee; --SELECT MAX(TaxRate) AS HighestRate --FROM Sales.SalesTaxRate; --SELECT SalesOrderID, SUM(LineTotal) AS SubTotal --FROM Sales.SalesOrderDetail --GROUP BY SalesOrderID --HAVING SUM(LineTotal) > 100000 --ORDER BY SalesOrderID; --SELECT ProductID, Name, Color --FROM Production.Product --WHERE ProductID BETWEEN 725 AND 734; --SELECT p.BusinessEntityID, p.FirstName, p.LastName, pp.PhoneNumber AS Phone --FROM Person.Person AS p --INNER JOIN Person.PersonPhone AS pp -- ON p.BusinessEntityID = pp.BusinessEntityID --WHERE LastName LIKE 'G%' --ORDER BY LastName, FirstName; --SELECT p.Name AS ProductName, -- NonDiscountSales = (OrderQty * UnitPrice), -- Disounts = ((OrderQty * sd.UnitPrice) * UnitPriceDiscount) --FROM Production.Product AS p --INNER JOIN Sales.SalesOrderDetail AS sd -- ON p.ProductID = sd.ProductID --ORDER BY ProductName DESC ; --SELECT ProductID, SpecialOfferID, AVG(UnitPrice) AS [Average Price], SUM(LineTotal) AS SubTotal --FROM Sales.SalesOrderDetail --GROUP BY ProductID, SpecialOfferID --ORDER BY ProductID;
SELECT Nationality, COUNT(*) FROM Actors GROUP BY Nationality;
grant select on YKT_CUR.T_TRANSDTL to YKT_PORTAL; grant select on YKT_CUR.T_POSDTL to YKT_PORTAL; grant select, insert, delete, index on YKT_CUR.T_MSGLIST to YKT_PORTAL; grant select, insert, delete, index on YKT_CUR.T_CARDVER to YKT_PORTAL; grant select on YKT_CUR.T_DICTIONARY to YKT_PORTAL; grant select on YKT_CUR.T_TRANSCODE to YKT_PORTAL; grant select, update on YKT_CUR.T_CARD to YKT_PORTAL; grant select on YKT_CUR.T_DEVICE to YKT_PORTAL; grant select on YKT_CUR.T_PHOTO to YKT_PORTAL; grant select on YKT_CUR.T_SPECIALTY to YKT_PORTAL; grant select on YKT_CUR.T_CUSTOMER to YKT_PORTAL; grant select on YKT_CUR.T_DEPT to YKT_PORTAL; grant select on YKT_CUR.T_ACCOUNT to YKT_PORTAL; grant select on YKT_CUR.T_CARDTYPE to YKT_PORTAL; grant select on ykt_cur.t_feetype to YKT_PORTAL; grant select on ykt_cur.t_rptposledger to YKT_PORTAL; grant select on ykt_cur.t_rptshoprakeoff to YKT_PORTAL; grant select on ykt_cur.t_shop to YKT_PORTAL; grant select on ykt_cur.v_device to YKT_PORTAL; grant select on ykt_cur.t_transtype to YKT_PORTAL; grant select on ykt_cur.t_rptsubjledger to YKT_PORTAL; grant select on ykt_cur.t_custtype to YKT_PORTAL; grant select on ykt_cur.t_rptopercash to YKT_PORTAL; grant select on ykt_cur.t_specialty to YKT_PORTAL; grant select on ykt_cur.T_RCSTATDATA to YKT_PORTAL; grant select on ykt_cur.t_rpttermledger to ykt_portal; grant select on ykt_cur.t_rptdeptledger to ykt_portal; grant select on ykt_cur.v_term to ykt_portal; grant select on YKT_CUR.V_TRANSDTL to YKT_PORTAL; grant select on ykt_cur.t_doordtl to ykt_portal; grant select on YKT_CUR.T_TRANSDTL to YKT_PORTAL; grant select on YKT_CUR.T_POSDTL to YKT_PORTAL; grant select, insert, delete, index on YKT_CUR.T_MSGLIST to YKT_PORTAL; grant select, insert, delete, index on YKT_CUR.T_CARDVER to YKT_PORTAL; grant select on YKT_CUR.T_DICTIONARY to YKT_PORTAL; grant select on YKT_CUR.T_TRANSCODE to YKT_PORTAL; grant select, update on YKT_CUR.T_CARD to YKT_PORTAL; grant select on YKT_CUR.T_DEVICE to YKT_PORTAL; grant select on YKT_CUR.T_PHOTO to YKT_PORTAL; grant select on YKT_CUR.T_SPECIALTY to YKT_PORTAL; grant select on YKT_CUR.T_CUSTOMER to YKT_PORTAL; grant select on YKT_CUR.T_DEPT to YKT_PORTAL; grant select on YKT_CUR.T_ACCOUNT to YKT_PORTAL; grant select on YKT_CUR.T_CARDTYPE to YKT_PORTAL; grant select on ykt_cur.t_feetype to YKT_PORTAL; grant select on YKT_CUR.t_cardtype to YKT_PORTAL; grant select on YKT_CUR.t_area to YKT_PORTAL; grant select, insert, update, delete, index on YKT_CUR.t_bankcard to YKT_PORTAL; grant select on ykt_cur.t_brastransdtl to ykt_portal; grant select on ykt_cur.t_rptposledger to YKT_PORTAL; grant select on ykt_cur.t_rptshoprakeoff to YKT_PORTAL; grant select on ykt_cur.t_shop to YKT_PORTAL; grant select on ykt_cur.v_device to YKT_PORTAL; grant select on ykt_cur.t_transtype to YKT_PORTAL; grant select on ykt_cur.t_custtype to YKT_PORTAL; grant select on ykt_cur.t_rptopercash to YKT_PORTAL; grant select on ykt_cur.t_specialty to YKT_PORTAL; grant select on ykt_cur.v_term to ykt_portal; grant select on YKT_CUR.V_TRANSDTL to YKT_PORTAL; grant select,insert,update,delete,index on ykt_cur.t_losscardinfo to YKT_PORTAL; grant select,insert,update,delete,index on ykt_cur.t_losscardshop to YKT_PORTAL; grant select on ykt_cur.v_accdtl to ykt_portal; grant select,update,insert,delete on ykt_cur.t_cardbalance to ykt_portal; grant select,update,insert,delete on ykt_cur.t_account to ykt_portal; grant select on ykt_cur.t_doordtl to ykt_portal; grant select on YKT_CUR.T_RPTSYSSTAT to ykt_portal;
INSERT INTO magazine(name, number, update_time) VALUES('テスト雑誌', 'テスト号数', CURRENT_TIMESTAMP); UPDATE magazine SET magazine_id = 1; INSERT INTO keyword(magazine_id, word, start_page, update_time) VALUES( 1, 'テスト単語2-1', 1, CURRENT_TIMESTAMP);
create database liisincart; use liisincart; create table usuario( id_usuario int auto_increment, Nombre varchar(100), correo varchar(200), pass varchar(100), telefono varchar(20), rol varchar(20), primary key(id_usuario) ); select * from usuario; create table producto( id_producto int auto_increment, Descripcion varchar(1000), Detalles varchar(1000), precio decimal, tipo varchar(20), primary key(id_producto) ); create table proveedor( id_proveedor int auto_increment, Nombre varchar(100), Mail varchar(100), primary key(id_proveedor) ); create table calificacion( id_calificacion int auto_increment, nota decimal, comentario varchar(3000), id_producto int, id_usuario int, foreign key(id_producto) references producto(id_producto), foreign key(id_usuario) references usuario(id_usuario), primary key(id_calificacion) ); create table imagen( id_imagen int auto_increment, ruta varchar(500), id_producto int, foreign key(id_producto) references producto(id_producto), primary key(id_imagen) ); create table prod_proveedor( id_prod_proveedor int auto_increment, id_producto int, id_proveedor int, foreign key(id_producto) references producto(id_producto), foreign key(id_proveedor) references proveedor(id_proveedor), primary key(id_prod_proveedor) ); create table categoría( id_category int auto_increment, nombre varchar(70), primary key (id_category) ); create table Mail( id_mail int, mail varchar(100), pass varchar(200), primary key(id_mail) );
create database if not exists `myMysql`; use `myMysql`; #用户表 CREATE TABLE if not exists `user` ( `id` INT NOT NULL AUTO_INCREMENT COMMENT '用户ID', `userName` VARCHAR(255) NOT NULL COMMENT '用户名', `password` VARCHAR(255) NOT NULL COMMENT '密码', `nickName` VARCHAR(255) NULL COMMENT '昵称', `description` LONGTEXT NULL COMMENT '描述', `token` LONGTEXT NULL, `createTime` VARCHAR(20) NULL COMMENT '创建时间', `modifiedTime` VARCHAR(20) NULL COMMENT '修改时间', `roles` VARCHAR(255) NULL COMMENT '权限级别', `active` INT NOT NULL COMMENT '是否是激活状态 0 是 1 否', PRIMARY KEY (`id`), UNIQUE INDEX `userName_UNIQUE` (`userName` ASC) VISIBLE) COMMENT = '用户表'; INSERT INTO `user` (`userName`, `password`, `nickName`, `description`, `createTime`, `modifiedTime`, `roles`, `active`) VALUES ('admin001', '111111', '管理员', '管理员的描述', '1551256596246', '1551256596246', '1', 0); INSERT INTO `user` (`userName`, `password`, `nickName`, `description`, `createTime`, `modifiedTime`, `roles`, `active`) VALUES ('a1', '111111', 'user', 'test user', '1551256596246', '1551256596246', '2', 0);
-- 1077. 项目员工 III -- 项目表 Project: -- -- +-------------+---------+ -- | Column Name | Type | -- +-------------+---------+ -- | project_id | int | -- | employee_id | int | -- +-------------+---------+ -- (project_id, employee_id) 是这个表的主键 -- employee_id 是员工表 Employee 的外键 -- 员工表 Employee: -- -- +------------------+---------+ -- | Column Name | Type | -- +------------------+---------+ -- | employee_id | int | -- | name | varchar | -- | experience_years | int | -- +------------------+---------+ -- employee_id 是这个表的主键 --   -- -- 写 一个 SQL 查询语句,报告在每一个项目中经验最丰富的雇员是谁。如果出现经验年数相同的情况,请报告所有具有最大经验年数的员工。 -- -- 查询结果格式在以下示例中: -- -- Project 表: -- +-------------+-------------+ -- | project_id | employee_id | -- +-------------+-------------+ -- | 1 | 1 | -- | 1 | 2 | -- | 1 | 3 | -- | 2 | 1 | -- | 2 | 4 | -- +-------------+-------------+ -- -- Employee 表: -- +-------------+--------+------------------+ -- | employee_id | name | experience_years | -- +-------------+--------+------------------+ -- | 1 | Khaled | 3 | -- | 2 | Ali | 2 | -- | 3 | John | 3 | -- | 4 | Doe | 2 | -- +-------------+--------+------------------+ -- -- Result 表: -- +-------------+---------------+ -- | project_id | employee_id | -- +-------------+---------------+ -- | 1 | 1 | -- | 1 | 3 | -- | 2 | 1 | -- +-------------+---------------+ -- employee_id 为 1 和 3 的员工在 project_id 为 1 的项目中拥有最丰富的经验。在 project_id 为 2 的项目中,employee_id 为 1 的员工拥有最丰富的经验。 -- -- 来源:力扣(LeetCode) -- 链接:https://leetcode-cn.com/problems/project-employees-iii -- 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。 SELECT p.project_id, p.employee_id FROM Project AS p JOIN Employee AS e ON p.employee_id = e.employee_id WHERE ( p.project_id, e.experience_years ) IN ( SELECT p1.project_id, max( e1.experience_years ) FROM Project AS p1 JOIN Employee AS e1 ON p1.employee_id = e1.employee_id GROUP BY p1.project_id );
BEGIN TRANSACTION; CREATE TABLE "users" ( `username` INTEGER PRIMARY KEY AUTOINCREMENT, `password` TEXT, `type` TEXT, `name` TEXT, `email address` TEXT ); INSERT INTO `users` VALUES(1,'','student','sina dee','sinadee@gmail.com'); INSERT INTO `users` VALUES(2,'','content manager','brad pitt','bpitt@sissymail.com'); CREATE TABLE `textbooks` ( `isbn` INTEGER, `title` TEXT, `publisher` TEXT, `authors` TEXT, `year` INTEGER, `edition` TEXT, `description` TEXT, `isAvailable` TEXT, PRIMARY KEY(isbn) ); CREATE TABLE `section` ( `id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, `isAvailable` TEXT DEFAULT 'no', `name` TEXT, `number` INTEGER, `description` TEXT, `chapter id` INTEGER ); CREATE TABLE `course` ( `courseCode` TEXT, `courseName` TEXT, PRIMARY KEY(courseCode) ); CREATE TABLE `classlist` ( `username` TEXT, `courseCode` TEXT, PRIMARY KEY(username) ); CREATE TABLE `chapters` ( `chapter_id` INTEGER, `name` TEXT, `number` INTEGER, `isbn` INTEGER, `description` TEXT, `isAvailable` TEXT, PRIMARY KEY(chapter_id) ); ; ; COMMIT;
-- MySQL Script generated by MySQL Workbench -- Tue Oct 8 17:41:02 2019 -- Model: New Model Version: 1.0 -- MySQL Workbench Forward Engineering SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0; SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL,ALLOW_INVALID_DATES'; -- ----------------------------------------------------- -- Schema r4_php_db -- ----------------------------------------------------- DROP SCHEMA IF EXISTS `r4_php_db` ; -- ----------------------------------------------------- -- Schema r4_php_db -- ----------------------------------------------------- CREATE SCHEMA IF NOT EXISTS `r4_php_db` DEFAULT CHARACTER SET utf8 ; USE `r4_php_db` ; -- ----------------------------------------------------- -- Table `r4_php_db`.`users` -- ----------------------------------------------------- DROP TABLE IF EXISTS `r4_php_db`.`users` ; CREATE TABLE IF NOT EXISTS `r4_php_db`.`users` ( `id` INT UNSIGNED NOT NULL AUTO_INCREMENT, `name` VARCHAR(200) NOT NULL, `email` VARCHAR(150) NOT NULL, `password` VARCHAR(75) NOT NULL, `reset_senha` TINYINT NULL, `remember_token` VARCHAR(100) NULL, `avatar` VARCHAR(45) NULL, `created_at` TIMESTAMP NOT NULL, `updated_at` TIMESTAMP NULL, `deleted_at` TIMESTAMP NULL, PRIMARY KEY (`id`)) ENGINE = InnoDB; -- ----------------------------------------------------- -- Table `r4_php_db`.`throttles` -- ----------------------------------------------------- DROP TABLE IF EXISTS `r4_php_db`.`throttles` ; CREATE TABLE IF NOT EXISTS `r4_php_db`.`throttles` ( `id` INT UNSIGNED NOT NULL AUTO_INCREMENT, `ip_address` VARCHAR(45) NULL, `is_default_password` TINYINT(4) NOT NULL, `last_access_at` TIMESTAMP NULL, `attempts` TINYINT(4) NULL, `suspended` TINYINT NULL, `last_attempt_at` TIMESTAMP NULL, `user_id` INT UNSIGNED NOT NULL, PRIMARY KEY (`id`), INDEX `fk_throttles_user_idx` (`user_id` ASC), CONSTRAINT `fk_throttles_users` FOREIGN KEY (`user_id`) REFERENCES `r4_php_db`.`users` (`id`) ON DELETE NO ACTION ON UPDATE NO ACTION) ENGINE = InnoDB; -- ----------------------------------------------------- -- Table `r4_php_db`.`roles` -- ----------------------------------------------------- DROP TABLE IF EXISTS `r4_php_db`.`roles` ; CREATE TABLE IF NOT EXISTS `r4_php_db`.`roles` ( `id` INT UNSIGNED NOT NULL AUTO_INCREMENT, `name` VARCHAR(45) NULL, `description` VARCHAR(75) NULL, PRIMARY KEY (`id`)) ENGINE = InnoDB; -- ----------------------------------------------------- -- Table `r4_php_db`.`users_roles` -- ----------------------------------------------------- DROP TABLE IF EXISTS `r4_php_db`.`users_roles` ; CREATE TABLE IF NOT EXISTS `r4_php_db`.`users_roles` ( `id` INT UNSIGNED NOT NULL AUTO_INCREMENT, `user_id` INT UNSIGNED NOT NULL, `role_id` INT UNSIGNED NOT NULL, PRIMARY KEY (`id`), INDEX `fk_users_has_roles_roles1_idx` (`role_id` ASC), INDEX `fk_users_has_roles_users1_idx` (`user_id` ASC), CONSTRAINT `fk_users_has_roles_users1` FOREIGN KEY (`user_id`) REFERENCES `r4_php_db`.`users` (`id`) ON DELETE NO ACTION ON UPDATE NO ACTION, CONSTRAINT `fk_users_has_roles_roles1` FOREIGN KEY (`role_id`) REFERENCES `r4_php_db`.`roles` (`id`) ON DELETE NO ACTION ON UPDATE NO ACTION) ENGINE = InnoDB; -- ----------------------------------------------------- -- Table `r4_php_db`.`comments` -- ----------------------------------------------------- DROP TABLE IF EXISTS `r4_php_db`.`comments` ; CREATE TABLE IF NOT EXISTS `r4_php_db`.`comments` ( `id` INT UNSIGNED NOT NULL AUTO_INCREMENT, `text` VARCHAR(400) NOT NULL, `created_at` TIMESTAMP NOT NULL, `updated_at` TIMESTAMP NULL, `deleted_at` TIMESTAMP NULL, `user_id` INT UNSIGNED NOT NULL, PRIMARY KEY (`id`), INDEX `fk_comments_users1_idx` (`user_id` ASC), CONSTRAINT `fk_comments_users1` FOREIGN KEY (`user_id`) REFERENCES `r4_php_db`.`users` (`id`) ON DELETE NO ACTION ON UPDATE NO ACTION) ENGINE = InnoDB; -- ----------------------------------------------------- -- Table `r4_php_db`.`comments_updates` -- ----------------------------------------------------- DROP TABLE IF EXISTS `r4_php_db`.`comments_updates` ; CREATE TABLE IF NOT EXISTS `r4_php_db`.`comments_updates` ( `id` INT UNSIGNED NOT NULL AUTO_INCREMENT, `comment_id` INT UNSIGNED NOT NULL, `text` VARCHAR(400) NOT NULL, `created_at` TIMESTAMP NOT NULL, `updated_at` TIMESTAMP NOT NULL, PRIMARY KEY (`id`), INDEX `fk_comments_updates_comment_idx` (`comment_id` ASC), CONSTRAINT `fk_comments_updates_comments` FOREIGN KEY (`comment_id`) REFERENCES `r4_php_db`.`comments` (`id`) ON DELETE NO ACTION ON UPDATE NO ACTION) ENGINE = InnoDB; -- ----------------------------------------------------- -- Table `r4_php_db`.`logs` -- ----------------------------------------------------- DROP TABLE IF EXISTS `r4_php_db`.`loggers` ; CREATE TABLE IF NOT EXISTS `r4_php_db`.`loggers` ( `id` INT UNSIGNED NOT NULL AUTO_INCREMENT, `action` VARCHAR(45) NULL, `message` VARCHAR(200) NULL, `user_id` INT UNSIGNED NOT NULL, PRIMARY KEY (`id`), INDEX `fk_loggers_user_idx` (`user_id` ASC), CONSTRAINT `fk_loggers_users` FOREIGN KEY (`user_id`) REFERENCES `r4_php_db`.`users` (`id`) ON DELETE NO ACTION ON UPDATE NO ACTION) ENGINE = InnoDB; SET SQL_MODE=@OLD_SQL_MODE; SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS; -- ------------------------- INSERT INTO `roles` VALUES (1, 'ROOT', 'Desenvolvedor'), (2, 'ADMIN', 'Administrador do Sistema'), (3, 'USUARIO', 'Usuário Comum'); INSERT INTO `users` VALUES (1, 'Administrador', 'mayradbueno@gmail.com', '$2y$10$hravkHv4Whx6oXsw1jMwsOAovVvQ29vlTwVlLox1lgq9ZZUFOG6MO', 0, '$2y$10$X/ri8NeCdc3oc2swndWwQ.uZSjDZFVMmaIS1/zQAMNg16cIaEbWJu', null, now(), null, null); INSERT INTO `throttles` VALUES (1, NULL, 0, NULL, 0, 0, NULL, 1); INSERT INTO `users_roles` (`id`, `user_id`, `role_id`) VALUES (1,1,1),(2,1,2),(3,1,3);
SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0; SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0; SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL,ALLOW_INVALID_DATES'; CREATE SCHEMA IF NOT EXISTS `regform` DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci ; USE `regform` ; -- ----------------------------------------------------- -- Table `regform`.`user` -- ----------------------------------------------------- DROP TABLE IF EXISTS `regform`.`user` ; CREATE TABLE IF NOT EXISTS `regform`.`user` ( `iduser` INT NOT NULL AUTO_INCREMENT, `imie` VARCHAR(45) NULL, `nazwisko` VARCHAR(45) NULL, `login` VARCHAR(45) NULL, `haslo` VARCHAR(45) NULL, `email` VARCHAR(45) NULL, `ulica` VARCHAR(45) NULL, `kod` VARCHAR(6) NULL, `miasto` VARCHAR(45) NULL, `szkola` VARCHAR(45) NULL, PRIMARY KEY (`iduser`), UNIQUE INDEX `iduser_UNIQUE` (`iduser` ASC)) ENGINE = InnoDB; -- ----------------------------------------------------- -- Table `regform`.`hobby` -- ----------------------------------------------------- DROP TABLE IF EXISTS `regform`.`hobby` ; CREATE TABLE IF NOT EXISTS `regform`.`hobby` ( `idhobby` INT NOT NULL AUTO_INCREMENT, `nazwa` VARCHAR(45) NULL, PRIMARY KEY (`idhobby`), UNIQUE INDEX `idhobby_UNIQUE` (`idhobby` ASC)) ENGINE = InnoDB; -- ----------------------------------------------------- -- Table `regform`.`user_has_hobby` -- ----------------------------------------------------- DROP TABLE IF EXISTS `regform`.`user_has_hobby` ; CREATE TABLE IF NOT EXISTS `regform`.`user_has_hobby` ( `user_iduser` INT NOT NULL, `hobby_idhobby` INT NOT NULL, PRIMARY KEY (`user_iduser`, `hobby_idhobby`), INDEX `fk_user_has_hobby_hobby1_idx` (`hobby_idhobby` ASC), INDEX `fk_user_has_hobby_user_idx` (`user_iduser` ASC), CONSTRAINT `fk_user_has_hobby_user` FOREIGN KEY (`user_iduser`) REFERENCES `regform`.`user` (`iduser`) ON DELETE NO ACTION ON UPDATE NO ACTION, CONSTRAINT `fk_user_has_hobby_hobby1` FOREIGN KEY (`hobby_idhobby`) REFERENCES `regform`.`hobby` (`idhobby`) ON DELETE NO ACTION ON UPDATE NO ACTION) ENGINE = InnoDB; SET SQL_MODE=@OLD_SQL_MODE; SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS; SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS; START TRANSACTION; USE `regform`; INSERT INTO `hobby` (`nazwa`) VALUES ('wino'); INSERT INTO `hobby` (`nazwa`) VALUES ('kobiety'); INSERT INTO `hobby` (`nazwa`) VALUES ('śpiew'); INSERT INTO `hobby` (`nazwa`) VALUES ('skóry'); INSERT INTO `hobby` (`nazwa`) VALUES ('fury'); INSERT INTO `hobby` (`nazwa`) VALUES ('komóry'); INSERT INTO `hobby` (`nazwa`) VALUES ('inne'); COMMIT; START TRANSACTION; USE `regform`; INSERT INTO `user` (`imie`, `nazwisko`,`login`,`email`,`ulica`,`kod`,`miasto`,`szkola`) VALUES ('Adam','Burczymucha','mrocznyrycerz','adam@bumucha.com','Buraczana 87','02-474','Warszawa','elementary'); INSERT INTO `user` (`imie`, `nazwisko`,`login`,`email`,`ulica`,`kod`,`miasto`,`szkola`) VALUES ('Barbara','Ciernista','bacia','babucha@serwer.com','Zakole 14/74','32-458','Klepacze','medium'); INSERT INTO `user` (`imie`, `nazwisko`,`login`,`email`,`ulica`,`kod`,`miasto`,`szkola`) VALUES ('Celina','Achcelina','stokrotka','sto@razy.pl','Krucza 54','24-954','Leszczyny','none'); INSERT INTO `user` (`imie`, `nazwisko`,`login`,`email`,`ulica`,`kod`,`miasto`,`szkola`) VALUES ('Damian','Kleszcz','kleszczu','kleszcz@jutub.com','Leśna 254','63-456','Puszczyki','high'); COMMIT; START TRANSACTION; USE `regform`; INSERT INTO `user_has_hobby` (`user_iduser`, `hobby_idhobby`) VALUES (1,1); INSERT INTO `user_has_hobby` (`user_iduser`, `hobby_idhobby`) VALUES (1,3); INSERT INTO `user_has_hobby` (`user_iduser`, `hobby_idhobby`) VALUES (1,6); INSERT INTO `user_has_hobby` (`user_iduser`, `hobby_idhobby`) VALUES (2,3); INSERT INTO `user_has_hobby` (`user_iduser`, `hobby_idhobby`) VALUES (2,4); INSERT INTO `user_has_hobby` (`user_iduser`, `hobby_idhobby`) VALUES (3,6); INSERT INTO `user_has_hobby` (`user_iduser`, `hobby_idhobby`) VALUES (3,1); INSERT INTO `user_has_hobby` (`user_iduser`, `hobby_idhobby`) VALUES (4,3); INSERT INTO `user_has_hobby` (`user_iduser`, `hobby_idhobby`) VALUES (4,5); COMMIT;
CREATE DATABASE BPAwebsite CREATE TABLE userInfo ( UserID INT(11) NOT NULL PRIMARY KEY, UserEmail VARCHAR(60) NOT NULL, Pwd VARCHAR(60) NOT NULL, fName VARCHAR(30) NOT NULL, lname VARCHAR(30) NOT NULL );
--sum set serveroutput on declare x number:=&x; y number:=&y; z number:=0; begin z:=x+y; dbms_output.put_line('Sum is : '||z); end; --largest set serveroutput on declare x number:=&x; y number:=&y; z number:=&z; begin if x>y and x>z then dbms_output.put_line('Greatest is : '||x); elsif y>x and y>z then dbms_output.put_line('Greatest is : '||y); else dbms_output.put_line('Greatest is : '||z); end if; end; --factorial set serveroutput on declare x number:=&x; fact number:=1; i number; begin if x=0 or x=1 then dbms_output.put_line('Factorial : 1'); elsif x<0 then dbms_output.put_line('Enter Positive num'); else for i in 1..x loop fact:=fact*i; end loop; end if; dbms_output.put_line('Factorial : '||fact); end; --fibinocci set serveroutput on declare n number:=&n; j number:=0; k number:=1; l number:=0; i number:=0; begin if n=1 then dbms_output.put_line(' '||j); else dbms_output.put_line(' '||j); dbms_output.put_line(' '||k); for i in 3..n loop l:=j+k; dbms_output.put_line(' '||l); j:=k; k:=l; end loop; end if; end; --reverse set serveroutput on declare n number:=&n; d number; s number:=0; temp number:=n; begin while(n>0) loop s:=s*10; d:=mod(n,10); s:=s+d; n:=trunc(n/10); end loop; dbms_output.put_line(s); end; --palindrome set serveroutput on declare input varchar(20):='&input'; reverse varchar(20); i integer(10); begin for i in reverse 1..length(input) loop reverse:=reverse||substr(input,i,1); end loop; if input=reverse then dbms_output.put_line('the given string '||input||' is a palindrome'); else dbms_output.put_line('the given sting '||input||'is not a palindrome'); end if; end; --odd even set serveroutput on declare i number; create table even(eno number(10)); create table odd(ono number(10)); begin for i in 1..50 loop if mod(i,2)=0 then insert into even values(i); else insert into odd values(i); end if; end loop; end; select *from odd; select * from even;
/* SELECT s.id_carnet,j.* FROM dbo.Jugadores j inner join dbo.Socios s on s.id_jugador = j.id_jugador where j.numero_de_documento = '24717875' */ select distinct te.nombre, j.apellidos, j.nombres --p.fecha_parido,te.nombre, sp.* from dbo.Partidos p inner join dbo.Torneos t on p.id_torneo = t.id_torneo inner join dbo.Temporadas te on te.id_temporada = p.id_temporada inner join dbo.Socios_por_Partidos sp on sp.id_temporada = te.id_temporada and sp.id_torneo = t.id_torneo inner join dbo.Socios ss on ss.id_carnet = sp.id_carnet inner join dbo.Jugadores j on ss.id_jugador = j.id_jugador where j.numero_de_documento = '11607507' and (sp.minutos_jugados is not null or sp.camiseta <> 0 or sp.goles_a_favor > 0 or sp.goles_en_contra > 0 or sp.amonestado_expulsado is not null or sp.mejor_jugador is not null or sp.penales_convertidos > 0 or sp.penales_atajados > 0 or sp.camiseta = 1) order by 1 --order by p.fecha_parido --2002,2003,2004,2005,2006,2009,2010,2011,2012
-- MySQL dump 10.13 Distrib 5.7.25, for Win64 (x86_64) -- -- Host: localhost Database: my-app-segura -- ------------------------------------------------------ -- Server version 5.7.25 /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8 */; /*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; /*!40103 SET TIME_ZONE='+00:00' */; /*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; /*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; /*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; /*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; -- -- Table structure for table `notes` -- DROP TABLE IF EXISTS `notes`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `notes` ( `note_id` bigint(20) NOT NULL AUTO_INCREMENT, `title` varchar(50) DEFAULT NULL, `description` varchar(250) DEFAULT NULL, `date` datetime DEFAULT NULL, `owner_id` bigint(11) DEFAULT NULL, PRIMARY KEY (`note_id`) ) ENGINE=InnoDB AUTO_INCREMENT=8 DEFAULT CHARSET=utf8mb4; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `notes` -- LOCK TABLES `notes` WRITE; /*!40000 ALTER TABLE `notes` DISABLE KEYS */; INSERT INTO `notes` VALUES (1,'Meeting RRH','Meeting to talk about social benefits','2019-10-11 00:00:00',3),(3,'House tasks important','Do laundry important','2019-09-09 00:00:00',3),(4,'House tasks 3','test hour','2019-12-13 00:00:00',3),(6,'task test','task1','2019-09-01 00:00:00',3),(7,'Advanced TEST','Test number 1234567890():_-QWERTY+;','2019-08-01 00:00:00',3); /*!40000 ALTER TABLE `notes` ENABLE KEYS */; UNLOCK TABLES; -- -- Table structure for table `users` -- DROP TABLE IF EXISTS `users`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `users` ( `id_user` bigint(11) NOT NULL AUTO_INCREMENT, `email` varchar(60) DEFAULT NULL, `role` varchar(15) DEFAULT NULL, PRIMARY KEY (`id_user`) ) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8mb4; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `users` -- LOCK TABLES `users` WRITE; /*!40000 ALTER TABLE `users` DISABLE KEYS */; INSERT INTO `users` VALUES (1,'myappsegura19@gmail.com','ROLE_ADMIN'),(3,'sergiosnow73@gmail.com','ROLE_USER'); /*!40000 ALTER TABLE `users` ENABLE KEYS */; UNLOCK TABLES; /*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; /*!40101 SET SQL_MODE=@OLD_SQL_MODE */; /*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; /*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; /*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; -- Dump completed on 2019-09-02 23:11:19
SET statement_timeout = 0; SET lock_timeout = 0; SET idle_in_transaction_session_timeout = 0; SET client_encoding = 'UTF8'; SET standard_conforming_strings = on; SELECT pg_catalog.set_config('search_path', '', false); SET check_function_bodies = false; SET xmloption = content; SET client_min_messages = warning; SET row_security = off; -- -- Name: plpgsql; Type: EXTENSION; Schema: -; Owner: -- CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog; -- -- Name: EXTENSION plpgsql; Type: COMMENT; Schema: -; Owner: -- COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language'; SET default_tablespace = ''; SET default_with_oids = false; -- -- Name: role; Type: TABLE; Schema: public; Owner: postgres -- CREATE TABLE public.role ( role_id bigint NOT NULL, name character varying NOT NULL ); ALTER TABLE public.role OWNER TO postgres; -- -- Name: role_role_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres -- CREATE SEQUENCE public.role_role_id_seq START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.role_role_id_seq OWNER TO postgres; -- -- Name: role_role_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres -- ALTER SEQUENCE public.role_role_id_seq OWNED BY public.role.role_id; -- -- Name: app_user; Type: TABLE; Schema: public; Owner: postgres -- CREATE TABLE public.app_user ( user_id bigint NOT NULL, name character varying NOT NULL, password character varying NOT NULL, employee_id bigint ); ALTER TABLE public.app_user OWNER TO postgres; -- -- Name: user_role; Type: TABLE; Schema: public; Owner: postgres -- CREATE TABLE public.user_role ( user_id bigint NOT NULL, role_id bigint NOT NULL ); ALTER TABLE public.user_role OWNER TO postgres; -- -- Name: user_user_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres -- CREATE SEQUENCE public.user_user_id_seq START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.user_user_id_seq OWNER TO postgres; -- -- Name: user_user_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres -- ALTER SEQUENCE public.user_user_id_seq OWNED BY public.app_user.user_id; -- -- Name: role role_id; Type: DEFAULT; Schema: public; Owner: postgres -- ALTER TABLE ONLY public.role ALTER COLUMN role_id SET DEFAULT nextval('public.role_role_id_seq'::regclass); -- -- Name: app_user user_id; Type: DEFAULT; Schema: public; Owner: postgres -- ALTER TABLE ONLY public.app_user ALTER COLUMN user_id SET DEFAULT nextval('public.user_user_id_seq'::regclass); -- -- Data for Name: role; Type: TABLE DATA; Schema: public; Owner: postgres -- COPY public.role (role_id, name) FROM stdin; 1 ROLE_ADMIN 2 ROLE_USER 3 ROLE_USER_CHIEF 4 ROLE_GUEST \. -- -- Data for Name: app_user; Type: TABLE DATA; Schema: public; Owner: postgres -- COPY public.app_user (user_id, name, password, employee_id) FROM stdin; \. -- -- Data for Name: user_role; Type: TABLE DATA; Schema: public; Owner: postgres -- COPY public.user_role (user_id, role_id) FROM stdin; \. -- -- Name: role_role_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres -- SELECT pg_catalog.setval('public.role_role_id_seq', 4, true); -- -- Name: user_user_id_seq; Type: SEQUENCE SET; Schema: public; Owner: postgres -- SELECT pg_catalog.setval('public.user_user_id_seq', 12, false); -- -- Name: role role_pk; Type: CONSTRAINT; Schema: public; Owner: postgres -- ALTER TABLE ONLY public.role ADD CONSTRAINT role_pk PRIMARY KEY (role_id); -- -- Name: role role_un; Type: CONSTRAINT; Schema: public; Owner: postgres -- ALTER TABLE ONLY public.role ADD CONSTRAINT role_un UNIQUE (name); -- -- Name: app_user user_pk; Type: CONSTRAINT; Schema: public; Owner: postgres -- ALTER TABLE ONLY public.app_user ADD CONSTRAINT user_pk PRIMARY KEY (user_id); -- -- Name: user_role user_role_pk; Type: CONSTRAINT; Schema: public; Owner: postgres -- ALTER TABLE ONLY public.user_role ADD CONSTRAINT user_role_pk PRIMARY KEY (user_id, role_id); -- -- Name: app_user user_un; Type: CONSTRAINT; Schema: public; Owner: postgres -- ALTER TABLE ONLY public.app_user ADD CONSTRAINT user_un UNIQUE (name); -- -- Name: user_role user_role_role_fk; Type: FK CONSTRAINT; Schema: public; Owner: postgres -- ALTER TABLE ONLY public.user_role ADD CONSTRAINT user_role_role_fk FOREIGN KEY (role_id) REFERENCES public.role(role_id); -- -- Name: user_role user_role_user_fk; Type: FK CONSTRAINT; Schema: public; Owner: postgres -- ALTER TABLE ONLY public.user_role ADD CONSTRAINT user_role_user_fk FOREIGN KEY (user_id) REFERENCES public.app_user(user_id);
DROP DATABASE IF EXISTS listings_db; CREATE DATABASE listings_db; USE listings_db; CREATE TABLE listings ( id INT(10) AUTO_INCREMENT NOT NULL, listing_name VARCHAR(255) NOT NULL, listing_description VARCHAR(255) NOT NULL, listing_rate VARCHAR(255) NOT NULL, listing_location VARCHAR(255) NOT NULL, listing_date VARCHAR(255) NOT NULL, PRIMARY KEY(id) );
CREATE Procedure spr_list_TransferIn_Detail_Bunge (@DocSerial int) As Select StockTransferInDetail.Product_Code, "Item Code" = StockTransferInDetail.Product_Code, "Item Name" = Items.ProductName, "Quantity" = Sum(StockTransferInDetail.Quantity), "Rate" = StockTransferInDetail.Rate, "Amount" = Sum(StockTransferInDetail.Amount), "Batch" = StockTransferInDetail.Batch_Number, "Expiry" = StockTransferInDetail.Expiry, "PKD" = StockTransferInDetail.PKD, "PTS" = StockTransferInDetail.PTS, "PTR" = StockTransferInDetail.PTR, "ECP" = StockTransferInDetail.ECP From StockTransferInDetail, Items Where StockTransferInDetail.DocSerial = @DocSerial And StockTransferInDetail.Product_Code = Items.Product_Code Group By StockTransferInDetail.Product_Code, Items.ProductName, StockTransferInDetail.Batch_Number, StockTransferInDetail.Expiry, StockTransferInDetail.PKD, StockTransferInDetail.PTS, StockTransferInDetail.PTR, StockTransferInDetail.ECP, StockTransferInDetail.Rate
select i_product_name , i_brand , i_class , i_category , avg(inv_quantity_on_hand) qoh from inventory , date_dim , item , warehouse where inv_date_sk=d_date_sk and inv_item_sk=i_item_sk and inv_warehouse_sk=w_warehouse_sk and d_month_seq between 1212 and 1212 + 11 group by rollup(i_product_name ,i_brand ,i_class ,i_category) order by qoh, i_product_name, i_brand, i_class, i_category limit 100;
CREATE OR REPLACE PROCEDURE status_to_tids_rollup ( xtid_id IN tids.id%TYPE ) AS status tids.flag%TYPE := 4; xcause tids.cause%TYPE := NULL; BEGIN SELECT flag INTO status FROM tid_interface_status WHERE tid_id = xtid_id AND flag = (SELECT max(flag) FROM tid_interface_status WHERE tid_id = xtid_id) AND rownum = 1; SELECT cause INTO xcause FROM tid_interface_status WHERE tid_id = xtid_id AND flag = (SELECT max(flag) FROM tid_interface_status WHERE tid_id = xtid_id) AND rownum = 1; UPDATE tids SET flag = status, cause = xcause WHERE id = xtid_id; END; / SHOW ERRORS
CREATE TABLE KITCHEN_IMAGE ( id INT NOT NULL auto_increment PRIMARY KEY, kitchen_id INT NOT NULL, name VARCHAR(255) NOT NULL default "", FOREIGN KEY(kitchen_id) REFERENCES KITCHEN_INFO(id) )