sql
stringlengths
6
1.05M
<filename>migrations/20191209214430_initialize_schema.down.sql<gh_stars>1-10 DROP TABLE IF EXISTS vehicles; DROP TABLE IF EXISTS revisions; DROP TYPE VEHICLE_STATUS_T;
<reponame>cincuranet/FirebirdDbComparer create table t (i int, j date); create unique asc index idx on t(i, j);
<reponame>Shuttl-Tech/antlr_psql -- file:xml.sql ln:212 expect:false IF degree_symbol <> res[1]::text THEN RAISE 'expected % (%), got % (%)', degree_symbol, convert_to(degree_symbol, 'UTF8'), res[1], convert_to(res[1]::text, 'UTF8')
<filename>coeus-db/coeus-db-sql/src/main/resources/org/kuali/coeus/coeus-sql/log/Release_3_1_sp_1_logs/KRACOEUS-4269.sql --create citizenship type table CREATE TABLE "CITIZENSHIP_TYPE_T" ( "CITIZENSHIP_TYPE_CODE" NUMBER(15,1) NOT NULL, "DESCRIPTION" VARCHAR2(40) NOT NULL, "ACTIVE_FLAG" CHAR(1) NOT NULL, "UPDATE_TIMESTAMP" DATE NOT NULL, "UPDATE_USER" VARCHAR2(60) NOT NULL, "VER_NBR" NUMBER(15,5) NOT NULL, "OBJ_ID" VARCHAR2(36) NOT NULL ) / --add primary key ALTER TABLE "CITIZENSHIP_TYPE_T" ADD ( CONSTRAINT "CITIZEN_TYPE_PK1" PRIMARY KEY ("CITIZENSHIP_TYPE_CODE") ) / --insert data INSERT INTO CITIZENSHIP_TYPE_T(CITIZENSHIP_TYPE_CODE, DESCRIPTION, ACTIVE_FLAG, UPDATE_TIMESTAMP, UPDATE_USER, VER_NBR, OBJ_ID) VALUES (1, 'US CITIZEN OR NONCITIZEN NATIONAL', 'Y', SYSDATE, 'kradev', 1, SYS_GUID()) / INSERT INTO CITIZENSHIP_TYPE_T(CITIZENSHIP_TYPE_CODE, DESCRIPTION, ACTIVE_FLAG, UPDATE_TIMESTAMP, UPDATE_USER, VER_NBR, OBJ_ID) VALUES (2, 'PERMANENT RESIDENT OF US', 'Y', SYSDATE, 'kradev', 1, SYS_GUID()) / INSERT INTO CITIZENSHIP_TYPE_T(CITIZENSHIP_TYPE_CODE, DESCRIPTION, ACTIVE_FLAG, UPDATE_TIMESTAMP, UPDATE_USER, VER_NBR, OBJ_ID) VALUES (3, 'NON US CITIZEN WITH TEMPORARY VISA', 'Y', SYSDATE, 'kradev', 1, SYS_GUID()) / COMMIT
<filename>packages/acs-events/sql/postgresql/upgrade/upgrade-0.6d6-0.6d7.sql create index acs_events_timespan_id_idx on acs_events(timespan_id);
-- phpMyAdmin SQL Dump -- version 4.4.14 -- http://www.phpmyadmin.net -- -- Host: 127.0.0.1 -- Generation Time: Jan 23, 2016 at 07:14 PM -- Server version: 5.6.26 -- PHP Version: 5.6.12 SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; SET time_zone = "+00:00"; /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8mb4 */; -- -- Database: `dbtest` -- -- -------------------------------------------------------- -- -- Table structure for table `tbl_users` -- CREATE TABLE IF NOT EXISTS `tbl_users` ( `userID` int(11) NOT NULL, `userName` varchar(100) NOT NULL, `num` varchar(10) NOT NULL, `userEmail` varchar(100) NOT NULL, `userPass` varchar(100) NOT NULL, `userStatus` enum('Y','N') NOT NULL DEFAULT 'N', `tokenCode` varchar(100) NOT NULL ) ENGINE=InnoDB AUTO_INCREMENT=13 DEFAULT CHARSET=latin1; -- -- Indexes for dumped tables -- -- -- Indexes for table `tbl_users` -- ALTER TABLE `tbl_users` ADD PRIMARY KEY (`userID`), ADD UNIQUE KEY `userEmail` (`userEmail`); -- -- AUTO_INCREMENT for dumped tables -- -- -- AUTO_INCREMENT for table `tbl_users` -- ALTER TABLE `tbl_users` MODIFY `userID` int(11) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=13; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
INSERT INTO `interiors` (`Name`,`Interior`,`X`,`Y`,`Z`) VALUES ('24/7 1', '17', -25.884498,-185.868988,'1003.546875'), ('24/7 2', '10', 6.091179,-29.271898,'1003.549438'), ('24/7 3', '18', -30.946699,-89.609596,'1003.546875'), ('24/7 4', '16', -25.132598,-139.066986,'1003.546875'), ('24/7 5', '4', -27.312299,-29.277599,'1003.557250'), ('24/7 6', '6', -26.691598,-55.714897,'1003.546875'), ('Airport ticket desk', '14', -1827.147338,7.207417,'1061.143554'), ('Airport baggage reclaim', '14', -1861.936889,54.908092,'1061.143554'), ('Shamal', '1', 1.808619,32.384357,'1199.593750'), ('Andromada', '9', 315.745086,984.969299,'1958.919067'), ('Ammunation 1', '1', 286.148986,-40.644397,'1001.515625'), ('Ammunation 2', '4', 286.800994,-82.547599,'1001.515625'), ('Ammunation 3', '6', 296.919982,-108.071998,'1001.515625'), ('Ammunation 4', '7', 314.820983,-141.431991,'999.601562'), ('Ammunation 5', '6', 316.524993,-167.706985,'999.593750'), ('Ammunation booths', '7', 302.292877,-143.139099,'1004.062500'), ('Ammunation range', '7', 298.507934,-141.647048,'1004.054748'), ('Blastin fools hallway', '3', 1038.531372,0.111030,'1001.284484'), ('Budget inn motel room', '12', 444.646911,508.239044,'1001.419494'), ('<NAME>', '15', 2215.454833,-1147.475585,'1025.796875'), ('Off track betting shop', '3', 833.269775,10.588416,'1004.179687'), ('Sex shop', '3', -103.559165,-24.225606,'1000.718750'), ('Meat factory', '1', 963.418762,2108.292480,'1011.030273'), ('Zeros RC shop', '6', -2240.468505,137.060440,'1035.414062'), ('Dillimore gas station', '0', 663.836242,-575.605407,'16.343263'), ('Catigulas basement', '1', 2169.461181,1618.798339,'999.976562'), ('FDC Janitors room', '10', 1889.953369,1017.438293,'31.882812'), ('Woozies office', '1', -2159.122802,641.517517,'1052.381713'), ('Binco', '15', 207.737991,-109.019996,'1005.132812'), ('<NAME>', '14', 204.332992,-166.694992,'1000.523437'), ('Prolaps', '3', 207.054992,-138.804992,'1003.507812'), ('Suburban', '1', 203.777999,-48.492397,'1001.804687'), ('Victim', '5', 226.293991,-7.431529,'1002.210937'), ('Zip', '18', 161.391006,-93.159156,'1001.804687'), ('Club', '17', 493.390991,-22.722799,'1000.679687'), ('Bar', '11', 501.980987,-69.150199,'998.757812'), ('Lil probe inn', '18', -227.027999,1401.229980,'27.765625'), ('Jays diner', '4', 457.304748,-88.428497,'999.554687'), ('Gant bridge diner', '5', 454.973937,-110.104995,'1000.077209'), ('Secret valley diner', '6', 435.271331,-80.958938,'999.554687'), ('World of coq', '1', 452.489990,-18.179698,'1001.132812'), ('Welcome pump', '1', 681.557861,-455.680053,-'25.609874'), ('Burger shot', '10', 375.962463,-65.816848,'1001.507812'), ('Cluckin bell', '9', 369.579528,-4.487294,'1001.858886'), ('Well stacked pizza', '5', 373.825653,-117.270904,'1001.499511'), ('Rusty browns donuts', '17', 381.169189,-188.803024,'1000.632812'), ('Denise room', '1', 244.411987,305.032989,'999.148437'), ('Katie room', '2', 271.884979,306.631988,'999.148437'), ('Helena room', '3', 291.282989,310.031982,'999.148437'), ('Michelle room', '4', 302.180999,300.722991,'999.148437'), ('Barbara room', '5', 322.197998,302.497985,'999.148437'), ('Millie room', '6', 346.870025,309.259033,'999.155700'), ('<NAME>', '17', -959.564392,1848.576782,'9.000000'), ('Planning dept.', '3', 384.808624,173.804992,'1008.382812'), ('Area 51', '0', 223.431976,1872.400268,'13.734375'), ('LS gym', '5', 772.111999,-3.898649,'1000.728820'), ('SF gym', '6', 774.213989,-48.924297,'1000.585937'), ('LV gym', '7', 773.579956,-77.096694,'1000.655029'), ('B Dups house', '3', 1527.229980,-11.574499,'1002.097106'), ('B Dups crack pad', '2', 1523.509887,-47.821197,'1002.130981'), ('CJ house', '3', 2496.049804,-1695.238159,'1014.742187'), ('Madd Doggs mansion', '5', 1267.663208,-781.323242,'1091.906250'), ('Og Loc house', '3', 513.882507,-11.269994,'1001.565307'), ('Ryder house', '2', 2454.717041,-1700.871582,'1013.515197'), ('Sweet house', '1', 2527.654052,-1679.388305,'1015.498596'), ('Crack factory', '2', 2543.462646,-1308.379882,'1026.728393'), ('Big spread ranch', '3', 1212.019897,-28.663099,'1000.953125'), ('Fanny batters', '6', 761.412963,1440.191650,'1102.703125'), ('Strip club', '2', 1204.809936,-11.586799,'1000.921875'), ('Strip club private room', '2', 1204.809936,13.897239,'1000.921875'), ('Unnamed brothel', '3', 942.171997,-16.542755,'1000.929687'), ('Tiger skin brothel', '3', 964.106994,-53.205497,'1001.124572'), ('Pleasure domes', '3', -2640.762939,1406.682006,'906.460937'), ('Liberty city outside', '1', -729.276000,503.086944,'1371.971801'), ('Liberty city inside', '1', -794.806396,497.738037,'1376.195312'), ('Gang house', '5', 2350.339843,-1181.649902,'1027.976562'), ('<NAME>', '8', 2807.619873,-1171.899902,'1025.570312'), ('Crack den', '5', 318.564971,1118.209960,'1083.882812'), ('Warehouse 1', '1', 1412.639892,-1.787510,'1000.924377'), ('Warehouse 2', '18', 1302.519897,-1.787510,'1001.028259'), ('Sweets garage', '0', 2522.000000,-1673.383911,'14.866223'), ('Lil probe inn toilet', '18', -221.059051,1408.984008,'27.773437'), ('Unused safe house', '12', 2324.419921,-1145.568359,'1050.710083'), ('RC Battlefield', '10', -975.975708,1060.983032,'1345.671875'), ('Barber 1', '2', 411.625976,-21.433298,'1001.804687'), ('Barber 2', '3', 418.652984,-82.639793,'1001.804687'), ('Barber 3', '12', 412.021972,-52.649898,'1001.898437'), ('Tatoo parlour 1', '16', -204.439987,-26.453998,'1002.273437'), ('Tatoo parlour 2', '17', -204.439987,-8.469599,'1002.273437'), ('Tatoo parlour 3', '3', -204.439987,-43.652496,'1002.273437'), ('LS police HQ', '6', 246.783996,63.900199,'1003.640625'), ('SF police HQ', '10', 246.375991,109.245994,'1003.218750'), ('LV police HQ', '3', 288.745971,169.350997,'1007.171875'), ('Car school', '3', -2029.798339,-106.675910,'1035.171875'), ('8-Track', '7', -1398.065307,-217.028900,'1051.115844'), ('Bloodbowl', '15', -1398.103515,937.631164,'1036.479125'), ('Dirt track', '4', -1444.645507,-664.526000,'1053.572998'), ('Kickstart', '14', -1465.268676,1557.868286,'1052.531250'), ('Vice stadium', '1', -1401.829956,107.051300,'1032.273437'), ('SF Garage', '0',-1790.378295,1436.949829,'7.187500'), ('LS Garage', '0',1643.839843,-1514.819580,'13.566620'), ('SF Bomb shop', '0', -1685.636474,1035.476196,'45.210937'), ('Blueberry warehouse', '0', '76.632553','-301.156829','1.578125'), ('LV Warehouse 1', '0', '1059.895996','2081.685791','10.820312'), ('Catigulas hidden room', '1', '2131.507812',1600.818481,'1008.359375'), ('Bank', '0', '2315.952880','-1.618174','26.742187'), ('Bank (behind desk)', '0', '2319.714843','-14.838361','26.749565'), ('LS Atruim', '18', '1710.433715','-1669.379272','20.225049'), ('Bike School', '3', '1494.325195','1304.942871','1093.289062');
CREATE DATABASE IF NOT EXISTS ${DB}; USE ${DB}; DROP TABLE IF EXISTS kafka_temp_table_q22; SET spark.testing=${TESTING_ENABLE}; SET spark.sql.streaming.query.timeout.ms=${TESTING_TIMEOUT_MS}; SET streaming.query.name=job22; SET spark.sql.streaming.checkpointLocation.job22=${CHECKPOINT_ROOT}/job22; CREATE TABLE kafka_temp_table_q22 USING kafka OPTIONS ( kafka.bootstrap.servers = "${BOOTSTRAP_SERVERS}", subscribe = 'temp_topic_q22', output.mode = 'complete', kafka.schema.registry.url = "${SCHEMA_REGISTRY_URL}", kafka.schema.record.name = 'TempResult', kafka.schema.record.namespace = 'org.apache.spark.emr.baseline.testing', kafka.auto.register.schemas = 'true'); INSERT INTO kafka_temp_table_q22 SELECT i_product_name, i_brand, i_class, i_category, avg(inv_quantity_on_hand) qoh FROM kafka_inventory, date_dim, item, warehouse WHERE inv_date_sk = d_date_sk AND inv_item_sk = i_item_sk AND inv_warehouse_sk = w_warehouse_sk AND d_month_seq BETWEEN 1200 AND 1200 + 11 GROUP BY ROLLUP (i_product_name, i_brand, i_class, i_category) ORDER BY qoh, i_product_name, i_brand, i_class, i_category
<gh_stars>1-10 CREATE TABLE IF NOT EXISTS Record ( id UUID NOT NULL, typeId UUID NOT NULL, data bytea NOT NULL, PRIMARY KEY (typeId, id) ); CREATE INDEX k_record_id ON Record (id); CREATE OR REPLACE VIEW Record_d AS SELECT id, typeId, CONVERT_FROM(data, 'UTF-8') AS data FROM Record; CREATE EXTENSION IF NOT EXISTS postgis; CREATE TABLE IF NOT EXISTS RecordLocation3 ( id UUID NOT NULL, typeId UUID NOT NULL, symbolId INT NOT NULL, value GEOMETRY(POINT,4326) NOT NULL, PRIMARY KEY (symbolId, value, typeId, id) ); CREATE INDEX k_recordlocation_id ON RecordLocation3 (id); CREATE INDEX k_recordlocation_value_gix ON RecordLocation3 USING GIST ( value ); CREATE TABLE IF NOT EXISTS RecordNumber3 ( id UUID NOT NULL, typeId UUID NOT NULL, symbolId INT NOT NULL, value DECIMAL NOT NULL, PRIMARY KEY (symbolId, value, typeId, id) ); CREATE INDEX k_recordnumber3_id ON RecordNumber3 (id); CREATE TABLE IF NOT EXISTS RecordRegion2 ( id UUID NOT NULL, symbolId INT NOT NULL, value GEOMETRY(MULTIPOLYGON,4326) NOT NULL, PRIMARY KEY (symbolId, value, typeId, id) ); CREATE INDEX k_recordregion2_value_gix ON RecordRegion USING GIST (value); CREATE TABLE IF NOT EXISTS RecordString4 ( id UUID NOT NULL, typeId UUID NOT NULL, symbolId INT NOT NULL, value bytea NOT NULL, PRIMARY KEY (symbolId, value, typeId, id) ); CREATE INDEX k_recordstring4_id ON RecordString4 (id); CREATE OR REPLACE VIEW RecordString4_d AS SELECT id, typeId, symbolId, CONVERT_FROM(value, 'UTF-8') AS value FROM RecordString4; CREATE TABLE IF NOT EXISTS RecordUpdate ( id UUID NOT NULL, typeId UUID NOT NULL, updateDate DECIMAL NOT NULL, PRIMARY KEY (id) ); CREATE INDEX k_recordupdate_typeId_updateDate ON RecordUpdate (typeId, updateDate); CREATE INDEX k_recordupdate_updateDate ON RecordUpdate (updateDate); CREATE TABLE IF NOT EXISTS RecordUuid3 ( id UUID NOT NULL, typeId UUID NOT NULL, symbolId INT NOT NULL, value UUID NOT NULL, PRIMARY KEY (symbolId, value, typeId, id) ); CREATE INDEX k_recorduuid3_id ON RecordUuid3 (id); CREATE SEQUENCE symbol_seq; CREATE TABLE IF NOT EXISTS Symbol ( symbolId INT NOT NULL DEFAULT NEXTVAL('symbol_seq'), value bytea NOT NULL, PRIMARY KEY (symbolId) ); CREATE UNIQUE INDEX k_symbol_value ON Symbol (value); CREATE OR REPLACE VIEW Symbol_d AS SELECT symbolId, CONVERT_FROM(value, 'UTF-8') AS value FROM Symbol; CREATE TABLE IF NOT EXISTS Metric ( id UUID NOT NULL, typeId UUID NOT NULL, symbolId INT NOT NULL, dimensionId UUID NOT NULL, data CHAR(40) NOT NULL, PRIMARY KEY (symbolId, typeId, id, dimensionId, data) ); CREATE INDEX k_metricAllDims ON Metric (symbolId, typeId, id, data, dimensionId); CREATE UNIQUE INDEX k_metricData ON Metric (symbolId, id, dimensionId, left(data, 8)); CREATE TABLE IF NOT EXISTS MetricDimension ( dimensionId UUID NOT NULL PRIMARY KEY, value BYTEA NOT NULL ); CREATE UNIQUE INDEX k_metricDimensionValue ON MetricDimension(value); CREATE OR REPLACE VIEW Metric_n AS SELECT c.id , c.typeId , c.symbolId , c.dimensionId , ROUND(('x'||SUBSTRING(data,25,16))::bit(64)::bigint / 1000000, 6) AS amount , ROUND(('x'||SUBSTRING(data,9,16))::bit(64)::bigint / 1000000, 6) AS cumulativeAmount , ('x'||SUBSTRING(data,1,8))::bit(32)::bigint * 60000 AS eventDate , data AS data FROM Metric c; CREATE OR REPLACE VIEW Metric_d AS SELECT c.id , c.typeId , c.symbolId , c.dimensionId , ENCODE(d.value, 'ESCAPE') AS dimension , ENCODE(ls.value, 'ESCAPE') AS symbol , ROUND(('x'||SUBSTRING(data,25,16))::bit(64)::bigint / 1000000, 6) AS amount , ROUND(('x'||SUBSTRING(data,9,16))::bit(64)::bigint / 1000000, 6) AS cumulativeAmount , TO_TIMESTAMP(('x'||SUBSTRING(data,1,8))::bit(32)::bigint * 60)::TIMESTAMP AS eventDate , data , ('x'||SUBSTRING(data,1,8))::bit(32)::bigint * 60000 AS eventTimestamp FROM Metric c JOIN Symbol ls ON (c.symbolId = ls.symbolId) LEFT JOIN MetricDimension d ON (c.dimensionId = d.dimensionId);
<filename>tools/perf/core/tbmv3/metrics/webview_power_usage_metric.sql<gh_stars>0 -- WebView is embedded in the hosting app's main process, which means it shares some threads -- with the host app's work. We approximate WebView-related power usage -- by selecting user slices that belong to WebView and estimating their power use -- through the CPU time they consume at different core frequencies. -- This metric requires the power_profile table to be filled with the device power -- profile data. -- Output values are in milliampere-seconds. SELECT RUN_METRIC('webview/webview_power_usage.sql'); CREATE VIEW webview_power_usage_metric_output AS SELECT WebViewPowerUsageMetric( 'estimated_webview_app_power_usage', (SELECT RepeatedField( EstimatedWebViewAppPowerUsage( 'app_name', app_name, 'webview_power_mas', webview_power_mas, 'total_app_power_mas', total_app_power_mas ) ) FROM webview_power_summary ), 'total_device_power_mas', (SELECT power_mas FROM total_device_power) );
<filename>assets/template_web/db/ccm.sql -- phpMyAdmin SQL Dump -- version 4.5.1 -- http://www.phpmyadmin.net -- -- Host: 127.0.0.1 -- Generation Time: 25 Jan 2018 pada 01.39 -- Versi Server: 10.1.16-MariaDB -- PHP Version: 5.6.24 SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; SET time_zone = "+00:00"; /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8mb4 */; -- -- Database: `ccm` -- -- -------------------------------------------------------- -- -- Struktur dari tabel `admin` -- CREATE TABLE `admin` ( `id` int(11) NOT NULL, `nama` char(30) NOT NULL, `username` varchar(30) NOT NULL, `password` varchar(64) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=latin1; -- -------------------------------------------------------- -- -- Struktur dari tabel `peserta` -- CREATE TABLE `peserta` ( `id` int(11) NOT NULL, `ktp` char(20) NOT NULL, `nama` char(30) NOT NULL, `gender` enum('Laki-laki','Perempuan','','') NOT NULL, `darah` enum('A','B','AB','O') NOT NULL, `ttl` char(30) NOT NULL, `alamat` char(100) NOT NULL, `kota` char(30) NOT NULL, `provinsi` char(50) NOT NULL, `kategori` enum('5K Run','10K Run','Half Marathon 21K','Full Marathon 42K') NOT NULL, `hp` char(12) NOT NULL, `komunitas` char(30) DEFAULT NULL, `tgl` datetime NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=latin1; -- -- Indexes for dumped tables -- -- -- Indexes for table `admin` -- ALTER TABLE `admin` ADD PRIMARY KEY (`id`); -- -- Indexes for table `peserta` -- ALTER TABLE `peserta` ADD PRIMARY KEY (`id`); -- -- AUTO_INCREMENT for dumped tables -- -- -- AUTO_INCREMENT for table `admin` -- ALTER TABLE `admin` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT; -- -- AUTO_INCREMENT for table `peserta` -- ALTER TABLE `peserta` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<filename>sql/init.sql CREATE DATABASE "my-site2"; GRANT ALL PRIVILEGES ON DATABASE "my-site2" TO "my-site";
-- phpMyAdmin SQL Dump -- version 4.8.2 -- https://www.phpmyadmin.net/ -- -- Host: localhost -- Generation Time: Aug 26, 2018 at 08:29 AM -- Server version: 10.1.34-MariaDB -- PHP Version: 7.2.7 SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; SET AUTOCOMMIT = 0; START TRANSACTION; SET time_zone = "+00:00"; /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8mb4 */; -- -- Database: `Expense` -- -- -------------------------------------------------------- -- -- Table structure for table `users` -- CREATE TABLE `users` ( `user_id` int(11) NOT NULL, `name` varchar(250) NOT NULL, `username` varchar(250) NOT NULL, `email` varchar(250) NOT NULL, `password` varchar(250) NOT NULL, `gender` varchar(100) NOT NULL, `RegisteredDate` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, `LoggedInStatus` tinyint(1) NOT NULL DEFAULT '0' ) ENGINE=InnoDB DEFAULT CHARSET=latin1; -- -- Dumping data for table `users` -- INSERT INTO `users` (`user_id`, `name`, `username`, `email`, `password`, `gender`, `RegisteredDate`, `LoggedInStatus`) VALUES (1, 'Satyam', 'satya96', '<EMAIL>', '1234', 'Male', '2018-08-18 00:38:37', 0), (2, '<NAME>', 'znair96', '<EMAIL>', '1234', 'Male', '2018-08-18 00:40:11', 0), (3, 'Ravi', 'ravi123', '<EMAIL>', '123456', 'Male', '2018-08-18 00:42:36', 0), (4, 'Chaman', 'chn998', '<EMAIL>', 'admin', 'Female', '2018-08-25 23:30:30', 0), (5, 'Naman', 'nam23', '<EMAIL>', '123456', 'Male', '2018-08-25 23:32:47', 0), (9, 'Raman', 'ram123', '<EMAIL>', '123456', 'Male', '2018-08-25 23:42:51', 0), (10, 'Thakur', 'tah123', '<EMAIL>', '1234', 'Male', '2018-08-25 23:43:36', 0), (17, 'Rajat', 'rajjo123', '<EMAIL>', '123456', 'Male', '2018-08-25 23:55:56', 0), (19, 'Parth', 'parth43', '<EMAIL>', '123456', 'Others', '2018-08-25 23:57:40', 0), (20, 'Shyam', 'sha1', '<EMAIL>', '123456', 'Male', '2018-08-26 11:21:47', 0); -- -- Indexes for dumped tables -- -- -- Indexes for table `users` -- ALTER TABLE `users` ADD PRIMARY KEY (`user_id`), ADD UNIQUE KEY `username` (`username`); -- -- AUTO_INCREMENT for dumped tables -- -- -- AUTO_INCREMENT for table `users` -- ALTER TABLE `users` MODIFY `user_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=21; COMMIT; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
-- phpMyAdmin SQL Dump -- version 4.9.2 -- https://www.phpmyadmin.net/ -- -- Host: 127.0.0.1 -- Generation Time: Feb 03, 2020 at 10:48 AM -- Server version: 10.4.11-MariaDB -- PHP Version: 7.4.1 SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; SET AUTOCOMMIT = 0; START TRANSACTION; SET time_zone = "+00:00"; /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8mb4 */; -- -- Database: `admin_panel` -- -- -------------------------------------------------------- -- -- Table structure for table `failed_jobs` -- CREATE TABLE `failed_jobs` ( `id` bigint(20) UNSIGNED NOT NULL, `connection` text COLLATE utf8mb4_unicode_ci NOT NULL, `queue` text COLLATE utf8mb4_unicode_ci NOT NULL, `payload` longtext COLLATE utf8mb4_unicode_ci NOT NULL, `exception` longtext COLLATE utf8mb4_unicode_ci NOT NULL, `failed_at` timestamp NOT NULL DEFAULT current_timestamp() ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -------------------------------------------------------- -- -- Table structure for table `migrations` -- CREATE TABLE `migrations` ( `id` int(10) UNSIGNED NOT NULL, `migration` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `batch` int(11) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Dumping data for table `migrations` -- INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES (1, '2014_10_12_000000_create_users_table', 1), (2, '2014_10_12_100000_create_password_resets_table', 1), (3, '2019_08_19_000000_create_failed_jobs_table', 1), (4, '2020_01_29_080839_create_questions_table', 1), (5, '2020_01_29_091548_create_question_responses_table', 1); -- -------------------------------------------------------- -- -- Table structure for table `password_resets` -- CREATE TABLE `password_resets` ( `email` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `token` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `created_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -------------------------------------------------------- -- -- Table structure for table `questions` -- CREATE TABLE `questions` ( `id` bigint(20) UNSIGNED NOT NULL, `title` longtext COLLATE utf8mb4_unicode_ci NOT NULL, `set_no` int(10) UNSIGNED NOT NULL, `answer_text` longtext COLLATE utf8mb4_unicode_ci DEFAULT NULL, `correct_answer` longtext COLLATE utf8mb4_unicode_ci NOT NULL, `year` int(10) UNSIGNED NOT NULL DEFAULT 2020, `is_answer_visible` tinyint(1) NOT NULL DEFAULT 0, `created_by` int(10) UNSIGNED DEFAULT NULL, `updated_by` int(10) UNSIGNED DEFAULT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Dumping data for table `questions` -- INSERT INTO `questions` (`id`, `title`, `set_no`, `answer_text`, `correct_answer`, `year`, `is_answer_visible`, `created_by`, `updated_by`, `created_at`, `updated_at`) VALUES (1, 'কারও পৌষ মাস, কারও ______।', 1, '', 'সর্বনাশ', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (2, 'কয়লা ধুইলে _____ যায় না।', 1, '', 'ময়লা', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (3, 'মেঘের কোলে রোদ হেসেছে, _____ গেছে টুটি।', 1, '', 'বাদল', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (4, '‘হাট্টি মাটিম টিম’ এর পরের লাইন: _______________ ', 1, '', 'তারা মাঠে পাড়ে ডিম', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (5, '‘আমাদের দেশে হবে সেই ছেলে কবে’ এর পরের লাইন কী?', 1, '', 'কথায় না বড় হয়ে কাজে বড় হবে', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (6, 'সঠিক বানান কোনটি? ক. মুমূর্ষ খ. মুমূর্ষু গ. মূমুর্ষ', 1, '', 'মুমূর্ষু', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (7, '‘শয্যা’ শব্দের অর্থ কী?', 1, '', 'বিছানা', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (8, 'বাংলা অক্ষরে লেখো: ami banglay gaan gai', 1, '', 'আমি বাংলায় গান গাই', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (9, 'সঠিক বাংলায় লেখো: আই ছেলেরা আই মেয়েরা ফুল তুলিতে যায়', 1, '', 'আয় ছেলেরা আয় মেয়েরা ফুল তুলিতে যাই', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (10, 'কোনটি সঠিক? ক. সব ফুল খ. সব ফুলগুলো গ. সব ফুলসমূহ', 1, '', 'সব ফুল', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (11, 'অতি লোভে _____ নষ্ট।', 2, '', 'তাঁতি', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (12, 'ঢেঁকি ____ গেলেও ধান ভানে।', 2, '', 'স্বর্গে', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (13, '‘ধন ধান্য পুষ্প ভরা’ এর পরের লাইন কী? ______________', 2, '', 'আমাদের এই বসুন্ধরা', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (14, '‘একটা যদি পাই’ এর পরের লাইন: _______________ ', 2, '', 'অমনি ধরে গাপুস গুপুস খাই', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (15, 'আমরা যদি না জাগি মা __________?', 2, '', 'কেমনে সকাল হবে', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (16, 'সঠিক বানান কোনটি? ক. মুহুর্ত খ. মূহুর্ত গ. মুহূর্ত', 2, '', 'মুহূর্ত', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (17, '‘প্রত্যুৎপন্নমতি’ শব্দের অর্থ কী? ক. উপস্থিত বুদ্ধিযুক্ত খ. উৎপন্নকারী গ. অহংকারী', 2, '', 'উপস্থিত বুদ্ধিযুক্ত', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (18, 'বাংলা অক্ষরে লেখো: <NAME> <NAME>', 2, '', 'আমরা তোমাদের ভুলবো না', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (19, 'সঠিক বাংলায় লেখো: বিশ্বজোড়া পাঠশালা মোড় সবার আমি ছাএ', 2, '', 'বিশ্বজোড়া পাঠশালা মোর সবার আমি ছাত্র', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (20, 'কোনটি সঠিক? ক. খেলা চলাকালীন সময়ে খ. খেলা চলাকালীন গ. খেলা চলাকালীণ সময়ে', 2, '', 'খেলা চলাকালীন', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (21, 'নাচতে না জানলে _____।', 3, '', 'উঠান বাঁকা', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (22, 'চোর শোনে না _______।', 3, '', 'ধর্মের কাহিনী', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (23, '‘আ-মরি বাংলা ভাষা’ এর আগের লাইন কী? ______________', 3, '', 'মোদের গরব, মোদের আশা', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (24, '‘মারবো চাবুক চড়বো ঘোড়া’ এর আগের লাইন: _______________ ', 3, '', 'আম পাতা জোড়া জোড়া', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (25, 'বিশ্বজোড়া পাঠশালা মোর, __________?', 3, '', 'সবার আমি ছাত্র', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (26, 'সঠিক বানান কোনটি? ক. নিড়বচ্ছিন্ন খ. নিরবচ্ছিন্ন গ. নিরবিচ্ছিন্ন', 3, '', 'নিরবচ্ছিন্ন', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (27, '‘বারি’ শব্দের অর্থ কী? ক. ঘর খ. পানি গ. আকাশ', 3, '', 'পানি', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (28, 'বাংলা অক্ষরে লেখো: ekti Bangladesh tumi jagroto jonotar', 3, '', 'একটি বাংলাদেশ তুমি জাগ্রত জনতার', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (29, 'সঠিক বাংলায় লেখো: আই আই চাদ মামা টিপ দিয়ে যা', 3, '', 'আয় আয় চাঁদ মামা টিপ দিয়ে যা', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (30, 'কোনটি সঠিক? ক. সকল মানুষদের খ. সকল মানুষেরা গ. সকল মানুষ', 3, '', 'সকল মানুষ', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (31, 'ইট মারলে _____ খেতে হয়।', 4, '', 'পাটকেল', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (32, '_____ ঘায়ে নুনের ছিটা।', 4, '', 'কাটা', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (33, '‘মাঝে মাঝে তব দেখা পাই’ এর পরের লাইন কী? ______________', 4, '', 'চিরদিন কেন পাই না', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (34, '‘কাঠবিড়ালি! কাঠবিড়ালি!’ এর পরের লাইন: _______________', 4, '', 'পেয়ারা তুমি খাও?', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (35, '\"ফুলের গন্ধে ঘুম আসে না\"-এর পরের লাইন কী? _____ ', 4, '', 'একলা জেগে রই', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (36, 'সঠিক বানান কোনটি? ক. বুদ্ধিজিবি খ. বুদ্ধিজীবী গ. বুদ্ধিজিবী', 4, '', 'বুদ্ধিজীবী', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (37, '‘ললাট’ শব্দের অর্থ কী? ক. কপাল খ. নাক গ. আঙুল', 4, '', 'কপাল', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (38, 'বাংলা অক্ষরে লেখো: o amar desher mati tomar pore thekai matha', 4, '', 'ও আমার দেশের মাটি তোমার পরে ঠেকাই মাথা', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (39, 'সঠিক বাংলায় লেখো: উদ্যোগ নিচ্ছে উওর ঢাকার ছাএসমাজ', 4, '', 'উদ্যোগ নিচ্ছে উত্তর ঢাকার ছাত্রসমাজ ', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (40, 'কোনটি সঠিক? ক. অজস্র লোক খ. অজস্র লোকজন গ. অজস্র লোকবৃন্দ', 4, '', 'অজস্র লোক', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (41, 'আদার ব্যাপারি _____ খোঁজ।', 5, '', 'জাহাজের', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (42, 'খাল কেটে _____ আনা।', 5, '', 'কুমির', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (43, '‘আমি চিনি গো চিনি তোমারে’ এর পরের লাইন কী? ______________', 5, '', 'ওগো বিদেশিনী', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (44, '‘আয় বৃষ্টি ঝেঁপে’ এর পরের লাইন: _______________', 5, '', 'ধান দেবো মেপে', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (45, '\"আমাদের ছোট নদী চলে বাঁকে বাঁকে\"-এর পরের লাইন কী?', 5, '', 'বৈশাখ মাসে তার হাঁটু জল থাকে', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (46, 'সঠিক বানান কোনটি? ক. পিচাশ খ. পিশাচ গ. পিসাচ', 5, '', 'পিশাচ', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (47, '‘মীন’ শব্দের অর্থ কী? ক. ব্যাঙ খ. মাছ গ. পাখি', 5, '', 'মাছ', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (48, 'বাংলা অক্ষরে লেখো: moder gorob moder asha a-mori bangla vasha', 5, '', 'মোদের গরব মোদের আশা আ-মরি বাংলা ভাষা', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (49, 'সঠিক বাংলায় লেখো: আমি যা চায় তা ভুল করে চায়', 5, '', 'আমি যা চাই তা ভুল করে চাই', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (50, 'কোনটি সঠিক? ক. তুমিই কি রাজু? খ. তুমিই কী রাজু?', 5, '', 'তুমিই কি রাজু?', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (51, 'অতি লোভে _____ নষ্ট।', 6, 'তাঁতি', '', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (52, 'ঝোপ বুঝে _____ মারা।', 6, '', 'কোপ', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (53, '‘তোমার পরে ঠেকাই মাথা’ এর আগের লাইন কী? ______________', 6, '', 'ও আমার দেশের মাটি', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (54, '‘রামগরুড়ের ছানা...’ এর পরের লাইন: _______________', 6, '', 'হাসতে তাদের মানা', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (55, '\"ভেঙে ফেল কর রে লোপাট\" -এর আগের লাইন কী?', 6, '', 'কারার ঐ লৌহ কপাট', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (56, 'সঠিক বানান কোনটি? ক. উর্ধ খ. ঊর্ধ্ব গ. ঊর্ধ', 6, '', 'ঊর্ধ্ব', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (57, '‘সমাচার’ শব্দের অর্থ কী? ক. খবর খ. আচার গ. টক', 6, '', 'খবর', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (58, 'বাংলা অক্ষরে লেখো: mora ekti fulke bachabo bole judhdho kori', 6, '', 'মোরা একটি ফুলকে বাঁচাবো বলে যুদ্ধ করি', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (59, 'সঠিক বাংলায় লেখো: আমি যা চায় তা পায় না', 6, '', 'আমি যা চাই তা পাই না', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (60, 'কোনটি সঠিক? ক. কী দিয়ে ভাত খেলে? খ. কি দিয়ে ভাত খেলে?', 6, '', 'কী দিয়ে ভাত খেলে?', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (61, 'যত গর্জে _____ বর্ষে না।', 7, '', 'তত', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (62, '_____ এসে তরী ডোবা।', 7, 'তীরে', '', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (63, '‘ঊর্ধ্ব গগণে বাজে মাদল’ এর আগের লাইন কী? ______________', 7, '', 'চল চল চল', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (64, '‘ঐখানেতে বাস করে কানা বগীর ছা’ এর আগের লাইন: _______________', 7, '', 'ঐ দেখা যায় তালগাছ ঐ আমাদের গাঁ', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (65, '\"রবিঠাকুরের অজর কবিতা, অবিনাশী গান\" এর আগের লাইন কী?', 7, '', 'স্বাধীনতা তুমি', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (66, 'সঠিক বানান কোনটি? ক. স্বাস্থ খ. স্বাস্ত গ. স্বাস্থ্য', 7, '', 'স্বাস্থ্য', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (67, '‘প্রভাকর’ শব্দের অর্থ কী? ক. চাঁদ খ. জাদুকর গ. সূর্য', 7, '', 'সূর্য', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (68, 'বাংলা অক্ষরে লেখো: purbo digonte surjo utheche', 7, '', 'পূর্ব দিগন্তে সূর্য উঠেছে', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (69, 'সঠিক বাংলায় লেখো: আমাদের লক্ষ বাঁধা এড়িয়ে দেশের উন্নতি', 7, '', 'আমাদের লক্ষ্য বাধা এড়িয়ে দেশের উন্নতি', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (70, 'সঠিক কোনটি? ক. কি সুন্দর পাখি! খ. কী সুন্দর পাখি!', 7, '', 'কী সুন্দর পাখি!', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (71, 'সবুরে _____ ফলে।', 8, '', 'মেওয়া', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (72, 'জলে কুমির ডাঙায় _____', 8, '', 'বাঘ', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (73, '‘পাগলা হাওয়ার বাদল-দিনে’ এর পরের লাইন কী? ______________', 8, '', 'পাগল আমার মন জেগে ওঠে', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (74, '‘সবার আগে কুসুম-বাগে উঠব আমি ডাকি!’ এর আগের লাইন: _______________', 8, '', 'আমি হব সকাল বেলার পাখি ', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (75, '\"জ্বলে পুড়ে মরে ছারখার, তবু মাথা নোয়াবার নয়\" এর আগের লাইন কী?', 8, '', 'সাবাস বাংলাদেশ এ পৃথিবী অবাক তাকিয়ে রয়', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (76, 'সঠিক বানান কোনটি? ক. প্রতিযোগী খ. প্রতিযোগি গ. প্রতিযগী', 8, '', 'প্রতিযোগী', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (77, '‘কমল’ শব্দের অর্থ কী? ক. পদ্ম খ. নরম গ. গোলাপ', 8, '', 'পদ্ম', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (78, 'বাংলা অক্ষরে লেখো: somy gele sadhon hobe na', 8, '', 'সময় গেলে সাধন হবে না', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (79, 'সঠিক বাংলায় লেখো: ১০০ টাকা মাএ', 8, '', '১০০ টাকা মাত্র', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (80, 'চলিত ভাষায় লেখো: তাহাকে হেথায় আসিতে দেখিলাম', 8, '', 'তাকে এখানে আসতে দেখলাম', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (81, 'নাচতে না জানলে _____।', 9, '', 'উঠান বাঁকা', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (82, '_____ উপর খাড়ার ঘা।', 9, '', 'মরার', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (83, '\"সখী ভালোবাসা কারে কয়\" এর পরের লাইন কী?', 9, '', 'সে কি কেবলই যাতনাময়', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (84, '\"ভোর হলো দোর খোল\" এর পরের লাইন: _______________', 9, '', 'খুকুমণি ওঠো রে', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (85, '\"লঙ্ঘিতে হবে রাত্রি নিশীথে যাত্রীরা হুঁশিয়ার\" এর আগের লাইন কী?', 9, '', 'দুর্গম গিরি কান্তার মরু', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (86, 'সঠিক বানান কোনটি? ক. লন্ডন খ. লণ্ডন গ. লন্ডণ', 9, '', 'লন্ডন', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (87, '‘গলদ’ শব্দের অর্থ কী? ক. ষাঁড় খ. ভুল গ. কাঁটা', 9, '', 'ভুল', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (88, 'বাংলা অক্ষরে লেখো: tomar bhoy nei ma amra protibad korte jani', 9, '', 'তোমার ভয় নেই মা আমরা প্রতিবাদ করতে জানি', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (89, 'সঠিক বাংলায় লেখো: ছাএরা স্কুলে যাই', 9, '', 'ছাত্ররা স্কুলে যায়', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (90, 'চলিত ভাষায় লেখো: বৃক্ষে বৃক্ষে পুষ্প ফুটিয়াছে।', 9, '', 'গাছে গাছে ফুল ফুটেছে', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (91, 'ঘুঘু দেখেছো, _____ দেখোনি।', 10, '', 'ফাঁদ', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (92, 'শাক দিয়ে _____ ঢাকা।', 10, '', 'মাছ', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (93, '\"খাঁচার ভিতর অচিন পাখি\" এর পরের লাইন কী?', 10, '', 'কেমনে আসে যায়', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (94, '\"আয় আয় চাঁদ মামা\" এর পরের লাইন: _______________', 10, '', 'টিপ দিয়ে যা', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (95, 'মনে করো যেন বিদেশ ঘুরে ______ নিয়ে যাচ্ছি অনেক দূরে', 10, '', 'মাকে', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (96, 'সঠিক বানান কোনটি? ক. পরিস্কার খ. পরীষ্কার গ. পরিষ্কার', 10, '', 'পরিষ্কার', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (97, '‘নৃপতি’ শব্দের অর্থ কী? ক. নাপিত খ. রাজা গ. প্রজা', 10, '', 'রাজা', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (98, 'বাংলা অক্ষরে লেখো: teerhara ei dhewer sagor pari dibo re', 10, '', 'তীরহারা এই ঢেউয়ের সাগর পাড়ি দেবো রে', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (99, 'সঠিক বাংলায় লেখো: সৎ পাএ মানেই ভালো পাএ', 10, '', 'সৎ পাত্র মানেই ভালো পাত্র', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (100, 'চলিত ভাষায় লেখো: পারিব না এ কথাটি বলিও না আর', 10, '', 'পারবো না এ কথাটি বোলো না আর', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (101, 'বাংলা অক্ষরে লেখো: tomar bhoy nei ma amra protibad korte jani', 60, '', '123', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (102, 'সঠিক বাংলায় লেখো: ছাএরা স্কুলে যাই', 60, '', '123', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (103, 'চলিত ভাষায় লেখো: বৃক্ষে বৃক্ষে পুষ্প ফুটিয়াছে।', 60, '', '123', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (104, 'ঘুঘু দেখেছো, _____ দেখোনি।', 60, '', '123', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (105, 'শাক দিয়ে _____ ঢাকা।', 60, '', '123', 2020, 0, 1, 1, '2020-02-01 07:35:39', '2020-02-01 07:35:39'); -- -------------------------------------------------------- -- -- Table structure for table `question_responses` -- CREATE TABLE `question_responses` ( `id` bigint(20) UNSIGNED NOT NULL, `user_id` int(10) UNSIGNED NOT NULL, `date` date NOT NULL, `time` int(10) UNSIGNED NOT NULL, `created_by` int(10) UNSIGNED DEFAULT NULL, `updated_by` int(10) UNSIGNED DEFAULT NULL, `total_play` int(10) UNSIGNED NOT NULL DEFAULT 0, `slug` varchar(20) COLLATE utf8mb4_unicode_ci NOT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Dumping data for table `question_responses` -- INSERT INTO `question_responses` (`id`, `user_id`, `date`, `time`, `created_by`, `updated_by`, `total_play`, `slug`, `created_at`, `updated_at`) VALUES (1, 2, '2020-02-01', 8420, 2, 2, 5, 'gdfg-5454-rer', '2020-02-01 07:37:27', '2020-02-01 07:53:43'), (2, 3, '2020-02-01', 7918, 3, 3, 5, 'bcvbv-5456-re', '2020-02-01 07:44:36', '2020-02-01 07:58:26'), (3, 4, '2020-02-01', 12202, 4, 4, 1, 'vv-bnv-r', '2020-02-01 07:55:25', '2020-02-01 07:55:25'), (4, 3, '2020-02-02', 8801, 3, 3, 6, 'bvbc-fghgfgh-eres', '2020-02-01 18:30:43', '2020-02-02 13:05:14'), (5, 2, '2020-02-02', 9201, 2, 2, 2, 'bvb-trdrd-vxgfd', '2020-02-01 18:40:09', '2020-02-01 19:56:51'), (6, 5, '2020-02-02', 7700, 5, 5, 3, 'bvbv-tdgdgd-cffgd', '2020-02-01 20:34:44', '2020-02-01 20:36:54'), (7, 9, '2020-02-02', 10701, 9, 9, 1, 'bvbvghf-ggdgf-bvcxv', '2020-02-02 09:29:00', '2020-02-02 09:29:00'), (8, 6, '2020-02-02', 10201, 6, 6, 1, 'bvcvc-qwewewe-nmbmio', '2020-02-02 10:26:46', '2020-02-02 10:26:46'), (9, 7, '2020-02-02', 13201, 7, 7, 1, 'bvgtr-bvcbdgdg-iuihj', '2020-02-02 10:27:12', '2020-02-02 10:27:12'), (10, 8, '2020-02-02', 8700, 8, 8, 1, 'bnvnghf-trgtfgf-b', '2020-02-02 10:27:33', '2020-02-02 10:27:33'), (11, 3, '2020-02-03', 11201, 3, 3, 2, 'wavdybklgv', '2020-02-03 03:27:33', '2020-02-03 08:57:11'), (12, 2, '2020-02-03', 1200, 2, 2, 1, 'mhj0jbtejo', '2020-02-03 03:35:16', '2020-02-03 03:35:16'), (13, 10, '2020-02-03', 12301, 10, 10, 3, 'stpgragasp', '2020-02-03 06:58:08', '2020-02-03 07:05:14'), (14, 8, '2020-02-03', 9500, 8, 8, 6, 'a7eqtis5tt', '2020-02-03 07:29:52', '2020-02-03 08:48:28'), (15, 12, '2020-02-03', 8001, 12, 12, 3, 'qzdbbr2glv', '2020-02-03 09:29:12', '2020-02-03 09:43:52'); -- -------------------------------------------------------- -- -- Table structure for table `users` -- CREATE TABLE `users` ( `id` bigint(20) UNSIGNED NOT NULL, `name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `username` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `email` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL, `phone_no` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `password` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `fb_address` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL, `api_token` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL, `verify_token` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL, `email_verified_at` timestamp NULL DEFAULT NULL, `remember_token` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL, `status` int(11) NOT NULL DEFAULT 0, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Dumping data for table `users` -- INSERT INTO `users` (`id`, `name`, `username`, `email`, `phone_no`, `password`, `<PASSWORD>`, `api_token`, `verify_token`, `email_verified_at`, `remember_token`, `status`, `created_at`, `updated_at`) VALUES (1, 'Admin', 'admin', '<EMAIL>', '<PASSWORD>', <PASSWORD>', NULL, NULL, NULL, NULL, NULL, 0, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (2, '<NAME>', 'shakib', '<EMAIL>', '01747867585', '$2y$10$/4/8n051t/smvknQ9Fr3LuO.a0v8IwH/t/PBZEkqdvlv5MYm8lkeK', NULL, '$2y$10$ePZ1PkU7dyH1i.EEk7V.y.7Wpe5cghfhgfdfP7zBYaGkF1Jhr2bZHGoq', NULL, NULL, NULL, 0, '2020-02-01 07:35:39', '2020-02-01 07:35:39'), (3, 'Labib', 'labib', '<EMAIL>', '01515636437', '$2y$10$ucX/8ecfrNaL.Zc2S7LgzeT09TS91tEmYf9HcU3.3eciJMl16MZfG', NULL, 'ujfhhgiyzqs0mpz90lrxzqhskrxlmafubsqb0fqghdsvlpdqd4n0fwvgnfiao20lzdpkjgbbvvlapivl', NULL, NULL, NULL, 0, '2020-02-01 07:44:11', '2020-02-01 07:44:11'), (4, 'Sabib', 'sabib', '<EMAIL>', '01716138117', '$2y$10$7/a3nEaNKqYjKRlOSQP1w.6u9gUMO65tTCKpOhjTTXE/KfvIR53aG', NULL, 'bzlcytyqopxctdmhqx5w1qyyexyhstbujwuzbhilh3nbsasqvvkrlebn0igugfrvdh75xyhwlvxt3653', NULL, NULL, NULL, 0, '2020-02-01 07:54:59', '2020-02-01 07:54:59'), (5, '<NAME>', 'abdurrashid', '<EMAIL>', '01714474924', '$2y$10$3NrNJpGRorGWxgvHTgku1uUl/413mEgfV6K1jb7IW4dLSRw80V3su', NULL, 'bzssqtymsoiaj7uwhzbp8ccdyjh6vpupd7jgwxkhpq9ezh9dwj4pygn9n9i4vcnmmdsdzyf1bajmwm2i', NULL, NULL, NULL, 0, '2020-02-01 20:32:07', '2020-02-01 20:32:07'), (6, '<NAME>', 'rafiulanjum', '<EMAIL>', '01234567891', '$2y$10$YcLGMOiKGiOHeOVsCuCb.eO7vRSVuR.6awDtNQUuRjdYSv.soXwVm', NULL, 'lg0g1nx3oxmozu1yrxh5vtiyvgnkc1cm9yfk1399whqlwxofq1njjeqxah1o2ndoewnwdrvheubktdrt', NULL, NULL, NULL, 0, '2020-02-02 06:57:11', '2020-02-02 06:57:11'), (7, 'Robin', 'robin', '<EMAIL>', '01234567892', '$2y$10$Gz6pohxBU9jfqJPWqg9.4euwC5Bm0dXsslTexlx2PAx0k.yWAPPxC', NULL, 'h2dan4mtl2yihulg7lgfyorxq8mbup1xkbw93be5mdama4yvw4viih4edv9rejwpciqvqekdhwrdttar', NULL, NULL, NULL, 0, '2020-02-02 08:06:20', '2020-02-02 08:06:20'), (8, '<NAME>', 'amrafatrahman', '<EMAIL>', '01234567893', '$2y$10$Rg39EeCas3cpBzseFr5P3uZKkd4Sj8pnnI3yeffUA0wDq7kUBBbfu', NULL, 'oxd69tjc9edquqz0hegoiszsakg6tsitbtc1vltet6xzca22o1a2avqehbqunezroba2hbtsgyj06a7z', NULL, NULL, NULL, 0, '2020-02-02 08:07:16', '2020-02-02 08:07:16'), (9, '<NAME>', 'abirahmad', '<EMAIL>', '01234567894', '$2y$10$QCbXP5T9YbtZJ5LjRBhfJeZLDfGFWk3Psla7BxK9OMgpIaxkoto6G', NULL, '82hmvkeccexl80vlilejvjj2r3gqwddrt31xmunowcwuk3bot8hgw4kwwnl4luxdjnevos2hgdpbctdr', NULL, NULL, NULL, 0, '2020-02-02 08:12:40', '2020-02-02 08:12:40'), (10, '<NAME>', 'maniruzzamanakash', '<EMAIL>', '01234567895', '$2y$10$ruwkRortkhC7P/m/Z4lwTuTEqIRTw10WkdzChBOyYkj5X6LrUklNy', NULL, 'qr8q7voxdvcnqv8ctpro90hk2gaxqffagrn8b97sgdpmqa6e8ipg1siyoqxaf9mfpqzmobrqukh5hgce', NULL, NULL, NULL, 0, '2020-02-02 08:13:06', '2020-02-02 08:13:06'), (11, '<NAME>', 'fariduddin', '<EMAIL>', '01234567896', '$2y$10$IL/GXUQPfllVn.T3BRxAve08ZJinOedzmkDWusSZrBj5sLyt0SqPG', NULL, 'ca2pjyz4j4fvyqcz2hbog0i40evolqrzzfvgplgwlxmbbwgi9vgmcbyoriflmeioxqxk1gwaatimcdm3', NULL, NULL, NULL, 0, '2020-02-02 08:13:33', '2020-02-02 08:13:33'), (12, '<NAME>', 'rashedulislamsiraji', '<EMAIL>', '01234567897', '$2y$10$xVv4HKDzZvajlVBZTHKdoOldFv8PWN9Vbv87Qo6coxXZyAbdWO4hy', NULL, 'lixicbrrdhix4kwqthgbyqz9izvutkt2rgky0voxeisurdfo8zu5ff2ft2r7pvzbfqbl6zgjins8n15m', NULL, NULL, NULL, 0, '2020-02-02 08:14:12', '2020-02-02 08:14:12'), (13, '<NAME>', 'abrarsabib', '<EMAIL>', '01234567898', '$2y$10$vxqzAT7LBoMBHo6HKCEWK.9xbcVlImbv/lpPpKLj/pmOBbDw1H3Mq', NULL, 'n5ukoyf1gvlq2vuciylq3jd0v0rronilhdhls42ozkfmgoizoddkctmkpnsc8aorcxo0f4uy1ijt8ggd', NULL, NULL, NULL, 0, '2020-02-02 08:15:50', '2020-02-02 08:15:50'); -- -- Indexes for dumped tables -- -- -- Indexes for table `failed_jobs` -- ALTER TABLE `failed_jobs` ADD PRIMARY KEY (`id`); -- -- Indexes for table `migrations` -- ALTER TABLE `migrations` ADD PRIMARY KEY (`id`); -- -- Indexes for table `password_resets` -- ALTER TABLE `password_resets` ADD KEY `password_resets_email_index` (`email`); -- -- Indexes for table `questions` -- ALTER TABLE `questions` ADD PRIMARY KEY (`id`); -- -- Indexes for table `question_responses` -- ALTER TABLE `question_responses` ADD PRIMARY KEY (`id`); -- -- Indexes for table `users` -- ALTER TABLE `users` ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `users_username_unique` (`username`); -- -- AUTO_INCREMENT for dumped tables -- -- -- AUTO_INCREMENT for table `failed_jobs` -- ALTER TABLE `failed_jobs` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT; -- -- AUTO_INCREMENT for table `migrations` -- ALTER TABLE `migrations` MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=6; -- -- AUTO_INCREMENT for table `questions` -- ALTER TABLE `questions` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=106; -- -- AUTO_INCREMENT for table `question_responses` -- ALTER TABLE `question_responses` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=16; -- -- AUTO_INCREMENT for table `users` -- ALTER TABLE `users` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=14; COMMIT; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<reponame>dzikrisinatria/tubes-webpro<gh_stars>0 CREATE TABLE `obat` ( `id_obat` int NOT NULL AUTO_INCREMENT, `kode_obat` varchar(5) NOT NULL, `nama_obat` varchar(128) NOT NULL, `id_jenis_obat` int NOT NULL, `harga` int(6) NOT NULL, `stok` int(6) NOT NULL, `bentuk` varchar(10) NOT NULL, `fungsi` varchar(128) NOT NULL, `aturan` TEXT NOT NULL, `gambar` varchar(128) NOT NULL, PRIMARY KEY (`id_obat`) ); CREATE TABLE `jenis_obat` ( `id_jenis_obat` int NOT NULL AUTO_INCREMENT, `nama_jenis` varchar(50) NOT NULL, PRIMARY KEY (`id_jenis_obat`) ); CREATE TABLE `user` ( `id_user` int NOT NULL AUTO_INCREMENT, `nama` varchar(128) NOT NULL, `email` varchar(128) NOT NULL, `username` varchar(20) NOT NULL, `password` varchar(256) NOT NULL, `jenis_kelamin` varchar(10) NOT NULL, `tgl_lahir` DATE NOT NULL, `alamat` TEXT NOT NULL, `telepon` varchar(13) NOT NULL, `foto` varchar(128) NOT NULL, `role_id` int(1) NOT NULL, `date_created` int(11) NOT NULL, PRIMARY KEY (`id_user`) ); CREATE TABLE `user_role` ( `role_id` int NOT NULL AUTO_INCREMENT, `role` varchar(30) NOT NULL, PRIMARY KEY (`role_id`) ); CREATE TABLE `pemesanan` ( `id_pemesanan` int NOT NULL AUTO_INCREMENT, `id_user` int NOT NULL, `tgl_pemesanan` DATE NOT NULL, `total` int NOT NULL, `bayar` int NOT NULL, `status` int(1) NOT NULL, PRIMARY KEY (`id_pemesanan`) ); CREATE TABLE `detail_pemesanan` ( `id_pemesanan` int NOT NULL, `id_obat` int NOT NULL, `jumlah` int NOT NULL, `subtotal` int NOT NULL ); ALTER TABLE `obat` ADD CONSTRAINT `obat_fk0` FOREIGN KEY (`id_jenis_obat`) REFERENCES `jenis_obat`(`id_jenis_obat`); ALTER TABLE `user` ADD CONSTRAINT `user_fk0` FOREIGN KEY (`role_id`) REFERENCES `user_role`(`role_id`); ALTER TABLE `pemesanan` ADD CONSTRAINT `pemesanan_fk0` FOREIGN KEY (`id_user`) REFERENCES `user`(`id_user`); ALTER TABLE `detail_pemesanan` ADD CONSTRAINT `detail_pemesanan_fk0` FOREIGN KEY (`id_pemesanan`) REFERENCES `pemesanan`(`id_pemesanan`); ALTER TABLE `detail_pemesanan` ADD CONSTRAINT `detail_pemesanan_fk1` FOREIGN KEY (`id_obat`) REFERENCES `obat`(`id_obat`);
<filename>Tp01/Ex01/drop_create_table-dbo.DimAccessory.sql<gh_stars>0 USE [UdeS.Cefti.Inf735.Lab01.Ex1.DW] GO EXEC sys.sp_dropextendedproperty @name=N'MS_Description' , @level0type=N'SCHEMA',@level0name=N'dbo', @level1type=N'TABLE',@level1name=N'DimAccessory' GO EXEC sys.sp_dropextendedproperty @name=N'MS_Description' , @level0type=N'SCHEMA',@level0name=N'dbo', @level1type=N'TABLE',@level1name=N'DimAccessory', @level2type=N'COLUMN',@level2name=N'Model' GO EXEC sys.sp_dropextendedproperty @name=N'MS_Description' , @level0type=N'SCHEMA',@level0name=N'dbo', @level1type=N'TABLE',@level1name=N'DimAccessory', @level2type=N'COLUMN',@level2name=N'Brand' GO EXEC sys.sp_dropextendedproperty @name=N'MS_Description' , @level0type=N'SCHEMA',@level0name=N'dbo', @level1type=N'TABLE',@level1name=N'DimAccessory', @level2type=N'COLUMN',@level2name=N'Name' GO EXEC sys.sp_dropextendedproperty @name=N'MS_Description' , @level0type=N'SCHEMA',@level0name=N'dbo', @level1type=N'TABLE',@level1name=N'DimAccessory', @level2type=N'COLUMN',@level2name=N'Id' GO ALTER TABLE [dbo].[DimAccessory] DROP CONSTRAINT [DF_DimAccessory_Brand] GO ALTER TABLE [dbo].[DimAccessory] DROP CONSTRAINT [DF_DimAccessory_Name] GO /****** Object: Table [dbo].[DimAccessory] Script Date: 10/31/2018 7:36:52 PM ******/ DROP TABLE [dbo].[DimAccessory] GO /****** Object: Table [dbo].[DimAccessory] Script Date: 10/31/2018 7:36:52 PM ******/ SET ANSI_NULLS ON GO SET QUOTED_IDENTIFIER ON GO CREATE TABLE [dbo].[DimAccessory]( [Id] [bigint] IDENTITY(1,1) NOT NULL, [Name] [nchar](32) NOT NULL, [Brand] [nchar](16) NOT NULL, [Model] [uniqueidentifier] NOT NULL, CONSTRAINT [PK_DimAccessory] PRIMARY KEY CLUSTERED ( [Id] ASC )WITH (PAD_INDEX = OFF, STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY] ) ON [PRIMARY] GO ALTER TABLE [dbo].[DimAccessory] ADD CONSTRAINT [DF_DimAccessory_Name] DEFAULT (N'AccessoryName') FOR [Name] GO ALTER TABLE [dbo].[DimAccessory] ADD CONSTRAINT [DF_DimAccessory_Brand] DEFAULT (N'AccessoryBrand') FOR [Brand] GO EXEC sys.sp_addextendedproperty @name=N'MS_Description', @value=N'Accessory Id' , @level0type=N'SCHEMA',@level0name=N'dbo', @level1type=N'TABLE',@level1name=N'DimAccessory', @level2type=N'COLUMN',@level2name=N'Id' GO EXEC sys.sp_addextendedproperty @name=N'MS_Description', @value=N'Accessory Name' , @level0type=N'SCHEMA',@level0name=N'dbo', @level1type=N'TABLE',@level1name=N'DimAccessory', @level2type=N'COLUMN',@level2name=N'Name' GO EXEC sys.sp_addextendedproperty @name=N'MS_Description', @value=N'Accessory Brand' , @level0type=N'SCHEMA',@level0name=N'dbo', @level1type=N'TABLE',@level1name=N'DimAccessory', @level2type=N'COLUMN',@level2name=N'Brand' GO EXEC sys.sp_addextendedproperty @name=N'MS_Description', @value=N'Accessory Model' , @level0type=N'SCHEMA',@level0name=N'dbo', @level1type=N'TABLE',@level1name=N'DimAccessory', @level2type=N'COLUMN',@level2name=N'Model' GO EXEC sys.sp_addextendedproperty @name=N'MS_Description', @value=N'Dimension Accessory' , @level0type=N'SCHEMA',@level0name=N'dbo', @level1type=N'TABLE',@level1name=N'DimAccessory' GO
<reponame>danishabdullah/lunatic<gh_stars>0 \echo Creating the api schema drop schema if exists api cascade; create schema api; set search_path = api, public; -- this role will be used as the owner of the views in the api schema -- it is needed for the definition of the RLS policies drop role if exists api; create role api; grant api to current_user; -- this is a workaround for RDS where the master user does not have SUPERUSER priviliges -- redefine this type to control the user properties returned by auth endpoints \ir ../libs/auth/api/types/user.sql -- include all auth endpoints \ir ../libs/auth/api/all.sql -- our endpoints -- uisetup \ir uisetup/uisetups.sql \ir rpc/uisetups.sql -- conversation \ir conversation/conversations.sql \ir rpc/conversations.sql -- message \ir message/messages.sql \ir rpc/messages.sql -- match \ir match/matches.sql \ir rpc/matches.sql -- hidden \ir hidden/hiddens.sql \ir rpc/hiddens.sql -- country \ir country/countries.sql \ir rpc/countries.sql -- city \ir city/cities.sql \ir rpc/cities.sql -- userlocation \ir userlocation/userlocations.sql \ir rpc/userlocations.sql -- profile \ir profile/profiles.sql \ir rpc/profiles.sql
<reponame>GUSAR1T0/VXDS-DEV-TOOLS CREATE TABLE [authentication].[User] ( [Id] INT IDENTITY (1, 1) NOT NULL, [FirstName] NVARCHAR(50) NOT NULL, [LastName] NVARCHAR(50) NOT NULL, [Email] NVARCHAR(255) NOT NULL, [Password] NVARCHAR(50) NOT NULL, [Color] NVARCHAR(32) NOT NULL, [Location] NVARCHAR(255) NULL, [Bio] NVARCHAR(1000) NULL, [UserRoleId] INT NULL, [IsActivated] BIT DEFAULT ((1)) NOT NULL, CONSTRAINT [PK_User_Id] PRIMARY KEY CLUSTERED ([Id] ASC), CONSTRAINT [UQ_User_Email] UNIQUE ([Email]), CONSTRAINT [FK_User_UserRoleId] FOREIGN KEY ([UserRoleId]) REFERENCES [authentication].[UserRole] ([Id]) ON DELETE SET NULL );
<filename>assets/sql/sql_recipe_tutorial_003.sql /************************************* * * SQL Recipe for Analysis Tutorial #3 * ************************************* * @version 1.0.0 2020-01-28 17:37:10 dorbae Initialize * @author dorbae(<EMAIL>) */ -- DROP TABLE IF EXISTS mst_user_location; -- Create user location table CREATE TABLE mst_user_location ( user_id varchar(255) , pref_name varchar(255) , city_name varchar(255) ); -- Insert sample data INSERT INTO mst_user_location VALUES ('U001', 'Seoul-si', 'Seodaemun-gu') , ('U002', 'Kyeonki-do Suwon-si', 'Paldal-gu' ) , ('U003', 'Jeju-do', 'Seoguipo-si') ; -- Select sample data SELECT * FROM mst_user_location ; -- Concatenate pref_name and citiy_name SELECT user_id -- PostgreSQL, Hive, SparkSQL, BigQuery , CONCAT(city_name, ', ', pref_name) AS pref_city -- PostgreSQL, Redshift , city_name || ', ' || pref_name AS pref_city2 FROM mst_user_location ; -- DROP TABLE IF EXISTS quarterly_sales; -- Create quarterly sales table CREATE TABLE quarterly_sales ( year integer , q1 integer , q2 integer , q3 integer , q4 integer ); -- Insert sample data INSERT INTO quarterly_sales VALUES (2017, 82000, 83000, 78000, 83000) , (2018, 85000, 85000, 80000, 81000) , (2019, 92000, 81000, NULL , NULL ) ; -- Select sample data SELECT * FROM quarterly_sales ; -- Find out the fluctuation of sales quaterly SELECT year , q1 , q2 , CASE WHEN q1 < q2 THEN '+' WHEN q1 = q2 THEN ' ' ELSE '-' END AS judge_q1_q2 , q2 - q1 AS diff_q2_q1 , SIGN(q2 - q1) AS sign_q2_q1 FROM quarterly_sales ORDER BY year ; -- Find out the max/min sales of a year SELECT year -- Maximum sales , greatest(q1, q2, q3, q4) AS greatest_sales -- Minimum sales , greatest(q1, q2, q3, q4) AS least_sales FROM quarterly_sales ORDER BY year ; -- Calculate the average of a year SELECT year -- Invalid: It doesn't replace null to 0. The result of an arithmetic operation with null is null , (q1 + q2 + q3 + q4) / 4 AS null_average -- Invalid: You have to skip q3, q4 in 2019. But, it always divide total value into 4 , (COALESCE(q1, 0) + COALESCE(q2, 0) + COALESCE(q3, 0) + COALESCE(q4, 0)) / 4 AS invalid_denominator_average -- Valid , (COALESCE(q1, 0) + COALESCE(q2, 0) + COALESCE(q3, 0) + COALESCE(q4, 0)) / (SIGN(COALESCE(q1, 0)) + SIGN(COALESCE(q2, 0)) + SIGN(COALESCE(q3, 0)) + SIGN(COALESCE(q4, 0))) AS correct_average FROM quarterly_sales ORDER BY year ; --DROP TABLE IF EXISTS advertising_stats; -- Create advertising status table CREATE TABLE advertising_stats ( dt varchar(255) , ad_id varchar(255) , impressions integer , clicks integer ); -- Insert sample data INSERT INTO advertising_stats VALUES ('2020-01-28', '001', 100000, 3000) , ('2020-01-28', '002', 120000, 1200) , ('2020-01-28', '003', 500000, 10000) , ('2020-01-29', '001', 0, 0) , ('2020-01-29', '002', 130000, 1400) , ('2020-01-29', '003', 620000, 15000) ; -- Select sample data SELECT * FROM advertising_stats ; -- Calculate CTR(Click through rate) SELECT dt , ad_id , CAST(clicks AS double precision) / impressions AS ctr -- 100.0(double) * clicks(integer) -> double , 100.0 * clicks / impressions AS ctr_as_percent -- Hive, SparkSQL, Redshift, BigQuery => Auto conversion -- , clicks / impressions AS ctr FROM advertising_stats WHERE dt = '2020-01-28' ORDER BY dt, ad_id ; -- Calculate CTR without error SELECT dt , ad_id -- use CASE clause , CASE WHEN impressions > 0 THEN 100.0 * clicks / impressions END AS ctr_percent_by_case -- PostgreSQL, SparkSQL, Redshift, BigQuery -- The result of arithmetic operation with null is null , 100.0 * clicks / NULLIF(impressions , 0) AS ctr_as_percent_by_null -- Hive (Not support NULLIF) -- , 100.0 * clicks / CASE WHEN imperssions = 0 THEN NULL ELSE impressions END AS ctr_as_percent_by_null FROM advertising_stats ORDER BY dt, ad_id ; -- DROP TABLE IF EXISTS location_1d; -- Create a dimension table CREATE TABLE location_1d ( x1 integer , x2 integer ); -- Insert sample data INSERT INTO location_1d VALUES ( 5 , 10) , (10 , 5) , (-2 , 4) , ( 3 , 3) , ( 0 , 1) ; -- Select sample data SELECT * FROM location_1d ; -- Calculate the distance between x1 and x2 SELECT abs(x1 - x2) AS abs , sqrt(power(x1 - x2, 2)) AS rms FROM location_1d ; -- DROP TABLE IF EXISTS location_2d; -- Create two dimensions table CREATE TABLE location_2d ( x1 integer , y1 integer , x2 integer , y2 integer ); -- Insert sample data INSERT INTO location_2d VALUES (0, 0, 2, 2) , (3, 5, 1, 2) , (5, 3, 2, 1) ; -- Select sample data SELECT * FROM location_2d ; -- Calculate the Euclidean distance between (x1, y1) and (x2, y2) SELECT sqrt(power(x1 - x2, 2) + power(y1 - y2, 2)) AS distance , point(x1, y1) <-> point(x2, y2) as dist_point FROM location_2d ; -- DROP TABLE IF EXISTS mst_users_with_dates; -- Create user table with user's birthday and register datetime CREATE TABLE mst_users_with_dates ( user_id varchar(255) , register_stamp varchar(255) , birth_date varchar(255) ); -- Insert sample data INSERT INTO mst_users_with_dates VALUES ('U001', '2019-02-20 10:00:00', '2000-02-19') , ('U002', '2019-02-21 10:00:00', '2000-03-16') , ('U003', '2019-03-01 10:00:00', '2000-04-09') ; -- Select sample data SELECT * FROM mst_users_with_dates ; -- Calculate date/time SELECT user_id , register_stamp::timestamp AS register_stamp , register_stamp::timestamp + '1 hour'::interval AS after_1_hour , register_stamp::timestamp - '30 minutes'::interval AS before_30_minutes , register_stamp::date AS register_date , register_stamp::date + '1 day'::interval AS after_1_day , register_stamp::date - '1 month'::interval AS before_1_month -- -- if Hive, SparkSQL (Not support timstamp operation functions) -- , CAST(register_stamp AS timestamp) AS register_stamp -- , from_unixtime(unix_tiestamp(register_stamp) + 60 * 60) AS after_1_hour -- , from_unixtime(unix_tiestamp(register_stamp) - 30 * 60) AS before_30_minutes -- , to_date(register_stamp) AS register_date -- , date_add(to_date(register_stamp), 1) AS after_1_day -- , add_months(to_date(register_stamp), -1) AS before_1_month -- -- if Redshift (dateadd) -- , register_stamp::timestamp AS register_stamp -- , dateadd(hour, 1, register_stamp::timestamp) AS after_1_hour -- , dateadd(minute, -30, register_stamp::timestamp) AS before_30_minutes -- , register_stamp::date AS register_date -- , dateadd(day, 1, register_stamp::date) AS after_1_day -- , dateadd(month, -1, register_stamp::date) AS before_1_month -- -- if BigQuery (timestamp_add, timestamp_sub, date_add, date_sub) -- , timestamp(register_stamp) AS register_stamp -- , timestamp_add(timestamp(register_stamp), interval 1 hour) AS after_1_hour -- , timestamp_sub(timestamp(register_stamp), interval 30 minute) AS before_30_minutes -- , date(register_stamp) AS register_date -- , date_add(date(register_stamp), interval 1 day) AS after_1_day -- , date_sub(date(register_stamp), interval 1 month) AS before_1_month FROM mst_users_with_dates ; -- Calculate the difference between today and register_stamp SELECT user_id -- if PostgreSQL, Redshift , CURRENT_DATE AS today , register_stamp::date AS register_date , CURRENT_DATE - register_stamp::date AS diff_days -- -- if Hive, SparkSQL (datediff) -- , CURRENT_DATE() AS today -- , to_date(register_stamp) AS register_date -- , datediff(CURRENT_DATE(), to_date(register_stamp)) AS diff_days -- -- if BigQuery (date_diff) -- , CURRENT_DATE AS today -- , date(timestamp(register_stamp)) AS register_date -- , date_diff(CURRENT_DATE, date(timestamp(register_stamp)), day) AS diff_days FROM mst_users_with_dates muwd ; -- Calculate user's age from the birthday SELECT user_id , CURRENT_DATE AS today , register_stamp::date AS register_date , birth_date::date AS birth_date , EXTRACT(YEAR FROM age(birth_date::date)) AS current_age , EXTRACT(YEAR FROM age(register_stamp::date, birth_date::date)) AS register_age , age(register_stamp::date, birth_date::date) AS register_age2 FROM mst_users_with_dates muwd ; -- Calculate year difference SELECT user_id -- if Redshift , CURRENT_DATE AS today , register_stamp::date AS register_date , birth_date::date AS birth_date , datediff(year, birth_date::date, CURRENT_DATE) AS current_age , datediff(year, birth_date::date, register_stamp::date) AS register_age -- -- if BigQuery -- , CURRENT_DATE AS today -- , date(timestamp(register_stamp)) AS register_date -- , date(timestamp(birth_date)) AS birth_date -- , date_diff(CURRENT_DATE, date(tiemstamp(birth_date)), year) AS current_age -- , date_diff(date(timestamp(registeR_stamp)), date(timestamp(birth_date)), year) AS register_age FROM mst_users_with_dates ; -- How to calculate the age exactly without a special function like age SELECT birth_date_integer , current_date_integer , floor((current_date_integer - birth_date_integer) / 10000) AS age FROM (SELECT EXTRACT(YEAR from birth_date::date) * 10000 + EXTRACT(MONTH from birth_date::date) * 100 + EXTRACT(YEAR from birth_date::date) AS birth_date_integer , EXTRACT(YEAR from CURRENT_DATE) * 10000 + EXTRACT(MONTH from CURRENT_DATE) * 100 + EXTRACT(YEAR from CURRENT_DATE) AS current_date_integer FROM mst_users_with_dates muwd) ; -- Compare IP addresses SELECT CAST('127.0.0.1' AS inet) < CAST('127.0.0.2' AS inet) AS lt , CAST('127.0.0.1' AS inet) > CAST('192.168.0.1' AS inet) AS gt ; -- Find out whether an IP address is contained or not SELECT CAST('127.0.0.1' AS inet) << CAST('127.0.0.0/8' AS inet) AS is_contained; -- Convert IP address into integer data type SELECT ip -- if PostgreSQL, Redshift (split_part) , CAST(split_part(ip, '.', 1) AS integer) AS ip_part_1 , CAST(split_part(ip, '.', 2) AS integer) AS ip_part_2 , CAST(split_part(ip, '.', 3) AS integer) AS ip_part_3 , CAST(split_part(ip, '.', 4) AS integer) AS ip_part_4 -- -- if Hive, SparkSQL -- , CAST(split(ip, '\\.')[0] AS int) AS ip_part_1 -- , CAST(split(ip, '\\.')[1] AS int) AS ip_part_2 -- , CAST(split(ip, '\\.')[2] AS int) AS ip_part_3 -- , CAST(split(ip, '\\.')[3] AS int) AS ip_part_4 -- -- if BigQuery -- , CAST(split(ip, '.')[0] AS int) AS ip_part_1 -- , CAST(split(ip, '.')[1] AS int) AS ip_part_2 -- , CAST(split(ip, '.')[2] AS int) AS ip_part_3 -- , CAST(split(ip, '.')[3] AS int) AS ip_part_4 FROM (SELECT CAST('192.168.0.1' AS text) AS ip) AS t -- Other databases -- (SELECT '192.168.0.1' AS ip) AS t ; -- Convert IP address into integer SELECT ip -- if PostgreSQL, Redshift (split_part) , CAST(split_part(ip, '.', 1) AS integer) * 2^24 + CAST(split_part(ip, '.', 2) AS integer) * 2^16 + CAST(split_part(ip, '.', 3) AS integer) * 2^8 + CAST(split_part(ip, '.', 4) AS integer) * 2^0 AS ip_integer -- -- if Hive, SparkSQL -- , CAST(split(ip, '\\.')[0] AS int) * pow(2, 24) -- + CAST(split(ip, '\\.')[1] AS int) * pow(2, 16) -- + CAST(split(ip, '\\.')[2] AS int) * pow(2, 8) -- + CAST(split(ip, '\\.')[3] AS int) * pow(2, 0) -- -- if BigQuery -- , CAST(split(ip, '.')[SAFE_ORDINAL(1)] AS int64) * pow(2, 24) -- + CAST(split(ip, '.')[SAFE_ORDINAL(2)] AS int64) * pow(2, 16) -- + CAST(split(ip, '.')[SAFE_ORDINAL(3)] AS int64) * pow(2, 8) -- + CAST(split(ip, '.')[SAFE_ORDINAL(4)] AS int64) * pow(2, 0) -- AS ip_integer FROM (SELECT CAST('192.168.0.1' AS text) AS ip) AS t -- Other databases -- (SELECT '192.168.0.1' AS ip) AS t ; -- Convert IP address into string with 0 padding SELECT ip -- if PostgreSQL, Redshift (lpad) , lpad(split_part(ip, '.', 1), 3, '0') || lpad(split_part(ip, '.', 2), 3, '0') || lpad(split_part(ip, '.', 3), 3, '0') || lpad(split_part(ip, '.', 4), 3, '0') AS ip_padding -- -- if Hive, SparkSQL -- , CONCAT(lpad(split(ip, '\\.')[0], 3, '0') -- , lpad(split(ip, '\\.')[1], 3, '0') -- , lpad(split(ip, '\\.')[2], 3, '0') -- , lpad(split(ip, '\\.')[3], 3, '0') -- ) AS ip_padding -- -- if BigQuery -- , CONCAT(lpad(split(ip, '.')[SAFE_ORDINAL(1)], 3, '0') -- , lpad(split(ip, '.')[SAFE_ORDINAL(2)], 3, '0') -- , lpad(split(ip, '.')[SAFE_ORDINAL(3)], 3, '0') -- , lpad(split(ip, '.')[SAFE_ORDINAL(4)], 3, '0') -- ) AS ip_padding FROM (SELECT CAST('192.168.0.1' AS text) AS ip) AS t -- Other databases -- (SELECT '192.168.0.1' AS ip) AS t ; commit;
<gh_stars>0 create table danp_stop_times ( trip_rowid int, stop_rowid int, stop_sequence int NOT NULL, stop_headsign text, departure_time_seconds int, danp_last_stop bool ); --create index dan_dep_time_index on danp_stop_times(departure_time_seconds); --create index dan_stop_id_index on danp_stop_times(stop_rowid); --create index dan_trip_id_index on danp_stop_times(trip_rowid); create index dan_stoptime_index on danp_stop_times(stop_rowid, trip_rowid); INSERT INTO danp_stop_times SELECT t.ROWID, s.ROWID, st.stop_sequence, st.stop_headsign, st.departure_time_seconds, st.danp_last_stop FROM gtfs_stop_times st JOIN gtfs_stops s ON s.stop_id = st.stop_id JOIN gtfs_trips t ON t.trip_id = st.trip_id; DROP TABLE gtfs_stop_times;
<gh_stars>1-10 -- =============================================================================== -- Author: <NAME> -- Create date: 24/03/2010 -- Description: The Multiple Use of Bank Details report in data warehousing form -- =============================================================================== CREATE PROCEDURE [dbo].[h3giDataWarehousing_MultipleUseOfBankDetails] @endDate DATETIME AS BEGIN SET NOCOUNT ON; DECLARE @endDateMorning DATETIME SET @endDateMorning = DATEADD(dd, DATEDIFF(dd, 0, @endDate), 0) DECLARE @startDate DATETIME SET @startDate = DATEADD(dd,-7,@endDateMorning) DECLARE @CurrentDate AS DATETIME DECLARE @Date6m AS DATETIME SET @CurrentDate = GETDATE() SET @Date6m = DATEADD(mm,-6,GETDATE()) IF OBJECT_ID('tempdb..#bankDetails6M') IS NOT NULL DROP TABLE #bankDetails6M; CREATE TABLE #bankDetails6M ( iban NVARCHAR(34), bic NVARCHAR(11), timesUsed INT, PRIMARY KEY([iban] desc,[bic]) ); INSERT INTO #bankDetails6M SELECT DISTINCT h3gi.iban, h3gi.bic, COUNT(*) FROM h3giOrderHeader h3gi WITH(NOLOCK) INNER JOIN b4nOrderHeader b4n WITH(NOLOCK) ON h3gi.orderRef = b4n.orderRef WHERE b4n.orderDate BETWEEN @Date6m AND @CurrentDate AND h3gi.orderType = 0 AND h3gi.iban <> '' AND h3gi.bic <> '' AND h3gi.isTestOrder = 0 GROUP BY h3gi.iban,h3gi.bic HAVING COUNT(*) > 1 ORDER BY h3gi.iban SELECT h3gi.accountNumber [Account Number], h3gi.sortCode [Sort Code], b4n.orderRef [Order Ref], CONVERT(VARCHAR(10),b4n.orderDate,103) AS [Order Date], CASE WHEN b4n.status IN (500,501,502,505,506) THEN statusCode.b4nClassExplain ELSE statusCode.b4nClassDesc END AS [Order Status], channel.channelName [Channel], h3gi.retailerCode [Retailer Code], CASE h3gi.orderType WHEN 0 THEN 'Contract' WHEN 1 THEN 'Prepay' WHEN 2 THEN 'Contract Upgrade' WHEN 3 THEN 'Prepay Upgrade' END AS [Order Type], tariff.productName [Tariff], handset.productName [Handset], b4n.billingForename + ' ' + (CASE WHEN(LEN(h3gi.initials)>0) THEN h3gi.initials + ' ' ELSE '' END) + b4n.billingSurname [Name], dbo.fn_FormatAddress(h3gi.billingAptNumber, h3gi.billingHouseNumber, h3gi.billingHouseName, b4n.billingAddr2, b4n.billingAddr3, b4n.billingCity, b4n.billingCounty, b4n.billingCountry, b4n.billingPostCode) [Address], bankDetails.iban [BIC], bankDetails.bic [IBAN] FROM #bankDetails6M bankDetails INNER JOIN h3giOrderHeader h3gi WITH(NOLOCK) ON bankDetails.iban = h3gi.iban AND bankDetails.bic = h3gi.bic INNER JOIN b4nOrderHeader b4n WITH(NOLOCK) ON h3gi.orderRef = b4n.orderRef INNER JOIN b4nClassCodes statusCode WITH(NOLOCK) ON b4n.status = statusCode.b4nClassCode AND b4nClassSysID = 'StatusCode' INNER JOIN h3giChannel channel WITH(NOLOCK) ON h3gi.channelCode = channel.channelCode INNER JOIN h3giProductCatalogue tariff WITH(NOLOCK) ON h3gi.catalogueVersionId = tariff.catalogueVersionId AND tariff.peopleSoftId = h3gi.tariffProductCode AND tariff.productType = 'TARIFF' INNER JOIN h3giProductCatalogue AS handset WITH(NOLOCK) ON h3gi.catalogueVersionID = handset.catalogueVersionID AND CONVERT(varchar(20), handset.productFamilyId) = h3gi.phoneProductCode AND handset.productType = 'HANDSET' WHERE b4n.orderDate BETWEEN @startDate AND @endDateMorning ORDER BY bankDetails.iban, b4n.orderRef DROP TABLE #bankDetails6M END GRANT EXECUTE ON h3giDataWarehousing_MultipleUseOfBankDetails TO b4nuser GO
<filename>Complementares/BDNSQL/Mini-Projeto-01/create_tables.sql CREATE TABLE "SENSOR"."PATIENT" ( "ID" NUMBER NOT NULL ENABLE, "NAME" VARCHAR2(45 BYTE) NOT NULL ENABLE, "BIRTHDATE" DATE NOT NULL ENABLE, "AGE" NUMBER NOT NULL ENABLE, CONSTRAINT "PATIENT_PK" PRIMARY KEY ("ID") USING INDEX PCTFREE 10 INITRANS 2 MAXTRANS 255 COMPUTE STATISTICS STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645 PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT) TABLESPACE "SENSORS_TABLES" ENABLE ) SEGMENT CREATION IMMEDIATE PCTFREE 10 PCTUSED 40 INITRANS 1 MAXTRANS 255 NOCOMPRESS LOGGING STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645 PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT) TABLESPACE "SENSORS_TABLES" ; CREATE TABLE "SENSOR"."DOCTOR" ( "ID" NUMBER NOT NULL ENABLE, "NOME" VARCHAR2(50 BYTE) NOT NULL ENABLE, CONSTRAINT "DOCTOR_PK" PRIMARY KEY ("ID") USING INDEX PCTFREE 10 INITRANS 2 MAXTRANS 255 COMPUTE STATISTICS STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645 PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT) TABLESPACE "SENSORS_TABLES" ENABLE ) SEGMENT CREATION IMMEDIATE PCTFREE 10 PCTUSED 40 INITRANS 1 MAXTRANS 255 NOCOMPRESS LOGGING STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645 PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT) TABLESPACE "SENSORS_TABLES" ; CREATE TABLE "SENSOR"."SENSOR" ( "NUMBER_OF_SENSORS" NUMBER NOT NULL ENABLE, "ID" NUMBER NOT NULL ENABLE, "NUM" NUMBER NOT NULL ENABLE, "TYPE" VARCHAR2(20 BYTE) NOT NULL ENABLE, "IDPATIENT" NUMBER NOT NULL ENABLE, "SERVICECOD" VARCHAR2(25 BYTE) NOT NULL ENABLE, "SERVICEDEC" VARCHAR2(50 BYTE) NOT NULL ENABLE, "ADMDATE" DATE NOT NULL ENABLE, "BED" VARCHAR2(25 BYTE) NOT NULL ENABLE, CONSTRAINT "SENSOR_PK" PRIMARY KEY ("ID") USING INDEX PCTFREE 10 INITRANS 2 MAXTRANS 255 STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645 PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT) TABLESPACE "SENSORS_TABLES" ENABLE, CONSTRAINT "SENSOR_FK1" FOREIGN KEY ("IDPATIENT") REFERENCES "SENSOR"."PATIENT" ("ID") ENABLE ) SEGMENT CREATION IMMEDIATE PCTFREE 10 PCTUSED 40 INITRANS 1 MAXTRANS 255 NOCOMPRESS LOGGING STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645 PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT) TABLESPACE "SENSORS_TABLES" ; CREATE TABLE "SENSOR"."CARETEAM" ( "IDSENSOR" NUMBER NOT NULL ENABLE, "IDDOCTOR" NUMBER NOT NULL ENABLE, CONSTRAINT "CARETEAM_PK" PRIMARY KEY ("IDDOCTOR", "IDSENSOR") USING INDEX PCTFREE 10 INITRANS 2 MAXTRANS 255 COMPUTE STATISTICS STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645 PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT) TABLESPACE "SENSORS_TABLES" ENABLE, CONSTRAINT "CARETEAM_FK2" FOREIGN KEY ("IDDOCTOR") REFERENCES "SENSOR"."DOCTOR" ("ID") ENABLE, CONSTRAINT "CARETEAM_FK1" FOREIGN KEY ("IDSENSOR") REFERENCES "SENSOR"."SENSOR" ("ID") ENABLE ) SEGMENT CREATION IMMEDIATE PCTFREE 10 PCTUSED 40 INITRANS 1 MAXTRANS 255 NOCOMPRESS LOGGING STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645 PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT) TABLESPACE "SENSORS_TABLES" ; CREATE TABLE "SENSOR"."DATA" ( "IDDATA" NUMBER NOT NULL ENABLE, "IDSENSOR" NUMBER NOT NULL ENABLE, "BODYTEMP" NUMBER NOT NULL ENABLE, "BLOODPRESS_SYSTOLIC" NUMBER NOT NULL ENABLE, "BLOODPRESS_DIASTOLIC" NUMBER NOT NULL ENABLE, "BPM" NUMBER NOT NULL ENABLE, "SATO2" NUMBER NOT NULL ENABLE, "TIMESTAMP" TIMESTAMP (6) NOT NULL ENABLE, CONSTRAINT "DATA_PK" PRIMARY KEY ("IDDATA") USING INDEX PCTFREE 10 INITRANS 2 MAXTRANS 255 COMPUTE STATISTICS STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645 PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT) TABLESPACE "SENSORS_TABLES" ENABLE, CONSTRAINT "DATA_FK1" FOREIGN KEY ("IDSENSOR") REFERENCES "SENSOR"."SENSOR" ("ID") ENABLE ) SEGMENT CREATION IMMEDIATE PCTFREE 10 PCTUSED 40 INITRANS 1 MAXTRANS 255 NOCOMPRESS LOGGING STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645 PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT) TABLESPACE "SENSORS_TABLES" ; CREATE OR REPLACE EDITIONABLE TRIGGER "SENSOR"."DATA_TRG" BEFORE INSERT ON DATA FOR EACH ROW BEGIN <<COLUMN_SEQUENCES>> BEGIN IF INSERTING AND :NEW.IDDATA IS NULL THEN SELECT DATA_SEQ.NEXTVAL INTO :NEW.IDDATA FROM SYS.DUAL; END IF; END COLUMN_SEQUENCES; END; / ALTER TRIGGER "SENSOR"."DATA_TRG" ENABLE;
<reponame>richardswinbank/sprockit /* * sprockit.[ReportBatch] * Copyright (c) 2021 <NAME> (<EMAIL>) * http://richardswinbank.net/sprockit * * Batch details for monitoring dashboard. */ CREATE VIEW [sprockit].[ReportBatch] AS SELECT BatchId , ProcessGroup , CONVERT(NVARCHAR, batch.[CreatedDateTime], 120) + ' - Group ' + CAST(batch.ProcessGroup AS NVARCHAR) + ' - Batch ' + CAST(batch.BatchId AS NVARCHAR) AS BatchName , [CreatedDateTime] , COALESCE(EndDateTime, GETUTCDATE()) AS EndDateTime FROM sprockit.Batch WHERE DATEADD(DAY, 14, [CreatedDateTime]) > GETUTCDATE()
<filename>hasura/migrations_history/1611558872027_run_sql_migration/up.sql CREATE OR REPLACE VIEW "public"."search" AS SELECT a.id, a.title as s, 'artwork' as type FROM artworks a UNION SELECT u.id, u.username as s, 'user' as type FROM users u UNION SELECT DISTINCT uuid_generate_v4() as id, t.tag as s, 'tag' as type FROM tags t;
<reponame>smith750/kc TRUNCATE TABLE PROTOCOL_ATTACHMENT_GROUP DROP STORAGE / INSERT INTO PROTOCOL_ATTACHMENT_GROUP (GROUP_CD,DESCRIPTION,UPDATE_USER,UPDATE_TIMESTAMP,OBJ_ID,VER_NBR) VALUES ('1','Protocol Attachments','admin',SYSDATE,SYS_GUID(),1) / INSERT INTO PROTOCOL_ATTACHMENT_GROUP (GROUP_CD,DESCRIPTION,UPDATE_USER,UPDATE_TIMESTAMP,OBJ_ID,VER_NBR) VALUES ('2','Personnel Attachments','admin',SYSDATE,SYS_GUID(),1) /
delete from custom_armor where item_id in (9710,9711,9712,9713,9714,9715,9716); INSERT ignore INTO `custom_armor` VALUES ('9710', 'Angel Wings', 'hair', 'false', 'none', '10', 'none', '0', '-1', '0', '0', '0', '0', '0', 'true', 'true', 'true', 'true', '0', '0'), ('9711', 'Fallen Angel Wings', 'hair', 'false', 'none', '10', 'none', '0', '-1', '0', '0', '0', '0', '0', 'true', 'true', 'true', 'true', '0', '0'), ('9712', 'Gargoyle Wings', 'hair', 'false', 'none', '10', 'none', '0', '-1', '0', '0', '0', '0', '0', 'true', 'true', 'true', 'true', '0', '0'), ('9713', 'Chaos Wings', 'hair', 'false', 'none', '10', 'none', '0', '-1', '0', '0', '0', '0', '0', 'true', 'true', 'true', 'true', '0', '0'), ('9714', 'Eva Wings', 'hair', 'false', 'none', '10', 'none', '0', '-1', '0', '0', '0', '0', '0', 'true', 'true', 'true', 'true', '0', '0'), ('9715', 'Netherworld Wings', 'hair', 'false', 'none', '10', 'none', '0', '-1', '0', '0', '0', '0', '0', 'true', 'true', 'true', 'true', '0', '0'), ('9716', 'Succubus Wings', 'hair', 'false', 'none', '10', 'none', '0', '-1', '0', '0', '0', '0', '0', 'true', 'true', 'true', 'true', '0', '0');
<filename>fao-mozfis-api/src/main/resources/db/migration/V001__create_tables_for_territory_and_initial_load.sql ------------------------------------------------------------------------------------------------------------------------- -- Creating structure for PROVINCE ------------------------------------------------------------------------------------------------------------------------- create table province( id bigint(20) primary key auto_increment, name varchar(20) not null, status integer(1) not null, constraint uq_province_name unique(name) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; create table district( id bigint(20) primary key auto_increment, name varchar(20) not null, status integer(1) not null, province_id bigint(20), foreign key (province_id) references province(id) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; create table administrative_post( id bigint(20) primary key auto_increment, name varchar(20) not null, status integer(1) not null, district_id bigint(20), foreign key (district_id) references district(id) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; create table locality( id bigint(20) primary key auto_increment, name varchar(20) not null, status integer(1) not null, administrative_post_id bigint(20), foreign key (administrative_post_id) references administrative_post(id) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; ------------------------------------------------------------------------------------------------------------------------- -- Inserting initial DATA ------------------------------------------------------------------------------------------------------------------------- insert into province (id, name, status) values (1, 'Central', 1); insert into province (id, name, status) values (2, 'Cabo Delgado', 1); insert into province (id, name, status) values (3, 'Niassa', 1); insert into province (id, name, status) values (4, 'Nampula', 1); insert into province (id, name, status) values (5, 'Zambézia', 1); insert into province (id, name, status) values (6, 'Tete', 1); insert into province (id, name, status) values (7, 'Sofala', 1); insert into province (id, name, status) values (8, 'Manica', 1); insert into province (id, name, status) values (9, 'Inhambane', 1); insert into province (id, name, status) values (10, 'Gaza', 1); insert into province (id, name, status) values (11, 'Maputo Cidade', 1); insert into province (id, name, status) values (12,'Maputo Província', 1); insert into district (id, province_id, name, status) values (1, 1, 'Central', 1); insert into district (id, province_id, name, status) values (2, 2, 'Pemba', 1); insert into district (id, province_id, name, status) values (3, 3, 'Lichinga', 1); insert into district (id, province_id, name, status) values (4, 4, 'Nampula', 1); insert into district (id, province_id, name, status) values (5, 5, 'Quelimane', 1); insert into district (id, province_id, name, status) values (6, 6, 'Tete', 1); insert into district (id, province_id, name, status) values (7, 7, 'Beira', 1); insert into district (id, province_id, name, status) values (8, 8, 'Chimoio', 1); insert into district (id, province_id, name, status) values (9, 9, 'Inhambane', 1); insert into district (id, province_id, name, status) values (10, 10, 'Xai-xai', 1); insert into district (id, province_id, name, status) values (11, 11, 'Cidade Maputo', 1); insert into district (id, province_id, name, status) values (12, 12, 'Matola', 1); insert into administrative_post (id, district_id, name, status) values (1, 1, 'Central', 1); insert into administrative_post (id, district_id, name, status) values (2, 2, 'Cidade de Pemba', 1); insert into administrative_post (id, district_id, name, status) values (3, 3, 'Cidade de Lichinga', 1); insert into administrative_post (id, district_id, name, status) values (4, 4, 'Cidade de Nampula', 1); insert into administrative_post (id, district_id, name, status) values (5, 5, 'Cidade de Quelimane', 1); insert into administrative_post (id, district_id, name, status) values (6, 6, 'Cidade de Tete', 1); insert into administrative_post (id, district_id, name, status) values (7, 7, 'Cidade da Beira', 1); insert into administrative_post (id, district_id, name, status) values (8, 8, 'Cidade de Chimoio', 1); insert into administrative_post (id, district_id, name, status) values (9, 9, 'Cidade de Inhambane', 1); insert into administrative_post (id, district_id, name, status) values (10, 10, 'Cidade de Xai-xai', 1); insert into administrative_post (id, district_id, name, status) values (11, 11, 'Cidade de Maputo', 1); insert into administrative_post (id, district_id, name, status) values (12, 12, 'Cidade da Matola', 1); insert into locality (id, administrative_post_id, name, status) values (1, 1, 'Central', 1); insert into locality (id, administrative_post_id, name, status) values (2, 2, 'Cidade de Pemba', 1); insert into locality (id, administrative_post_id, name, status) values (3, 3, 'Cidade de Lichinga', 1); insert into locality (id, administrative_post_id, name, status) values (4, 4, 'Cidade de Nampula', 1); insert into locality (id, administrative_post_id, name, status) values (5, 5, 'Cidade de Quelimane', 1); insert into locality (id, administrative_post_id, name, status) values (6, 6, 'Cidade de Tete', 1); insert into locality (id, administrative_post_id, name, status) values (7, 7, 'Cidade da Beira', 1); insert into locality (id, administrative_post_id, name, status) values (8, 8, 'Cidade de Chimoio', 1); insert into locality (id, administrative_post_id, name, status) values (9, 9, 'Cidade de Inhambane', 1); insert into locality (id, administrative_post_id, name, status) values (10, 10, 'Cidade de Xai-xai', 1); insert into locality (id, administrative_post_id, name, status) values (11, 11, 'Cidade de Maputo', 1); insert into locality (id, administrative_post_id, name, status) values (12, 12, 'Cidade da Matola', 1);
SELECT /* select */ A.* FROM ALL_OBJECTS A WHERE 1 = 1 AND A.OWNER = 'SYS' -- AND A.OBJECT_NAME LIKE 'ALL_TA%' -- AND A.OBJECT_NAME LIKE 'ALL_%COMMENTS%' -- AND A.OBJECT_NAME LIKE 'ALL_%PK%' AND A.OBJECT_NAME LIKE 'ALL_%CONS%' ORDER BY A.OBJECT_NAME ; SELECT * FROM ALL_TABLES A WHERE A.OWNER = 'COM' ORDER BY A.OWNER, A.TABLE_NAME; SELECT * FROM ALL_TAB_COMMENTS A WHERE A.OWNER = 'COM' ORDER BY A.OWNER, A.TABLE_NAME; SELECT * FROM ALL_TAB_COLS A WHERE A.OWNER = 'COM' ORDER BY A.OWNER, A.TABLE_NAME, A.COLUMN_ID; SELECT * FROM ALL_COL_COMMENTS A WHERE A.OWNER = 'COM' ORDER BY A.OWNER, A.TABLE_NAME; --SELECT * FROM ALL_REPKEY_COLUMNS; SELECT * FROM ALL_CONSTRAINTS A WHERE A.OWNER = 'COM' AND A.CONSTRAINT_TYPE = 'P' AND A.TABLE_NAME = 'COMTCADMINISTCODERECPTNLOG'; SELECT * FROM ALL_CONS_COLUMNS A WHERE A.OWNER = 'COM' --AND A.CONSTRAINT_NAME = 'COMTCADMINISTCODERECPTNLOG_PK' --AND A.TABLE_NAME = 'COMTCADMINISTCODERECPTNLOG' AND A.TABLE_NAME = 'COMTCCMMNDETAILCODE' --AND A.COLUMN_NAME = 'OCCRRNC_DE' --AND A.POSITION IS NOT NULL ; --SELECT * FROM ALL_CONS_OBJ_COLUMNS; --SELECT * FROM ALL_STREAMS_MESSAGE_CONSUMERS;
<filename>misc/logging/src/apache-views.sql CREATE OR REPLACE VIEW apache_agents AS SELECT agent,COUNT(*) FROM apache_log GROUP BY agent ORDER BY count(*) DESC; CREATE OR REPLACE VIEW apache_countable AS SELECT src_bin,A.* FROM apache_log A JOIN src_bin B on A.src=B.src WHERE req ~ '^GET ' AND req !~ '/(av/|js/|robots|styles/|tmp/)' AND src_bin !~ 'gene\.com|host\d+\.hostmonster\.com|search\.msn\.com|' AND agent !~ 'AISearchBot|Baiduspider|<EMAIL>|CCBot/|CazoodleBot/|discobot/|DotBot/|Exabot/|Exabot-Thumbnails|Gaisbot/|Gigabot/|Googlebot|GurujiBot/|LijitSpider/|MJ12bot/|NaverBot/|NaverBot/|OOZBOT/|Plonebot/|Semager/|SnapPreviewBot|Sogou develop spider|Sogou web spider/|Sosoimagespider|Speedy Spider|SurveyBot/|TurnitinBot/|Twiceler|Yahoo! Slurp|Yanga WorldSearch|YebolBot|librabot|msnbot|; obot|psbot/|robotgenius|YodaoBot/|YodaoBot-Image/|YoudaoBot/';
/* Navicat MySQL Data Transfer Source Server : localhost Source Server Type : MySQL Source Server Version : 50725 Source Host : localhost:3306 Source Schema : meeting_rest Target Server Type : MySQL Target Server Version : 50725 File Encoding : 65001 Date: 01/08/2019 17:31:43 */ SET NAMES utf8mb4; SET FOREIGN_KEY_CHECKS = 0; -- ---------------------------- -- Table structure for actor_t -- ---------------------------- DROP TABLE IF EXISTS `actor_t`; CREATE TABLE `actor_t` ( `UUID` int(11) NOT NULL AUTO_INCREMENT COMMENT '主键编号', `actor_name` varchar(50) DEFAULT NULL COMMENT '演员名称', `actor_img` varchar(200) DEFAULT NULL COMMENT '演员图片位置', PRIMARY KEY (`UUID`) ) ENGINE=InnoDB AUTO_INCREMENT=11 DEFAULT CHARSET=utf8 ROW_FORMAT=DYNAMIC COMMENT='演员表'; -- ---------------------------- -- Records of actor_t -- ---------------------------- BEGIN; INSERT INTO `actor_t` VALUES (1, '徐峥', 'actors/2b98c9d2e6d23a7eff25dcac8b584b0136045.jpg'); INSERT INTO `actor_t` VALUES (2, '王传君', 'actors/b782d497577baffb5ed14de52841dcb164365.jpg'); INSERT INTO `actor_t` VALUES (3, '谭卓', 'actors/acf7db57456cb1aed1a42f7ebffedaa842002.jpg'); INSERT INTO `actor_t` VALUES (4, '黄渤', 'actors/c6594ef2705dcaf7d9df857d228b5e1645712.jpg'); INSERT INTO `actor_t` VALUES (5, '舒淇', 'actors/6b32a489467283bb739a2bac3b2b929742175.jpg'); INSERT INTO `actor_t` VALUES (6, '张艺兴', 'actors/b738d5e78a1f5c3379d9d42a9b18286f32246.jpeg'); INSERT INTO `actor_t` VALUES (7, '强森', 'actors/7e3067d066c1e285b0cc17bfd5f1b34e108474.jpg'); INSERT INTO `actor_t` VALUES (8, '杰森·斯坦森', 'actors/7ec0c90aec03c7904c1db3af1153162f77864.jpg'); INSERT INTO `actor_t` VALUES (9, '李冰冰', 'actors/d2258cd0529950cf5099206519d91d0e51803.jpg'); INSERT INTO `actor_t` VALUES (10, '汤姆·克鲁斯', 'actors/6afaea1cb4ca2b346e86e265347c78b833970.jpg'); COMMIT; SET FOREIGN_KEY_CHECKS = 1;
-- SPDX-License-Identifier: Apache-2.0 -- Licensed to the Ed-Fi Alliance under one or more agreements. -- The Ed-Fi Alliance licenses this file to you under the Apache License, Version 2.0. -- See the LICENSE and NOTICES files in the project root for more information. CREATE INDEX IF NOT EXISTS UX_91a31b_ChangeVersion ON tpdm.AnonymizedStudent(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_a5aeb2_ChangeVersion ON tpdm.AnonymizedStudentAcademicRecord(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_e4eb73_ChangeVersion ON tpdm.AnonymizedStudentAssessment(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_e6ba6c_ChangeVersion ON tpdm.AnonymizedStudentAssessmentCourseAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_64d5d3_ChangeVersion ON tpdm.AnonymizedStudentAssessmentSectionAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_2abb16_ChangeVersion ON tpdm.AnonymizedStudentCourseAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_d194a8_ChangeVersion ON tpdm.AnonymizedStudentCourseTranscript(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_7f59f4_ChangeVersion ON tpdm.AnonymizedStudentEducationOrganizationAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_562e9d_ChangeVersion ON tpdm.AnonymizedStudentSectionAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_0a1ce1_ChangeVersion ON tpdm.Applicant(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_57cdba_ChangeVersion ON tpdm.ApplicantProspectAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_e7ad52_ChangeVersion ON tpdm.Application(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_143de6_ChangeVersion ON tpdm.ApplicationEvent(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_447e8f_ChangeVersion ON tpdm.CompleterAsStaffAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_055fb2_ChangeVersion ON tpdm.CourseCourseTranscriptFacts(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_6cdc01_ChangeVersion ON tpdm.CourseStudentAcademicRecordFacts(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_157c1d_ChangeVersion ON tpdm.CourseStudentAssessmentFacts(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_81ddd0_ChangeVersion ON tpdm.CourseStudentFacts(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_092fc4_ChangeVersion ON tpdm.EducationOrganizationCourseTranscriptFacts(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_f4fab0_ChangeVersion ON tpdm.EducationOrganizationFacts(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_347c34_ChangeVersion ON tpdm.EducationOrganizationStudentAcademicRecordFacts(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_7c74f1_ChangeVersion ON tpdm.EducationOrganizationStudentAssessmentFacts(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_eedec2_ChangeVersion ON tpdm.EducationOrganizationStudentFacts(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_786774_ChangeVersion ON tpdm.EmploymentEvent(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_f51cef_ChangeVersion ON tpdm.EmploymentSeparationEvent(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_e809b0_ChangeVersion ON tpdm.OpenStaffPositionEvent(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_97e755_ChangeVersion ON tpdm.PerformanceMeasure(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_e21917_ChangeVersion ON tpdm.PerformanceMeasureCourseAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_be96e7_ChangeVersion ON tpdm.PerformanceMeasureFacts(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_8c4ca1_ChangeVersion ON tpdm.ProfessionalDevelopmentEvent(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_f84f61_ChangeVersion ON tpdm.Prospect(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_dc8233_ChangeVersion ON tpdm.ProspectProfessionalDevelopmentEventAttendance(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_6232e8_ChangeVersion ON tpdm.RecruitmentEvent(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_f13a78_ChangeVersion ON tpdm.Rubric(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_6ab97b_ChangeVersion ON tpdm.RubricLevel(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_c8ffc2_ChangeVersion ON tpdm.RubricLevelResponse(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_98f7aa_ChangeVersion ON tpdm.RubricLevelResponseFacts(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_51c6b9_ChangeVersion ON tpdm.SectionCourseTranscriptFacts(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_f7814e_ChangeVersion ON tpdm.SectionStudentAcademicRecordFacts(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_aeccab_ChangeVersion ON tpdm.SectionStudentAssessmentFacts(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_5b8d40_ChangeVersion ON tpdm.SectionStudentFacts(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_11e466_ChangeVersion ON tpdm.StaffApplicantAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_5f5473_ChangeVersion ON tpdm.StaffEvaluation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_60e7ab_ChangeVersion ON tpdm.StaffEvaluationComponent(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_6b5a36_ChangeVersion ON tpdm.StaffEvaluationComponentRating(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_9e0757_ChangeVersion ON tpdm.StaffEvaluationElement(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_e94b2b_ChangeVersion ON tpdm.StaffEvaluationElementRating(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_e34e81_ChangeVersion ON tpdm.StaffEvaluationRating(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_420d2e_ChangeVersion ON tpdm.StaffFieldworkAbsenceEvent(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_fe2944_ChangeVersion ON tpdm.StaffFieldworkExperience(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_aa4534_ChangeVersion ON tpdm.StaffFieldworkExperienceSectionAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_b4a1e0_ChangeVersion ON tpdm.StaffProfessionalDevelopmentEventAttendance(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_990b71_ChangeVersion ON tpdm.StaffProspectAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_609983_ChangeVersion ON tpdm.StaffStudentGrowthMeasure(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_f22014_ChangeVersion ON tpdm.StaffStudentGrowthMeasureCourseAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_120788_ChangeVersion ON tpdm.StaffStudentGrowthMeasureEducationOrganizationAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_fbfeb4_ChangeVersion ON tpdm.StaffStudentGrowthMeasureSectionAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_7bf40b_ChangeVersion ON tpdm.StaffTeacherPreparationProviderAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_5bac62_ChangeVersion ON tpdm.StaffTeacherPreparationProviderProgramAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_c4d5a9_ChangeVersion ON tpdm.TalentManagementGoal(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_835b49_ChangeVersion ON tpdm.TeacherCandidate(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_163dde_ChangeVersion ON tpdm.TeacherCandidateAcademicRecord(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_87fd83_ChangeVersion ON tpdm.TeacherCandidateCourseTranscript(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_30d301_ChangeVersion ON tpdm.TeacherCandidateFieldworkAbsenceEvent(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_fe5770_ChangeVersion ON tpdm.TeacherCandidateFieldworkExperience(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_1a948e_ChangeVersion ON tpdm.TeacherCandidateFieldworkExperienceSectionAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_8c39c1_ChangeVersion ON tpdm.TeacherCandidateProfessionalDevelopmentEventAttendance(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_3395e5_ChangeVersion ON tpdm.TeacherCandidateStaffAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_464a58_ChangeVersion ON tpdm.TeacherCandidateStudentGrowthMeasure(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_512fab_ChangeVersion ON tpdm.TeacherCandidateStudentGrowthMeasureCourseAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_22b9a4_ChangeVersion ON tpdm.TeacherCandidateStudentGrowthMeasureEducationOrganizatio_22b9a4(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_b8b1b0_ChangeVersion ON tpdm.TeacherCandidateStudentGrowthMeasureSectionAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_0dff08_ChangeVersion ON tpdm.TeacherCandidateTeacherPreparationProviderAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_81475b_ChangeVersion ON tpdm.TeacherCandidateTeacherPreparationProviderProgramAssociation(ChangeVersion); CREATE INDEX IF NOT EXISTS UX_aceeb9_ChangeVersion ON tpdm.TeacherPreparationProviderProgram(ChangeVersion);
---------------------------------------------------------------- -- [all_disks] View -- Create or replace FUNCTION GetAllFromDisks() RETURNS SETOF all_disks AS $procedure$ BEGIN RETURN QUERY SELECT * FROM all_disks; END; $procedure$ LANGUAGE plpgsql; Create or replace FUNCTION GetDiskByDiskId(v_disk_id UUID) RETURNS SETOF all_disks AS $procedure$ BEGIN RETURN QUERY SELECT * FROM all_disks WHERE image_group_id = v_disk_id; END; $procedure$ LANGUAGE plpgsql; Create or replace FUNCTION GetDisksVmGuid(v_vm_guid UUID, v_user_id UUID, v_is_filtered BOOLEAN) RETURNS SETOF all_disks AS $procedure$ BEGIN RETURN QUERY SELECT * FROM all_disks WHERE vm_guid = v_vm_guid AND (NOT v_is_filtered OR EXISTS (SELECT 1 FROM user_disk_permissions_view WHERE user_id = v_user_id AND entity_id = all_disks.disk_id)); END; $procedure$ LANGUAGE plpgsql; -- Returns all the attachable disks in the storage pool -- If storage pool is ommited, all the attachable disks are retrurned. -- in case vm id is provided, returning all the disks in SP that are not attached to the vm Create or replace FUNCTION GetAllAttachableDisksByPoolId(v_storage_pool_id UUID, v_vm_id uuid, v_user_id UUID, v_is_filtered BOOLEAN) RETURNS SETOF all_disks AS $procedure$ BEGIN RETURN QUERY SELECT all_disks.* FROM all_disks WHERE (v_storage_pool_id IS NULL OR all_disks.storage_pool_id = v_storage_pool_id) AND (all_disks.vm_guid IS NULL OR all_disks.shareable) AND (v_vm_id IS NULL OR all_disks.vm_guid IS NULL OR v_vm_id != all_disks.vm_guid) AND (NOT v_is_filtered OR EXISTS (SELECT 1 FROM user_disk_permissions_view WHERE user_id = v_user_id AND entity_id = disk_id)); END; $procedure$ LANGUAGE plpgsql;
-- phpMyAdmin SQL Dump -- version 4.7.0-beta1 -- https://www.phpmyadmin.net/ -- -- Host: localhost -- Generation Time: 2017-09-20 06:53:51 -- 服务器版本: 5.5.53 -- PHP Version: 5.5.38 SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; SET time_zone = "+00:00"; /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; -- -- Database: `bdm260603580_db` -- -- -------------------------------------------------------- -- -- 表的结构 `gz_bills` -- CREATE TABLE `gz_bills` ( `id` int(11) NOT NULL, `user_id` int(11) NOT NULL DEFAULT '0' COMMENT '用户id', `type_id` varchar(10) NOT NULL DEFAULT '0' COMMENT '记账类型id 1月营业额 2支出工资', `money` decimal(10,2) NOT NULL DEFAULT '0.00' COMMENT '金额', `create_at` timestamp NULL DEFAULT NULL, `update_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='记账表'; -- -- 转存表中的数据 `gz_bills` -- INSERT INTO `gz_bills` (`id`, `user_id`, `type_id`, `money`, `create_at`, `update_at`) VALUES (1, 1, '1', '10000.00', NULL, NULL), (2, 1, '2', '-8000.00', NULL, NULL), (3, 2, '2', '-3000.00', NULL, NULL), (4, 1, '1', '222.00', NULL, NULL), (5, 1, '1', '222.00', NULL, NULL), (6, 1, '1', '1.00', NULL, NULL), (7, 1, '1', '22.00', '2017-09-19 16:00:00', NULL), (8, 1, '3', '1000.00', '2017-09-20 03:48:22', '2017-09-20 03:48:22'), (9, 2, '3', '100.00', '2017-09-20 05:51:08', '2017-09-20 05:51:08'), (10, 3, '1', '100.00', '2017-09-20 05:55:51', '2017-09-20 05:55:51'), (11, 3, '3', '400.00', '2017-09-20 05:56:04', '2017-09-20 05:56:04'); -- -------------------------------------------------------- -- -- 表的结构 `gz_category` -- CREATE TABLE `gz_category` ( `id` int(11) NOT NULL, `typename` varchar(20) NOT NULL DEFAULT '0' COMMENT '类型名称', `create_at` timestamp NULL DEFAULT NULL, `update_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='记账分类表'; -- -- 转存表中的数据 `gz_category` -- INSERT INTO `gz_category` (`id`, `typename`, `create_at`, `update_at`) VALUES (1, '月营业额', '2017-09-19 16:00:00', NULL), (2, '支出工资', '2017-09-19 16:00:00', NULL), (3, '杂货钱', '2017-09-19 16:00:00', NULL); -- -------------------------------------------------------- -- -- 表的结构 `gz_users` -- CREATE TABLE `gz_users` ( `id` int(11) NOT NULL, `username` varchar(10) NOT NULL DEFAULT '' COMMENT '用户名', `password` varchar(10) NOT NULL DEFAULT '' COMMENT '密码', `nickname` varchar(20) NOT NULL DEFAULT '' COMMENT '昵称', `create_at` timestamp NULL DEFAULT NULL, `update_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='用户表'; -- -- 转存表中的数据 `gz_users` -- INSERT INTO `gz_users` (`id`, `username`, `password`, `nickname`, `create_at`, `update_at`) VALUES (1, 'admin', '123456', '江河', '2017-09-18 16:00:00', '2017-09-18 16:00:00'), (2, 'yuangong1', '123456', '舒婷', '2017-09-18 16:00:00', '2017-09-18 16:00:00'), (3, 'yuangong2', '123456', '三大姑', '2017-09-18 16:00:00', '2017-09-18 16:00:00'); -- -- Indexes for dumped tables -- -- -- Indexes for table `gz_bills` -- ALTER TABLE `gz_bills` ADD PRIMARY KEY (`id`); -- -- Indexes for table `gz_category` -- ALTER TABLE `gz_category` ADD PRIMARY KEY (`id`); -- -- Indexes for table `gz_users` -- ALTER TABLE `gz_users` ADD PRIMARY KEY (`id`); -- -- 在导出的表使用AUTO_INCREMENT -- -- -- 使用表AUTO_INCREMENT `gz_bills` -- ALTER TABLE `gz_bills` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=12; -- -- 使用表AUTO_INCREMENT `gz_category` -- ALTER TABLE `gz_category` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=4; -- -- 使用表AUTO_INCREMENT `gz_users` -- ALTER TABLE `gz_users` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=4; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
DROP TABLE IF EXISTS quotes; CREATE TABLE quotes ( id int(11) NOT NULL AUTO_INCREMENT, quote VARCHAR(600), PRIMARY KEY (id), UNIQUE KEY quote (quote)); INSERT INTO quotes (id, quote) VALUES (1, 'quote 1'); INSERT INTO quotes (id, quote) VALUES (2, 'quote 2'); INSERT INTO quotes (id, quote) VALUES (3, 'quote 3'); INSERT INTO quotes (id, quote) VALUES (4, 'quote 4'); INSERT INTO quotes (id, quote) VALUES (5, 'quote 5'); INSERT INTO quotes (id, quote) VALUES (6, 'quote 6'); INSERT INTO quotes (id, quote) VALUES (7, 'quote 7'); INSERT INTO quotes (id, quote) VALUES (8, 'quote 8');
-- phpMyAdmin SQL Dump -- version 5.0.2 -- https://www.phpmyadmin.net/ -- -- Host: localhost -- Generation Time: Jun 12, 2020 at 08:02 AM -- Server version: 10.4.11-MariaDB -- PHP Version: 7.4.6 SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; START TRANSACTION; SET time_zone = "+00:00"; /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8mb4 */; -- -- Database: `careforu` -- -- -------------------------------------------------------- -- -- Table structure for table `buydata` -- CREATE TABLE `buydata` ( `buy_id` int(11) NOT NULL, `buy_med_id` int(11) NOT NULL, `buy_date` datetime NOT NULL, `exp_date` datetime NOT NULL, `buy_qty` int(11) NOT NULL, `remain_qty` int(11) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- -- Dumping data for table `buydata` -- INSERT INTO `buydata` (`buy_id`, `buy_med_id`, `buy_date`, `exp_date`, `buy_qty`, `remain_qty`) VALUES (1, 1, '2019-06-07 12:39:40', '2020-06-15 00:00:00', 30, 15), (2, 2, '2020-03-01 12:39:40', '2020-06-15 00:00:00', 20, 0), (4, 2, '2020-06-10 05:42:10', '2020-06-15 00:00:00', 10, 5), (5, 4, '2020-06-10 06:07:33', '2020-08-10 00:00:00', 5, 2), (6, 4, '2020-06-11 04:38:42', '2020-10-08 00:00:00', 1, 1), (7, 6, '2020-06-10 04:55:52', '2020-12-10 00:00:00', 5, 2), (8, 3, '2020-06-11 04:58:48', '2021-06-11 00:00:00', 10, 10), (9, 6, '2020-06-11 05:00:21', '2021-11-11 00:00:00', 15, 15); -- -------------------------------------------------------- -- -- Table structure for table `medicines` -- CREATE TABLE `medicines` ( `id` int(11) NOT NULL, `name` varchar(255) NOT NULL, `medname` varchar(255) NOT NULL, `add_date` datetime NOT NULL, `sell_point` varchar(255) NOT NULL, `or_price` int(11) NOT NULL, `sell_price` int(11) NOT NULL, `del_status` tinyint(4) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- -- Dumping data for table `medicines` -- INSERT INTO `medicines` (`id`, `name`, `medname`, `add_date`, `sell_point`, `or_price`, `sell_price`, `del_status`) VALUES (1, 'Paracap', 'Paracetamol', '2020-06-08 12:11:54', 'For curing fever', 2800, 3000, 0), (2, 'Biogesic', 'Paracetamol', '2020-06-08 12:14:11', 'For curing fever', 1300, 1500, 0), (3, 'Neoplast', 'Tan Plastic Bandage', '2020-06-08 12:16:15', 'For recovering cuts and bruises', 1800, 2000, 0), (4, 'Meditone', 'Multivitamin tablets', '2020-06-08 12:19:22', 'For revitalizing', 4700, 5000, 0), (5, 'testbrand', 'testmad', '2020-06-07 09:31:31', 'testpoint', 8000, 8500, 1), (6, 'Enervon-C', 'Vitamin C Tablets', '2020-06-08 12:25:00', 'For revitalizing Vitamin C', 3300, 3500, 0), (7, 'test2brand', 'test2med', '2020-06-05 14:38:11', 'testsellpoint', 1000, 1200, 1); -- -------------------------------------------------------- -- -- Table structure for table `patiencedatas` -- CREATE TABLE `patiencedatas` ( `pt_id` int(11) NOT NULL, `pt_p_id` int(11) NOT NULL, `pt_date` datetime NOT NULL, `pt_bloodtest` varchar(255) NOT NULL, `pt_urinetest` varchar(255) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- -- Dumping data for table `patiencedatas` -- INSERT INTO `patiencedatas` (`pt_id`, `pt_p_id`, `pt_date`, `pt_bloodtest`, `pt_urinetest`) VALUES (1, 1, '2020-06-08 01:48:12', '10', '12'), (2, 2, '2020-06-09 01:48:50', '13', '14'), (3, 3, '2020-06-10 01:49:12', '16', '17'), (4, 1, '2020-06-11 01:49:58', '9', '10'); -- -------------------------------------------------------- -- -- Table structure for table `patiences` -- CREATE TABLE `patiences` ( `p_id` int(11) NOT NULL, `p_name` varchar(255) NOT NULL, `p_father` varchar(255) NOT NULL, `p_age` int(11) NOT NULL, `p_nrc` varchar(255) NOT NULL, `p_phone` varchar(255) NOT NULL, `p_address` text NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- -- Dumping data for table `patiences` -- INSERT INTO `patiences` (`p_id`, `p_name`, `p_father`, `p_age`, `p_nrc`, `p_phone`, `p_address`) VALUES (1, '<NAME>', 'U Aung', 25, '12/kmd(N)12345', '091234567', 'Kyimyindine'), (2, '<NAME>', 'U Kyaw', 18, '12/LMD(N)54321', '097654321', 'Lanmadaw'), (3, '<NAME>', '<NAME>', 30, '12/KMY(N)32154', '094321765', 'Kamayut'); -- -------------------------------------------------------- -- -- Table structure for table `selldata` -- CREATE TABLE `selldata` ( `sell_id` int(11) NOT NULL, `sell_med_id` int(11) NOT NULL, `sell_buy_id` int(11) NOT NULL, `sell_date` datetime NOT NULL, `sell_qty` int(11) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- -- Dumping data for table `selldata` -- INSERT INTO `selldata` (`sell_id`, `sell_med_id`, `sell_buy_id`, `sell_date`, `sell_qty`) VALUES (1, 1, 1, '2020-06-08 12:56:23', 5), (2, 1, 1, '2020-06-08 12:58:02', 10), (4, 2, 2, '2020-06-10 12:32:24', 20), (5, 4, 5, '2020-06-11 04:33:42', 3), (7, 6, 7, '2020-06-11 05:10:59', 3), (8, 2, 4, '2020-06-11 05:17:32', 5); -- -------------------------------------------------------- -- -- Table structure for table `stocks` -- CREATE TABLE `stocks` ( `stocks_id` int(11) NOT NULL, `stocks_med_id` int(11) NOT NULL, `stocks_exp_date` datetime NOT NULL, `qty` int(11) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- -- Dumping data for table `stocks` -- INSERT INTO `stocks` (`stocks_id`, `stocks_med_id`, `stocks_exp_date`, `qty`) VALUES (1, 1, '2020-06-15 00:00:00', 15), (2, 2, '2020-06-15 00:00:00', 5), (3, 4, '2020-08-10 00:00:00', 2), (4, 4, '2020-10-08 00:00:00', 1), (5, 6, '2020-12-10 00:00:00', 2), (6, 3, '2021-06-11 00:00:00', 10), (7, 6, '2021-11-11 00:00:00', 15); -- -------------------------------------------------------- -- -- Table structure for table `user` -- CREATE TABLE `user` ( `u_id` int(11) NOT NULL, `u_name` varchar(255) NOT NULL, `u_pass` varchar(255) NOT NULL, `u_role` varchar(255) NOT NULL, `u_ban_status` tinyint(4) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- -- Dumping data for table `user` -- INSERT INTO `user` (`u_id`, `u_name`, `u_pass`, `u_role`, `u_ban_status`) VALUES (1, 'superadmin', '$2y$10$RGDr77EDawhhf07tteeDweypWUPbMhh2cNqWHAG7MqSspwv3Ap.r6', 'super', 0), (2, 'buyselladmin', '$2y$10$RGDr77EDawhhf07tteeDweypWUPbMhh2cNqWHAG7MqSspwv3Ap.r6', 'buysell', 0), (3, 'stockadmin', '$2y$10$RGDr77EDawhhf07tteeDweypWUPbMhh2cNqWHAG7MqSspwv3Ap.r6', 'stockadmin', 0), (4, 'patienceadmin', '$2y$10$RGDr77EDawhhf07tteeDweypWUPbMhh2cNqWHAG7MqSspwv3Ap.r6', 'patience', 0); -- -- Indexes for dumped tables -- -- -- Indexes for table `buydata` -- ALTER TABLE `buydata` ADD PRIMARY KEY (`buy_id`); -- -- Indexes for table `medicines` -- ALTER TABLE `medicines` ADD PRIMARY KEY (`id`); -- -- Indexes for table `patiencedatas` -- ALTER TABLE `patiencedatas` ADD PRIMARY KEY (`pt_id`); -- -- Indexes for table `patiences` -- ALTER TABLE `patiences` ADD PRIMARY KEY (`p_id`), ADD UNIQUE KEY `p_nrc` (`p_nrc`); -- -- Indexes for table `selldata` -- ALTER TABLE `selldata` ADD PRIMARY KEY (`sell_id`); -- -- Indexes for table `stocks` -- ALTER TABLE `stocks` ADD PRIMARY KEY (`stocks_id`); -- -- Indexes for table `user` -- ALTER TABLE `user` ADD PRIMARY KEY (`u_id`), ADD UNIQUE KEY `u_name` (`u_name`); -- -- AUTO_INCREMENT for dumped tables -- -- -- AUTO_INCREMENT for table `buydata` -- ALTER TABLE `buydata` MODIFY `buy_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=10; -- -- AUTO_INCREMENT for table `medicines` -- ALTER TABLE `medicines` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=8; -- -- AUTO_INCREMENT for table `patiencedatas` -- ALTER TABLE `patiencedatas` MODIFY `pt_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5; -- -- AUTO_INCREMENT for table `patiences` -- ALTER TABLE `patiences` MODIFY `p_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=4; -- -- AUTO_INCREMENT for table `selldata` -- ALTER TABLE `selldata` MODIFY `sell_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=9; -- -- AUTO_INCREMENT for table `stocks` -- ALTER TABLE `stocks` MODIFY `stocks_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=8; -- -- AUTO_INCREMENT for table `user` -- ALTER TABLE `user` MODIFY `u_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5; COMMIT; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
-- View: public.corp_emissions_target -- DROP VIEW public.corp_emissions_target; CREATE OR REPLACE VIEW public.corp_emissions_target AS SELECT cr.corp_id, cr.survey_year, cr.response FROM corp_responses cr JOIN corp_questions cq ON cr.question_id = cq.id AND cq.question_number::text = 'C4.1'::text; ALTER TABLE public.corp_emissions_target OWNER TO fmccollum;
ALTER TABLE books ADD CONSTRAINT fk_bookshelves FOREIGN KEY (bookshelf_id) REFERENCES bookshelves(id); this command will modify the data type of the bookshelf_id in the books table, setting it as a foreign key which references the primary key in the bookshelves table. Now PostgreSQL knows HOW these 2 tables are connected. --from lab 14 canvas instructions. so sorry..
/**************************************************************************************************************************** Create the examiner and test centre record if they don't already exist ****************************************************************************************************************************/ USE tarsreplica; -- Initialise variables SET @TestCentreName = 'Example Test Centre'; SET @TestCentreCostCode = 'EXTC1'; SET @Country = 'England'; SET @StaffNumber = '01234567'; SET @ExaminerFirstName = 'MobExaminer'; SET @ExaminerLastName = '1'; SET @Date = CURRENT_DATE; CALL uspGenerateExaminerData(@TestCentreName,@TestCentreCostCode,@Country,@StaffNumber,@ExaminerFirstName,@ExaminerLastName,@Date); /**************************************************************************************************************************** Create the programme ****************************************************************************************************************************/ SET @IndividualId = (SELECT individual_id FROM EXAMINER WHERE staff_number = @StaffNumber); SET @TestCentreId = (SELECT tc_id FROM TEST_CENTRE WHERE tc_cost_centre_code = @TestCentreCostCode); -- PROGRAMME INSERT INTO PROGRAMME (individual_id, programme_date, tc_id, state_code) SELECT @IndividualId, @Date, @TestCentreId, 1 FROM DUAL; /**************************************************************************************************************************** Create the slots, including booking, application, individual, etc. ****************************************************************************************************************************/ /**************************************************** Test Slot 1 -- Simple Cat B, Automatic ****************************************************/ -- Set test characteristic variables SET @SlotHr = 8; SET @SlotMinute = 40; SET @TestCategoryRef = 'B'; SET @Minutes = 57; SET @NTACode = NULL; SET @GearboxType = 'Automatic'; SET @LargeVehicle = 0; SET @DriverNumber = 'PEARS015220A99HC'; SET @DateOfBirth = '1987-06-27'; SET @Title = 'Miss'; SET @FirstName = 'Florence'; SET @SecondName = NULL; SET @ThirdName = NULL; SET @Surname = 'Pearson'; SET @Gender = 'Female'; SET @Ethnicity = 'A'; SET @PrimaryTelNo = '01234 567890'; SET @SecondaryTelNo = NULL; SET @MobileTelNo = NULL; SET @EmailAddress = NULL; SET @AddressLine1 = '1234 Station Street'; SET @AddressLine2 = 'Sometown'; SET @AddressLine3 = NULL; SET @AddressLine4 = NULL; SET @AddressLine5 = NULL; SET @PostCode = 'AB12 3CD'; SET @CancReason1 = NULL; SET @CancReason2 = NULL; SET @CancReason3 = NULL; SET @ExtendedTest = 0; SET @SpecialNeedsCode = 'NONE'; SET @EntitlementCheck = 0; SET @WelshTest = 0; SET @SpecialNeedsText = NULL; SET @ProgressiveAccess = 0; SET @BookingSeq = 1; SET @CheckDigit = 1; -- Call stored procedure CALL uspGenerateJournalData(@Date,@IndividualId,@TestCentreId,@CancReason1,@CancReason2,@CancReason3,@SlotHr,@SlotMinute,@TestCategoryRef,@Minutes,@NTACode,@GearboxType,@LargeVehicle,@DriverNumber,@DateOfBirth,@Title,@FirstName,@SecondName,@ThirdName,@Surname,@Gender,@Ethnicity ,@PrimaryTelNo,@SecondaryTelNo,@MobileTelNo,@EmailAddress,@AddressLine1,@AddressLine2,@AddressLine3,@AddressLine4,@AddressLine5,@PostCode,@ExtendedTest,@SpecialNeedsCode,@EntitlementCheck,@WelshTest,@SpecialNeedsText,@ProgressiveAccess,@BookingSeq,@CheckDigit); /**************************************************** Test Slot 2 -- Cat B, Manual, Welsh, Secondary Tel ****************************************************/ -- Set test characteristic variables SET @SlotHr = 9; SET @SlotMinute = 37; SET @TestCategoryRef = 'B'; SET @Minutes = 57; SET @NTACode = NULL; SET @GearboxType = 'Manual'; SET @LargeVehicle = 0; SET @DriverNumber = 'DOEXX625220A99HC'; SET @DateOfBirth = '1960-09-05'; SET @Title = 'Mrs'; SET @FirstName = 'Jane'; SET @SecondName = NULL; SET @ThirdName = NULL; SET @Surname = 'Doe'; SET @Gender = 'Female'; SET @Ethnicity = 'B'; SET @PrimaryTelNo = '01234 567890'; SET @SecondaryTelNo = '04321 098765'; SET @MobileTelNo = NULL; SET @EmailAddress = NULL; SET @AddressLine1 = 'My House'; SET @AddressLine2 = 'Someplace'; SET @AddressLine3 = 'Sometown'; SET @AddressLine4 = NULL; SET @AddressLine5 = NULL; SET @PostCode = 'AB45 6CD'; SET @CancReason1 = NULL; SET @CancReason2 = NULL; SET @CancReason3 = NULL; SET @ExtendedTest = 0; SET @SpecialNeedsCode = 'NONE'; SET @EntitlementCheck = 0; SET @WelshTest = 1; SET @SpecialNeedsText = NULL; SET @ProgressiveAccess = 0; SET @BookingSeq = 1; SET @CheckDigit = 9; -- Call stored procedure CALL uspGenerateJournalData(@Date,@IndividualId,@TestCentreId,@CancReason1,@CancReason2,@CancReason3,@SlotHr,@SlotMinute,@TestCategoryRef,@Minutes,@NTACode,@GearboxType,@LargeVehicle,@DriverNumber,@DateOfBirth,@Title,@FirstName,@SecondName,@ThirdName,@Surname,@Gender,@Ethnicity ,@PrimaryTelNo,@SecondaryTelNo,@MobileTelNo,@EmailAddress,@AddressLine1,@AddressLine2,@AddressLine3,@AddressLine4,@AddressLine5,@PostCode,@ExtendedTest,@SpecialNeedsCode,@EntitlementCheck,@WelshTest,@SpecialNeedsText,@ProgressiveAccess,@BookingSeq,@CheckDigit); /**************************************************** Test Slot 3 -- Cat B, Manual, Entitlement Check, Mobile Tel ****************************************************/ -- Set test characteristic variables SET @SlotHr = 10; SET @SlotMinute = 44; SET @TestCategoryRef = 'B'; SET @Minutes = 57; SET @NTACode = NULL; SET @GearboxType = 'Manual'; SET @LargeVehicle = 0; SET @DriverNumber = 'SHAWX885220A99HC'; SET @DateOfBirth = '1962-03-05'; SET @Title = 'Miss'; SET @FirstName = 'Theresa'; SET @SecondName = NULL; SET @ThirdName = NULL; SET @Surname = 'Shaw'; SET @Gender = 'Female'; SET @Ethnicity = 'C'; SET @PrimaryTelNo = '01234 567890'; SET @SecondaryTelNo = NULL; SET @MobileTelNo = '07654 123456'; SET @EmailAddress = NULL; SET @AddressLine1 = '999 Letsby Avenue'; SET @AddressLine2 = 'Someplace'; SET @AddressLine3 = 'Sometown'; SET @AddressLine4 = NULL; SET @AddressLine5 = NULL; SET @PostCode = 'AB67 8CD'; SET @CancReason1 = NULL; SET @CancReason2 = NULL; SET @CancReason3 = NULL; SET @ExtendedTest = 0; SET @SpecialNeedsCode = 'NONE'; SET @EntitlementCheck = 1; SET @WelshTest = 0; SET @SpecialNeedsText = NULL; SET @ProgressiveAccess = 0; SET @BookingSeq = 1; SET @CheckDigit = 2; -- Call stored procedure CALL uspGenerateJournalData(@Date,@IndividualId,@TestCentreId,@CancReason1,@CancReason2,@CancReason3,@SlotHr,@SlotMinute,@TestCategoryRef,@Minutes,@NTACode,@GearboxType,@LargeVehicle,@DriverNumber,@DateOfBirth,@Title,@FirstName,@SecondName,@ThirdName,@Surname,@Gender,@Ethnicity ,@PrimaryTelNo,@SecondaryTelNo,@MobileTelNo,@EmailAddress,@AddressLine1,@AddressLine2,@AddressLine3,@AddressLine4,@AddressLine5,@PostCode,@ExtendedTest,@SpecialNeedsCode,@EntitlementCheck,@WelshTest,@SpecialNeedsText,@ProgressiveAccess,@BookingSeq,@CheckDigit); /**************************************************** Test Slot 4 -- Cat B, Automatic, Welsh Test, Entitlement Check, Secondary + Mobile Tel ****************************************************/ -- Set test characteristic variables SET @SlotHr = 11; SET @SlotMinute = 41; SET @TestCategoryRef = 'B'; SET @Minutes = 57; SET @NTACode = NULL; SET @GearboxType = 'Automatic'; SET @LargeVehicle = 0; SET @DriverNumber = 'CAMPB805220A89HC'; SET @DateOfBirth = '1974-09-15'; SET @Title = 'Mr'; SET @FirstName = 'Ali'; SET @SecondName = NULL; SET @ThirdName = NULL; SET @Surname = 'Campbell'; SET @Gender = 'Male'; SET @Ethnicity = 'D'; SET @PrimaryTelNo = '01234 567890'; SET @SecondaryTelNo = '04321 098765'; SET @MobileTelNo = '07654 123456'; SET @EmailAddress = NULL; SET @AddressLine1 = '1 Station Street'; SET @AddressLine2 = 'Someplace'; SET @AddressLine3 = 'Somearea'; SET @AddressLine4 = 'Somecity'; SET @AddressLine5 = NULL; SET @PostCode = 'UB40 1AA'; SET @CancReason1 = NULL; SET @CancReason2 = NULL; SET @CancReason3 = NULL; SET @ExtendedTest = 0; SET @SpecialNeedsCode = 'NONE'; SET @EntitlementCheck = 1; SET @WelshTest = 1; SET @SpecialNeedsText = NULL; SET @ProgressiveAccess = 0; SET @BookingSeq = 1; SET @CheckDigit = 6; -- Call stored procedure CALL uspGenerateJournalData(@Date,@IndividualId,@TestCentreId,@CancReason1,@CancReason2,@CancReason3,@SlotHr,@SlotMinute,@TestCategoryRef,@Minutes,@NTACode,@GearboxType,@LargeVehicle,@DriverNumber,@DateOfBirth,@Title,@FirstName,@SecondName,@ThirdName,@Surname,@Gender,@Ethnicity ,@PrimaryTelNo,@SecondaryTelNo,@MobileTelNo,@EmailAddress,@AddressLine1,@AddressLine2,@AddressLine3,@AddressLine4,@AddressLine5,@PostCode,@ExtendedTest,@SpecialNeedsCode,@EntitlementCheck,@WelshTest,@SpecialNeedsText,@ProgressiveAccess,@BookingSeq,@CheckDigit); /**************************************************** Test Slot 5 -- Cat B, Manual, Full Address, Previous Cancellation, Special Needs Text ****************************************************/ -- Set test characteristic variables SET @SlotHr = 13; SET @SlotMinute = 03; SET @TestCategoryRef = 'B'; SET @Minutes = 57; SET @NTACode = NULL; SET @GearboxType = 'Manual'; SET @LargeVehicle = 0; SET @DriverNumber = 'BROWN915220A99HC'; SET @DateOfBirth = '1983-11-11'; SET @Title = 'Mr'; SET @FirstName = 'James'; SET @SecondName = NULL; SET @ThirdName = NULL; SET @Surname = 'Brown'; SET @Gender = 'Male'; SET @Ethnicity = 'E'; SET @PrimaryTelNo = '01234 567890'; SET @SecondaryTelNo = NULL; SET @MobileTelNo = NULL; SET @EmailAddress = NULL; SET @AddressLine1 = 'The Gables Cottage'; SET @AddressLine2 = 'Home Farm'; SET @AddressLine3 = 'Farm Road'; SET @AddressLine4 = 'Farm Area'; SET @AddressLine5 = 'Farmtown'; SET @PostCode = 'FA43 9XY'; SET @CancReason1 = 'Act of nature'; SET @CancReason2 = NULL; SET @CancReason3 = NULL; SET @ExtendedTest = 0; SET @SpecialNeedsCode = 'NONE'; SET @EntitlementCheck = 0; SET @WelshTest = 0; SET @SpecialNeedsText = 'Candidate has dyslexia'; SET @ProgressiveAccess = 0; SET @BookingSeq = 2; SET @CheckDigit = 3; -- Call stored procedure CALL uspGenerateJournalData(@Date,@IndividualId,@TestCentreId,@CancReason1,@CancReason2,@CancReason3,@SlotHr,@SlotMinute,@TestCategoryRef,@Minutes,@NTACode,@GearboxType,@LargeVehicle,@DriverNumber,@DateOfBirth,@Title,@FirstName,@SecondName,@ThirdName,@Surname,@Gender,@Ethnicity ,@PrimaryTelNo,@SecondaryTelNo,@MobileTelNo,@EmailAddress,@AddressLine1,@AddressLine2,@AddressLine3,@AddressLine4,@AddressLine5,@PostCode,@ExtendedTest,@SpecialNeedsCode,@EntitlementCheck,@WelshTest,@SpecialNeedsText,@ProgressiveAccess,@BookingSeq,@CheckDigit); /**************************************************** Test Slot 6 -- Cat B, Manual, 3 x Previous Cancellations, Extended Test ****************************************************/ -- Set test characteristic variables SET @SlotHr = 14; SET @SlotMinute = 00; SET @TestCategoryRef = 'B'; SET @Minutes = 114; SET @NTACode = NULL; SET @GearboxType = 'Manual'; SET @LargeVehicle = 0; SET @DriverNumber = 'SMYTH375220A99HC'; SET @DateOfBirth = '1973-10-05'; SET @Title = 'Captain'; SET @FirstName = 'Montague'; SET @SecondName = NULL; SET @ThirdName = NULL; SET @Surname = 'Smythe'; SET @Gender = 'Male'; SET @Ethnicity = 'F'; SET @PrimaryTelNo = '01234 567890'; SET @SecondaryTelNo = NULL; SET @MobileTelNo = NULL; SET @EmailAddress = NULL; SET @AddressLine1 = '1 Hangar Lane'; SET @AddressLine2 = 'Someplace'; SET @AddressLine3 = 'Sometown'; SET @AddressLine4 = NULL; SET @AddressLine5 = NULL; SET @PostCode = 'AB78 9CD'; SET @CancReason1 = 'Act of nature'; SET @CancReason2 = 'Act of nature'; SET @CancReason3 = 'DSA'; SET @ExtendedTest = 1; SET @SpecialNeedsCode = 'NONE'; SET @EntitlementCheck = 0; SET @WelshTest = 0; SET @SpecialNeedsText = NULL; SET @ProgressiveAccess = 0; SET @BookingSeq = 4; SET @CheckDigit = 7; -- Call stored procedure CALL uspGenerateJournalData(@Date,@IndividualId,@TestCentreId,@CancReason1,@CancReason2,@CancReason3,@SlotHr,@SlotMinute,@TestCategoryRef,@Minutes,@NTACode,@GearboxType,@LargeVehicle,@DriverNumber,@DateOfBirth,@Title,@FirstName,@SecondName,@ThirdName,@Surname,@Gender,@Ethnicity ,@PrimaryTelNo,@SecondaryTelNo,@MobileTelNo,@EmailAddress,@AddressLine1,@AddressLine2,@AddressLine3,@AddressLine4,@AddressLine5,@PostCode,@ExtendedTest,@SpecialNeedsCode,@EntitlementCheck,@WelshTest,@SpecialNeedsText,@ProgressiveAccess,@BookingSeq,@CheckDigit); /**************************************************************************************************************************** Create the programme (next day) ****************************************************************************************************************************/ -- Increment date variable to represent the next working day SET @Date = (SELECT getJournalEndDate(1,@Date)); -- PROGRAMME INSERT INTO PROGRAMME (individual_id, programme_date, tc_id, state_code) SELECT @IndividualId, @Date, @TestCentreId, 1 FROM DUAL; /**************************************************** Test Slot 1 (next day) -- Cat B, Automatic, 2 x Previous Cancellations, Special Needs Test, Mobile Tel ****************************************************/ -- Set test characteristic variables SET @SlotHr = 8; SET @SlotMinute = 40; SET @TestCategoryRef = 'B'; SET @Minutes = 57; SET @NTACode = NULL; SET @GearboxType = 'Automatic'; SET @LargeVehicle = 0; SET @DriverNumber = 'POTSX015220A99HC'; SET @DateOfBirth = '1985-03-04'; SET @Title = 'Mr'; SET @FirstName = 'Bill'; SET @SecondName = NULL; SET @ThirdName = NULL; SET @Surname = 'Pots'; SET @Gender = 'Male'; SET @Ethnicity = 'G'; SET @PrimaryTelNo = '01234 567890'; SET @SecondaryTelNo = NULL; SET @MobileTelNo = '07654 123456'; SET @EmailAddress = NULL; SET @AddressLine1 = '1234 Station Street'; SET @AddressLine2 = 'Someplace'; SET @AddressLine3 = 'Sometown'; SET @AddressLine4 = NULL; SET @AddressLine5 = NULL; SET @PostCode = 'AB12 3CD'; SET @CancReason1 = 'Act of nature'; SET @CancReason2 = 'DSA'; SET @CancReason3 = NULL; SET @ExtendedTest = 0; SET @SpecialNeedsCode = 'YES'; SET @EntitlementCheck = 0; SET @WelshTest = 0; SET @SpecialNeedsText = NULL; SET @ProgressiveAccess = 0; SET @BookingSeq = 3; SET @CheckDigit = 1; -- Call stored procedure CALL uspGenerateJournalData(@Date,@IndividualId,@TestCentreId,@CancReason1,@CancReason2,@CancReason3,@SlotHr,@SlotMinute,@TestCategoryRef,@Minutes,@NTACode,@GearboxType,@LargeVehicle,@DriverNumber,@DateOfBirth,@Title,@FirstName,@SecondName,@ThirdName,@Surname,@Gender,@Ethnicity ,@PrimaryTelNo,@SecondaryTelNo,@MobileTelNo,@EmailAddress,@AddressLine1,@AddressLine2,@AddressLine3,@AddressLine4,@AddressLine5,@PostCode,@ExtendedTest,@SpecialNeedsCode,@EntitlementCheck,@WelshTest,@SpecialNeedsText,@ProgressiveAccess,@BookingSeq,@CheckDigit); /**************************************************** Test Slot 2 (next day) -- Cat B, Automatic, Extra Special Needs Test, Secondary + Mobile Tel ****************************************************/ -- Set test characteristic variables SET @SlotHr = 9; SET @SlotMinute = 37; SET @TestCategoryRef = 'B'; SET @Minutes = 86; SET @NTACode = NULL; SET @GearboxType = 'Automatic'; SET @LargeVehicle = 0; SET @DriverNumber = 'COOPE625220A99HC'; SET @DateOfBirth = '1975-09-01'; SET @Title = 'Miss'; SET @FirstName = 'Alice'; SET @SecondName = NULL; SET @ThirdName = NULL; SET @Surname = 'Cooper'; SET @Gender = 'Female'; SET @Ethnicity = 'A'; SET @PrimaryTelNo = '01234 567890'; SET @SecondaryTelNo = '04321 098765'; SET @MobileTelNo = '07654 123456'; SET @EmailAddress = NULL; SET @AddressLine1 = 'My House'; SET @AddressLine2 = 'Someplace'; SET @AddressLine3 = 'Sometown'; SET @AddressLine4 = NULL; SET @AddressLine5 = NULL; SET @PostCode = 'AB45 6CD'; SET @CancReason1 = NULL; SET @CancReason2 = NULL; SET @CancReason3 = NULL; SET @ExtendedTest = 0; SET @SpecialNeedsCode = 'EXTRA'; SET @EntitlementCheck = 0; SET @WelshTest = 0; SET @SpecialNeedsText = NULL; SET @ProgressiveAccess = 0; SET @BookingSeq = 1; SET @CheckDigit = 1; -- Call stored procedure CALL uspGenerateJournalData(@Date,@IndividualId,@TestCentreId,@CancReason1,@CancReason2,@CancReason3,@SlotHr,@SlotMinute,@TestCategoryRef,@Minutes,@NTACode,@GearboxType,@LargeVehicle,@DriverNumber,@DateOfBirth,@Title,@FirstName,@SecondName,@ThirdName,@Surname,@Gender,@Ethnicity ,@PrimaryTelNo,@SecondaryTelNo,@MobileTelNo,@EmailAddress,@AddressLine1,@AddressLine2,@AddressLine3,@AddressLine4,@AddressLine5,@PostCode,@ExtendedTest,@SpecialNeedsCode,@EntitlementCheck,@WelshTest,@SpecialNeedsText,@ProgressiveAccess,@BookingSeq,@CheckDigit); /**************************************************** Test Slot 3 (next day) -- Cat B, Manual, Special Needs Test, Extended Test, Special Needs Text, Entitlement Check, Secondary Tel ****************************************************/ -- Set test characteristic variables SET @SlotHr = 11; SET @SlotMinute = 03; SET @TestCategoryRef = 'B'; SET @Minutes = 114; SET @NTACode = NULL; SET @GearboxType = 'Manual'; SET @LargeVehicle = 0; SET @DriverNumber = 'BUXTO015220A99HC'; SET @DateOfBirth = '1977-10-16'; SET @Title = 'Mr'; SET @FirstName = 'Phil'; SET @SecondName = NULL; SET @ThirdName = NULL; SET @Surname = 'Buxton'; SET @Gender = 'Male'; SET @Ethnicity = 'B'; SET @PrimaryTelNo = '01234 567890'; SET @SecondaryTelNo = '04321 098765'; SET @MobileTelNo = NULL; SET @EmailAddress = NULL; SET @AddressLine1 = '1234 Station Street'; SET @AddressLine2 = 'Someplace'; SET @AddressLine3 = 'Sometown'; SET @AddressLine4 = NULL; SET @AddressLine5 = NULL; SET @PostCode = 'AB12 3CD'; SET @CancReason1 = NULL; SET @CancReason2 = NULL; SET @CancReason3 = NULL; SET @ExtendedTest = 1; SET @SpecialNeedsCode = 'YES'; SET @EntitlementCheck = 1; SET @WelshTest = 0; SET @SpecialNeedsText = 'Candidate has dyslexia'; SET @ProgressiveAccess = 0; SET @BookingSeq = 1; SET @CheckDigit = 1; -- Call stored procedure CALL uspGenerateJournalData(@Date,@IndividualId,@TestCentreId,@CancReason1,@CancReason2,@CancReason3,@SlotHr,@SlotMinute,@TestCategoryRef,@Minutes,@NTACode,@GearboxType,@LargeVehicle,@DriverNumber,@DateOfBirth,@Title,@FirstName,@SecondName,@ThirdName,@Surname,@Gender,@Ethnicity ,@PrimaryTelNo,@SecondaryTelNo,@MobileTelNo,@EmailAddress,@AddressLine1,@AddressLine2,@AddressLine3,@AddressLine4,@AddressLine5,@PostCode,@ExtendedTest,@SpecialNeedsCode,@EntitlementCheck,@WelshTest,@SpecialNeedsText,@ProgressiveAccess,@BookingSeq,@CheckDigit); /**************************************************** Test Slot 4 (next day) -- Non Test Activity (Corporate Connectivity) ****************************************************/ -- Set test characteristic variables SET @SlotHr = 13; SET @SlotMinute = 27; SET @TestCategoryRef = NULL; SET @Minutes = 57; SET @NTACode = '198'; SET @GearboxType = NULL; SET @LargeVehicle = NULL; SET @DriverNumber = NULL; SET @DateOfBirth = NULL; SET @Title = NULL; SET @FirstName = NULL; SET @SecondName = NULL; SET @ThirdName = NULL; SET @Surname = NULL; SET @Gender = NULL; SET @Ethnicity = NULL; SET @PrimaryTelNo = NULL; SET @SecondaryTelNo = NULL; SET @MobileTelNo = NULL; SET @EmailAddress = NULL; SET @AddressLine1 = NULL; SET @AddressLine2 = NULL; SET @AddressLine3 = NULL; SET @AddressLine4 = NULL; SET @AddressLine5 = NULL; SET @PostCode = NULL; SET @CancReason1 = NULL; SET @CancReason2 = NULL; SET @CancReason3 = NULL; SET @ExtendedTest = NULL; SET @SpecialNeedsCode = NULL; SET @EntitlementCheck = NULL; SET @WelshTest = NULL; SET @SpecialNeedsText = NULL; SET @ProgressiveAccess = NULL; SET @BookingSeq = NULL; SET @CheckDigit = NULL; -- Call stored procedure CALL uspGenerateJournalData(@Date,@IndividualId,@TestCentreId,@CancReason1,@CancReason2,@CancReason3,@SlotHr,@SlotMinute,@TestCategoryRef,@Minutes,@NTACode,@GearboxType,@LargeVehicle,@DriverNumber,@DateOfBirth,@Title,@FirstName,@SecondName,@ThirdName,@Surname,@Gender,@Ethnicity ,@PrimaryTelNo,@SecondaryTelNo,@MobileTelNo,@EmailAddress,@AddressLine1,@AddressLine2,@AddressLine3,@AddressLine4,@AddressLine5,@PostCode,@ExtendedTest,@SpecialNeedsCode,@EntitlementCheck,@WelshTest,@SpecialNeedsText,@ProgressiveAccess,@BookingSeq,@CheckDigit); /**************************************************************************************************************************** Change Test Centre ****************************************************************************************************************************/ -- Initialise variables SET @TestCentreName = 'Welsh Test Centre'; SET @TestCentreCostCode = 'EXTC3'; SET @Country = 'Wales'; -- Call stored procedure CALL uspGenerateExaminerData(@TestCentreName,@TestCentreCostCode,@Country,@StaffNumber,@ExaminerFirstName,@ExaminerLastName,@Date); SET @IndividualId = (SELECT individual_id FROM EXAMINER WHERE staff_number = @StaffNumber); SET @TestCentreId = (SELECT tc_id FROM TEST_CENTRE WHERE tc_cost_centre_code = @TestCentreCostCode); -- PROGRAMME INSERT INTO PROGRAMME (individual_id, programme_date, tc_id, state_code) SELECT @IndividualId, @Date, @TestCentreId, 1 FROM DUAL; /**************************************************** Test Slot 5 (next day) -- Cat B, Automatic, Mobile Tel, Previous Cancellation, 2 x Middle Names, Welsh Test, Special Needs Text ****************************************************/ -- Set test characteristic variables SET @SlotHr = 14; SET @SlotMinute = 24; SET @TestCategoryRef = 'B'; SET @Minutes = 57; SET @NTACode = NULL; SET @GearboxType = 'Automatic'; SET @LargeVehicle = 0; SET @DriverNumber = 'SHIRT015220A99HC'; SET @DateOfBirth = '1977-10-16'; SET @Title = 'Mr'; SET @FirstName = 'Polo'; SET @SecondName = 'Ralph'; SET @ThirdName = 'Lauren'; SET @Surname = 'Shirt'; SET @Gender = 'Female'; SET @Ethnicity = 'C'; SET @PrimaryTelNo = '01234 567890'; SET @SecondaryTelNo = NULL; SET @MobileTelNo = '07654 123456'; SET @EmailAddress = NULL; SET @AddressLine1 = '1234 Station Street'; SET @AddressLine2 = 'Someplace'; SET @AddressLine3 = 'Sometown'; SET @AddressLine4 = NULL; SET @AddressLine5 = NULL; SET @PostCode = 'AB12 3CD'; SET @CancReason1 = 'Act of nature'; SET @CancReason2 = NULL; SET @CancReason3 = NULL; SET @ExtendedTest = 0; SET @SpecialNeedsCode = 'NONE'; SET @EntitlementCheck = 0; SET @WelshTest = 1; SET @SpecialNeedsText = 'Candidate is pregnant'; SET @ProgressiveAccess = 0; SET @BookingSeq = 2; SET @CheckDigit = 5; -- Call stored procedure CALL uspGenerateJournalData(@Date,@IndividualId,@TestCentreId,@CancReason1,@CancReason2,@CancReason3,@SlotHr,@SlotMinute,@TestCategoryRef,@Minutes,@NTACode,@GearboxType,@LargeVehicle,@DriverNumber,@DateOfBirth,@Title,@FirstName,@SecondName,@ThirdName,@Surname,@Gender,@Ethnicity ,@PrimaryTelNo,@SecondaryTelNo,@MobileTelNo,@EmailAddress,@AddressLine1,@AddressLine2,@AddressLine3,@AddressLine4,@AddressLine5,@PostCode,@ExtendedTest,@SpecialNeedsCode,@EntitlementCheck,@WelshTest,@SpecialNeedsText,@ProgressiveAccess,@BookingSeq,@CheckDigit);
<reponame>cliffordcarnmo/generic-db-template<gh_stars>0 CREATE UNIQUE INDEX id_UNIQUE ON contractdetails(id ASC);
-- MySQL Administrator dump 1.4 -- -- ------------------------------------------------------ -- Server version 5.0.19-nt /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8 */; /*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; /*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; /*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; DROP SCHEMA IF EXISTS ej5plan111K; CREATE SCHEMA ej5plan111K; USE ej5plan111K; -- -- Table structure for table `ejercicio3`.`afiliado` -- DROP TABLE IF EXISTS curso; CREATE TABLE curso ( nombre_curso varchar(40) NOT NULL , descripcion varchar(255) NOT NULL default '', cantidad_maxima_alumnos integer NOT NULL default 0, PRIMARY KEY (nombre_curso) ) ENGINE=InnoDB DEFAULT CHARSET=latin1; -- -- Dumping data for table `ejercicio3`.`afiliado` -- /*!40000 ALTER TABLE curso DISABLE KEYS */; INSERT INTO curso VALUES ('matematica','Matemática general',20), ('Ingles','Ingles general',30), ('Biologia','Biología Introductoria',10), ('Plastica','Repaso de Plastica',15); /*!40000 ALTER TABLE curso ENABLE KEYS */; -- -- Table structure for table `ejercicio3`.`articulo` -- DROP TABLE IF EXISTS alumno; CREATE TABLE alumno ( dni varchar(15) NOT NULL, nombre_y_apellido varchar(145) NOT NULL default '', direccion varchar(100) NOT NULL default '', PRIMARY KEY (dni) ) ENGINE=InnoDB DEFAULT CHARSET=latin1; /*!40000 ALTER TABLE alumno DISABLE KEYS */; INSERT INTO alumno VALUES ('23545688','<NAME>','San Martin 946'), ('21542382','<NAME>','San Juan 146'), ('33543618','<NAME>','Constitución 1200'); /*!40000 ALTER TABLE alumno ENABLE KEYS */; DROP TABLE IF EXISTS realiza; CREATE TABLE realiza ( nombre_curso varchar(40) NOT NULL default '', dni varchar(15) NOT NULL default '', nota numeric(4,2) NOT NULL default '0.0', PRIMARY KEY (nombre_curso,dni) ) ENGINE=InnoDB DEFAULT CHARSET=latin1; /*!40000 ALTER TABLE realiza DISABLE KEYS */; INSERT INTO realiza VALUES ('matematica','23545688','4.8'), ('matematica','21542382','10'), ('Biologia','23545688','7.5'), ('Biologia','33543618','8.9'), ('Ingles','33543618','5.9'), ('matematica','33543618','4.8'); /*!40000 ALTER TABLE realiza ENABLE KEYS */; /*!40101 SET SQL_MODE=@OLD_SQL_MODE */; /*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; /*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
insert into test values (default, 'tomorrow', 1, 1.1, 'tomorrow', '{1,2,3}') insert into test values (default, 'today', 0, 0, 'today', '{0,1,2}') insert into reftest values (3,1) create table reftest ( test_id integer not null references test on delete cascade, another_test_id integer not null references test on delete cascade ) create table test ( id serial primary key, created timestamp, counter int, number decimal, data text, list int[], prop hstore ) insert into test values (default, 'yesterday', -1, -1.1, 'yesterday', '{-1,0,1}') insert into reftest values (1,3) insert into reftest values (2,2)
-- phpMyAdmin SQL Dump -- version 4.4.15.10 -- https://www.phpmyadmin.net -- -- Host: localhost -- Generation Time: 2020-05-03 20:06:07 -- 服务器版本: 5.5.62-log -- PHP Version: 5.6.40 SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; SET time_zone = "+00:00"; /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8mb4 */; -- -- Database: `outbreak` -- CREATE DATABASE IF NOT EXISTS `outbreak` DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci; USE `outbreak`; -- -------------------------------------------------------- -- -- 表的结构 `admin` -- CREATE TABLE IF NOT EXISTS `admin` ( `id` int(11) NOT NULL, `name` varchar(200) NOT NULL COMMENT '用户名', `password` varchar(12) NOT NULL COMMENT '密码', `level` int(1) NOT NULL COMMENT '级别' ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8 COMMENT='utf8_general_ci'; -- -- 转存表中的数据 `admin` -- INSERT INTO `admin` (`id`, `name`, `password`, `level`) VALUES (1, 'admin', '<PASSWORD>', 0); -- -------------------------------------------------------- -- -- 表的结构 `article` -- CREATE TABLE IF NOT EXISTS `article` ( `id` int(11) NOT NULL, `title` char(250) NOT NULL COMMENT '标题', `content` text NOT NULL COMMENT '内容', `time` date NOT NULL, `tag` char(100) DEFAULT NULL COMMENT '标签', `sid` int(11) NOT NULL, `audit` int(1) NOT NULL DEFAULT '0' COMMENT '审核' ) ENGINE=InnoDB AUTO_INCREMENT=22 DEFAULT CHARSET=utf8 COMMENT='utf8_general_ci'; -- -- 转存表中的数据 `article` -- INSERT INTO `article` (`id`, `title`, `content`, `time`, `tag`, `sid`, `audit`) VALUES (19, 'aaasdas', '<p>adaaaaaada</p>', '2020-04-19', '["0"]', 362, 1), (21, 'adas', '<p>dasda</p>', '2020-04-25', '["0"]', 362, 0); -- -------------------------------------------------------- -- -- 表的结构 `comments` -- CREATE TABLE IF NOT EXISTS `comments` ( `id` int(11) NOT NULL, `time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '评论的时间', `sid` int(11) NOT NULL COMMENT '学生id', `content` varchar(350) NOT NULL COMMENT '评论内容', `aid` int(11) NOT NULL COMMENT '文章id' ) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8; -- -- 转存表中的数据 `comments` -- INSERT INTO `comments` (`id`, `time`, `sid`, `content`, `aid`) VALUES (1, '2020-04-19 08:58:40', 362, '1', 19); -- -------------------------------------------------------- -- -- 表的结构 `students` -- CREATE TABLE IF NOT EXISTS `students` ( `id` int(11) NOT NULL, `name` varchar(300) NOT NULL COMMENT '姓名', `sex` int(1) NOT NULL COMMENT '0:男 1:女', `department` varchar(300) NOT NULL COMMENT '所在系', `clas` varchar(250) NOT NULL COMMENT '所在班级', `studentid` varchar(12) NOT NULL COMMENT '学号', `password` varchar(300) NOT NULL, `email` char(100) DEFAULT NULL COMMENT '邮箱' ) ENGINE=InnoDB AUTO_INCREMENT=363 DEFAULT CHARSET=utf8 COMMENT='utf8_general_ci'; -- -- 转存表中的数据 `students` -- INSERT INTO `students` (`id`, `name`, `sex`, `department`, `clas`, `studentid`, `password`, `email`) VALUES (362, 'test', 1, '信息工程系', '19物流信息技术班', '<PASSWORD>', '<PASSWORD>', NULL); -- -------------------------------------------------------- -- -- 表的结构 `teacher` -- CREATE TABLE IF NOT EXISTS `teacher` ( `id` int(11) NOT NULL, `name` varchar(300) NOT NULL DEFAULT '老师' COMMENT '姓名', `sex` int(1) NOT NULL DEFAULT '1' COMMENT '0:男 1:女', `department` varchar(300) NOT NULL DEFAULT '信息工程系' COMMENT '所在系', `studentid` varchar(12) NOT NULL COMMENT '工号', `password` varchar(300) NOT NULL DEFAULT '<PASSWORD>', `email` char(100) DEFAULT NULL COMMENT '邮箱' ) ENGINE=InnoDB AUTO_INCREMENT=1522 DEFAULT CHARSET=utf8 COMMENT='utf8_general_ci'; -- -- 转存表中的数据 `teacher` -- INSERT INTO `teacher` (`id`, `name`, `sex`, `department`, `studentid`, `password`, `email`) VALUES (1521, 'test', 1, '信息工程系', '<PASSWORD>', '<PASSWORD>', '<EMAIL>'); -- -------------------------------------------------------- -- -- 表的结构 `teachertemp` -- CREATE TABLE IF NOT EXISTS `teachertemp` ( `id` int(11) NOT NULL, `sid` int(11) NOT NULL COMMENT '老师Id', `record` float NOT NULL COMMENT '体温', `time` date NOT NULL COMMENT '时间' ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- -------------------------------------------------------- -- -- 表的结构 `temperature` -- CREATE TABLE IF NOT EXISTS `temperature` ( `id` int(11) NOT NULL, `sid` int(11) NOT NULL COMMENT '用户Id', `record` float NOT NULL COMMENT '体温', `time` date NOT NULL COMMENT '时间' ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- -- Indexes for dumped tables -- -- -- Indexes for table `admin` -- ALTER TABLE `admin` ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `id` (`id`), ADD UNIQUE KEY `name` (`name`); -- -- Indexes for table `article` -- ALTER TABLE `article` ADD PRIMARY KEY (`id`), ADD KEY `sid` (`sid`); -- -- Indexes for table `comments` -- ALTER TABLE `comments` ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `id` (`id`), ADD KEY `sid` (`sid`), ADD KEY `aid` (`aid`); -- -- Indexes for table `students` -- ALTER TABLE `students` ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `id` (`id`), ADD UNIQUE KEY `studentid` (`studentid`), ADD UNIQUE KEY `email` (`email`); -- -- Indexes for table `teacher` -- ALTER TABLE `teacher` ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `id` (`id`), ADD UNIQUE KEY `studentid` (`studentid`), ADD UNIQUE KEY `email` (`email`); -- -- Indexes for table `teachertemp` -- ALTER TABLE `teachertemp` ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `id` (`id`), ADD KEY `studentid` (`sid`); -- -- Indexes for table `temperature` -- ALTER TABLE `temperature` ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `id` (`id`), ADD KEY `studentid` (`sid`); -- -- AUTO_INCREMENT for dumped tables -- -- -- AUTO_INCREMENT for table `admin` -- ALTER TABLE `admin` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=2; -- -- AUTO_INCREMENT for table `article` -- ALTER TABLE `article` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=22; -- -- AUTO_INCREMENT for table `comments` -- ALTER TABLE `comments` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=3; -- -- AUTO_INCREMENT for table `students` -- ALTER TABLE `students` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=363; -- -- AUTO_INCREMENT for table `teacher` -- ALTER TABLE `teacher` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=1522; -- -- AUTO_INCREMENT for table `teachertemp` -- ALTER TABLE `teachertemp` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT; -- -- AUTO_INCREMENT for table `temperature` -- ALTER TABLE `temperature` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT; -- -- 限制导出的表 -- -- -- 限制表 `article` -- ALTER TABLE `article` ADD CONSTRAINT `article_ibfk_1` FOREIGN KEY (`sid`) REFERENCES `students` (`id`); -- -- 限制表 `comments` -- ALTER TABLE `comments` ADD CONSTRAINT `comments_ibfk_1` FOREIGN KEY (`aid`) REFERENCES `article` (`id`), ADD CONSTRAINT `studendcom` FOREIGN KEY (`sid`) REFERENCES `students` (`id`); -- -- 限制表 `teachertemp` -- ALTER TABLE `teachertemp` ADD CONSTRAINT `teachertemp_ibfk_1` FOREIGN KEY (`sid`) REFERENCES `teacher` (`id`); -- -- 限制表 `temperature` -- ALTER TABLE `temperature` ADD CONSTRAINT `temperature_ibfk_1` FOREIGN KEY (`sid`) REFERENCES `students` (`id`); /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<filename>src/main/resources/test/sqls/preset_add_1_review.sql -- requires: preset_add_1_group INSERT INTO "review" (id, group_id, visibility, rate, review, created_at, created_by) VALUES (103001, 101001, 1, 2, 'long review text', '2021-01-16 01:50:01', 105001);
<reponame>KorawitRupanya/SeasonPredictor -- phpMyAdmin SQL Dump -- version 4.9.1 -- https://www.phpmyadmin.net/ -- -- Host: db:3306 -- Generation Time: Dec 20, 2019 at 12:16 PM -- Server version: 8.0.18 -- PHP Version: 7.2.23 SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; SET AUTOCOMMIT = 0; START TRANSACTION; SET time_zone = "+00:00"; /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8mb4 */; -- -- Database: `b6010546915` -- -- -------------------------------------------------------- -- -- Table structure for table `bangkok` -- CREATE TABLE `bangkok` ( `year` int(4) DEFAULT NULL, `month` varchar(9) DEFAULT NULL, `date` int(2) DEFAULT NULL, `tempMax` decimal(5,2) DEFAULT NULL, `tempAvg` decimal(4,2) DEFAULT NULL, `tempMin` decimal(4,2) DEFAULT NULL, `tempMonthAvg` decimal(4,2) DEFAULT NULL, `tempDiff` decimal(5,2) DEFAULT NULL, `sunrise` varchar(14) DEFAULT NULL, `sunset` varchar(14) DEFAULT NULL, `length` varchar(8) DEFAULT NULL, `difference` varchar(5) DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- -- Dumping data for table `bangkok` -- INSERT INTO `bangkok` (`year`, `month`, `date`, `tempMax`, `tempAvg`, `tempMin`, `tempMonthAvg`, `tempDiff`, `sunrise`, `sunset`, `length`, `difference`) VALUES (2017, 'January', 1, '86.00', '80.10', '73.00', '73.00', '0.00', '06:41 ↑ (113°)', '18:01 ↑ (247°)', '11:20:06', '+0:09'), (2017, 'January', 2, '90.00', '81.30', '73.00', '73.00', '0.00', '06:41 ↑ (113°)', '18:02 ↑ (247°)', '11:20:18', '+0:12'), (2017, 'January', 3, '86.00', '81.20', '77.00', '74.33', '2.67', '06:42 ↑ (113°)', '18:02 ↑ (247°)', '11:20:30', '+0:12'), (2017, 'January', 4, '86.00', '81.20', '75.00', '74.50', '0.50', '06:42 ↑ (113°)', '18:03 ↑ (247°)', '11:20:44', '+0:13'), (2017, 'January', 5, '88.00', '82.60', '79.00', '75.40', '3.60', '06:42 ↑ (113°)', '18:03 ↑ (247°)', '11:20:58', '+0:14'), (2017, 'January', 6, '86.00', '79.70', '75.00', '75.33', '-0.33', '06:43 ↑ (113°)', '18:04 ↑ (247°)', '11:21:14', '+0:15'), (2017, 'January', 7, '90.00', '82.20', '77.00', '75.57', '1.43', '06:43 ↑ (113°)', '18:05 ↑ (247°)', '11:21:30', '+0:16'), (2017, 'January', 8, '82.00', '79.90', '75.00', '75.50', '-0.50', '06:43 ↑ (113°)', '18:05 ↑ (247°)', '11:21:47', '+0:17'), (2017, 'January', 9, '82.00', '78.30', '75.00', '75.44', '-0.44', '06:44 ↑ (113°)', '18:06 ↑ (248°)', '11:22:06', '+0:18'), (2017, 'January', 10, '81.00', '78.40', '75.00', '75.40', '-0.40', '06:44 ↑ (112°)', '18:06 ↑ (248°)', '11:22:25', '+0:19'), (2017, 'January', 11, '88.00', '80.30', '75.00', '75.36', '-0.36', '06:44 ↑ (112°)', '18:07 ↑ (248°)', '11:22:45', '+0:20'), (2017, 'January', 12, '90.00', '81.50', '75.00', '75.33', '-0.33', '06:44 ↑ (112°)', '18:07 ↑ (248°)', '11:23:06', '+0:21'), (2017, 'January', 13, '90.00', '82.90', '77.00', '75.46', '1.54', '06:44 ↑ (112°)', '18:08 ↑ (248°)', '11:23:28', '+0:22'), (2017, 'January', 14, '91.00', '83.50', '73.00', '75.29', '-2.29', '06:45 ↑ (112°)', '18:09 ↑ (248°)', '11:23:50', '+0:22'), (2017, 'January', 15, '93.00', '85.30', '77.00', '75.40', '1.60', '06:45 ↑ (112°)', '18:09 ↑ (249°)', '11:24:14', '+0:23'), (2017, 'January', 16, '91.00', '84.20', '75.00', '75.38', '-0.38', '06:45 ↑ (111°)', '18:10 ↑ (249°)', '11:24:38', '+0:24'), (2017, 'January', 17, '88.00', '82.80', '79.00', '75.59', '3.41', '06:45 ↑ (111°)', '18:10 ↑ (249°)', '11:25:03', '+0:25'), (2017, 'January', 18, '90.00', '84.30', '79.00', '75.78', '3.22', '06:45 ↑ (111°)', '18:11 ↑ (249°)', '11:25:29', '+0:26'), (2017, 'January', 19, '93.00', '85.20', '77.00', '75.84', '1.16', '06:45 ↑ (111°)', '18:11 ↑ (249°)', '11:25:56', '+0:26'), (2017, 'January', 20, '93.00', '84.40', '75.00', '75.80', '-0.80', '06:45 ↑ (111°)', '18:12 ↑ (250°)', '11:26:24', '+0:27'), (2017, 'January', 21, '90.00', '83.70', '75.00', '75.76', '-0.76', '06:45 ↑ (110°)', '18:12 ↑ (250°)', '11:26:52', '+0:28'), (2017, 'January', 22, '88.00', '82.20', '75.00', '75.73', '-0.73', '06:45 ↑ (110°)', '18:13 ↑ (250°)', '11:27:21', '+0:29'), (2017, 'January', 23, '86.00', '80.10', '73.00', '75.61', '-2.61', '06:45 ↑ (110°)', '18:13 ↑ (250°)', '11:27:50', '+0:29'), (2017, 'January', 24, '88.00', '80.40', '72.00', '75.46', '-3.46', '06:45 ↑ (110°)', '18:14 ↑ (251°)', '11:28:21', '+0:30'), (2017, 'January', 25, '90.00', '81.80', '73.00', '75.36', '-2.36', '06:45 ↑ (109°)', '18:14 ↑ (251°)', '11:28:52', '+0:31'), (2017, 'January', 26, '88.00', '81.50', '75.00', '75.35', '-0.35', '06:45 ↑ (109°)', '18:15 ↑ (251°)', '11:29:23', '+0:31'), (2017, 'January', 27, '90.00', '82.10', '73.00', '75.26', '-2.26', '06:45 ↑ (109°)', '18:15 ↑ (251°)', '11:29:56', '+0:32'), (2017, 'January', 28, '88.00', '80.20', '72.00', '75.14', '-3.14', '06:45 ↑ (109°)', '18:16 ↑ (252°)', '11:30:29', '+0:33'), (2017, 'January', 29, '91.00', '80.60', '72.00', '75.03', '-3.03', '06:45 ↑ (108°)', '18:16 ↑ (252°)', '11:31:02', '+0:33'), (2017, 'January', 30, '91.00', '81.60', '72.00', '74.93', '-2.93', '06:45 ↑ (108°)', '18:17 ↑ (252°)', '11:31:36', '+0:34'), (2017, 'January', 31, '90.00', '81.70', '72.00', '74.84', '-2.84', '06:45 ↑ (108°)', '18:17 ↑ (252°)', '11:32:11', '+0:34'), (2017, 'February', 1, '91.00', '82.40', '72.00', '74.75', '-2.75', '06:45 ↑ (107°)', '18:18 ↑ (253°)', '11:32:46', '+0:35'), (2017, 'February', 2, '91.00', '81.50', '75.00', '74.76', '0.24', '06:45 ↑ (107°)', '18:18 ↑ (253°)', '11:33:22', '+0:35'), (2017, 'February', 3, '84.00', '79.10', '75.00', '74.76', '0.24', '06:44 ↑ (107°)', '18:18 ↑ (253°)', '11:33:58', '+0:36'), (2017, 'February', 4, '93.00', '83.80', '75.00', '74.77', '0.23', '06:44 ↑ (107°)', '18:19 ↑ (254°)', '11:34:35', '+0:36'), (2017, 'February', 5, '95.00', '86.00', '77.00', '74.83', '2.17', '06:44 ↑ (106°)', '18:19 ↑ (254°)', '11:35:12', '+0:37'), (2017, 'February', 6, '95.00', '85.20', '73.00', '74.78', '-1.78', '06:44 ↑ (106°)', '18:20 ↑ (254°)', '11:35:50', '+0:37'), (2017, 'February', 7, '93.00', '84.20', '73.00', '74.74', '-1.74', '06:43 ↑ (106°)', '18:20 ↑ (255°)', '11:36:28', '+0:38'), (2017, 'February', 8, '93.00', '83.50', '75.00', '74.74', '0.26', '06:43 ↑ (105°)', '18:20 ↑ (255°)', '11:37:07', '+0:38'), (2017, 'February', 9, '95.00', '83.60', '73.00', '74.70', '-1.70', '06:43 ↑ (105°)', '18:21 ↑ (255°)', '11:37:46', '+0:39'), (2017, 'February', 10, '91.00', '84.20', '79.00', '74.80', '4.20', '06:43 ↑ (105°)', '18:21 ↑ (256°)', '11:38:25', '+0:39'), (2017, 'February', 11, '90.00', '81.40', '73.00', '74.76', '-1.76', '06:42 ↑ (104°)', '18:21 ↑ (256°)', '11:39:05', '+0:39'), (2017, 'February', 12, '88.00', '78.30', '70.00', '74.65', '-4.65', '06:42 ↑ (104°)', '18:22 ↑ (256°)', '11:39:45', '+0:40'), (2017, 'February', 13, '86.00', '77.40', '68.00', '74.50', '-6.50', '06:41 ↑ (104°)', '18:22 ↑ (257°)', '11:40:25', '+0:40'), (2017, 'February', 14, '88.00', '78.50', '68.00', '74.36', '-6.36', '06:41 ↑ (103°)', '18:22 ↑ (257°)', '11:41:06', '+0:40'), (2017, 'February', 15, '90.00', '80.00', '70.00', '74.26', '-4.26', '06:41 ↑ (103°)', '18:23 ↑ (257°)', '11:41:48', '+0:41'), (2017, 'February', 16, '90.00', '81.40', '72.00', '74.21', '-2.21', '06:40 ↑ (103°)', '18:23 ↑ (258°)', '11:42:29', '+0:41'), (2017, 'February', 17, '93.00', '82.70', '70.00', '74.13', '-4.13', '06:40 ↑ (102°)', '18:23 ↑ (258°)', '11:43:11', '+0:41'), (2017, 'February', 18, '93.00', '83.20', '72.00', '74.08', '-2.08', '06:39 ↑ (102°)', '18:24 ↑ (258°)', '11:43:53', '+0:42'), (2017, 'February', 19, '93.00', '83.80', '75.00', '74.10', '0.90', '06:39 ↑ (101°)', '18:24 ↑ (259°)', '11:44:36', '+0:42'), (2017, 'February', 20, '95.00', '84.30', '73.00', '74.08', '-1.08', '06:39 ↑ (101°)', '18:24 ↑ (259°)', '11:45:18', '+0:42'), (2017, 'February', 21, '95.00', '85.70', '77.00', '74.13', '2.87', '06:38 ↑ (101°)', '18:24 ↑ (260°)', '11:46:01', '+0:43'), (2017, 'February', 22, '95.00', '85.20', '77.00', '74.19', '2.81', '06:38 ↑ (100°)', '18:25 ↑ (260°)', '11:46:44', '+0:43'), (2017, 'February', 23, '95.00', '86.90', '81.00', '74.31', '6.69', '06:37 ↑ (100°)', '18:25 ↑ (260°)', '11:47:28', '+0:43'), (2017, 'February', 24, '95.00', '85.80', '81.00', '74.44', '6.56', '06:37 ↑ (100°)', '18:25 ↑ (261°)', '11:48:11', '+0:43'), (2017, 'February', 25, '95.00', '84.70', '77.00', '74.48', '2.52', '06:36 ↑ (99°)', '18:25 ↑ (261°)', '11:48:55', '+0:43'), (2017, 'February', 26, '95.00', '85.40', '77.00', '74.53', '2.47', '06:36 ↑ (99°)', '18:25 ↑ (261°)', '11:49:39', '+0:44'), (2017, 'February', 27, '91.00', '85.40', '79.00', '74.60', '4.40', '06:35 ↑ (98°)', '18:26 ↑ (262°)', '11:50:23', '+0:44'), (2017, 'February', 28, '95.00', '85.80', '77.00', '74.64', '2.36', '06:34 ↑ (98°)', '18:26 ↑ (262°)', '11:51:08', '+0:44'), (2017, 'March', 1, '95.00', '87.10', '77.00', '74.68', '2.32', '06:34 ↑ (98°)', '18:26 ↑ (263°)', '11:51:52', '+0:44'), (2017, 'March', 2, '91.00', '84.40', '77.00', '74.72', '2.28', '06:33 ↑ (97°)', '18:26 ↑ (263°)', '11:52:37', '+0:44'), (2017, 'March', 3, '95.00', '85.10', '75.00', '74.73', '0.27', '06:33 ↑ (97°)', '18:26 ↑ (263°)', '11:53:22', '+0:44'), (2017, 'March', 4, '95.00', '85.10', '75.00', '74.73', '0.27', '06:32 ↑ (96°)', '18:26 ↑ (264°)', '11:54:07', '+0:45'), (2017, 'March', 5, '95.00', '85.70', '79.00', '74.80', '4.20', '06:32 ↑ (96°)', '18:27 ↑ (264°)', '11:54:52', '+0:45'), (2017, 'March', 6, '99.00', '86.70', '79.00', '74.86', '4.14', '06:31 ↑ (96°)', '18:27 ↑ (265°)', '11:55:37', '+0:45'), (2017, 'March', 7, '93.00', '86.30', '81.00', '74.95', '6.05', '06:30 ↑ (95°)', '18:27 ↑ (265°)', '11:56:23', '+0:45'), (2017, 'March', 8, '95.00', '85.90', '82.00', '75.06', '6.94', '06:30 ↑ (95°)', '18:27 ↑ (265°)', '11:57:08', '+0:45'), (2017, 'March', 9, '93.00', '85.40', '81.00', '75.15', '5.85', '06:29 ↑ (94°)', '18:27 ↑ (266°)', '11:57:54', '+0:45'), (2017, 'March', 10, '93.00', '85.70', '81.00', '75.23', '5.77', '06:28 ↑ (94°)', '18:27 ↑ (266°)', '11:58:40', '+0:45'), (2017, 'March', 11, '95.00', '86.00', '81.00', '75.31', '5.69', '06:28 ↑ (94°)', '18:27 ↑ (267°)', '11:59:26', '+0:45'), (2017, 'March', 12, '97.00', '87.20', '81.00', '75.39', '5.61', '06:27 ↑ (93°)', '18:28 ↑ (267°)', '12:00:12', '+0:45'), (2017, 'March', 13, '100.00', '88.20', '81.00', '75.47', '5.53', '06:27 ↑ (93°)', '18:28 ↑ (267°)', '12:00:57', '+0:45'), (2017, 'March', 14, '99.00', '88.20', '81.00', '75.55', '5.45', '06:26 ↑ (92°)', '18:28 ↑ (268°)', '12:01:43', '+0:46'), (2017, 'March', 15, '97.00', '87.70', '81.00', '75.62', '5.38', '06:25 ↑ (92°)', '18:28 ↑ (268°)', '12:02:30', '+0:46'), (2017, 'March', 16, '95.00', '86.40', '79.00', '75.67', '3.33', '06:25 ↑ (92°)', '18:28 ↑ (269°)', '12:03:16', '+0:46'), (2017, 'March', 17, '95.00', '83.80', '77.00', '75.68', '1.32', '06:24 ↑ (91°)', '18:28 ↑ (269°)', '12:04:02', '+0:46'), (2017, 'March', 18, '95.00', '85.80', '79.00', '75.73', '3.27', '06:23 ↑ (91°)', '18:28 ↑ (269°)', '12:04:48', '+0:46'), (2017, 'March', 19, '97.00', '86.90', '79.00', '75.77', '3.23', '06:23 ↑ (90°)', '18:28 ↑ (270°)', '12:05:34', '+0:46'), (2017, 'March', 20, '95.00', '86.70', '79.00', '75.81', '3.19', '06:22 ↑ (90°)', '18:28 ↑ (270°)', '12:06:20', '+0:46'), (2017, 'March', 21, '99.00', '88.40', '82.00', '75.89', '6.11', '06:21 ↑ (90°)', '18:28 ↑ (271°)', '12:07:07', '+0:46'), (2017, 'March', 22, '99.00', '88.20', '81.00', '75.95', '5.05', '06:20 ↑ (89°)', '18:29 ↑ (271°)', '12:07:53', '+0:46'), (2017, 'March', 23, '99.00', '88.20', '81.00', '76.01', '4.99', '06:20 ↑ (89°)', '18:29 ↑ (271°)', '12:08:39', '+0:46'), (2017, 'March', 24, '95.00', '88.20', '84.00', '76.11', '7.89', '06:19 ↑ (88°)', '18:29 ↑ (272°)', '12:09:25', '+0:46'), (2017, 'March', 25, '97.00', '87.60', '79.00', '76.14', '2.86', '06:18 ↑ (88°)', '18:29 ↑ (272°)', '12:10:11', '+0:46'), (2017, 'March', 26, '99.00', '85.60', '79.00', '76.18', '2.82', '06:18 ↑ (88°)', '18:29 ↑ (273°)', '12:10:58', '+0:46'), (2017, 'March', 27, '84.00', '79.60', '77.00', '76.19', '0.81', '06:17 ↑ (87°)', '18:29 ↑ (273°)', '12:11:44', '+0:46'), (2017, 'March', 28, '93.00', '84.30', '75.00', '76.17', '-1.17', '06:16 ↑ (87°)', '18:29 ↑ (273°)', '12:12:30', '+0:46'), (2017, 'March', 29, '93.00', '83.30', '75.00', '76.16', '-1.16', '06:16 ↑ (86°)', '18:29 ↑ (274°)', '12:13:16', '+0:46'), (2017, 'March', 30, '93.00', '83.80', '77.00', '76.17', '0.83', '06:15 ↑ (86°)', '18:29 ↑ (274°)', '12:14:02', '+0:45'), (2017, 'March', 31, '95.00', '87.00', '79.00', '76.20', '2.80', '06:14 ↑ (86°)', '18:29 ↑ (275°)', '12:14:48', '+0:45'), (2017, 'April', 1, '90.00', '83.10', '75.00', '76.19', '-1.19', '06:14 ↑ (85°)', '18:29 ↑ (275°)', '12:15:33', '+0:45'), (2017, 'April', 2, '91.00', '84.20', '77.00', '76.20', '0.80', '06:13 ↑ (85°)', '18:29 ↑ (275°)', '12:16:19', '+0:45'), (2017, 'April', 3, '93.00', '85.10', '79.00', '76.23', '2.77', '06:12 ↑ (84°)', '18:30 ↑ (276°)', '12:17:05', '+0:45'), (2017, 'April', 4, '93.00', '83.00', '77.00', '76.23', '0.77', '06:12 ↑ (84°)', '18:30 ↑ (276°)', '12:17:50', '+0:45'), (2017, 'April', 5, '95.00', '84.00', '77.00', '76.24', '0.76', '06:11 ↑ (84°)', '18:30 ↑ (277°)', '12:18:36', '+0:45'), (2017, 'April', 6, '90.00', '84.40', '79.00', '76.27', '2.73', '06:10 ↑ (83°)', '18:30 ↑ (277°)', '12:19:21', '+0:45'), (2017, 'April', 7, '91.00', '83.90', '79.00', '76.30', '2.70', '06:10 ↑ (83°)', '18:30 ↑ (277°)', '12:20:06', '+0:45'), (2017, 'April', 8, '95.00', '85.50', '79.00', '76.33', '2.67', '06:09 ↑ (82°)', '18:30 ↑ (278°)', '12:20:51', '+0:45'), (2017, 'April', 9, '97.00', '87.50', '81.00', '76.37', '4.63', '06:08 ↑ (82°)', '18:30 ↑ (278°)', '12:21:36', '+0:44'), (2017, 'April', 10, '97.00', '88.50', '82.00', '76.43', '5.57', '06:08 ↑ (82°)', '18:30 ↑ (279°)', '12:22:21', '+0:44'), (2017, 'April', 11, '97.00', '89.00', '82.00', '76.49', '5.51', '06:07 ↑ (81°)', '18:30 ↑ (279°)', '12:23:06', '+0:44'), (2017, 'April', 12, '97.00', '88.80', '82.00', '76.54', '5.46', '06:06 ↑ (81°)', '18:30 ↑ (279°)', '12:23:50', '+0:44'), (2017, 'April', 13, '97.00', '87.70', '81.00', '76.58', '4.42', '06:06 ↑ (81°)', '18:31 ↑ (280°)', '12:24:35', '+0:44'), (2017, 'April', 14, '95.00', '87.20', '79.00', '76.61', '2.39', '06:05 ↑ (80°)', '18:31 ↑ (280°)', '12:25:19', '+0:44'), (2017, 'April', 15, '95.00', '87.30', '81.00', '76.65', '4.35', '06:05 ↑ (80°)', '18:31 ↑ (280°)', '12:26:03', '+0:43'), (2017, 'April', 16, '95.00', '86.80', '81.00', '76.69', '4.31', '06:04 ↑ (79°)', '18:31 ↑ (281°)', '12:26:46', '+0:43'), (2017, 'April', 17, '93.00', '86.20', '79.00', '76.71', '2.29', '06:03 ↑ (79°)', '18:31 ↑ (281°)', '12:27:30', '+0:43'), (2017, 'April', 18, '97.00', '87.80', '81.00', '76.75', '4.25', '06:03 ↑ (79°)', '18:31 ↑ (282°)', '12:28:13', '+0:43'), (2017, 'April', 19, '99.00', '88.80', '84.00', '76.82', '7.18', '06:02 ↑ (78°)', '18:31 ↑ (282°)', '12:28:56', '+0:42'), (2017, 'April', 20, '99.00', '89.40', '84.00', '76.88', '7.12', '06:02 ↑ (78°)', '18:31 ↑ (282°)', '12:29:39', '+0:42'), (2017, 'April', 21, '100.00', '90.60', '82.00', '76.93', '5.07', '06:01 ↑ (78°)', '18:32 ↑ (283°)', '12:30:21', '+0:42'), (2017, 'April', 22, '100.00', '89.80', '82.00', '76.97', '5.03', '06:01 ↑ (77°)', '18:32 ↑ (283°)', '12:31:04', '+0:42'), (2017, 'April', 23, '100.00', '89.90', '81.00', '77.01', '3.99', '06:00 ↑ (77°)', '18:32 ↑ (283°)', '12:31:46', '+0:41'), (2017, 'April', 24, '100.00', '90.10', '82.00', '77.05', '4.95', '05:59 ↑ (77°)', '18:32 ↑ (284°)', '12:32:27', '+0:41'), (2017, 'April', 25, '100.00', '90.40', '82.00', '77.10', '4.90', '05:59 ↑ (76°)', '18:32 ↑ (284°)', '12:33:09', '+0:41'), (2017, 'April', 26, '99.00', '89.60', '82.00', '77.14', '4.86', '05:58 ↑ (76°)', '18:32 ↑ (284°)', '12:33:50', '+0:40'), (2017, 'April', 27, '99.00', '90.10', '79.00', '77.15', '1.85', '05:58 ↑ (76°)', '18:33 ↑ (285°)', '12:34:30', '+0:40'), (2017, 'April', 28, '95.00', '85.90', '81.00', '77.19', '3.81', '05:57 ↑ (75°)', '18:33 ↑ (285°)', '12:35:11', '+0:40'), (2017, 'April', 29, '95.00', '84.70', '75.00', '77.17', '-2.17', '05:57 ↑ (75°)', '18:33 ↑ (285°)', '12:35:51', '+0:39'), (2017, 'April', 30, '95.00', '85.70', '77.00', '77.17', '-0.17', '05:56 ↑ (75°)', '18:33 ↑ (286°)', '12:36:30', '+0:39'), (2017, 'May', 1, '99.00', '88.90', '82.00', '77.21', '4.79', '05:56 ↑ (74°)', '18:33 ↑ (286°)', '12:37:09', '+0:39'), (2017, 'May', 2, '95.00', '87.00', '82.00', '77.25', '4.75', '05:56 ↑ (74°)', '18:34 ↑ (286°)', '12:37:48', '+0:38'), (2017, 'May', 3, '97.00', '89.50', '82.00', '77.28', '4.72', '05:55 ↑ (74°)', '18:34 ↑ (286°)', '12:38:27', '+0:38'), (2017, 'May', 4, '97.00', '89.20', '86.00', '77.35', '8.65', '05:55 ↑ (73°)', '18:34 ↑ (287°)', '12:39:05', '+0:37'), (2017, 'May', 5, '91.00', '84.00', '75.00', '77.34', '-2.34', '05:54 ↑ (73°)', '18:34 ↑ (287°)', '12:39:42', '+0:37'), (2017, 'May', 6, '95.00', '86.80', '81.00', '77.37', '3.63', '05:54 ↑ (73°)', '18:34 ↑ (287°)', '12:40:19', '+0:36'), (2017, 'May', 7, '93.00', '84.20', '75.00', '77.35', '-2.35', '05:54 ↑ (72°)', '18:35 ↑ (288°)', '12:40:56', '+0:36'), (2017, 'May', 8, '93.00', '85.80', '81.00', '77.38', '3.63', '05:53 ↑ (72°)', '18:35 ↑ (288°)', '12:41:32', '+0:35'), (2017, 'May', 9, '95.00', '86.10', '79.00', '77.39', '1.61', '05:53 ↑ (72°)', '18:35 ↑ (288°)', '12:42:08', '+0:35'), (2017, 'May', 10, '93.00', '83.30', '77.00', '77.38', '-0.38', '05:53 ↑ (72°)', '18:35 ↑ (288°)', '12:42:43', '+0:34'), (2017, 'May', 11, '93.00', '84.60', '79.00', '77.40', '1.60', '05:52 ↑ (71°)', '18:36 ↑ (289°)', '12:43:17', '+0:34'), (2017, 'May', 12, '88.00', '82.30', '79.00', '77.41', '1.59', '05:52 ↑ (71°)', '18:36 ↑ (289°)', '12:43:51', '+0:33'), (2017, 'May', 13, '91.00', '82.10', '79.00', '77.42', '1.58', '05:52 ↑ (71°)', '18:36 ↑ (289°)', '12:44:25', '+0:33'), (2017, 'May', 14, '93.00', '84.50', '77.00', '77.42', '-0.42', '05:51 ↑ (71°)', '18:36 ↑ (290°)', '12:44:58', '+0:32'), (2017, 'May', 15, '93.00', '85.70', '79.00', '77.43', '1.57', '05:51 ↑ (70°)', '18:37 ↑ (290°)', '12:45:30', '+0:32'), (2017, 'May', 16, '91.00', '82.60', '77.00', '77.43', '-0.43', '05:51 ↑ (70°)', '18:37 ↑ (290°)', '12:46:02', '+0:31'), (2017, 'May', 17, '84.00', '81.30', '79.00', '77.44', '1.56', '05:51 ↑ (70°)', '18:37 ↑ (290°)', '12:46:33', '+0:30'), (2017, 'May', 18, '95.00', '86.80', '79.00', '77.45', '1.55', '05:50 ↑ (70°)', '18:38 ↑ (290°)', '12:47:03', '+0:30'), (2017, 'May', 19, '91.00', '84.50', '77.00', '77.45', '-0.45', '05:50 ↑ (69°)', '18:38 ↑ (291°)', '12:47:33', '+0:29'), (2017, 'May', 20, '95.00', '86.50', '79.00', '77.46', '1.54', '05:50 ↑ (69°)', '18:38 ↑ (291°)', '12:48:02', '+0:28'), (2017, 'May', 21, '97.00', '89.80', '84.00', '77.50', '6.50', '05:50 ↑ (69°)', '18:38 ↑ (291°)', '12:48:30', '+0:28'), (2017, 'May', 22, '97.00', '90.00', '84.00', '77.55', '6.45', '05:50 ↑ (69°)', '18:39 ↑ (291°)', '12:48:58', '+0:27'), (2017, 'May', 23, '95.00', '88.20', '81.00', '77.57', '3.43', '05:50 ↑ (69°)', '18:39 ↑ (292°)', '12:49:25', '+0:26'), (2017, 'May', 24, '95.00', '85.90', '75.00', '77.56', '-2.56', '05:49 ↑ (68°)', '18:39 ↑ (292°)', '12:49:51', '+0:26'), (2017, 'May', 25, '91.00', '83.50', '75.00', '77.54', '-2.54', '05:49 ↑ (68°)', '18:40 ↑ (292°)', '12:50:17', '+0:25'), (2017, 'May', 26, '90.00', '83.00', '77.00', '77.53', '-0.53', '05:49 ↑ (68°)', '18:40 ↑ (292°)', '12:50:41', '+0:24'), (2017, 'May', 27, '90.00', '81.50', '79.00', '77.54', '1.46', '05:49 ↑ (68°)', '18:40 ↑ (292°)', '12:51:05', '+0:23'), (2017, 'May', 28, '91.00', '82.60', '79.00', '77.55', '1.45', '05:49 ↑ (68°)', '18:41 ↑ (292°)', '12:51:28', '+0:22'), (2017, 'May', 29, '90.00', '83.80', '79.00', '77.56', '1.44', '05:49 ↑ (68°)', '18:41 ↑ (293°)', '12:51:50', '+0:22'), (2017, 'May', 30, '88.00', '82.70', '75.00', '77.55', '-2.55', '05:49 ↑ (67°)', '18:41 ↑ (293°)', '12:52:12', '+0:21'), (2017, 'May', 31, '91.00', '84.80', '79.00', '77.56', '1.44', '05:49 ↑ (67°)', '18:42 ↑ (293°)', '12:52:32', '+0:20'), (2017, 'June', 1, '93.00', '86.20', '81.00', '77.58', '3.42', '05:49 ↑ (67°)', '18:42 ↑ (293°)', '12:52:52', '+0:19'), (2017, 'June', 2, '95.00', '85.90', '79.00', '77.59', '1.41', '05:49 ↑ (67°)', '18:42 ↑ (293°)', '12:53:11', '+0:18'), (2017, 'June', 3, '95.00', '86.20', '75.00', '77.57', '-2.57', '05:49 ↑ (67°)', '18:42 ↑ (293°)', '12:53:29', '+0:17'), (2017, 'June', 4, '91.00', '83.50', '75.00', '77.55', '-2.55', '05:49 ↑ (67°)', '18:43 ↑ (293°)', '12:53:46', '+0:16'), (2017, 'June', 5, '95.00', '84.10', '79.00', '77.56', '1.44', '05:49 ↑ (67°)', '18:43 ↑ (294°)', '12:54:02', '+0:16'), (2017, 'June', 6, '91.00', '83.20', '77.00', '77.56', '-0.56', '05:49 ↑ (66°)', '18:43 ↑ (294°)', '12:54:18', '+0:15'), (2017, 'June', 7, '91.00', '83.50', '75.00', '77.54', '-2.54', '05:49 ↑ (66°)', '18:44 ↑ (294°)', '12:54:32', '+0:14'), (2017, 'June', 8, '91.00', '83.20', '77.00', '77.54', '-0.54', '05:49 ↑ (66°)', '18:44 ↑ (294°)', '12:54:45', '+0:13'), (2017, 'June', 9, '91.00', '84.60', '75.00', '77.53', '-2.53', '05:49 ↑ (66°)', '18:44 ↑ (294°)', '12:54:58', '+0:12'), (2017, 'June', 10, '93.00', '83.80', '73.00', '77.50', '-4.50', '05:49 ↑ (66°)', '18:45 ↑ (294°)', '12:55:10', '+0:11'), (2017, 'June', 11, '91.00', '84.90', '79.00', '77.51', '1.49', '05:49 ↑ (66°)', '18:45 ↑ (294°)', '12:55:20', '+0:10'), (2017, 'June', 12, '93.00', '86.80', '81.00', '77.53', '3.47', '05:50 ↑ (66°)', '18:45 ↑ (294°)', '12:55:30', '+0:09'), (2017, 'June', 13, '93.00', '87.30', '79.00', '77.54', '1.46', '05:50 ↑ (66°)', '18:45 ↑ (294°)', '12:55:38', '+0:08'), (2017, 'June', 14, '95.00', '86.80', '81.00', '77.56', '3.44', '05:50 ↑ (66°)', '18:46 ↑ (294°)', '12:55:46', '+0:07'), (2017, 'June', 15, '91.00', '86.70', '82.00', '77.58', '4.42', '05:50 ↑ (66°)', '18:46 ↑ (294°)', '12:55:53', '+0:06'), (2017, 'June', 16, '93.00', '87.80', '82.00', '77.61', '4.39', '05:50 ↑ (66°)', '18:46 ↑ (294°)', '12:55:58', '+0:05'), (2017, 'June', 17, '95.00', '88.70', '82.00', '77.64', '4.36', '05:50 ↑ (66°)', '18:46 ↑ (294°)', '12:56:03', '+0:04'), (2017, 'June', 18, '93.00', '86.40', '77.00', '77.63', '-0.63', '05:51 ↑ (66°)', '18:47 ↑ (294°)', '12:56:07', '+0:03'), (2017, 'June', 19, '95.00', '84.40', '77.00', '77.63', '-0.63', '05:51 ↑ (66°)', '18:47 ↑ (294°)', '12:56:09', '+0:02'), (2017, 'June', 20, '93.00', '83.40', '77.00', '77.63', '-0.63', '05:51 ↑ (66°)', '18:47 ↑ (294°)', '12:56:11', '+0:01'), (2017, 'June', 21, '93.00', '85.10', '77.00', '77.62', '-0.62', '05:51 ↑ (66°)', '18:47 ↑ (294°)', '12:56:12', '< 1s'), (2017, 'June', 22, '93.00', '83.40', '77.00', '77.62', '-0.62', '05:51 ↑ (66°)', '18:48 ↑ (294°)', '12:56:11', '< 1s'), (2017, 'June', 23, '93.00', '85.10', '79.00', '77.63', '1.37', '05:52 ↑ (66°)', '18:48 ↑ (294°)', '12:56:10', '−0:01'), (2017, 'June', 24, '93.00', '84.90', '81.00', '77.65', '3.35', '05:52 ↑ (66°)', '18:48 ↑ (294°)', '12:56:08', '−0:02'), (2017, 'June', 25, '93.00', '86.10', '81.00', '77.66', '3.34', '05:52 ↑ (66°)', '18:48 ↑ (294°)', '12:56:04', '−0:03'), (2017, 'June', 26, '91.00', '86.90', '81.00', '77.68', '3.32', '05:52 ↑ (66°)', '18:48 ↑ (294°)', '12:56:00', '−0:04'), (2017, 'June', 27, '91.00', '86.70', '82.00', '77.71', '4.29', '05:53 ↑ (66°)', '18:49 ↑ (294°)', '12:55:55', '−0:05'), (2017, 'June', 28, '93.00', '85.50', '77.00', '77.70', '-0.70', '05:53 ↑ (66°)', '18:49 ↑ (294°)', '12:55:48', '−0:06'), (2017, 'June', 29, '91.00', '83.80', '77.00', '77.70', '-0.70', '05:53 ↑ (66°)', '18:49 ↑ (294°)', '12:55:41', '−0:07'), (2017, 'June', 30, '90.00', '83.10', '75.00', '77.69', '-2.69', '05:53 ↑ (66°)', '18:49 ↑ (294°)', '12:55:33', '−0:08'), (2017, 'July', 1, '91.00', '83.00', '77.00', '77.68', '-0.68', '05:54 ↑ (66°)', '18:49 ↑ (294°)', '12:55:24', '−0:09'), (2017, 'July', 2, '93.00', '84.10', '79.00', '77.69', '1.31', '05:54 ↑ (66°)', '18:49 ↑ (294°)', '12:55:13', '−0:10'), (2017, 'July', 3, '93.00', '83.60', '77.00', '77.68', '-0.68', '05:54 ↑ (66°)', '18:49 ↑ (294°)', '12:55:02', '−0:11'), (2017, 'July', 4, '91.00', '81.60', '75.00', '77.67', '-2.67', '05:55 ↑ (66°)', '18:49 ↑ (294°)', '12:54:50', '−0:12'), (2017, 'July', 5, '91.00', '83.50', '77.00', '77.67', '-0.67', '05:55 ↑ (66°)', '18:49 ↑ (294°)', '12:54:37', '−0:13'), (2017, 'July', 6, '93.00', '84.80', '77.00', '77.66', '-0.66', '05:55 ↑ (66°)', '18:49 ↑ (294°)', '12:54:23', '−0:14'), (2017, 'July', 7, '91.00', '83.30', '77.00', '77.66', '-0.66', '05:55 ↑ (66°)', '18:49 ↑ (293°)', '12:54:08', '−0:15'), (2017, 'July', 8, '90.00', '82.80', '77.00', '77.66', '-0.66', '05:56 ↑ (67°)', '18:49 ↑ (293°)', '12:53:53', '−0:16'), (2017, 'July', 9, '93.00', '85.40', '77.00', '77.65', '-0.65', '05:56 ↑ (67°)', '18:49 ↑ (293°)', '12:53:36', '−0:16'), (2017, 'July', 10, '90.00', '82.90', '75.00', '77.64', '-2.64', '05:56 ↑ (67°)', '18:49 ↑ (293°)', '12:53:18', '−0:17'), (2017, 'July', 11, '91.00', '85.40', '77.00', '77.64', '-0.64', '05:57 ↑ (67°)', '18:49 ↑ (293°)', '12:53:00', '−0:18'), (2017, 'July', 12, '88.00', '82.20', '77.00', '77.63', '-0.63', '05:57 ↑ (67°)', '18:49 ↑ (293°)', '12:52:41', '−0:19'), (2017, 'July', 13, '90.00', '82.90', '77.00', '77.63', '-0.63', '05:57 ↑ (67°)', '18:49 ↑ (293°)', '12:52:20', '−0:20'), (2017, 'July', 14, '90.00', '85.10', '82.00', '77.65', '4.35', '05:57 ↑ (67°)', '18:49 ↑ (292°)', '12:51:59', '−0:21'), (2017, 'July', 15, '91.00', '85.90', '81.00', '77.67', '3.33', '05:58 ↑ (68°)', '18:49 ↑ (292°)', '12:51:38', '−0:21'), (2017, 'July', 16, '91.00', '86.30', '82.00', '77.69', '4.31', '05:58 ↑ (68°)', '18:49 ↑ (292°)', '12:51:15', '−0:22'), (2017, 'July', 17, '90.00', '84.10', '79.00', '77.70', '1.30', '05:58 ↑ (68°)', '18:49 ↑ (292°)', '12:50:52', '−0:23'), (2017, 'July', 18, '91.00', '85.20', '81.00', '77.71', '3.29', '05:59 ↑ (68°)', '18:49 ↑ (292°)', '12:50:27', '−0:24'), (2017, 'July', 19, '90.00', '82.70', '75.00', '77.70', '-2.70', '05:59 ↑ (68°)', '18:49 ↑ (292°)', '12:50:03', '−0:25'), (2017, 'July', 20, '86.00', '81.10', '75.00', '77.69', '-2.69', '05:59 ↑ (68°)', '18:49 ↑ (291°)', '12:49:37', '−0:25'), (2017, 'July', 21, '90.00', '84.20', '79.00', '77.69', '1.31', '05:59 ↑ (69°)', '18:48 ↑ (291°)', '12:49:10', '−0:26'), (2017, 'July', 22, '91.00', '84.90', '79.00', '77.70', '1.30', '06:00 ↑ (69°)', '18:48 ↑ (291°)', '12:48:43', '−0:27'), (2017, 'July', 23, '93.00', '86.10', '81.00', '77.72', '3.28', '06:00 ↑ (69°)', '18:48 ↑ (291°)', '12:48:16', '−0:27'), (2017, 'July', 24, '91.00', '84.60', '81.00', '77.73', '3.27', '06:00 ↑ (69°)', '18:48 ↑ (291°)', '12:47:47', '−0:28'), (2017, 'July', 25, '91.00', '85.30', '81.00', '77.75', '3.25', '06:00 ↑ (70°)', '18:48 ↑ (290°)', '12:47:18', '−0:29'), (2017, 'July', 26, '86.00', '82.90', '81.00', '77.76', '3.24', '06:01 ↑ (70°)', '18:47 ↑ (290°)', '12:46:48', '−0:29'), (2017, 'July', 27, '91.00', '84.90', '79.00', '77.77', '1.23', '06:01 ↑ (70°)', '18:47 ↑ (290°)', '12:46:18', '−0:30'), (2017, 'July', 28, '91.00', '83.70', '77.00', '77.77', '-0.77', '06:01 ↑ (70°)', '18:47 ↑ (290°)', '12:45:47', '−0:31'), (2017, 'July', 29, '91.00', '83.00', '77.00', '77.76', '-0.76', '06:01 ↑ (70°)', '18:46 ↑ (289°)', '12:45:15', '−0:31'), (2017, 'July', 30, '95.00', '86.50', '81.00', '77.78', '3.22', '06:02 ↑ (71°)', '18:46 ↑ (289°)', '12:44:43', '−0:32'), (2017, 'July', 31, '95.00', '87.40', '82.00', '77.80', '4.20', '06:02 ↑ (71°)', '18:46 ↑ (289°)', '12:44:10', '−0:32'), (2017, 'August', 1, '95.00', '87.50', '81.00', '77.81', '3.19', '06:02 ↑ (71°)', '18:46 ↑ (289°)', '12:43:36', '−0:33'), (2017, 'August', 2, '97.00', '85.80', '73.00', '77.79', '-4.79', '06:02 ↑ (71°)', '18:45 ↑ (288°)', '12:43:03', '−0:34'), (2017, 'August', 3, '90.00', '83.90', '79.00', '77.80', '1.20', '06:02 ↑ (72°)', '18:45 ↑ (288°)', '12:42:28', '−0:34'), (2017, 'August', 4, '91.00', '84.50', '81.00', '77.81', '3.19', '06:03 ↑ (72°)', '18:44 ↑ (288°)', '12:41:53', '−0:35'), (2017, 'August', 5, '91.00', '84.80', '81.00', '77.82', '3.18', '06:03 ↑ (72°)', '18:44 ↑ (288°)', '12:41:18', '−0:35'), (2017, 'August', 6, '90.00', '83.70', '79.00', '77.83', '1.17', '06:03 ↑ (73°)', '18:44 ↑ (287°)', '12:40:42', '−0:36'), (2017, 'August', 7, '91.00', '83.10', '77.00', '77.83', '-0.83', '06:03 ↑ (73°)', '18:43 ↑ (287°)', '12:40:05', '−0:36'), (2017, 'August', 8, '91.00', '84.70', '79.00', '77.83', '1.17', '06:03 ↑ (73°)', '18:43 ↑ (287°)', '12:39:28', '−0:36'), (2017, 'August', 9, '93.00', '86.30', '82.00', '77.85', '4.15', '06:04 ↑ (73°)', '18:42 ↑ (286°)', '12:38:51', '−0:37'), (2017, 'August', 10, '95.00', '87.90', '81.00', '77.86', '3.14', '06:04 ↑ (74°)', '18:42 ↑ (286°)', '12:38:13', '−0:37'), (2017, 'August', 11, '95.00', '87.20', '79.00', '77.87', '1.13', '06:04 ↑ (74°)', '18:41 ↑ (286°)', '12:37:35', '−0:38'), (2017, 'August', 12, '95.00', '87.30', '81.00', '77.88', '3.12', '06:04 ↑ (74°)', '18:41 ↑ (285°)', '12:36:57', '−0:38'), (2017, 'August', 13, '95.00', '87.60', '81.00', '77.90', '3.10', '06:04 ↑ (75°)', '18:40 ↑ (285°)', '12:36:18', '−0:38'), (2017, 'August', 14, '95.00', '86.40', '73.00', '77.88', '-4.88', '06:04 ↑ (75°)', '18:40 ↑ (285°)', '12:35:39', '−0:39'), (2017, 'August', 15, '91.00', '85.70', '81.00', '77.89', '3.11', '06:05 ↑ (75°)', '18:39 ↑ (285°)', '12:34:59', '−0:39'), (2017, 'August', 16, '90.00', '82.70', '79.00', '77.89', '1.11', '06:05 ↑ (76°)', '18:39 ↑ (284°)', '12:34:19', '−0:40'), (2017, 'August', 17, '86.00', '80.00', '77.00', '77.89', '-0.89', '06:05 ↑ (76°)', '18:38 ↑ (284°)', '12:33:39', '−0:40'), (2017, 'August', 18, '90.00', '83.80', '77.00', '77.89', '-0.89', '06:05 ↑ (76°)', '18:38 ↑ (284°)', '12:32:58', '−0:40'), (2017, 'August', 19, '91.00', '84.50', '79.00', '77.89', '1.11', '06:05 ↑ (77°)', '18:37 ↑ (283°)', '12:32:17', '−0:40'), (2017, 'August', 20, '91.00', '84.10', '79.00', '77.90', '1.10', '06:05 ↑ (77°)', '18:37 ↑ (283°)', '12:31:36', '−0:41'), (2017, 'August', 21, '91.00', '85.80', '81.00', '77.91', '3.09', '06:05 ↑ (77°)', '18:36 ↑ (283°)', '12:30:54', '−0:41'), (2017, 'August', 22, '93.00', '86.70', '81.00', '77.92', '3.08', '06:05 ↑ (78°)', '18:35 ↑ (282°)', '12:30:13', '−0:41'), (2017, 'August', 23, '95.00', '85.90', '81.00', '77.94', '3.06', '06:05 ↑ (78°)', '18:35 ↑ (282°)', '12:29:31', '−0:42'), (2017, 'August', 24, '91.00', '84.70', '79.00', '77.94', '1.06', '06:05 ↑ (78°)', '18:34 ↑ (281°)', '12:28:48', '−0:42'), (2017, 'August', 25, '91.00', '85.80', '79.00', '77.95', '1.05', '06:06 ↑ (79°)', '18:34 ↑ (281°)', '12:28:06', '−0:42'), (2017, 'August', 26, '91.00', '84.10', '79.00', '77.95', '1.05', '06:06 ↑ (79°)', '18:33 ↑ (281°)', '12:27:23', '−0:42'), (2017, 'August', 27, '90.00', '81.90', '77.00', '77.95', '-0.95', '06:06 ↑ (79°)', '18:32 ↑ (280°)', '12:26:40', '−0:42'), (2017, 'August', 28, '91.00', '82.80', '77.00', '77.94', '-0.94', '06:06 ↑ (80°)', '18:32 ↑ (280°)', '12:25:57', '−0:43'), (2017, 'August', 29, '86.00', '80.60', '77.00', '77.94', '-0.94', '06:06 ↑ (80°)', '18:31 ↑ (280°)', '12:25:14', '−0:43'), (2017, 'August', 30, '93.00', '84.60', '77.00', '77.93', '-0.93', '06:06 ↑ (81°)', '18:30 ↑ (279°)', '12:24:30', '−0:43'), (2017, 'August', 31, '91.00', '85.30', '77.00', '77.93', '-0.93', '06:06 ↑ (81°)', '18:30 ↑ (279°)', '12:23:47', '−0:43'), (2017, 'September', 1, '95.00', '87.10', '79.00', '77.93', '1.07', '06:06 ↑ (81°)', '18:29 ↑ (279°)', '12:23:03', '−0:43'), (2017, 'September', 2, '93.00', '85.20', '81.00', '77.95', '3.05', '06:06 ↑ (82°)', '18:28 ↑ (278°)', '12:22:19', '−0:44'), (2017, 'September', 3, '93.00', '86.20', '81.00', '77.96', '3.04', '06:06 ↑ (82°)', '18:28 ↑ (278°)', '12:21:35', '−0:44'), (2017, 'September', 4, '95.00', '87.20', '81.00', '77.97', '3.03', '06:06 ↑ (82°)', '18:27 ↑ (277°)', '12:20:51', '−0:44'), (2017, 'September', 5, '93.00', '85.80', '77.00', '77.97', '-0.97', '06:06 ↑ (83°)', '18:26 ↑ (277°)', '12:20:06', '−0:44'), (2017, 'September', 6, '91.00', '83.80', '79.00', '77.97', '1.03', '06:06 ↑ (83°)', '18:25 ↑ (277°)', '12:19:22', '−0:44'), (2017, 'September', 7, '90.00', '84.70', '77.00', '77.97', '-0.97', '06:06 ↑ (84°)', '18:25 ↑ (276°)', '12:18:37', '−0:44'), (2017, 'September', 8, '95.00', '86.20', '82.00', '77.98', '4.02', '06:06 ↑ (84°)', '18:24 ↑ (276°)', '12:17:52', '−0:44'), (2017, 'September', 9, '97.00', '87.30', '81.00', '78.00', '3.00', '06:06 ↑ (84°)', '18:23 ↑ (275°)', '12:17:07', '−0:44'), (2017, 'September', 10, '95.00', '82.80', '77.00', '77.99', '-0.99', '06:06 ↑ (85°)', '18:23 ↑ (275°)', '12:16:22', '−0:44'), (2017, 'September', 11, '91.00', '83.50', '79.00', '78.00', '1.00', '06:06 ↑ (85°)', '18:22 ↑ (275°)', '12:15:37', '−0:45'), (2017, 'September', 12, '93.00', '85.00', '79.00', '78.00', '1.00', '06:06 ↑ (85°)', '18:21 ↑ (274°)', '12:14:52', '−0:45'), (2017, 'September', 13, '95.00', '86.50', '79.00', '78.00', '1.00', '06:06 ↑ (86°)', '18:20 ↑ (274°)', '12:14:07', '−0:45'), (2017, 'September', 14, '95.00', '88.30', '81.00', '78.02', '2.98', '06:06 ↑ (86°)', '18:20 ↑ (274°)', '12:13:21', '−0:45'), (2017, 'September', 15, '93.00', '83.70', '77.00', '78.01', '-1.01', '06:06 ↑ (87°)', '18:19 ↑ (273°)', '12:12:36', '−0:45'), (2017, 'September', 16, '91.00', '83.70', '79.00', '78.02', '0.98', '06:06 ↑ (87°)', '18:18 ↑ (273°)', '12:11:51', '−0:45'), (2017, 'September', 17, '86.00', '82.30', '79.00', '78.02', '0.98', '06:06 ↑ (87°)', '18:17 ↑ (272°)', '12:11:05', '−0:45'), (2017, 'September', 18, '91.00', '85.10', '79.00', '78.02', '0.98', '06:07 ↑ (88°)', '18:17 ↑ (272°)', '12:10:20', '−0:45'), (2017, 'September', 19, '93.00', '86.80', '75.00', '78.01', '-3.01', '06:07 ↑ (88°)', '18:16 ↑ (272°)', '12:09:34', '−0:45'), (2017, 'September', 20, '90.00', '83.80', '79.00', '78.02', '0.98', '06:07 ↑ (89°)', '18:15 ↑ (271°)', '12:08:49', '−0:45'), (2017, 'September', 21, '95.00', '86.90', '81.00', '78.03', '2.97', '06:07 ↑ (89°)', '18:14 ↑ (271°)', '12:08:03', '−0:45'), (2017, 'September', 22, '90.00', '82.00', '77.00', '78.02', '-1.02', '06:07 ↑ (89°)', '18:14 ↑ (270°)', '12:07:18', '−0:45'), (2017, 'September', 23, '91.00', '80.50', '75.00', '78.01', '-3.01', '06:07 ↑ (90°)', '18:13 ↑ (270°)', '12:06:32', '−0:45'), (2017, 'September', 24, '90.00', '81.80', '77.00', '78.01', '-1.01', '06:07 ↑ (90°)', '18:12 ↑ (270°)', '12:05:47', '−0:45'), (2017, 'September', 25, '90.00', '84.50', '79.00', '78.01', '0.99', '06:07 ↑ (91°)', '18:12 ↑ (269°)', '12:05:01', '−0:45'), (2017, 'September', 26, '91.00', '84.50', '75.00', '78.00', '-3.00', '06:07 ↑ (91°)', '18:11 ↑ (269°)', '12:04:16', '−0:45'), (2017, 'September', 27, '93.00', '85.00', '77.00', '78.00', '-1.00', '06:07 ↑ (91°)', '18:10 ↑ (268°)', '12:03:31', '−0:45'), (2017, 'September', 28, '93.00', '85.70', '75.00', '77.99', '-2.99', '06:07 ↑ (92°)', '18:09 ↑ (268°)', '12:02:45', '−0:45'), (2017, 'September', 29, '90.00', '82.70', '77.00', '77.98', '-0.98', '06:07 ↑ (92°)', '18:09 ↑ (268°)', '12:02:00', '−0:45'), (2017, 'September', 30, '90.00', '82.50', '77.00', '77.98', '-0.98', '06:07 ↑ (93°)', '18:08 ↑ (267°)', '12:01:14', '−0:45'), (2017, 'October', 1, '88.00', '82.60', '77.00', '77.97', '-0.97', '06:07 ↑ (93°)', '18:07 ↑ (267°)', '12:00:29', '−0:45'), (2017, 'October', 2, '88.00', '80.40', '77.00', '77.97', '-0.97', '06:07 ↑ (93°)', '18:07 ↑ (266°)', '11:59:44', '−0:45'), (2017, 'October', 3, '84.00', '78.30', '75.00', '77.96', '-2.96', '06:07 ↑ (94°)', '18:06 ↑ (266°)', '11:58:59', '−0:45'), (2017, 'October', 4, '84.00', '79.10', '75.00', '77.95', '-2.95', '06:07 ↑ (94°)', '18:05 ↑ (266°)', '11:58:14', '−0:45'), (2017, 'October', 5, '88.00', '80.30', '75.00', '77.94', '-2.94', '06:07 ↑ (95°)', '18:05 ↑ (265°)', '11:57:29', '−0:44'), (2017, 'October', 6, '90.00', '83.70', '79.00', '77.94', '1.06', '06:07 ↑ (95°)', '18:04 ↑ (265°)', '11:56:44', '−0:44'), (2017, 'October', 7, '91.00', '81.80', '77.00', '77.94', '-0.94', '06:07 ↑ (95°)', '18:03 ↑ (264°)', '11:55:59', '−0:44'), (2017, 'October', 8, '91.00', '82.90', '79.00', '77.94', '1.06', '06:07 ↑ (96°)', '18:03 ↑ (264°)', '11:55:14', '−0:44'), (2017, 'October', 9, '91.00', '84.10', '77.00', '77.94', '-0.94', '06:08 ↑ (96°)', '18:02 ↑ (264°)', '11:54:30', '−0:44'), (2017, 'October', 10, '91.00', '83.70', '79.00', '77.94', '1.06', '06:08 ↑ (97°)', '18:01 ↑ (263°)', '11:53:45', '−0:44'), (2017, 'October', 11, '91.00', '84.00', '77.00', '77.94', '-0.94', '06:08 ↑ (97°)', '18:01 ↑ (263°)', '11:53:01', '−0:44'), (2017, 'October', 12, '91.00', '84.40', '79.00', '77.94', '1.06', '06:08 ↑ (97°)', '18:00 ↑ (262°)', '11:52:17', '−0:44'), (2017, 'October', 13, '90.00', '83.40', '81.00', '77.95', '3.05', '06:08 ↑ (98°)', '17:59 ↑ (262°)', '11:51:33', '−0:43'), (2017, 'October', 14, '91.00', '81.60', '75.00', '77.94', '-2.94', '06:08 ↑ (98°)', '17:59 ↑ (262°)', '11:50:49', '−0:43'), (2017, 'October', 15, '93.00', '84.20', '79.00', '77.95', '1.05', '06:08 ↑ (99°)', '17:58 ↑ (261°)', '11:50:05', '−0:43'), (2017, 'October', 16, '91.00', '84.50', '77.00', '77.94', '-0.94', '06:08 ↑ (99°)', '17:58 ↑ (261°)', '11:49:21', '−0:43'), (2017, 'October', 17, '91.00', '83.50', '79.00', '77.95', '1.05', '06:09 ↑ (99°)', '17:57 ↑ (261°)', '11:48:38', '−0:43'), (2017, 'October', 18, '86.00', '81.70', '79.00', '77.95', '1.05', '06:09 ↑ (100°)', '17:56 ↑ (260°)', '11:47:55', '−0:43'), (2017, 'October', 19, '90.00', '84.60', '79.00', '77.96', '1.04', '06:09 ↑ (100°)', '17:56 ↑ (260°)', '11:47:12', '−0:42'), (2017, 'October', 20, '93.00', '85.80', '81.00', '77.97', '3.03', '06:09 ↑ (100°)', '17:55 ↑ (259°)', '11:46:29', '−0:42'), (2017, 'October', 21, '93.00', '86.50', '82.00', '77.98', '4.02', '06:09 ↑ (101°)', '17:55 ↑ (259°)', '11:45:47', '−0:42'), (2017, 'October', 22, '93.00', '84.20', '75.00', '77.97', '-2.97', '06:09 ↑ (101°)', '17:54 ↑ (259°)', '11:45:04', '−0:42'), (2017, 'October', 23, '93.00', '84.60', '77.00', '77.97', '-0.97', '06:10 ↑ (102°)', '17:54 ↑ (258°)', '11:44:22', '−0:41'), (2017, 'October', 24, '88.00', '82.10', '75.00', '77.96', '-2.96', '06:10 ↑ (102°)', '17:53 ↑ (258°)', '11:43:41', '−0:41'), (2017, 'October', 25, '88.00', '81.30', '77.00', '77.95', '-0.95', '06:10 ↑ (102°)', '17:53 ↑ (258°)', '11:42:59', '−0:41'), (2017, 'October', 26, '91.00', '83.70', '79.00', '77.96', '1.04', '06:10 ↑ (103°)', '17:53 ↑ (257°)', '11:42:18', '−0:41'), (2017, 'October', 27, '91.00', '84.70', '79.00', '77.96', '1.04', '06:11 ↑ (103°)', '17:52 ↑ (257°)', '11:41:37', '−0:40'), (2017, 'October', 28, '91.00', '84.50', '77.00', '77.96', '-0.96', '06:11 ↑ (103°)', '17:52 ↑ (257°)', '11:40:56', '−0:40'), (2017, 'October', 29, '91.00', '84.30', '79.00', '77.96', '1.04', '06:11 ↑ (104°)', '17:51 ↑ (256°)', '11:40:16', '−0:40'), (2017, 'October', 30, '88.00', '82.10', '75.00', '77.95', '-2.95', '06:11 ↑ (104°)', '17:51 ↑ (256°)', '11:39:36', '−0:39'), (2017, 'October', 31, '88.00', '80.50', '73.00', '77.93', '-4.93', '06:12 ↑ (104°)', '17:50 ↑ (256°)', '11:38:57', '−0:39'), (2017, 'November', 1, '86.00', '80.20', '77.00', '77.93', '-0.93', '06:12 ↑ (105°)', '17:50 ↑ (255°)', '11:38:17', '−0:39'), (2017, 'November', 2, '90.00', '82.30', '77.00', '77.93', '-0.93', '06:12 ↑ (105°)', '17:50 ↑ (255°)', '11:37:39', '−0:38'), (2017, 'November', 3, '88.00', '81.80', '75.00', '77.92', '-2.92', '06:13 ↑ (105°)', '17:49 ↑ (255°)', '11:37:00', '−0:38'), (2017, 'November', 4, '88.00', '81.20', '73.00', '77.90', '-4.90', '06:13 ↑ (106°)', '17:49 ↑ (254°)', '11:36:22', '−0:37'), (2017, 'November', 5, '82.00', '77.30', '73.00', '77.89', '-4.89', '06:13 ↑ (106°)', '17:49 ↑ (254°)', '11:35:44', '−0:37'), (2017, 'November', 6, '82.00', '77.20', '73.00', '77.87', '-4.87', '06:14 ↑ (106°)', '17:49 ↑ (254°)', '11:35:07', '−0:37'), (2017, 'November', 7, '88.00', '81.00', '75.00', '77.86', '-2.86', '06:14 ↑ (107°)', '17:48 ↑ (253°)', '11:34:30', '−0:36'), (2017, 'November', 8, '84.00', '80.80', '77.00', '77.86', '-0.86', '06:14 ↑ (107°)', '17:48 ↑ (253°)', '11:33:54', '−0:36'), (2017, 'November', 9, '88.00', '82.60', '77.00', '77.86', '-0.86', '06:15 ↑ (107°)', '17:48 ↑ (253°)', '11:33:18', '−0:35'), (2017, 'November', 10, '90.00', '83.70', '79.00', '77.86', '1.14', '06:15 ↑ (107°)', '17:48 ↑ (252°)', '11:32:43', '−0:35'), (2017, 'November', 11, '86.00', '80.70', '77.00', '77.86', '-0.86', '06:15 ↑ (108°)', '17:47 ↑ (252°)', '11:32:08', '−0:34'), (2017, 'November', 12, '93.00', '83.90', '77.00', '77.85', '-0.85', '06:16 ↑ (108°)', '17:47 ↑ (252°)', '11:31:34', '−0:34'), (2017, 'November', 13, '88.00', '83.20', '79.00', '77.86', '1.14', '06:16 ↑ (108°)', '17:47 ↑ (252°)', '11:31:00', '−0:33'), (2017, 'November', 14, '93.00', '85.00', '79.00', '77.86', '1.14', '06:17 ↑ (109°)', '17:47 ↑ (251°)', '11:30:27', '−0:33'), (2017, 'November', 15, '93.00', '84.00', '79.00', '77.87', '1.13', '06:17 ↑ (109°)', '17:47 ↑ (251°)', '11:29:54', '−0:32'), (2017, 'November', 16, '90.00', '82.80', '79.00', '77.87', '1.13', '06:18 ↑ (109°)', '17:47 ↑ (251°)', '11:29:22', '−0:31'), (2017, 'November', 17, '93.00', '85.10', '81.00', '77.88', '3.12', '06:18 ↑ (109°)', '17:47 ↑ (251°)', '11:28:51', '−0:31'), (2017, 'November', 18, '93.00', '86.60', '81.00', '77.89', '3.11', '06:19 ↑ (110°)', '17:47 ↑ (250°)', '11:28:20', '−0:30'), (2017, 'November', 19, '91.00', '85.40', '81.00', '77.90', '3.10', '06:19 ↑ (110°)', '17:47 ↑ (250°)', '11:27:50', '−0:29'), (2017, 'November', 20, '86.00', '81.50', '77.00', '77.90', '-0.90', '06:19 ↑ (110°)', '17:47 ↑ (250°)', '11:27:21', '−0:29'), (2017, 'November', 21, '90.00', '83.60', '79.00', '77.90', '1.10', '06:20 ↑ (110°)', '17:47 ↑ (250°)', '11:26:52', '−0:28'), (2017, 'November', 22, '90.00', '83.20', '79.00', '77.90', '1.10', '06:20 ↑ (111°)', '17:47 ↑ (249°)', '11:26:24', '−0:27'), (2017, 'November', 23, '90.00', '83.40', '79.00', '77.91', '1.09', '06:21 ↑ (111°)', '17:47 ↑ (249°)', '11:25:57', '−0:27'), (2017, 'November', 24, '88.00', '81.40', '75.00', '77.90', '-2.90', '06:21 ↑ (111°)', '17:47 ↑ (249°)', '11:25:30', '−0:26'), (2017, 'November', 25, '86.00', '79.90', '73.00', '77.88', '-4.88', '06:22 ↑ (111°)', '17:47 ↑ (249°)', '11:25:04', '−0:25'), (2017, 'November', 26, '88.00', '80.60', '73.00', '77.87', '-4.87', '06:22 ↑ (111°)', '17:47 ↑ (249°)', '11:24:39', '−0:24'), (2017, 'November', 27, '88.00', '81.20', '75.00', '77.86', '-2.86', '06:23 ↑ (112°)', '17:47 ↑ (248°)', '11:24:15', '−0:24'), (2017, 'November', 28, '88.00', '81.50', '75.00', '77.85', '-2.85', '06:24 ↑ (112°)', '17:47 ↑ (248°)', '11:23:52', '−0:23'), (2017, 'November', 29, '93.00', '84.30', '77.00', '77.85', '-0.85', '06:24 ↑ (112°)', '17:47 ↑ (248°)', '11:23:29', '−0:22'), (2017, 'November', 30, '90.00', '84.60', '81.00', '77.86', '3.14', '06:25 ↑ (112°)', '17:48 ↑ (248°)', '11:23:07', '−0:21'), (2017, 'December', 1, '88.00', '82.90', '77.00', '77.85', '-0.85', '06:25 ↑ (112°)', '17:48 ↑ (248°)', '11:22:46', '−0:20'), (2017, 'December', 2, '90.00', '82.80', '75.00', '77.85', '-2.85', '06:26 ↑ (112°)', '17:48 ↑ (248°)', '11:22:26', '−0:19'), (2017, 'December', 3, '91.00', '83.50', '75.00', '77.84', '-2.84', '06:26 ↑ (113°)', '17:48 ↑ (247°)', '11:22:07', '−0:18'), (2017, 'December', 4, '91.00', '84.50', '79.00', '77.84', '1.16', '06:27 ↑ (113°)', '17:49 ↑ (247°)', '11:21:49', '−0:18'), (2017, 'December', 5, '86.00', '81.10', '77.00', '77.84', '-0.84', '06:27 ↑ (113°)', '17:49 ↑ (247°)', '11:21:32', '−0:17'), (2017, 'December', 6, '88.00', '81.90', '75.00', '77.83', '-2.83', '06:28 ↑ (113°)', '17:49 ↑ (247°)', '11:21:15', '−0:16'), (2017, 'December', 7, '91.00', '84.20', '77.00', '77.83', '-0.83', '06:28 ↑ (113°)', '17:49 ↑ (247°)', '11:21:00', '−0:15'), (2017, 'December', 8, '91.00', '84.70', '77.00', '77.82', '-0.82', '06:29 ↑ (113°)', '17:50 ↑ (247°)', '11:20:45', '−0:14'), (2017, 'December', 9, '88.00', '81.40', '75.00', '77.82', '-2.82', '06:30 ↑ (113°)', '17:50 ↑ (247°)', '11:20:32', '−0:13'), (2017, 'December', 10, '90.00', '81.30', '73.00', '77.80', '-4.80', '06:30 ↑ (113°)', '17:50 ↑ (247°)', '11:20:19', '−0:12'), (2017, 'December', 11, '91.00', '83.10', '75.00', '77.79', '-2.79', '06:31 ↑ (113°)', '17:51 ↑ (246°)', '11:20:08', '−0:11'), (2017, 'December', 12, '90.00', '83.60', '77.00', '77.79', '-0.79', '06:31 ↑ (114°)', '17:51 ↑ (246°)', '11:19:57', '−0:10'), (2017, 'December', 13, '93.00', '83.80', '77.00', '77.79', '-0.79', '06:32 ↑ (114°)', '17:52 ↑ (246°)', '11:19:47', '−0:09'), (2017, 'December', 14, '91.00', '83.60', '79.00', '77.79', '1.21', '06:32 ↑ (114°)', '17:52 ↑ (246°)', '11:19:39', '−0:08'), (2017, 'December', 15, '91.00', '84.50', '77.00', '77.79', '-0.79', '06:33 ↑ (114°)', '17:52 ↑ (246°)', '11:19:31', '−0:07'), (2017, 'December', 16, '91.00', '84.20', '79.00', '77.79', '1.21', '06:33 ↑ (114°)', '17:53 ↑ (246°)', '11:19:25', '−0:06'), (2017, 'December', 17, '84.00', '78.40', '73.00', '77.78', '-4.78', '06:34 ↑ (114°)', '17:53 ↑ (246°)', '11:19:19', '−0:05'), (2017, 'December', 18, '81.00', '72.90', '66.00', '77.75', '-11.75', '06:34 ↑ (114°)', '17:54 ↑ (246°)', '11:19:15', '−0:04'), (2017, 'December', 19, '77.00', '69.20', '61.00', '77.70', '-16.70', '06:35 ↑ (114°)', '17:54 ↑ (246°)', '11:19:12', '−0:03'), (2017, 'December', 20, '77.00', '68.60', '57.00', '77.64', '-20.64', '06:35 ↑ (114°)', '17:55 ↑ (246°)', '11:19:09', '−0:02'), (2017, 'December', 21, '79.00', '71.00', '63.00', '77.60', '-14.60', '06:36 ↑ (114°)', '17:55 ↑ (246°)', '11:19:08', '−0:01'), (2017, 'December', 22, '84.00', '74.50', '66.00', '77.57', '-11.57', '06:36 ↑ (114°)', '17:56 ↑ (246°)', '11:19:08', '< 1s'), (2017, 'December', 23, '90.00', '79.70', '70.00', '77.55', '-7.55', '06:37 ↑ (114°)', '17:56 ↑ (246°)', '11:19:09', '+0:01'), (2017, 'December', 24, '91.00', '82.20', '73.00', '77.53', '-4.53', '06:37 ↑ (114°)', '17:57 ↑ (246°)', '11:19:11', '+0:02'), (2017, 'December', 25, '84.00', '78.40', '72.00', '77.52', '-5.52', '06:38 ↑ (114°)', '17:57 ↑ (246°)', '11:19:14', '+0:03'), (2017, 'December', 26, '77.00', '72.70', '68.00', '77.49', '-9.49', '06:38 ↑ (114°)', '17:58 ↑ (246°)', '11:19:18', '+0:04'), (2017, 'December', 27, '70.00', '68.60', '66.00', '77.46', '-11.46', '06:39 ↑ (114°)', '17:58 ↑ (246°)', '11:19:23', '+0:05'), (2017, 'December', 28, '81.00', '74.20', '70.00', '77.44', '-7.44', '06:39 ↑ (114°)', '17:59 ↑ (246°)', '11:19:29', '+0:06'), (2017, 'December', 29, '86.00', '78.10', '72.00', '77.42', '-5.42', '06:40 ↑ (114°)', '17:59 ↑ (246°)', '11:19:36', '+0:07'), (2017, 'December', 30, '90.00', '81.90', '73.00', '77.41', '-4.41', '06:40 ↑ (114°)', '18:00 ↑ (246°)', '11:19:44', '+0:08'), (2017, 'December', 31, '88.00', '81.10', '73.00', '77.40', '-4.40', '06:41 ↑ (114°)', '18:00 ↑ (246°)', '11:19:53', '+0:09'), (2018, 'January', 1, '86.00', '78.80', '72.00', '72.00', '0.00', '06:41 ↑ (114°)', '18:01 ↑ (247°)', '11:20:06', '+0:10'), (2018, 'January', 2, '88.00', '79.80', '70.00', '71.00', '-1.00', '06:41 ↑ (113°)', '18:02 ↑ (247°)', '11:20:18', '+0:11'), (2018, 'January', 3, '88.00', '81.20', '75.00', '72.33', '2.67', '06:42 ↑ (113°)', '18:02 ↑ (247°)', '11:20:30', '+0:12'), (2018, 'January', 4, '91.00', '83.10', '75.00', '73.00', '2.00', '06:42 ↑ (113°)', '18:03 ↑ (247°)', '11:20:44', '+0:13'), (2018, 'January', 5, '93.00', '85.30', '77.00', '73.80', '3.20', '06:42 ↑ (113°)', '18:03 ↑ (247°)', '11:20:58', '+0:14'), (2018, 'January', 6, '91.00', '84.10', '77.00', '74.33', '2.67', '06:43 ↑ (113°)', '18:04 ↑ (247°)', '11:21:14', '+0:15'), (2018, 'January', 7, '91.00', '83.70', '79.00', '75.00', '4.00', '06:43 ↑ (113°)', '18:04 ↑ (247°)', '11:21:30', '+0:16'), (2018, 'January', 8, '91.00', '83.40', '77.00', '75.25', '1.75', '06:43 ↑ (113°)', '18:05 ↑ (247°)', '11:21:47', '+0:17'), (2018, 'January', 9, '93.00', '84.20', '77.00', '75.44', '1.56', '06:44 ↑ (113°)', '18:06 ↑ (247°)', '11:22:06', '+0:18'), (2018, 'January', 10, '82.00', '77.20', '75.00', '75.40', '-0.40', '06:44 ↑ (112°)', '18:06 ↑ (248°)', '11:22:25', '+0:19'), (2018, 'January', 11, '81.00', '75.10', '70.00', '74.91', '-4.91', '06:44 ↑ (112°)', '18:07 ↑ (248°)', '11:22:45', '+0:20'), (2018, 'January', 12, '79.00', '73.80', '68.00', '74.33', '-6.33', '06:44 ↑ (112°)', '18:07 ↑ (248°)', '11:23:06', '+0:20'), (2018, 'January', 13, '79.00', '71.80', '66.00', '73.69', '-7.69', '06:44 ↑ (112°)', '18:08 ↑ (248°)', '11:23:28', '+0:21'), (2018, 'January', 14, '82.00', '74.30', '64.00', '73.00', '-9.00', '06:45 ↑ (112°)', '18:08 ↑ (248°)', '11:23:50', '+0:22'), (2018, 'January', 15, '86.00', '76.80', '68.00', '72.67', '-4.67', '06:45 ↑ (112°)', '18:09 ↑ (248°)', '11:24:14', '+0:23'), (2018, 'January', 16, '88.00', '80.00', '72.00', '72.63', '-0.63', '06:45 ↑ (111°)', '18:10 ↑ (249°)', '11:24:38', '+0:24'), (2018, 'January', 17, '90.00', '81.70', '73.00', '72.65', '0.35', '06:45 ↑ (111°)', '18:10 ↑ (249°)', '11:25:03', '+0:25'), (2018, 'January', 18, '91.00', '83.00', '75.00', '72.78', '2.22', '06:45 ↑ (111°)', '18:11 ↑ (249°)', '11:25:29', '+0:25'), (2018, 'January', 19, '93.00', '84.00', '75.00', '72.89', '2.11', '06:45 ↑ (111°)', '18:11 ↑ (249°)', '11:25:56', '+0:26'), (2018, 'January', 20, '90.00', '83.10', '77.00', '73.10', '3.90', '06:45 ↑ (111°)', '18:12 ↑ (250°)', '11:26:24', '+0:27'), (2018, 'January', 21, '91.00', '84.70', '77.00', '73.29', '3.71', '06:45 ↑ (110°)', '18:12 ↑ (250°)', '11:26:52', '+0:28'), (2018, 'January', 22, '93.00', '84.70', '77.00', '73.45', '3.55', '06:45 ↑ (110°)', '18:13 ↑ (250°)', '11:27:21', '+0:28'), (2018, 'January', 23, '91.00', '85.20', '79.00', '73.70', '5.30', '06:45 ↑ (110°)', '18:13 ↑ (250°)', '11:27:50', '+0:29'), (2018, 'January', 24, '90.00', '83.00', '81.00', '74.00', '7.00', '06:45 ↑ (110°)', '18:14 ↑ (250°)', '11:28:21', '+0:30'), (2018, 'January', 25, '91.00', '82.70', '77.00', '74.12', '2.88', '06:45 ↑ (109°)', '18:14 ↑ (251°)', '11:28:52', '+0:31'), (2018, 'January', 26, '90.00', '83.70', '79.00', '74.31', '4.69', '06:45 ↑ (109°)', '18:15 ↑ (251°)', '11:29:23', '+0:31'), (2018, 'January', 27, '90.00', '84.00', '79.00', '74.48', '4.52', '06:45 ↑ (109°)', '18:15 ↑ (251°)', '11:29:56', '+0:32'), (2018, 'January', 28, '91.00', '84.40', '79.00', '74.64', '4.36', '06:45 ↑ (109°)', '18:16 ↑ (252°)', '11:30:29', '+0:32'), (2018, 'January', 29, '93.00', '84.70', '79.00', '74.79', '4.21', '06:45 ↑ (108°)', '18:16 ↑ (252°)', '11:31:02', '+0:33'), (2018, 'January', 30, '91.00', '84.70', '79.00', '74.93', '4.07', '06:45 ↑ (108°)', '18:17 ↑ (252°)', '11:31:36', '+0:34'), (2018, 'January', 31, '90.00', '81.80', '77.00', '75.00', '2.00', '06:45 ↑ (108°)', '18:17 ↑ (252°)', '11:32:11', '+0:34'), (2018, 'February', 1, '86.00', '81.40', '75.00', '75.00', '0.00', '06:45 ↑ (107°)', '18:18 ↑ (253°)', '11:32:46', '+0:35'), (2018, 'February', 2, '84.00', '79.00', '73.00', '74.94', '-1.94', '06:45 ↑ (107°)', '18:18 ↑ (253°)', '11:33:22', '+0:35'), (2018, 'February', 3, '81.00', '77.50', '73.00', '74.88', '-1.88', '06:44 ↑ (107°)', '18:18 ↑ (253°)', '11:33:58', '+0:36'), (2018, 'February', 4, '84.00', '76.50', '70.00', '74.74', '-4.74', '06:44 ↑ (107°)', '18:19 ↑ (254°)', '11:34:35', '+0:36'), (2018, 'February', 5, '84.00', '76.60', '70.00', '74.61', '-4.61', '06:44 ↑ (106°)', '18:19 ↑ (254°)', '11:35:12', '+0:37'), (2018, 'February', 6, '82.00', '74.90', '68.00', '74.43', '-6.43', '06:44 ↑ (106°)', '18:20 ↑ (254°)', '11:35:50', '+0:37'), (2018, 'February', 7, '88.00', '76.90', '68.00', '74.26', '-6.26', '06:43 ↑ (106°)', '18:20 ↑ (255°)', '11:36:28', '+0:38'), (2018, 'February', 8, '88.00', '79.00', '72.00', '74.21', '-2.21', '06:43 ↑ (105°)', '18:20 ↑ (255°)', '11:37:07', '+0:38'), (2018, 'February', 9, '88.00', '79.60', '72.00', '74.15', '-2.15', '06:43 ↑ (105°)', '18:21 ↑ (255°)', '11:37:46', '+0:39'), (2018, 'February', 10, '90.00', '81.50', '73.00', '74.12', '-1.12', '06:43 ↑ (105°)', '18:21 ↑ (255°)', '11:38:25', '+0:39'), (2018, 'February', 11, '91.00', '82.10', '75.00', '74.14', '0.86', '06:42 ↑ (104°)', '18:21 ↑ (256°)', '11:39:05', '+0:39'), (2018, 'February', 12, '91.00', '82.80', '75.00', '74.16', '0.84', '06:42 ↑ (104°)', '18:22 ↑ (256°)', '11:39:45', '+0:40'), (2018, 'February', 13, '88.00', '80.10', '72.00', '74.11', '-2.11', '06:42 ↑ (104°)', '18:22 ↑ (257°)', '11:40:25', '+0:40'), (2018, 'February', 14, '93.00', '82.10', '72.00', '74.07', '-2.07', '06:41 ↑ (103°)', '18:22 ↑ (257°)', '11:41:06', '+0:40'), (2018, 'February', 15, '93.00', '83.50', '73.00', '74.04', '-1.04', '06:41 ↑ (103°)', '18:23 ↑ (257°)', '11:41:48', '+0:41'), (2018, 'February', 16, '95.00', '84.20', '75.00', '74.06', '0.94', '06:40 ↑ (103°)', '18:23 ↑ (258°)', '11:42:29', '+0:41'); INSERT INTO `bangkok` (`year`, `month`, `date`, `tempMax`, `tempAvg`, `tempMin`, `tempMonthAvg`, `tempDiff`, `sunrise`, `sunset`, `length`, `difference`) VALUES (2018, 'February', 17, '95.00', '84.70', '77.00', '74.13', '2.88', '06:40 ↑ (102°)', '18:23 ↑ (258°)', '11:43:11', '+0:41'), (2018, 'February', 18, '93.00', '84.80', '79.00', '74.22', '4.78', '06:40 ↑ (102°)', '18:23 ↑ (258°)', '11:43:53', '+0:42'), (2018, 'February', 19, '88.00', '82.70', '77.00', '74.28', '2.72', '06:39 ↑ (102°)', '18:24 ↑ (259°)', '11:44:36', '+0:42'), (2018, 'February', 20, '91.00', '83.80', '77.00', '74.33', '2.67', '06:39 ↑ (101°)', '18:24 ↑ (259°)', '11:45:18', '+0:42'), (2018, 'February', 21, '93.00', '85.30', '79.00', '74.42', '4.58', '06:38 ↑ (101°)', '18:24 ↑ (259°)', '11:46:01', '+0:42'), (2018, 'February', 22, '93.00', '84.80', '79.00', '74.51', '4.49', '06:38 ↑ (100°)', '18:24 ↑ (260°)', '11:46:44', '+0:43'), (2018, 'February', 23, '90.00', '81.00', '75.00', '74.52', '0.48', '06:37 ↑ (100°)', '18:25 ↑ (260°)', '11:47:28', '+0:43'), (2018, 'February', 24, '91.00', '83.80', '79.00', '74.60', '4.40', '06:37 ↑ (100°)', '18:25 ↑ (261°)', '11:48:11', '+0:43'), (2018, 'February', 25, '93.00', '84.20', '77.00', '74.64', '2.36', '06:36 ↑ (99°)', '18:25 ↑ (261°)', '11:48:55', '+0:43'), (2018, 'February', 26, '91.00', '84.10', '73.00', '74.61', '-1.61', '06:36 ↑ (99°)', '18:25 ↑ (261°)', '11:49:39', '+0:44'), (2018, 'February', 27, '90.00', '79.00', '73.00', '74.59', '-1.59', '06:35 ↑ (98°)', '18:26 ↑ (262°)', '11:50:23', '+0:44'), (2018, 'February', 28, '91.00', '80.90', '73.00', '74.56', '-1.56', '06:35 ↑ (98°)', '18:26 ↑ (262°)', '11:51:08', '+0:44'), (2018, 'March', 1, '93.00', '84.50', '77.00', '74.60', '2.40', '06:34 ↑ (98°)', '18:26 ↑ (262°)', '11:51:52', '+0:44'), (2018, 'March', 2, '93.00', '85.60', '79.00', '74.67', '4.33', '06:33 ↑ (97°)', '18:26 ↑ (263°)', '11:52:37', '+0:44'), (2018, 'March', 3, '93.00', '85.90', '81.00', '74.77', '6.23', '06:33 ↑ (97°)', '18:26 ↑ (263°)', '11:53:22', '+0:44'), (2018, 'March', 4, '93.00', '86.20', '81.00', '74.87', '6.13', '06:32 ↑ (97°)', '18:26 ↑ (264°)', '11:54:07', '+0:45'), (2018, 'March', 5, '97.00', '86.40', '79.00', '74.94', '4.06', '06:32 ↑ (96°)', '18:27 ↑ (264°)', '11:54:52', '+0:45'), (2018, 'March', 6, '93.00', '86.00', '81.00', '75.03', '5.97', '06:31 ↑ (96°)', '18:27 ↑ (264°)', '11:55:37', '+0:45'), (2018, 'March', 7, '93.00', '83.00', '79.00', '75.09', '3.91', '06:30 ↑ (95°)', '18:27 ↑ (265°)', '11:56:23', '+0:45'), (2018, 'March', 8, '91.00', '83.30', '77.00', '75.12', '1.88', '06:30 ↑ (95°)', '18:27 ↑ (265°)', '11:57:08', '+0:45'), (2018, 'March', 9, '82.00', '77.50', '75.00', '75.12', '-0.12', '06:29 ↑ (95°)', '18:27 ↑ (266°)', '11:57:54', '+0:45'), (2018, 'March', 10, '86.00', '78.50', '73.00', '75.09', '-2.09', '06:29 ↑ (94°)', '18:27 ↑ (266°)', '11:58:40', '+0:45'), (2018, 'March', 11, '90.00', '82.40', '73.00', '75.06', '-2.06', '06:28 ↑ (94°)', '18:27 ↑ (266°)', '11:59:26', '+0:45'), (2018, 'March', 12, '91.00', '83.80', '77.00', '75.08', '1.92', '06:27 ↑ (93°)', '18:28 ↑ (267°)', '12:00:12', '+0:45'), (2018, 'March', 13, '93.00', '85.30', '79.00', '75.14', '3.86', '06:27 ↑ (93°)', '18:28 ↑ (267°)', '12:00:57', '+0:45'), (2018, 'March', 14, '93.00', '85.80', '81.00', '75.22', '5.78', '06:26 ↑ (93°)', '18:28 ↑ (268°)', '12:01:43', '+0:46'), (2018, 'March', 15, '93.00', '86.20', '82.00', '75.31', '6.69', '06:25 ↑ (92°)', '18:28 ↑ (268°)', '12:02:30', '+0:46'), (2018, 'March', 16, '93.00', '83.90', '77.00', '75.33', '1.67', '06:25 ↑ (92°)', '18:28 ↑ (268°)', '12:03:16', '+0:46'), (2018, 'March', 17, '95.00', '85.30', '77.00', '75.36', '1.64', '06:24 ↑ (91°)', '18:28 ↑ (269°)', '12:04:02', '+0:46'), (2018, 'March', 18, '93.00', '86.00', '79.00', '75.40', '3.60', '06:23 ↑ (91°)', '18:28 ↑ (269°)', '12:04:48', '+0:46'), (2018, 'March', 19, '95.00', '87.10', '81.00', '75.47', '5.53', '06:23 ↑ (90°)', '18:28 ↑ (270°)', '12:05:34', '+0:46'), (2018, 'March', 20, '97.00', '87.50', '82.00', '75.56', '6.44', '06:22 ↑ (90°)', '18:28 ↑ (270°)', '12:06:20', '+0:46'), (2018, 'March', 21, '90.00', '84.70', '81.00', '75.63', '5.38', '06:21 ↑ (90°)', '18:28 ↑ (271°)', '12:07:07', '+0:46'), (2018, 'March', 22, '91.00', '83.70', '77.00', '75.64', '1.36', '06:21 ↑ (89°)', '18:29 ↑ (271°)', '12:07:53', '+0:46'), (2018, 'March', 23, '91.00', '85.80', '81.00', '75.71', '5.29', '06:20 ↑ (89°)', '18:29 ↑ (271°)', '12:08:39', '+0:46'), (2018, 'March', 24, '93.00', '86.40', '79.00', '75.75', '3.25', '06:19 ↑ (88°)', '18:29 ↑ (272°)', '12:09:25', '+0:46'), (2018, 'March', 25, '93.00', '86.00', '81.00', '75.81', '5.19', '06:19 ↑ (88°)', '18:29 ↑ (272°)', '12:10:11', '+0:46'), (2018, 'March', 26, '93.00', '86.80', '82.00', '75.88', '6.12', '06:18 ↑ (88°)', '18:29 ↑ (273°)', '12:10:58', '+0:46'), (2018, 'March', 27, '90.00', '83.40', '75.00', '75.87', '-0.87', '06:17 ↑ (87°)', '18:29 ↑ (273°)', '12:11:44', '+0:46'), (2018, 'March', 28, '90.00', '84.50', '81.00', '75.93', '5.07', '06:17 ↑ (87°)', '18:29 ↑ (273°)', '12:12:30', '+0:46'), (2018, 'March', 29, '93.00', '85.80', '79.00', '75.97', '3.03', '06:16 ↑ (86°)', '18:29 ↑ (274°)', '12:13:16', '+0:46'), (2018, 'March', 30, '93.00', '84.70', '75.00', '75.96', '-0.96', '06:15 ↑ (86°)', '18:29 ↑ (274°)', '12:14:02', '+0:45'), (2018, 'March', 31, '90.00', '83.60', '79.00', '75.99', '3.01', '06:14 ↑ (86°)', '18:29 ↑ (275°)', '12:14:48', '+0:45'), (2018, 'April', 1, '88.00', '82.10', '77.00', '76.00', '1.00', '06:14 ↑ (85°)', '18:29 ↑ (275°)', '12:15:33', '+0:45'), (2018, 'April', 2, '91.00', '82.80', '72.00', '75.96', '-3.96', '06:13 ↑ (85°)', '18:29 ↑ (275°)', '12:16:19', '+0:45'), (2018, 'April', 3, '93.00', '82.90', '75.00', '75.95', '-0.95', '06:12 ↑ (84°)', '18:30 ↑ (276°)', '12:17:05', '+0:45'), (2018, 'April', 4, '93.00', '85.70', '81.00', '76.00', '5.00', '06:12 ↑ (84°)', '18:30 ↑ (276°)', '12:17:50', '+0:45'), (2018, 'April', 5, '91.00', '84.50', '79.00', '76.03', '2.97', '06:11 ↑ (84°)', '18:30 ↑ (277°)', '12:18:36', '+0:45'), (2018, 'April', 6, '93.00', '84.90', '77.00', '76.04', '0.96', '06:10 ↑ (83°)', '18:30 ↑ (277°)', '12:19:21', '+0:45'), (2018, 'April', 7, '84.00', '76.70', '73.00', '76.01', '-3.01', '06:10 ↑ (83°)', '18:30 ↑ (277°)', '12:20:06', '+0:45'), (2018, 'April', 8, '84.00', '76.80', '70.00', '75.95', '-5.95', '06:09 ↑ (82°)', '18:30 ↑ (278°)', '12:20:51', '+0:45'), (2018, 'April', 9, '93.00', '82.70', '73.00', '75.92', '-2.92', '06:08 ↑ (82°)', '18:30 ↑ (278°)', '12:21:36', '+0:44'), (2018, 'April', 10, '97.00', '86.70', '75.00', '75.91', '-0.91', '06:08 ↑ (82°)', '18:30 ↑ (278°)', '12:22:21', '+0:44'), (2018, 'April', 11, '95.00', '86.70', '79.00', '75.94', '3.06', '06:07 ↑ (81°)', '18:30 ↑ (279°)', '12:23:06', '+0:44'), (2018, 'April', 12, '95.00', '87.80', '82.00', '76.00', '6.00', '06:07 ↑ (81°)', '18:30 ↑ (279°)', '12:23:50', '+0:44'), (2018, 'April', 13, '97.00', '88.50', '82.00', '76.06', '5.94', '06:06 ↑ (81°)', '18:31 ↑ (280°)', '12:24:35', '+0:44'), (2018, 'April', 14, '97.00', '88.50', '82.00', '76.12', '5.88', '06:05 ↑ (80°)', '18:31 ↑ (280°)', '12:25:19', '+0:44'), (2018, 'April', 15, '97.00', '89.00', '84.00', '76.19', '7.81', '06:05 ↑ (80°)', '18:31 ↑ (280°)', '12:26:03', '+0:43'), (2018, 'April', 16, '97.00', '87.80', '81.00', '76.24', '4.76', '06:04 ↑ (79°)', '18:31 ↑ (281°)', '12:26:46', '+0:43'), (2018, 'April', 17, '90.00', '79.70', '75.00', '76.22', '-1.22', '06:03 ↑ (79°)', '18:31 ↑ (281°)', '12:27:30', '+0:43'), (2018, 'April', 18, '91.00', '82.50', '73.00', '76.19', '-3.19', '06:03 ↑ (79°)', '18:31 ↑ (281°)', '12:28:13', '+0:43'), (2018, 'April', 19, '93.00', '85.50', '79.00', '76.22', '2.78', '06:02 ↑ (78°)', '18:31 ↑ (282°)', '12:28:56', '+0:43'), (2018, 'April', 20, '95.00', '86.90', '81.00', '76.26', '4.74', '06:02 ↑ (78°)', '18:31 ↑ (282°)', '12:29:39', '+0:42'), (2018, 'April', 21, '97.00', '87.90', '81.00', '76.31', '4.69', '06:01 ↑ (78°)', '18:32 ↑ (282°)', '12:30:21', '+0:42'), (2018, 'April', 22, '97.00', '88.80', '84.00', '76.38', '7.63', '06:01 ↑ (77°)', '18:32 ↑ (283°)', '12:31:04', '+0:42'), (2018, 'April', 23, '99.00', '89.60', '82.00', '76.42', '5.58', '06:00 ↑ (77°)', '18:32 ↑ (283°)', '12:31:46', '+0:41'), (2018, 'April', 24, '97.00', '89.40', '84.00', '76.49', '7.51', '06:00 ↑ (77°)', '18:32 ↑ (284°)', '12:32:27', '+0:41'), (2018, 'April', 25, '95.00', '88.10', '84.00', '76.56', '7.44', '05:59 ↑ (76°)', '18:32 ↑ (284°)', '12:33:09', '+0:41'), (2018, 'April', 26, '91.00', '84.00', '79.00', '76.58', '2.42', '05:59 ↑ (76°)', '18:32 ↑ (284°)', '12:33:50', '+0:41'), (2018, 'April', 27, '93.00', '82.90', '75.00', '76.56', '-1.56', '05:58 ↑ (76°)', '18:33 ↑ (285°)', '12:34:30', '+0:40'), (2018, 'April', 28, '79.00', '77.20', '73.00', '76.53', '-3.53', '05:58 ↑ (75°)', '18:33 ↑ (285°)', '12:35:11', '+0:40'), (2018, 'April', 29, '88.00', '80.90', '75.00', '76.52', '-1.52', '05:57 ↑ (75°)', '18:33 ↑ (285°)', '12:35:51', '+0:39'), (2018, 'April', 30, '84.00', '80.00', '77.00', '76.53', '0.47', '05:57 ↑ (75°)', '18:33 ↑ (285°)', '12:36:30', '+0:39'), (2018, 'May', 1, '91.00', '82.30', '75.00', '76.51', '-1.51', '05:56 ↑ (74°)', '18:33 ↑ (286°)', '12:37:09', '+0:39'), (2018, 'May', 2, '91.00', '83.30', '77.00', '76.52', '0.48', '05:56 ↑ (74°)', '18:34 ↑ (286°)', '12:37:48', '+0:38'), (2018, 'May', 3, '90.00', '83.00', '79.00', '76.54', '2.46', '05:55 ↑ (74°)', '18:34 ↑ (286°)', '12:38:27', '+0:38'), (2018, 'May', 4, '93.00', '83.20', '77.00', '76.54', '0.46', '05:55 ↑ (73°)', '18:34 ↑ (287°)', '12:39:05', '+0:37'), (2018, 'May', 5, '91.00', '84.00', '79.00', '76.56', '2.44', '05:54 ↑ (73°)', '18:34 ↑ (287°)', '12:39:42', '+0:37'), (2018, 'May', 6, '90.00', '83.70', '79.00', '76.58', '2.42', '05:54 ↑ (73°)', '18:34 ↑ (287°)', '12:40:19', '+0:37'), (2018, 'May', 7, '95.00', '85.70', '77.00', '76.58', '0.42', '05:54 ↑ (73°)', '18:35 ↑ (288°)', '12:40:56', '+0:36'), (2018, 'May', 8, '97.00', '86.20', '75.00', '76.57', '-1.57', '05:53 ↑ (72°)', '18:35 ↑ (288°)', '12:41:32', '+0:36'), (2018, 'May', 9, '93.00', '86.80', '82.00', '76.61', '5.39', '05:53 ↑ (72°)', '18:35 ↑ (288°)', '12:42:08', '+0:35'), (2018, 'May', 10, '97.00', '87.50', '81.00', '76.65', '4.35', '05:53 ↑ (72°)', '18:35 ↑ (288°)', '12:42:43', '+0:35'), (2018, 'May', 11, '91.00', '80.80', '77.00', '76.65', '0.35', '05:52 ↑ (71°)', '18:36 ↑ (289°)', '12:43:17', '+0:34'), (2018, 'May', 12, '91.00', '82.20', '77.00', '76.65', '0.35', '05:52 ↑ (71°)', '18:36 ↑ (289°)', '12:43:51', '+0:34'), (2018, 'May', 13, '93.00', '84.00', '79.00', '76.67', '2.33', '05:52 ↑ (71°)', '18:36 ↑ (289°)', '12:44:25', '+0:33'), (2018, 'May', 14, '93.00', '82.70', '77.00', '76.67', '0.33', '05:51 ↑ (71°)', '18:36 ↑ (289°)', '12:44:58', '+0:32'), (2018, 'May', 15, '91.00', '82.80', '77.00', '76.67', '0.33', '05:51 ↑ (70°)', '18:37 ↑ (290°)', '12:45:30', '+0:32'), (2018, 'May', 16, '97.00', '87.70', '81.00', '76.71', '4.29', '05:51 ↑ (70°)', '18:37 ↑ (290°)', '12:46:02', '+0:31'), (2018, 'May', 17, '93.00', '86.70', '82.00', '76.74', '5.26', '05:51 ↑ (70°)', '18:37 ↑ (290°)', '12:46:33', '+0:31'), (2018, 'May', 18, '91.00', '84.60', '81.00', '76.78', '4.22', '05:50 ↑ (70°)', '18:38 ↑ (290°)', '12:47:03', '+0:30'), (2018, 'May', 19, '91.00', '85.10', '79.00', '76.79', '2.21', '05:50 ↑ (69°)', '18:38 ↑ (291°)', '12:47:33', '+0:29'), (2018, 'May', 20, '97.00', '85.20', '79.00', '76.81', '2.19', '05:50 ↑ (69°)', '18:38 ↑ (291°)', '12:48:02', '+0:29'), (2018, 'May', 21, '93.00', '84.20', '79.00', '76.82', '2.18', '05:50 ↑ (69°)', '18:38 ↑ (291°)', '12:48:30', '+0:28'), (2018, 'May', 22, '91.00', '83.00', '79.00', '76.84', '2.16', '05:50 ↑ (69°)', '18:39 ↑ (291°)', '12:48:58', '+0:27'), (2018, 'May', 23, '93.00', '83.20', '77.00', '76.84', '0.16', '05:50 ↑ (69°)', '18:39 ↑ (291°)', '12:49:25', '+0:26'), (2018, 'May', 24, '91.00', '83.50', '79.00', '76.85', '2.15', '05:49 ↑ (68°)', '18:39 ↑ (292°)', '12:49:51', '+0:26'), (2018, 'May', 25, '91.00', '84.40', '79.00', '76.87', '2.13', '05:49 ↑ (68°)', '18:40 ↑ (292°)', '12:50:17', '+0:25'), (2018, 'May', 26, '88.00', '82.40', '79.00', '76.88', '2.12', '05:49 ↑ (68°)', '18:40 ↑ (292°)', '12:50:41', '+0:24'), (2018, 'May', 27, '93.00', '84.10', '79.00', '76.90', '2.10', '05:49 ↑ (68°)', '18:40 ↑ (292°)', '12:51:05', '+0:23'), (2018, 'May', 28, '91.00', '85.00', '79.00', '76.91', '2.09', '05:49 ↑ (68°)', '18:41 ↑ (292°)', '12:51:28', '+0:23'), (2018, 'May', 29, '91.00', '84.20', '77.00', '76.91', '0.09', '05:49 ↑ (68°)', '18:41 ↑ (293°)', '12:51:50', '+0:22'), (2018, 'May', 30, '91.00', '84.10', '79.00', '76.93', '2.07', '05:49 ↑ (67°)', '18:41 ↑ (293°)', '12:52:12', '+0:21'), (2018, 'May', 31, '91.00', '84.00', '77.00', '76.93', '0.07', '05:49 ↑ (67°)', '18:41 ↑ (293°)', '12:52:32', '+0:20'), (2018, 'June', 1, '91.00', '86.00', '81.00', '76.95', '4.05', '05:49 ↑ (67°)', '18:42 ↑ (293°)', '12:52:52', '+0:19'), (2018, 'June', 2, '93.00', '85.40', '79.00', '76.97', '2.03', '05:49 ↑ (67°)', '18:42 ↑ (293°)', '12:53:11', '+0:18'), (2018, 'June', 3, '95.00', '87.30', '82.00', '77.00', '5.00', '05:49 ↑ (67°)', '18:42 ↑ (293°)', '12:53:29', '+0:17'), (2018, 'June', 4, '95.00', '87.40', '81.00', '77.03', '3.97', '05:49 ↑ (67°)', '18:43 ↑ (293°)', '12:53:46', '+0:17'), (2018, 'June', 5, '95.00', '87.50', '81.00', '77.05', '3.95', '05:49 ↑ (67°)', '18:43 ↑ (293°)', '12:54:02', '+0:16'), (2018, 'June', 6, '95.00', '85.90', '79.00', '77.06', '1.94', '05:49 ↑ (66°)', '18:43 ↑ (294°)', '12:54:18', '+0:15'), (2018, 'June', 7, '90.00', '85.10', '81.00', '77.09', '3.91', '05:49 ↑ (66°)', '18:44 ↑ (294°)', '12:54:32', '+0:14'), (2018, 'June', 8, '88.00', '84.00', '81.00', '77.11', '3.89', '05:49 ↑ (66°)', '18:44 ↑ (294°)', '12:54:45', '+0:13'), (2018, 'June', 9, '86.00', '83.20', '79.00', '77.13', '1.88', '05:49 ↑ (66°)', '18:44 ↑ (294°)', '12:54:58', '+0:12'), (2018, 'June', 10, '91.00', '84.10', '79.00', '77.14', '1.86', '05:49 ↑ (66°)', '18:45 ↑ (294°)', '12:55:10', '+0:11'), (2018, 'June', 11, '91.00', '86.40', '82.00', '77.17', '4.83', '05:49 ↑ (66°)', '18:45 ↑ (294°)', '12:55:20', '+0:10'), (2018, 'June', 12, '93.00', '87.30', '84.00', '77.21', '6.79', '05:50 ↑ (66°)', '18:45 ↑ (294°)', '12:55:30', '+0:09'), (2018, 'June', 13, '93.00', '87.00', '82.00', '77.24', '4.76', '05:50 ↑ (66°)', '18:45 ↑ (294°)', '12:55:38', '+0:08'), (2018, 'June', 14, '93.00', '87.00', '82.00', '77.27', '4.73', '05:50 ↑ (66°)', '18:46 ↑ (294°)', '12:55:46', '+0:07'), (2018, 'June', 15, '93.00', '84.90', '79.00', '77.28', '1.72', '05:50 ↑ (66°)', '18:46 ↑ (294°)', '12:55:53', '+0:06'), (2018, 'June', 16, '91.00', '85.50', '81.00', '77.30', '3.70', '05:50 ↑ (66°)', '18:46 ↑ (294°)', '12:55:58', '+0:05'), (2018, 'June', 17, '91.00', '85.80', '81.00', '77.32', '3.68', '05:50 ↑ (66°)', '18:46 ↑ (294°)', '12:56:03', '+0:04'), (2018, 'June', 18, '93.00', '85.70', '79.00', '77.33', '1.67', '05:51 ↑ (66°)', '18:47 ↑ (294°)', '12:56:07', '+0:03'), (2018, 'June', 19, '93.00', '86.70', '82.00', '77.36', '4.64', '05:51 ↑ (66°)', '18:47 ↑ (294°)', '12:56:09', '+0:02'), (2018, 'June', 20, '93.00', '86.80', '82.00', '77.39', '4.61', '05:51 ↑ (66°)', '18:47 ↑ (294°)', '12:56:11', '+0:01'), (2018, 'June', 21, '90.00', '86.00', '82.00', '77.41', '4.59', '05:51 ↑ (66°)', '18:47 ↑ (294°)', '12:56:12', '< 1s'), (2018, 'June', 22, '95.00', '87.80', '82.00', '77.44', '4.56', '05:51 ↑ (66°)', '18:48 ↑ (294°)', '12:56:11', '< 1s'), (2018, 'June', 23, '95.00', '87.60', '81.00', '77.46', '3.54', '05:52 ↑ (66°)', '18:48 ↑ (294°)', '12:56:10', '−0:01'), (2018, 'June', 24, '93.00', '87.10', '82.00', '77.49', '4.51', '05:52 ↑ (66°)', '18:48 ↑ (294°)', '12:56:08', '−0:02'), (2018, 'June', 25, '90.00', '85.40', '81.00', '77.51', '3.49', '05:52 ↑ (66°)', '18:48 ↑ (294°)', '12:56:04', '−0:03'), (2018, 'June', 26, '91.00', '83.80', '75.00', '77.49', '-2.49', '05:52 ↑ (66°)', '18:48 ↑ (294°)', '12:56:00', '−0:04'), (2018, 'June', 27, '84.00', '80.10', '77.00', '77.49', '-0.49', '05:53 ↑ (66°)', '18:48 ↑ (294°)', '12:55:55', '−0:05'), (2018, 'June', 28, '90.00', '81.60', '77.00', '77.49', '-0.49', '05:53 ↑ (66°)', '18:49 ↑ (294°)', '12:55:48', '−0:06'), (2018, 'June', 29, '93.00', '83.80', '77.00', '77.48', '-0.48', '05:53 ↑ (66°)', '18:49 ↑ (294°)', '12:55:41', '−0:07'), (2018, 'June', 30, '95.00', '85.70', '79.00', '77.49', '1.51', '05:53 ↑ (66°)', '18:49 ↑ (294°)', '12:55:33', '−0:08'), (2018, 'July', 1, '95.00', '84.60', '77.00', '77.49', '-0.49', '05:54 ↑ (66°)', '18:49 ↑ (294°)', '12:55:24', '−0:09'), (2018, 'July', 2, '93.00', '83.60', '77.00', '77.49', '-0.49', '05:54 ↑ (66°)', '18:49 ↑ (294°)', '12:55:13', '−0:10'), (2018, 'July', 3, '93.00', '84.80', '81.00', '77.51', '3.49', '05:54 ↑ (66°)', '18:49 ↑ (294°)', '12:55:02', '−0:11'), (2018, 'July', 4, '91.00', '85.20', '77.00', '77.50', '-0.50', '05:54 ↑ (66°)', '18:49 ↑ (294°)', '12:54:50', '−0:12'), (2018, 'July', 5, '91.00', '85.50', '79.00', '77.51', '1.49', '05:55 ↑ (66°)', '18:49 ↑ (294°)', '12:54:37', '−0:13'), (2018, 'July', 6, '93.00', '83.80', '75.00', '77.50', '-2.50', '05:55 ↑ (66°)', '18:49 ↑ (294°)', '12:54:23', '−0:13'), (2018, 'July', 7, '90.00', '84.30', '77.00', '77.49', '-0.49', '05:55 ↑ (66°)', '18:49 ↑ (293°)', '12:54:08', '−0:14'), (2018, 'July', 8, '91.00', '86.20', '81.00', '77.51', '3.49', '05:56 ↑ (67°)', '18:49 ↑ (293°)', '12:53:53', '−0:15'), (2018, 'July', 9, '93.00', '84.40', '77.00', '77.51', '-0.51', '05:56 ↑ (67°)', '18:49 ↑ (293°)', '12:53:36', '−0:16'), (2018, 'July', 10, '90.00', '84.10', '77.00', '77.51', '-0.51', '05:56 ↑ (67°)', '18:49 ↑ (293°)', '12:53:18', '−0:17'), (2018, 'July', 11, '88.00', '83.10', '79.00', '77.52', '1.48', '05:56 ↑ (67°)', '18:49 ↑ (293°)', '12:53:00', '−0:18'), (2018, 'July', 12, '90.00', '83.30', '75.00', '77.50', '-2.50', '05:57 ↑ (67°)', '18:49 ↑ (293°)', '12:52:41', '−0:19'), (2018, 'July', 13, '91.00', '83.70', '77.00', '77.50', '-0.50', '05:57 ↑ (67°)', '18:49 ↑ (293°)', '12:52:20', '−0:20'), (2018, 'July', 14, '91.00', '84.00', '81.00', '77.52', '3.48', '05:57 ↑ (67°)', '18:49 ↑ (293°)', '12:51:59', '−0:20'), (2018, 'July', 15, '90.00', '84.80', '81.00', '77.54', '3.46', '05:58 ↑ (68°)', '18:49 ↑ (292°)', '12:51:38', '−0:21'), (2018, 'July', 16, '88.00', '83.30', '81.00', '77.55', '3.45', '05:58 ↑ (68°)', '18:49 ↑ (292°)', '12:51:15', '−0:22'), (2018, 'July', 17, '82.00', '80.80', '79.00', '77.56', '1.44', '05:58 ↑ (68°)', '18:49 ↑ (292°)', '12:50:52', '−0:23'), (2018, 'July', 18, '90.00', '83.50', '81.00', '77.58', '3.42', '05:58 ↑ (68°)', '18:49 ↑ (292°)', '12:50:27', '−0:24'), (2018, 'July', 19, '90.00', '83.90', '79.00', '77.59', '1.42', '05:59 ↑ (68°)', '18:49 ↑ (292°)', '12:50:03', '−0:24'), (2018, 'July', 20, '91.00', '84.80', '81.00', '77.60', '3.40', '05:59 ↑ (68°)', '18:49 ↑ (291°)', '12:49:37', '−0:25'), (2018, 'July', 21, '91.00', '86.80', '82.00', '77.62', '4.38', '05:59 ↑ (69°)', '18:48 ↑ (291°)', '12:49:10', '−0:26'), (2018, 'July', 22, '91.00', '84.40', '79.00', '77.63', '1.37', '06:00 ↑ (69°)', '18:48 ↑ (291°)', '12:48:43', '−0:27'), (2018, 'July', 23, '93.00', '85.90', '82.00', '77.65', '4.35', '06:00 ↑ (69°)', '18:48 ↑ (291°)', '12:48:16', '−0:27'), (2018, 'July', 24, '91.00', '85.80', '82.00', '77.67', '4.33', '06:00 ↑ (69°)', '18:48 ↑ (291°)', '12:47:47', '−0:28'), (2018, 'July', 25, '91.00', '85.50', '81.00', '77.69', '3.31', '06:00 ↑ (69°)', '18:48 ↑ (290°)', '12:47:18', '−0:29'), (2018, 'July', 26, '91.00', '85.80', '81.00', '77.71', '3.29', '06:01 ↑ (70°)', '18:47 ↑ (290°)', '12:46:48', '−0:29'), (2018, 'July', 27, '91.00', '86.30', '82.00', '77.73', '4.27', '06:01 ↑ (70°)', '18:47 ↑ (290°)', '12:46:18', '−0:30'), (2018, 'July', 28, '91.00', '84.70', '79.00', '77.73', '1.27', '06:01 ↑ (70°)', '18:47 ↑ (290°)', '12:45:47', '−0:31'), (2018, 'July', 29, '91.00', '83.70', '79.00', '77.74', '1.26', '06:01 ↑ (70°)', '18:47 ↑ (289°)', '12:45:15', '−0:31'), (2018, 'July', 30, '90.00', '81.50', '77.00', '77.73', '-0.73', '06:02 ↑ (71°)', '18:46 ↑ (289°)', '12:44:43', '−0:32'), (2018, 'July', 31, '90.00', '84.20', '77.00', '77.73', '-0.73', '06:02 ↑ (71°)', '18:46 ↑ (289°)', '12:44:10', '−0:32'), (2018, 'August', 1, '91.00', '85.30', '81.00', '77.75', '3.25', '06:02 ↑ (71°)', '18:46 ↑ (289°)', '12:43:36', '−0:33'), (2018, 'August', 2, '90.00', '85.30', '81.00', '77.76', '3.24', '06:02 ↑ (71°)', '18:45 ↑ (288°)', '12:43:03', '−0:33'), (2018, 'August', 3, '91.00', '85.20', '79.00', '77.77', '1.23', '06:02 ↑ (72°)', '18:45 ↑ (288°)', '12:42:28', '−0:34'), (2018, 'August', 4, '91.00', '85.80', '81.00', '77.78', '3.22', '06:03 ↑ (72°)', '18:45 ↑ (288°)', '12:41:53', '−0:34'), (2018, 'August', 5, '91.00', '84.80', '81.00', '77.80', '3.20', '06:03 ↑ (72°)', '18:44 ↑ (288°)', '12:41:18', '−0:35'), (2018, 'August', 6, '88.00', '82.70', '75.00', '77.78', '-2.78', '06:03 ↑ (73°)', '18:44 ↑ (287°)', '12:40:42', '−0:36'), (2018, 'August', 7, '90.00', '83.10', '79.00', '77.79', '1.21', '06:03 ↑ (73°)', '18:43 ↑ (287°)', '12:40:05', '−0:36'), (2018, 'August', 8, '91.00', '84.20', '79.00', '77.80', '1.20', '06:03 ↑ (73°)', '18:43 ↑ (287°)', '12:39:28', '−0:36'), (2018, 'August', 9, '90.00', '84.80', '81.00', '77.81', '3.19', '06:04 ↑ (73°)', '18:42 ↑ (286°)', '12:38:51', '−0:37'), (2018, 'August', 10, '91.00', '84.60', '79.00', '77.82', '1.18', '06:04 ↑ (74°)', '18:42 ↑ (286°)', '12:38:13', '−0:37'), (2018, 'August', 11, '91.00', '83.60', '79.00', '77.82', '1.18', '06:04 ↑ (74°)', '18:41 ↑ (286°)', '12:37:35', '−0:38'), (2018, 'August', 12, '90.00', '84.00', '81.00', '77.83', '3.17', '06:04 ↑ (74°)', '18:41 ↑ (286°)', '12:36:57', '−0:38'), (2018, 'August', 13, '90.00', '84.90', '82.00', '77.85', '4.15', '06:04 ↑ (75°)', '18:41 ↑ (285°)', '12:36:18', '−0:38'), (2018, 'August', 14, '90.00', '84.90', '82.00', '77.87', '4.13', '06:04 ↑ (75°)', '18:40 ↑ (285°)', '12:35:39', '−0:39'), (2018, 'August', 15, '91.00', '84.40', '77.00', '77.87', '-0.87', '06:04 ↑ (75°)', '18:39 ↑ (285°)', '12:34:59', '−0:39'), (2018, 'August', 16, '91.00', '84.80', '79.00', '77.87', '1.13', '06:05 ↑ (76°)', '18:39 ↑ (284°)', '12:34:19', '−0:39'), (2018, 'August', 17, '90.00', '84.90', '81.00', '77.89', '3.11', '06:05 ↑ (76°)', '18:38 ↑ (284°)', '12:33:39', '−0:40'), (2018, 'August', 18, '91.00', '84.20', '79.00', '77.89', '1.11', '06:05 ↑ (76°)', '18:38 ↑ (284°)', '12:32:58', '−0:40'), (2018, 'August', 19, '91.00', '84.00', '79.00', '77.90', '1.10', '06:05 ↑ (77°)', '18:37 ↑ (283°)', '12:32:17', '−0:40'), (2018, 'August', 20, '91.00', '84.70', '79.00', '77.90', '1.10', '06:05 ↑ (77°)', '18:37 ↑ (283°)', '12:31:36', '−0:41'), (2018, 'August', 21, '91.00', '84.50', '79.00', '77.91', '1.09', '06:05 ↑ (77°)', '18:36 ↑ (283°)', '12:30:54', '−0:41'), (2018, 'August', 22, '93.00', '85.00', '77.00', '77.90', '-0.90', '06:05 ↑ (78°)', '18:36 ↑ (282°)', '12:30:13', '−0:41'), (2018, 'August', 23, '91.00', '84.30', '75.00', '77.89', '-2.89', '06:05 ↑ (78°)', '18:35 ↑ (282°)', '12:29:31', '−0:41'), (2018, 'August', 24, '90.00', '82.60', '77.00', '77.89', '-0.89', '06:05 ↑ (78°)', '18:34 ↑ (282°)', '12:28:48', '−0:42'), (2018, 'August', 25, '88.00', '81.20', '79.00', '77.89', '1.11', '06:06 ↑ (79°)', '18:34 ↑ (281°)', '12:28:06', '−0:42'), (2018, 'August', 26, '86.00', '81.00', '77.00', '77.89', '-0.89', '06:06 ↑ (79°)', '18:33 ↑ (281°)', '12:27:23', '−0:42'), (2018, 'August', 27, '90.00', '79.90', '75.00', '77.87', '-2.87', '06:06 ↑ (79°)', '18:32 ↑ (280°)', '12:26:40', '−0:42'), (2018, 'August', 28, '91.00', '81.00', '75.00', '77.86', '-2.86', '06:06 ↑ (80°)', '18:32 ↑ (280°)', '12:25:57', '−0:43'), (2018, 'August', 29, '91.00', '84.10', '77.00', '77.86', '-0.86', '06:06 ↑ (80°)', '18:31 ↑ (280°)', '12:25:14', '−0:43'), (2018, 'August', 30, '90.00', '84.00', '79.00', '77.86', '1.14', '06:06 ↑ (80°)', '18:30 ↑ (279°)', '12:24:30', '−0:43'), (2018, 'August', 31, '90.00', '82.20', '77.00', '77.86', '-0.86', '06:06 ↑ (81°)', '18:30 ↑ (279°)', '12:23:47', '−0:43'), (2018, 'September', 1, '91.00', '83.20', '77.00', '77.86', '-0.86', '06:06 ↑ (81°)', '18:29 ↑ (279°)', '12:23:03', '−0:43'), (2018, 'September', 2, '93.00', '85.60', '79.00', '77.86', '1.14', '06:06 ↑ (82°)', '18:28 ↑ (278°)', '12:22:19', '−0:43'), (2018, 'September', 3, '93.00', '83.20', '73.00', '77.84', '-4.84', '06:06 ↑ (82°)', '18:28 ↑ (278°)', '12:21:35', '−0:44'), (2018, 'September', 4, '90.00', '81.50', '77.00', '77.84', '-0.84', '06:06 ↑ (82°)', '18:27 ↑ (278°)', '12:20:51', '−0:44'), (2018, 'September', 5, '90.00', '81.80', '75.00', '77.83', '-2.83', '06:06 ↑ (83°)', '18:26 ↑ (277°)', '12:20:06', '−0:44'), (2018, 'September', 6, '91.00', '83.80', '77.00', '77.82', '-0.82', '06:06 ↑ (83°)', '18:26 ↑ (277°)', '12:19:22', '−0:44'), (2018, 'September', 7, '95.00', '86.70', '79.00', '77.83', '1.17', '06:06 ↑ (83°)', '18:25 ↑ (276°)', '12:18:37', '−0:44'), (2018, 'September', 8, '88.00', '83.30', '81.00', '77.84', '3.16', '06:06 ↑ (84°)', '18:24 ↑ (276°)', '12:17:52', '−0:44'), (2018, 'September', 9, '90.00', '82.20', '79.00', '77.85', '1.15', '06:06 ↑ (84°)', '18:23 ↑ (276°)', '12:17:07', '−0:44'), (2018, 'September', 10, '91.00', '83.50', '77.00', '77.84', '-0.84', '06:06 ↑ (85°)', '18:23 ↑ (275°)', '12:16:22', '−0:44'), (2018, 'September', 11, '93.00', '85.70', '79.00', '77.85', '1.15', '06:06 ↑ (85°)', '18:22 ↑ (275°)', '12:15:37', '−0:45'), (2018, 'September', 12, '91.00', '83.30', '77.00', '77.84', '-0.84', '06:06 ↑ (85°)', '18:21 ↑ (274°)', '12:14:52', '−0:45'), (2018, 'September', 13, '91.00', '82.40', '77.00', '77.84', '-0.84', '06:06 ↑ (86°)', '18:21 ↑ (274°)', '12:14:07', '−0:45'), (2018, 'September', 14, '86.00', '79.80', '77.00', '77.84', '-0.84', '06:06 ↑ (86°)', '18:20 ↑ (274°)', '12:13:21', '−0:45'), (2018, 'September', 15, '91.00', '81.20', '75.00', '77.83', '-2.83', '06:06 ↑ (87°)', '18:19 ↑ (273°)', '12:12:36', '−0:45'), (2018, 'September', 16, '91.00', '81.30', '75.00', '77.81', '-2.81', '06:06 ↑ (87°)', '18:18 ↑ (273°)', '12:11:51', '−0:45'), (2018, 'September', 17, '91.00', '84.00', '79.00', '77.82', '1.18', '06:06 ↑ (87°)', '18:18 ↑ (272°)', '12:11:05', '−0:45'), (2018, 'September', 18, '91.00', '84.60', '77.00', '77.82', '-0.82', '06:07 ↑ (88°)', '18:17 ↑ (272°)', '12:10:20', '−0:45'), (2018, 'September', 19, '84.00', '80.00', '77.00', '77.81', '-0.81', '06:07 ↑ (88°)', '18:16 ↑ (272°)', '12:09:34', '−0:45'), (2018, 'September', 20, '91.00', '83.10', '77.00', '77.81', '-0.81', '06:07 ↑ (89°)', '18:15 ↑ (271°)', '12:08:49', '−0:45'), (2018, 'September', 21, '97.00', '86.10', '81.00', '77.82', '3.18', '06:07 ↑ (89°)', '18:15 ↑ (271°)', '12:08:03', '−0:45'), (2018, 'September', 22, '95.00', '85.10', '79.00', '77.83', '1.17', '06:07 ↑ (89°)', '18:14 ↑ (270°)', '12:07:18', '−0:45'), (2018, 'September', 23, '91.00', '83.50', '77.00', '77.82', '-0.82', '06:07 ↑ (90°)', '18:13 ↑ (270°)', '12:06:32', '−0:45'), (2018, 'September', 24, '88.00', '81.00', '73.00', '77.81', '-4.81', '06:07 ↑ (90°)', '18:12 ↑ (270°)', '12:05:47', '−0:45'), (2018, 'September', 25, '93.00', '84.50', '79.00', '77.81', '1.19', '06:07 ↑ (91°)', '18:12 ↑ (269°)', '12:05:01', '−0:45'), (2018, 'September', 26, '93.00', '85.90', '79.00', '77.81', '1.19', '06:07 ↑ (91°)', '18:11 ↑ (269°)', '12:04:16', '−0:45'), (2018, 'September', 27, '95.00', '86.80', '82.00', '77.83', '4.17', '06:07 ↑ (91°)', '18:10 ↑ (268°)', '12:03:31', '−0:45'), (2018, 'September', 28, '95.00', '86.70', '79.00', '77.83', '1.17', '06:07 ↑ (92°)', '18:10 ↑ (268°)', '12:02:45', '−0:45'), (2018, 'September', 29, '95.00', '85.60', '77.00', '77.83', '-0.83', '06:07 ↑ (92°)', '18:09 ↑ (268°)', '12:02:00', '−0:45'), (2018, 'September', 30, '93.00', '83.00', '75.00', '77.82', '-2.82', '06:07 ↑ (93°)', '18:08 ↑ (267°)', '12:01:14', '−0:45'), (2018, 'October', 1, '91.00', '83.20', '77.00', '77.82', '-0.82', '06:07 ↑ (93°)', '18:07 ↑ (267°)', '12:00:29', '−0:45'), (2018, 'October', 2, '90.00', '81.30', '79.00', '77.82', '1.18', '06:07 ↑ (93°)', '18:07 ↑ (266°)', '11:59:44', '−0:45'), (2018, 'October', 3, '91.00', '84.60', '79.00', '77.83', '1.17', '06:07 ↑ (94°)', '18:06 ↑ (266°)', '11:58:59', '−0:45'), (2018, 'October', 4, '91.00', '84.80', '75.00', '77.82', '-2.82', '06:07 ↑ (94°)', '18:05 ↑ (266°)', '11:58:14', '−0:45'), (2018, 'October', 5, '90.00', '81.80', '75.00', '77.81', '-2.81', '06:07 ↑ (95°)', '18:05 ↑ (265°)', '11:57:29', '−0:44'), (2018, 'October', 6, '91.00', '82.60', '79.00', '77.81', '1.19', '06:07 ↑ (95°)', '18:04 ↑ (265°)', '11:56:44', '−0:44'), (2018, 'October', 7, '91.00', '84.10', '79.00', '77.81', '1.19', '06:07 ↑ (95°)', '18:03 ↑ (264°)', '11:55:59', '−0:44'), (2018, 'October', 8, '93.00', '84.30', '77.00', '77.81', '-0.81', '06:07 ↑ (96°)', '18:03 ↑ (264°)', '11:55:14', '−0:44'), (2018, 'October', 9, '93.00', '84.10', '77.00', '77.81', '-0.81', '06:08 ↑ (96°)', '18:02 ↑ (264°)', '11:54:30', '−0:44'), (2018, 'October', 10, '93.00', '85.60', '79.00', '77.81', '1.19', '06:08 ↑ (97°)', '18:01 ↑ (263°)', '11:53:45', '−0:44'), (2018, 'October', 11, '93.00', '86.00', '79.00', '77.82', '1.18', '06:08 ↑ (97°)', '18:01 ↑ (263°)', '11:53:01', '−0:44'), (2018, 'October', 12, '91.00', '85.80', '79.00', '77.82', '1.18', '06:08 ↑ (97°)', '18:00 ↑ (263°)', '11:52:17', '−0:44'), (2018, 'October', 13, '91.00', '83.60', '79.00', '77.83', '1.17', '06:08 ↑ (98°)', '18:00 ↑ (262°)', '11:51:33', '−0:44'), (2018, 'October', 14, '95.00', '86.70', '81.00', '77.84', '3.16', '06:08 ↑ (98°)', '17:59 ↑ (262°)', '11:50:49', '−0:43'), (2018, 'October', 15, '91.00', '85.10', '79.00', '77.84', '1.16', '06:08 ↑ (98°)', '17:58 ↑ (261°)', '11:50:05', '−0:43'), (2018, 'October', 16, '91.00', '84.80', '79.00', '77.84', '1.16', '06:08 ↑ (99°)', '17:58 ↑ (261°)', '11:49:21', '−0:43'), (2018, 'October', 17, '93.00', '84.80', '81.00', '77.86', '3.14', '06:09 ↑ (99°)', '17:57 ↑ (261°)', '11:48:38', '−0:43'), (2018, 'October', 18, '93.00', '85.50', '81.00', '77.87', '3.13', '06:09 ↑ (100°)', '17:57 ↑ (260°)', '11:47:55', '−0:43'), (2018, 'October', 19, '91.00', '85.30', '79.00', '77.87', '1.13', '06:09 ↑ (100°)', '17:56 ↑ (260°)', '11:47:12', '−0:42'), (2018, 'October', 20, '88.00', '80.90', '77.00', '77.87', '-0.87', '06:09 ↑ (100°)', '17:56 ↑ (259°)', '11:46:29', '−0:42'), (2018, 'October', 21, '88.00', '80.50', '79.00', '77.87', '1.13', '06:09 ↑ (101°)', '17:55 ↑ (259°)', '11:45:47', '−0:42'), (2018, 'October', 22, '93.00', '82.80', '77.00', '77.87', '-0.87', '06:09 ↑ (101°)', '17:54 ↑ (259°)', '11:45:04', '−0:42'), (2018, 'October', 23, '88.00', '81.30', '73.00', '77.85', '-4.85', '06:10 ↑ (101°)', '17:54 ↑ (258°)', '11:44:22', '−0:42'), (2018, 'October', 24, '90.00', '82.80', '77.00', '77.85', '-0.85', '06:10 ↑ (102°)', '17:54 ↑ (258°)', '11:43:41', '−0:41'), (2018, 'October', 25, '91.00', '83.80', '79.00', '77.85', '1.15', '06:10 ↑ (102°)', '17:53 ↑ (258°)', '11:42:59', '−0:41'), (2018, 'October', 26, '93.00', '85.30', '77.00', '77.85', '-0.85', '06:10 ↑ (102°)', '17:53 ↑ (257°)', '11:42:18', '−0:41'), (2018, 'October', 27, '93.00', '84.80', '75.00', '77.84', '-2.84', '06:11 ↑ (103°)', '17:52 ↑ (257°)', '11:41:37', '−0:40'), (2018, 'October', 28, '91.00', '85.30', '79.00', '77.84', '1.16', '06:11 ↑ (103°)', '17:52 ↑ (257°)', '11:40:56', '−0:40'), (2018, 'October', 29, '91.00', '86.00', '81.00', '77.85', '3.15', '06:11 ↑ (104°)', '17:51 ↑ (256°)', '11:40:16', '−0:40'), (2018, 'October', 30, '90.00', '84.70', '77.00', '77.85', '-0.85', '06:11 ↑ (104°)', '17:51 ↑ (256°)', '11:39:36', '−0:39'), (2018, 'October', 31, '90.00', '81.90', '75.00', '77.84', '-2.84', '06:12 ↑ (104°)', '17:51 ↑ (256°)', '11:38:57', '−0:39'), (2018, 'November', 1, '88.00', '80.10', '73.00', '77.83', '-4.83', '06:12 ↑ (105°)', '17:50 ↑ (255°)', '11:38:17', '−0:39'), (2018, 'November', 2, '90.00', '81.20', '72.00', '77.81', '-5.81', '06:12 ↑ (105°)', '17:50 ↑ (255°)', '11:37:39', '−0:38'), (2018, 'November', 3, '90.00', '81.70', '73.00', '77.79', '-4.79', '06:12 ↑ (105°)', '17:50 ↑ (255°)', '11:37:00', '−0:38'), (2018, 'November', 4, '91.00', '84.00', '75.00', '77.78', '-2.78', '06:13 ↑ (106°)', '17:49 ↑ (254°)', '11:36:22', '−0:38'), (2018, 'November', 5, '93.00', '85.60', '77.00', '77.78', '-0.78', '06:13 ↑ (106°)', '17:49 ↑ (254°)', '11:35:44', '−0:37'), (2018, 'November', 6, '93.00', '85.10', '77.00', '77.78', '-0.78', '06:13 ↑ (106°)', '17:49 ↑ (254°)', '11:35:07', '−0:37'), (2018, 'November', 7, '90.00', '84.80', '79.00', '77.78', '1.22', '06:14 ↑ (106°)', '17:48 ↑ (253°)', '11:34:30', '−0:36'), (2018, 'November', 8, '90.00', '84.40', '81.00', '77.79', '3.21', '06:14 ↑ (107°)', '17:48 ↑ (253°)', '11:33:54', '−0:36'), (2018, 'November', 9, '84.00', '81.00', '79.00', '77.80', '1.20', '06:15 ↑ (107°)', '17:48 ↑ (253°)', '11:33:18', '−0:35'), (2018, 'November', 10, '90.00', '82.60', '77.00', '77.79', '-0.79', '06:15 ↑ (107°)', '17:48 ↑ (253°)', '11:32:43', '−0:35'), (2018, 'November', 11, '91.00', '84.40', '77.00', '77.79', '-0.79', '06:15 ↑ (108°)', '17:48 ↑ (252°)', '11:32:08', '−0:34'), (2018, 'November', 12, '93.00', '85.60', '79.00', '77.79', '1.21', '06:16 ↑ (108°)', '17:47 ↑ (252°)', '11:31:34', '−0:34'), (2018, 'November', 13, '90.00', '84.70', '81.00', '77.80', '3.20', '06:16 ↑ (108°)', '17:47 ↑ (252°)', '11:31:00', '−0:33'), (2018, 'November', 14, '93.00', '86.20', '81.00', '77.81', '3.19', '06:17 ↑ (108°)', '17:47 ↑ (251°)', '11:30:27', '−0:33'), (2018, 'November', 15, '95.00', '86.60', '81.00', '77.82', '3.18', '06:17 ↑ (109°)', '17:47 ↑ (251°)', '11:29:54', '−0:32'), (2018, 'November', 16, '95.00', '86.60', '82.00', '77.84', '4.16', '06:17 ↑ (109°)', '17:47 ↑ (251°)', '11:29:22', '−0:31'), (2018, 'November', 17, '90.00', '85.00', '79.00', '77.84', '1.16', '06:18 ↑ (109°)', '17:47 ↑ (251°)', '11:28:51', '−0:31'), (2018, 'November', 18, '91.00', '82.40', '77.00', '77.84', '-0.84', '06:18 ↑ (110°)', '17:47 ↑ (250°)', '11:28:20', '−0:30'), (2018, 'November', 19, '91.00', '84.40', '77.00', '77.84', '-0.84', '06:19 ↑ (110°)', '17:47 ↑ (250°)', '11:27:50', '−0:30'), (2018, 'November', 20, '93.00', '84.80', '77.00', '77.83', '-0.83', '06:19 ↑ (110°)', '17:47 ↑ (250°)', '11:27:21', '−0:29'), (2018, 'November', 21, '95.00', '86.70', '79.00', '77.84', '1.16', '06:20 ↑ (110°)', '17:47 ↑ (250°)', '11:26:52', '−0:28'), (2018, 'November', 22, '95.00', '86.80', '79.00', '77.84', '1.16', '06:20 ↑ (110°)', '17:47 ↑ (249°)', '11:26:24', '−0:28'), (2018, 'November', 23, '90.00', '85.00', '79.00', '77.84', '1.16', '06:21 ↑ (111°)', '17:47 ↑ (249°)', '11:25:57', '−0:27'), (2018, 'November', 24, '91.00', '84.30', '77.00', '77.84', '-0.84', '06:21 ↑ (111°)', '17:47 ↑ (249°)', '11:25:30', '−0:26'), (2018, 'November', 25, '91.00', '83.80', '77.00', '77.84', '-0.84', '06:22 ↑ (111°)', '17:47 ↑ (249°)', '11:25:04', '−0:25'), (2018, 'November', 26, '90.00', '82.60', '73.00', '77.82', '-4.82', '06:22 ↑ (111°)', '17:47 ↑ (249°)', '11:24:39', '−0:25'), (2018, 'November', 27, '86.00', '80.30', '75.00', '77.82', '-2.82', '06:23 ↑ (111°)', '17:47 ↑ (248°)', '11:24:15', '−0:24'), (2018, 'November', 28, '84.00', '78.90', '75.00', '77.81', '-2.81', '06:23 ↑ (112°)', '17:47 ↑ (248°)', '11:23:52', '−0:23'), (2018, 'November', 29, '90.00', '82.60', '73.00', '77.79', '-4.79', '06:24 ↑ (112°)', '17:47 ↑ (248°)', '11:23:29', '−0:22'), (2018, 'November', 30, '90.00', '84.20', '77.00', '77.79', '-0.79', '06:24 ↑ (112°)', '17:48 ↑ (248°)', '11:23:07', '−0:21'), (2018, 'December', 1, '93.00', '86.00', '79.00', '77.79', '1.21', '06:25 ↑ (112°)', '17:48 ↑ (248°)', '11:22:46', '−0:20'), (2018, 'December', 2, '95.00', '86.40', '79.00', '77.80', '1.20', '06:26 ↑ (112°)', '17:48 ↑ (248°)', '11:22:26', '−0:20'), (2018, 'December', 3, '95.00', '86.60', '79.00', '77.80', '1.20', '06:26 ↑ (113°)', '17:48 ↑ (247°)', '11:22:07', '−0:19'), (2018, 'December', 4, '93.00', '87.00', '81.00', '77.81', '3.19', '06:27 ↑ (113°)', '17:48 ↑ (247°)', '11:21:49', '−0:18'), (2018, 'December', 5, '93.00', '86.80', '79.00', '77.81', '1.19', '06:27 ↑ (113°)', '17:49 ↑ (247°)', '11:21:32', '−0:17'), (2018, 'December', 6, '93.00', '86.20', '79.00', '77.82', '1.18', '06:28 ↑ (113°)', '17:49 ↑ (247°)', '11:21:15', '−0:16'), (2018, 'December', 7, '93.00', '86.50', '81.00', '77.83', '3.17', '06:28 ↑ (113°)', '17:49 ↑ (247°)', '11:21:00', '−0:15'), (2018, 'December', 8, '86.00', '78.70', '73.00', '77.81', '-4.81', '06:29 ↑ (113°)', '17:50 ↑ (247°)', '11:20:45', '−0:14'), (2018, 'December', 9, '88.00', '80.70', '77.00', '77.81', '-0.81', '06:29 ↑ (113°)', '17:50 ↑ (247°)', '11:20:32', '−0:13'), (2018, 'December', 10, '88.00', '81.80', '75.00', '77.80', '-2.80', '06:30 ↑ (113°)', '17:50 ↑ (247°)', '11:20:19', '−0:12'), (2018, 'December', 11, '90.00', '83.50', '77.00', '77.80', '-0.80', '06:31 ↑ (113°)', '17:51 ↑ (246°)', '11:20:08', '−0:11'), (2018, 'December', 12, '88.00', '82.10', '77.00', '77.80', '-0.80', '06:31 ↑ (114°)', '17:51 ↑ (246°)', '11:19:57', '−0:10'), (2018, 'December', 13, '88.00', '80.50', '72.00', '77.78', '-5.78', '06:32 ↑ (114°)', '17:51 ↑ (246°)', '11:19:47', '−0:09'), (2018, 'December', 14, '86.00', '79.80', '73.00', '77.77', '-4.77', '06:32 ↑ (114°)', '17:52 ↑ (246°)', '11:19:39', '−0:08'), (2018, 'December', 15, '86.00', '80.50', '73.00', '77.75', '-4.75', '06:33 ↑ (114°)', '17:52 ↑ (246°)', '11:19:31', '−0:07'), (2018, 'December', 16, '90.00', '83.60', '79.00', '77.76', '1.24', '06:33 ↑ (114°)', '17:53 ↑ (246°)', '11:19:25', '−0:06'), (2018, 'December', 17, '90.00', '82.10', '75.00', '77.75', '-2.75', '06:34 ↑ (114°)', '17:53 ↑ (246°)', '11:19:19', '−0:05'), (2018, 'December', 18, '90.00', '81.70', '73.00', '77.74', '-4.74', '06:34 ↑ (114°)', '17:54 ↑ (246°)', '11:19:15', '−0:04'), (2018, 'December', 19, '91.00', '83.50', '73.00', '77.72', '-4.72', '06:35 ↑ (114°)', '17:54 ↑ (246°)', '11:19:12', '−0:03'), (2018, 'December', 20, '93.00', '84.80', '75.00', '77.71', '-2.71', '06:35 ↑ (114°)', '17:55 ↑ (246°)', '11:19:09', '−0:02'), (2018, 'December', 21, '95.00', '86.10', '79.00', '77.72', '1.28', '06:36 ↑ (114°)', '17:55 ↑ (246°)', '11:19:08', '−0:01'), (2018, 'December', 22, '95.00', '87.20', '79.00', '77.72', '1.28', '06:36 ↑ (114°)', '17:56 ↑ (246°)', '11:19:08', '< 1s'), (2018, 'December', 23, '93.00', '86.10', '81.00', '77.73', '3.27', '06:37 ↑ (114°)', '17:56 ↑ (246°)', '11:19:09', '< 1s'), (2018, 'December', 24, '91.00', '84.90', '79.00', '77.73', '1.27', '06:37 ↑ (114°)', '17:57 ↑ (246°)', '11:19:11', '+0:01'), (2018, 'December', 25, '93.00', '83.80', '77.00', '77.73', '-0.73', '06:38 ↑ (114°)', '17:57 ↑ (246°)', '11:19:14', '+0:03'), (2018, 'December', 26, '90.00', '83.00', '77.00', '77.73', '-0.73', '06:38 ↑ (114°)', '17:58 ↑ (246°)', '11:19:18', '+0:04'), (2018, 'December', 27, '91.00', '82.80', '75.00', '77.72', '-2.72', '06:39 ↑ (114°)', '17:58 ↑ (246°)', '11:19:23', '+0:05'), (2018, 'December', 28, '91.00', '83.70', '75.00', '77.72', '-2.72', '06:39 ↑ (114°)', '17:59 ↑ (246°)', '11:19:29', '+0:06'), (2018, 'December', 29, '90.00', '80.30', '75.00', '77.71', '-2.71', '06:40 ↑ (114°)', '17:59 ↑ (246°)', '11:19:36', '+0:07'), (2018, 'December', 30, '84.00', '78.80', '73.00', '77.70', '-4.70', '06:40 ↑ (114°)', '18:00 ↑ (246°)', '11:19:44', '+0:08'), (2018, 'December', 31, '86.00', '78.70', '73.00', '77.68', '-4.68', '06:40 ↑ (114°)', '18:00 ↑ (246°)', '11:19:53', '+0:09'), (2019, 'January', 1, '84.00', '77.00', '70.00', '70.00', '0.00', '06:41 ↑ (114°)', '18:01 ↑ (247°)', '11:20:04', '+0:10'), (2019, 'January', 2, '86.00', '78.10', '72.00', '71.00', '1.00', '06:41 ↑ (113°)', '18:01 ↑ (247°)', '11:20:15', '+0:11'), (2019, 'January', 3, '81.00', '77.50', '73.00', '71.67', '1.33', '06:42 ↑ (113°)', '18:02 ↑ (247°)', '11:20:27', '+0:12'), (2019, 'January', 4, '82.00', '78.80', '75.00', '72.50', '2.50', '06:42 ↑ (113°)', '18:03 ↑ (247°)', '11:20:40', '+0:13'), (2019, 'January', 5, '82.00', '79.00', '77.00', '73.40', '3.60', '06:42 ↑ (113°)', '18:03 ↑ (247°)', '11:20:55', '+0:14'), (2019, 'January', 6, '91.00', '81.10', '75.00', '73.67', '1.33', '06:43 ↑ (113°)', '18:04 ↑ (247°)', '11:21:10', '+0:15'), (2019, 'January', 7, '90.00', '83.10', '75.00', '73.86', '1.14', '06:43 ↑ (113°)', '18:04 ↑ (247°)', '11:21:26', '+0:16'), (2019, 'January', 8, '88.00', '82.10', '77.00', '74.25', '2.75', '06:43 ↑ (113°)', '18:05 ↑ (247°)', '11:21:43', '+0:17'), (2019, 'January', 9, '88.00', '80.90', '75.00', '74.33', '0.67', '06:43 ↑ (113°)', '18:05 ↑ (247°)', '11:22:01', '+0:18'), (2019, 'January', 10, '90.00', '82.80', '75.00', '74.40', '0.60', '06:44 ↑ (112°)', '18:06 ↑ (248°)', '11:22:20', '+0:18'), (2019, 'January', 11, '91.00', '84.30', '77.00', '74.64', '2.36', '06:44 ↑ (112°)', '18:07 ↑ (248°)', '11:22:40', '+0:19'), (2019, 'January', 12, '93.00', '85.60', '77.00', '74.83', '2.17', '06:44 ↑ (112°)', '18:07 ↑ (248°)', '11:23:01', '+0:20'), (2019, 'January', 13, '93.00', '86.00', '79.00', '75.15', '3.85', '06:44 ↑ (112°)', '18:08 ↑ (248°)', '11:23:22', '+0:21'), (2019, 'January', 14, '93.00', '85.50', '79.00', '75.43', '3.57', '06:45 ↑ (112°)', '18:08 ↑ (248°)', '11:23:45', '+0:22'), (2019, 'January', 15, '90.00', '84.50', '79.00', '75.67', '3.33', '06:45 ↑ (112°)', '18:09 ↑ (248°)', '11:24:08', '+0:23'), (2019, 'January', 16, '91.00', '84.60', '81.00', '76.00', '5.00', '06:45 ↑ (111°)', '18:09 ↑ (249°)', '11:24:32', '+0:24'), (2019, 'January', 17, '91.00', '83.50', '77.00', '76.06', '0.94', '06:45 ↑ (111°)', '18:10 ↑ (249°)', '11:24:57', '+0:24'), (2019, 'January', 18, '86.00', '79.60', '75.00', '76.00', '-1.00', '06:45 ↑ (111°)', '18:11 ↑ (249°)', '11:25:23', '+0:25'), (2019, 'January', 19, '90.00', '81.70', '75.00', '75.95', '-0.95', '06:45 ↑ (111°)', '18:11 ↑ (249°)', '11:25:49', '+0:26'), (2019, 'January', 20, '91.00', '84.60', '77.00', '76.00', '1.00', '06:45 ↑ (111°)', '18:12 ↑ (249°)', '11:26:17', '+0:27'), (2019, 'January', 21, '91.00', '84.70', '75.00', '75.95', '-0.95', '06:45 ↑ (110°)', '18:12 ↑ (250°)', '11:26:45', '+0:28'), (2019, 'January', 22, '90.00', '82.60', '77.00', '76.00', '1.00', '06:45 ↑ (110°)', '18:13 ↑ (250°)', '11:27:14', '+0:28'), (2019, 'January', 23, '90.00', '81.20', '72.00', '75.83', '-3.83', '06:45 ↑ (110°)', '18:13 ↑ (250°)', '11:27:43', '+0:29'), (2019, 'January', 24, '91.00', '81.50', '72.00', '75.67', '-3.67', '06:45 ↑ (110°)', '18:14 ↑ (250°)', '11:28:13', '+0:30'), (2019, 'January', 25, '91.00', '81.20', '70.00', '75.44', '-5.44', '06:45 ↑ (109°)', '18:14 ↑ (251°)', '11:28:44', '+0:30'), (2019, 'January', 26, '88.00', '80.50', '72.00', '75.31', '-3.31', '06:45 ↑ (109°)', '18:15 ↑ (251°)', '11:29:16', '+0:31'), (2019, 'January', 27, '88.00', '79.70', '72.00', '75.19', '-3.19', '06:45 ↑ (109°)', '18:15 ↑ (251°)', '11:29:48', '+0:32'), (2019, 'January', 28, '90.00', '80.30', '70.00', '75.00', '-5.00', '06:45 ↑ (109°)', '18:16 ↑ (251°)', '11:30:21', '+0:32'), (2019, 'January', 29, '91.00', '81.40', '72.00', '74.90', '-2.90', '06:45 ↑ (108°)', '18:16 ↑ (252°)', '11:30:54', '+0:33'), (2019, 'January', 30, '91.00', '82.80', '73.00', '74.83', '-1.83', '06:45 ↑ (108°)', '18:17 ↑ (252°)', '11:31:28', '+0:33'), (2019, 'January', 31, '93.00', '84.80', '77.00', '74.90', '2.10', '06:45 ↑ (108°)', '18:17 ↑ (252°)', '11:32:02', '+0:34'), (2019, 'February', 1, '91.00', '84.40', '79.00', '75.03', '3.97', '06:45 ↑ (108°)', '18:17 ↑ (253°)', '11:32:38', '+0:35'), (2019, 'February', 2, '91.00', '83.80', '77.00', '75.09', '1.91', '06:45 ↑ (107°)', '18:18 ↑ (253°)', '11:33:13', '+0:35'), (2019, 'February', 3, '95.00', '84.80', '77.00', '75.15', '1.85', '06:44 ↑ (107°)', '18:18 ↑ (253°)', '11:33:49', '+0:36'), (2019, 'February', 4, '95.00', '86.30', '79.00', '75.26', '3.74', '06:44 ↑ (107°)', '18:19 ↑ (253°)', '11:34:26', '+0:36'), (2019, 'February', 5, '95.00', '86.40', '81.00', '75.42', '5.58', '06:44 ↑ (106°)', '18:19 ↑ (254°)', '11:35:03', '+0:37'), (2019, 'February', 6, '93.00', '85.20', '81.00', '75.57', '5.43', '06:44 ↑ (106°)', '18:19 ↑ (254°)', '11:35:41', '+0:37'), (2019, 'February', 7, '93.00', '85.80', '81.00', '75.71', '5.29', '06:44 ↑ (106°)', '18:20 ↑ (254°)', '11:36:19', '+0:38'), (2019, 'February', 8, '95.00', '86.10', '79.00', '75.79', '3.21', '06:43 ↑ (105°)', '18:20 ↑ (255°)', '11:36:57', '+0:38'), (2019, 'February', 9, '95.00', '86.40', '79.00', '75.88', '3.13', '06:43 ↑ (105°)', '18:21 ↑ (255°)', '11:37:36', '+0:38'), (2019, 'February', 10, '95.00', '86.40', '81.00', '76.00', '5.00', '06:43 ↑ (105°)', '18:21 ↑ (255°)', '11:38:16', '+0:39'), (2019, 'February', 11, '93.00', '85.80', '82.00', '76.14', '5.86', '06:42 ↑ (104°)', '18:21 ↑ (256°)', '11:38:55', '+0:39'), (2019, 'February', 12, '93.00', '85.70', '79.00', '76.21', '2.79', '06:42 ↑ (104°)', '18:22 ↑ (256°)', '11:39:35', '+0:40'), (2019, 'February', 13, '95.00', '87.10', '79.00', '76.27', '2.73', '06:42 ↑ (104°)', '18:22 ↑ (256°)', '11:40:16', '+0:40'), (2019, 'February', 14, '93.00', '85.30', '81.00', '76.38', '4.62', '06:41 ↑ (103°)', '18:22 ↑ (257°)', '11:40:57', '+0:40'), (2019, 'February', 15, '93.00', '84.90', '79.00', '76.43', '2.57', '06:41 ↑ (103°)', '18:23 ↑ (257°)', '11:41:38', '+0:41'), (2019, 'February', 16, '93.00', '84.80', '79.00', '76.49', '2.51', '06:41 ↑ (103°)', '18:23 ↑ (257°)', '11:42:19', '+0:41'), (2019, 'February', 17, '91.00', '84.30', '81.00', '76.58', '4.42', '06:40 ↑ (102°)', '18:23 ↑ (258°)', '11:43:01', '+0:41'), (2019, 'February', 18, '93.00', '84.60', '79.00', '76.63', '2.37', '06:40 ↑ (102°)', '18:23 ↑ (258°)', '11:43:43', '+0:42'), (2019, 'February', 19, '93.00', '84.40', '75.00', '76.60', '-1.60', '06:39 ↑ (102°)', '18:24 ↑ (259°)', '11:44:25', '+0:42'), (2019, 'February', 20, '93.00', '85.70', '77.00', '76.61', '0.39', '06:39 ↑ (101°)', '18:24 ↑ (259°)', '11:45:08', '+0:42'), (2019, 'February', 21, '95.00', '86.00', '79.00', '76.65', '2.35', '06:38 ↑ (101°)', '18:24 ↑ (259°)', '11:45:51', '+0:42'), (2019, 'February', 22, '95.00', '85.90', '77.00', '76.66', '0.34', '06:38 ↑ (100°)', '18:24 ↑ (260°)', '11:46:34', '+0:43'), (2019, 'February', 23, '97.00', '87.00', '79.00', '76.70', '2.30', '06:37 ↑ (100°)', '18:25 ↑ (260°)', '11:47:17', '+0:43'), (2019, 'February', 24, '95.00', '86.60', '82.00', '76.80', '5.20', '06:37 ↑ (100°)', '18:25 ↑ (260°)', '11:48:01', '+0:43'), (2019, 'February', 25, '95.00', '86.20', '81.00', '76.88', '4.13', '06:36 ↑ (99°)', '18:25 ↑ (261°)', '11:48:45', '+0:43'), (2019, 'February', 26, '93.00', '85.40', '82.00', '76.96', '5.04', '06:36 ↑ (99°)', '18:25 ↑ (261°)', '11:49:29', '+0:44'), (2019, 'February', 27, '93.00', '84.70', '77.00', '76.97', '0.03', '06:35 ↑ (99°)', '18:26 ↑ (262°)', '11:50:13', '+0:44'), (2019, 'February', 28, '95.00', '85.60', '79.00', '77.00', '2.00', '06:35 ↑ (98°)', '18:26 ↑ (262°)', '11:50:57', '+0:44'), (2019, 'March', 1, '95.00', '86.00', '79.00', '77.03', '1.97', '06:34 ↑ (98°)', '18:26 ↑ (262°)', '11:51:42', '+0:44'), (2019, 'March', 2, '95.00', '86.90', '81.00', '77.10', '3.90', '06:34 ↑ (97°)', '18:26 ↑ (263°)', '11:52:26', '+0:44'), (2019, 'March', 3, '97.00', '87.20', '82.00', '77.18', '4.82', '06:33 ↑ (97°)', '18:26 ↑ (263°)', '11:53:11', '+0:44'), (2019, 'March', 4, '95.00', '86.70', '82.00', '77.25', '4.75', '06:32 ↑ (97°)', '18:26 ↑ (264°)', '11:53:56', '+0:44'), (2019, 'March', 5, '93.00', '85.80', '79.00', '77.28', '1.72', '06:32 ↑ (96°)', '18:27 ↑ (264°)', '11:54:41', '+0:45'), (2019, 'March', 6, '97.00', '86.50', '77.00', '77.28', '-0.28', '06:31 ↑ (96°)', '18:27 ↑ (264°)', '11:55:27', '+0:45'), (2019, 'March', 7, '95.00', '86.20', '79.00', '77.30', '1.70', '06:31 ↑ (95°)', '18:27 ↑ (265°)', '11:56:12', '+0:45'), (2019, 'March', 8, '97.00', '86.20', '77.00', '77.30', '-0.30', '06:30 ↑ (95°)', '18:27 ↑ (265°)', '11:56:58', '+0:45'), (2019, 'March', 9, '93.00', '86.00', '81.00', '77.35', '3.65', '06:29 ↑ (95°)', '18:27 ↑ (266°)', '11:57:43', '+0:45'), (2019, 'March', 10, '97.00', '86.60', '81.00', '77.41', '3.59', '06:29 ↑ (94°)', '18:27 ↑ (266°)', '11:58:29', '+0:45'), (2019, 'March', 11, '99.00', '86.80', '77.00', '77.40', '-0.40', '06:28 ↑ (94°)', '18:27 ↑ (266°)', '11:59:15', '+0:45'), (2019, 'March', 12, '100.00', '87.60', '77.00', '77.39', '-0.39', '06:28 ↑ (93°)', '18:28 ↑ (267°)', '12:00:01', '+0:45'), (2019, 'March', 13, '99.00', '87.10', '77.00', '77.39', '-0.39', '06:27 ↑ (93°)', '18:28 ↑ (267°)', '12:00:47', '+0:45'), (2019, 'March', 14, '95.00', '86.30', '79.00', '77.41', '1.59', '06:26 ↑ (93°)', '18:28 ↑ (268°)', '12:01:33', '+0:45'), (2019, 'March', 15, '97.00', '87.60', '81.00', '77.46', '3.54', '06:26 ↑ (92°)', '18:28 ↑ (268°)', '12:02:19', '+0:46'), (2019, 'March', 16, '86.00', '82.70', '81.00', '77.51', '3.49', '06:25 ↑ (92°)', '18:28 ↑ (268°)', '12:03:05', '+0:46'), (2019, 'March', 17, '97.00', '85.80', '77.00', '77.50', '-0.50', '06:24 ↑ (91°)', '18:28 ↑ (269°)', '12:03:51', '+0:46'), (2019, 'March', 18, '97.00', '87.40', '81.00', '77.55', '3.45', '06:24 ↑ (91°)', '18:28 ↑ (269°)', '12:04:37', '+0:46'), (2019, 'March', 19, '97.00', '87.80', '82.00', '77.60', '4.40', '06:23 ↑ (91°)', '18:28 ↑ (270°)', '12:05:23', '+0:46'), (2019, 'March', 20, '95.00', '87.00', '81.00', '77.65', '3.35', '06:22 ↑ (90°)', '18:28 ↑ (270°)', '12:06:09', '+0:46'), (2019, 'March', 21, '99.00', '87.20', '81.00', '77.69', '3.31', '06:22 ↑ (90°)', '18:28 ↑ (270°)', '12:06:56', '+0:46'), (2019, 'March', 22, '102.00', '89.30', '81.00', '77.73', '3.27', '06:21 ↑ (89°)', '18:29 ↑ (271°)', '12:07:42', '+0:46'), (2019, 'March', 23, '100.00', '89.00', '81.00', '77.77', '3.23', '06:20 ↑ (89°)', '18:29 ↑ (271°)', '12:08:28', '+0:46'), (2019, 'March', 24, '99.00', '87.90', '81.00', '77.81', '3.19', '06:19 ↑ (89°)', '18:29 ↑ (272°)', '12:09:14', '+0:46'), (2019, 'March', 25, '88.00', '83.40', '81.00', '77.85', '3.15', '06:19 ↑ (88°)', '18:29 ↑ (272°)', '12:10:00', '+0:46'), (2019, 'March', 26, '97.00', '86.60', '79.00', '77.86', '1.14', '06:18 ↑ (88°)', '18:29 ↑ (272°)', '12:10:47', '+0:46'), (2019, 'March', 27, '95.00', '86.80', '81.00', '77.90', '3.10', '06:17 ↑ (87°)', '18:29 ↑ (273°)', '12:11:33', '+0:46'), (2019, 'March', 28, '99.00', '88.40', '82.00', '77.94', '4.06', '06:17 ↑ (87°)', '18:29 ↑ (273°)', '12:12:19', '+0:46'), (2019, 'March', 29, '95.00', '88.70', '84.00', '78.01', '5.99', '06:16 ↑ (87°)', '18:29 ↑ (274°)', '12:13:05', '+0:46'), (2019, 'March', 30, '97.00', '88.40', '82.00', '78.06', '3.94', '06:15 ↑ (86°)', '18:29 ↑ (274°)', '12:13:51', '+0:45'), (2019, 'March', 31, '99.00', '87.60', '82.00', '78.10', '3.90', '06:15 ↑ (86°)', '18:29 ↑ (274°)', '12:14:37', '+0:45'), (2019, 'April', 1, '97.00', '86.80', '77.00', '78.09', '-1.09', '06:14 ↑ (85°)', '18:29 ↑ (275°)', '12:15:23', '+0:45'), (2019, 'April', 2, '97.00', '86.30', '77.00', '78.08', '-1.08', '06:13 ↑ (85°)', '18:29 ↑ (275°)', '12:16:08', '+0:45'), (2019, 'April', 3, '97.00', '86.00', '81.00', '78.11', '2.89', '06:13 ↑ (85°)', '18:30 ↑ (276°)', '12:16:54', '+0:45'), (2019, 'April', 4, '99.00', '87.10', '79.00', '78.12', '0.88', '06:12 ↑ (84°)', '18:30 ↑ (276°)', '12:17:40', '+0:45'), (2019, 'April', 5, '97.00', '88.80', '82.00', '78.16', '3.84', '06:11 ↑ (84°)', '18:30 ↑ (276°)', '12:18:25', '+0:45'); INSERT INTO `bangkok` (`year`, `month`, `date`, `tempMax`, `tempAvg`, `tempMin`, `tempMonthAvg`, `tempDiff`, `sunrise`, `sunset`, `length`, `difference`) VALUES (2019, 'April', 6, '97.00', '87.20', '81.00', '78.19', '2.81', '06:11 ↑ (83°)', '18:30 ↑ (277°)', '12:19:11', '+0:45'), (2019, 'April', 7, '95.00', '85.90', '77.00', '78.18', '-1.18', '06:10 ↑ (83°)', '18:30 ↑ (277°)', '12:19:56', '+0:45'), (2019, 'April', 8, '99.00', '86.10', '77.00', '78.16', '-1.16', '06:09 ↑ (83°)', '18:30 ↑ (278°)', '12:20:41', '+0:45'), (2019, 'April', 9, '99.00', '89.80', '82.00', '78.20', '3.80', '06:09 ↑ (82°)', '18:30 ↑ (278°)', '12:21:26', '+0:44'), (2019, 'April', 10, '100.00', '90.70', '84.00', '78.26', '5.74', '06:08 ↑ (82°)', '18:30 ↑ (278°)', '12:22:11', '+0:44'), (2019, 'April', 11, '97.00', '90.00', '84.00', '78.32', '5.68', '06:07 ↑ (81°)', '18:30 ↑ (279°)', '12:22:55', '+0:44'), (2019, 'April', 12, '99.00', '90.10', '84.00', '78.37', '5.63', '06:07 ↑ (81°)', '18:30 ↑ (279°)', '12:23:40', '+0:44'), (2019, 'April', 13, '97.00', '90.30', '84.00', '78.43', '5.57', '06:06 ↑ (81°)', '18:31 ↑ (280°)', '12:24:24', '+0:44'), (2019, 'April', 14, '99.00', '90.40', '84.00', '78.48', '5.52', '06:05 ↑ (80°)', '18:31 ↑ (280°)', '12:25:08', '+0:44'), (2019, 'April', 15, '99.00', '90.40', '86.00', '78.55', '7.45', '06:05 ↑ (80°)', '18:31 ↑ (280°)', '12:25:52', '+0:43'), (2019, 'April', 16, '99.00', '90.80', '84.00', '78.60', '5.40', '06:04 ↑ (80°)', '18:31 ↑ (281°)', '12:26:36', '+0:43'), (2019, 'April', 17, '100.00', '91.70', '86.00', '78.67', '7.33', '06:04 ↑ (79°)', '18:31 ↑ (281°)', '12:27:20', '+0:43'), (2019, 'April', 18, '99.00', '91.60', '86.00', '78.74', '7.26', '06:03 ↑ (79°)', '18:31 ↑ (281°)', '12:28:03', '+0:43'), (2019, 'April', 19, '100.00', '91.70', '86.00', '78.81', '7.19', '06:02 ↑ (78°)', '18:31 ↑ (282°)', '12:28:46', '+0:43'), (2019, 'April', 20, '104.00', '92.80', '86.00', '78.87', '7.13', '06:02 ↑ (78°)', '18:31 ↑ (282°)', '12:29:29', '+0:42'), (2019, 'April', 21, '100.00', '92.20', '86.00', '78.94', '7.06', '06:01 ↑ (78°)', '18:32 ↑ (282°)', '12:30:11', '+0:42'), (2019, 'April', 22, '100.00', '91.80', '88.00', '79.02', '8.98', '06:01 ↑ (77°)', '18:32 ↑ (283°)', '12:30:54', '+0:42'), (2019, 'April', 23, '99.00', '87.80', '77.00', '79.00', '-2.00', '06:00 ↑ (77°)', '18:32 ↑ (283°)', '12:31:36', '+0:42'), (2019, 'April', 24, '100.00', '91.90', '84.00', '79.04', '4.96', '06:00 ↑ (77°)', '18:32 ↑ (283°)', '12:32:17', '+0:41'), (2019, 'April', 25, '100.00', '89.30', '73.00', '78.99', '-5.99', '05:59 ↑ (76°)', '18:32 ↑ (284°)', '12:32:59', '+0:41'), (2019, 'April', 26, '100.00', '87.30', '82.00', '79.02', '2.98', '05:59 ↑ (76°)', '18:32 ↑ (284°)', '12:33:40', '+0:41'), (2019, 'April', 27, '95.00', '87.50', '81.00', '79.03', '1.97', '05:58 ↑ (76°)', '18:33 ↑ (284°)', '12:34:21', '+0:40'), (2019, 'April', 28, '99.00', '90.70', '84.00', '79.08', '4.92', '05:58 ↑ (75°)', '18:33 ↑ (285°)', '12:35:01', '+0:40'), (2019, 'April', 29, '99.00', '90.60', '82.00', '79.10', '2.90', '05:57 ↑ (75°)', '18:33 ↑ (285°)', '12:35:41', '+0:40'), (2019, 'April', 30, '99.00', '90.90', '84.00', '79.14', '4.86', '05:57 ↑ (75°)', '18:33 ↑ (285°)', '12:36:21', '+0:39'), (2019, 'May', 1, '100.00', '91.30', '84.00', '79.18', '4.82', '05:56 ↑ (74°)', '18:33 ↑ (286°)', '12:37:00', '+0:39'), (2019, 'May', 2, '100.00', '92.20', '84.00', '79.22', '4.78', '05:56 ↑ (74°)', '18:34 ↑ (286°)', '12:37:39', '+0:38'), (2019, 'May', 3, '102.00', '92.50', '86.00', '79.28', '6.72', '05:55 ↑ (74°)', '18:34 ↑ (286°)', '12:38:18', '+0:38'), (2019, 'May', 4, '100.00', '90.80', '79.00', '79.27', '-0.27', '05:55 ↑ (74°)', '18:34 ↑ (287°)', '12:38:56', '+0:38'), (2019, 'May', 5, '99.00', '89.80', '84.00', '79.31', '4.69', '05:55 ↑ (73°)', '18:34 ↑ (287°)', '12:39:34', '+0:37'), (2019, 'May', 6, '99.00', '91.60', '86.00', '79.37', '6.63', '05:54 ↑ (73°)', '18:34 ↑ (287°)', '12:40:11', '+0:37'), (2019, 'May', 7, '99.00', '90.70', '84.00', '79.40', '4.60', '05:54 ↑ (73°)', '18:35 ↑ (288°)', '12:40:48', '+0:36'), (2019, 'May', 8, '99.00', '90.40', '84.00', '79.44', '4.56', '05:53 ↑ (72°)', '18:35 ↑ (288°)', '12:41:24', '+0:36'), (2019, 'May', 9, '97.00', '89.80', '84.00', '79.47', '4.53', '05:53 ↑ (72°)', '18:35 ↑ (288°)', '12:42:00', '+0:35'), (2019, 'May', 10, '97.00', '88.40', '77.00', '79.45', '-2.45', '05:53 ↑ (72°)', '18:35 ↑ (288°)', '12:42:35', '+0:35'), (2019, 'May', 11, '95.00', '88.30', '81.00', '79.47', '1.53', '05:52 ↑ (72°)', '18:36 ↑ (289°)', '12:43:09', '+0:34'), (2019, 'May', 12, '95.00', '86.60', '79.00', '79.46', '-0.46', '05:52 ↑ (71°)', '18:36 ↑ (289°)', '12:43:44', '+0:34'), (2019, 'May', 13, '88.00', '82.30', '77.00', '79.44', '-2.44', '05:52 ↑ (71°)', '18:36 ↑ (289°)', '12:44:17', '+0:33'), (2019, 'May', 14, '90.00', '82.90', '79.00', '79.44', '-0.44', '05:52 ↑ (71°)', '18:36 ↑ (289°)', '12:44:50', '+0:33'), (2019, 'May', 15, '95.00', '86.60', '79.00', '79.44', '-0.44', '05:51 ↑ (70°)', '18:37 ↑ (290°)', '12:45:23', '+0:32'), (2019, 'May', 16, '93.00', '86.60', '81.00', '79.45', '1.55', '05:51 ↑ (70°)', '18:37 ↑ (290°)', '12:45:54', '+0:31'), (2019, 'May', 17, '97.00', '88.90', '81.00', '79.46', '1.54', '05:51 ↑ (70°)', '18:37 ↑ (290°)', '12:46:26', '+0:31'), (2019, 'May', 18, '97.00', '89.80', '82.00', '79.48', '2.52', '05:51 ↑ (70°)', '18:37 ↑ (290°)', '12:46:56', '+0:30'), (2019, 'May', 19, '100.00', '91.70', '84.00', '79.51', '4.49', '05:50 ↑ (70°)', '18:38 ↑ (291°)', '12:47:26', '+0:29'), (2019, 'May', 20, '99.00', '90.50', '86.00', '79.56', '6.44', '05:50 ↑ (69°)', '18:38 ↑ (291°)', '12:47:55', '+0:29'), (2019, 'May', 21, '99.00', '88.70', '82.00', '79.57', '2.43', '05:50 ↑ (69°)', '18:38 ↑ (291°)', '12:48:24', '+0:28'), (2019, 'May', 22, '93.00', '84.70', '77.00', '79.56', '-2.56', '05:50 ↑ (69°)', '18:39 ↑ (291°)', '12:48:51', '+0:27'), (2019, 'May', 23, '93.00', '83.50', '77.00', '79.54', '-2.54', '05:50 ↑ (69°)', '18:39 ↑ (291°)', '12:49:19', '+0:27'), (2019, 'May', 24, '95.00', '86.30', '79.00', '79.53', '-0.53', '05:50 ↑ (68°)', '18:39 ↑ (292°)', '12:49:45', '+0:26'), (2019, 'May', 25, '95.00', '87.70', '82.00', '79.55', '2.45', '05:49 ↑ (68°)', '18:40 ↑ (292°)', '12:50:11', '+0:25'), (2019, 'May', 26, '95.00', '87.80', '81.00', '79.56', '1.44', '05:49 ↑ (68°)', '18:40 ↑ (292°)', '12:50:35', '+0:24'), (2019, 'May', 27, '95.00', '87.00', '79.00', '79.56', '-0.56', '05:49 ↑ (68°)', '18:40 ↑ (292°)', '12:50:59', '+0:24'), (2019, 'May', 28, '93.00', '86.20', '77.00', '79.54', '-2.54', '05:49 ↑ (68°)', '18:40 ↑ (292°)', '12:51:23', '+0:23'), (2019, 'May', 29, '95.00', '85.70', '79.00', '79.54', '-0.54', '05:49 ↑ (68°)', '18:41 ↑ (293°)', '12:51:45', '+0:22'), (2019, 'May', 30, '88.00', '83.30', '79.00', '79.53', '-0.53', '05:49 ↑ (67°)', '18:41 ↑ (293°)', '12:52:07', '+0:21'), (2019, 'May', 31, '95.00', '86.60', '79.00', '79.53', '-0.53', '05:49 ↑ (67°)', '18:41 ↑ (293°)', '12:52:28', '+0:20'), (2019, 'June', 1, '97.00', '88.70', '81.00', '79.54', '1.46', '05:49 ↑ (67°)', '18:42 ↑ (293°)', '12:52:48', '+0:19'), (2019, 'June', 2, '97.00', '87.30', '77.00', '79.52', '-2.52', '05:49 ↑ (67°)', '18:42 ↑ (293°)', '12:53:07', '+0:19'), (2019, 'June', 3, '95.00', '87.20', '81.00', '79.53', '1.47', '05:49 ↑ (67°)', '18:42 ↑ (293°)', '12:53:25', '+0:18'), (2019, 'June', 4, '97.00', '89.70', '82.00', '79.55', '2.45', '05:49 ↑ (67°)', '18:43 ↑ (293°)', '12:53:42', '+0:17'), (2019, 'June', 5, '95.00', '85.70', '81.00', '79.56', '1.44', '05:49 ↑ (67°)', '18:43 ↑ (293°)', '12:53:59', '+0:16'), (2019, 'June', 6, '95.00', '84.20', '77.00', '79.54', '-2.54', '05:49 ↑ (66°)', '18:43 ↑ (294°)', '12:54:14', '+0:15'), (2019, 'June', 7, '93.00', '83.10', '79.00', '79.54', '-0.54', '05:49 ↑ (66°)', '18:44 ↑ (294°)', '12:54:29', '+0:14'), (2019, 'June', 8, '93.00', '85.40', '79.00', '79.53', '-0.53', '05:49 ↑ (66°)', '18:44 ↑ (294°)', '12:54:43', '+0:13'), (2019, 'June', 9, '93.00', '84.10', '77.00', '79.52', '-2.52', '05:49 ↑ (66°)', '18:44 ↑ (294°)', '12:54:55', '+0:12'), (2019, 'June', 10, '86.00', '84.70', '84.00', '79.55', '4.45', '05:49 ↑ (66°)', '18:44 ↑ (294°)', '12:55:07', '+0:11'), (2019, 'June', 11, '88.00', '82.00', '75.00', '79.52', '-4.52', '05:49 ↑ (66°)', '18:45 ↑ (294°)', '12:55:18', '+0:10'), (2019, 'June', 12, '93.00', '86.40', '81.00', '79.53', '1.47', '05:50 ↑ (66°)', '18:45 ↑ (294°)', '12:55:28', '+0:09'), (2019, 'June', 13, '93.00', '86.70', '79.00', '79.52', '-0.52', '05:50 ↑ (66°)', '18:45 ↑ (294°)', '12:55:36', '+0:08'), (2019, 'June', 14, '93.00', '87.30', '81.00', '79.53', '1.47', '05:50 ↑ (66°)', '18:46 ↑ (294°)', '12:55:44', '+0:07'), (2019, 'June', 15, '95.00', '87.40', '79.00', '79.53', '-0.53', '05:50 ↑ (66°)', '18:46 ↑ (294°)', '12:55:51', '+0:06'), (2019, 'June', 16, '93.00', '84.20', '79.00', '79.53', '-0.53', '05:50 ↑ (66°)', '18:46 ↑ (294°)', '12:55:57', '+0:05'), (2019, 'June', 17, '88.00', '83.40', '81.00', '79.54', '1.46', '05:50 ↑ (66°)', '18:46 ↑ (294°)', '12:56:02', '+0:04'), (2019, 'June', 18, '91.00', '85.50', '82.00', '79.55', '2.45', '05:51 ↑ (66°)', '18:47 ↑ (294°)', '12:56:06', '+0:03'), (2019, 'June', 19, '93.00', '86.10', '81.00', '79.56', '1.44', '05:51 ↑ (66°)', '18:47 ↑ (294°)', '12:56:09', '+0:02'), (2019, 'June', 20, '93.00', '87.20', '79.00', '79.56', '-0.56', '05:51 ↑ (66°)', '18:47 ↑ (294°)', '12:56:11', '+0:01'), (2019, 'June', 21, '95.00', '89.40', '82.00', '79.57', '2.43', '05:51 ↑ (66°)', '18:47 ↑ (294°)', '12:56:12', '< 1s'), (2019, 'June', 22, '97.00', '90.10', '84.00', '79.60', '4.40', '05:51 ↑ (66°)', '18:48 ↑ (294°)', '12:56:12', '< 1s'), (2019, 'June', 23, '97.00', '87.30', '81.00', '79.60', '1.40', '05:52 ↑ (66°)', '18:48 ↑ (294°)', '12:56:10', '−0:01'), (2019, 'June', 24, '91.00', '84.60', '79.00', '79.60', '-0.60', '05:52 ↑ (66°)', '18:48 ↑ (294°)', '12:56:08', '−0:02'), (2019, 'June', 25, '93.00', '84.30', '79.00', '79.60', '-0.60', '05:52 ↑ (66°)', '18:48 ↑ (294°)', '12:56:05', '−0:03'), (2019, 'June', 26, '93.00', '85.80', '81.00', '79.60', '1.40', '05:52 ↑ (66°)', '18:48 ↑ (294°)', '12:56:01', '−0:04'), (2019, 'June', 27, '91.00', '84.30', '77.00', '79.59', '-2.59', '05:53 ↑ (66°)', '18:48 ↑ (294°)', '12:55:56', '−0:05'), (2019, 'June', 28, '91.00', '84.90', '81.00', '79.60', '1.40', '05:53 ↑ (66°)', '18:49 ↑ (294°)', '12:55:50', '−0:06'), (2019, 'June', 29, '90.00', '85.00', '79.00', '79.59', '-0.59', '05:53 ↑ (66°)', '18:49 ↑ (294°)', '12:55:43', '−0:07'), (2019, 'June', 30, '88.00', '83.10', '77.00', '79.58', '-2.58', '05:53 ↑ (66°)', '18:49 ↑ (294°)', '12:55:35', '−0:08'), (2019, 'July', 1, '90.00', '84.20', '79.00', '79.58', '-0.58', '05:54 ↑ (66°)', '18:49 ↑ (294°)', '12:55:26', '−0:08'), (2019, 'July', 2, '93.00', '85.80', '81.00', '79.58', '1.42', '05:54 ↑ (66°)', '18:49 ↑ (294°)', '12:55:16', '−0:09'), (2019, 'July', 3, '82.00', '80.00', '79.00', '79.58', '-0.58', '05:54 ↑ (66°)', '18:49 ↑ (294°)', '12:55:05', '−0:10'), (2019, 'July', 4, '88.00', '81.30', '77.00', '79.57', '-2.57', '05:54 ↑ (66°)', '18:49 ↑ (294°)', '12:54:53', '−0:11'), (2019, 'July', 5, '82.00', '78.60', '75.00', '79.54', '-4.54', '05:55 ↑ (66°)', '18:49 ↑ (294°)', '12:54:40', '−0:12'), (2019, 'July', 6, '90.00', '82.60', '77.00', '79.53', '-2.53', '05:55 ↑ (66°)', '18:49 ↑ (294°)', '12:54:27', '−0:13'), (2019, 'July', 7, '91.00', '84.70', '79.00', '79.53', '-0.53', '05:55 ↑ (66°)', '18:49 ↑ (294°)', '12:54:12', '−0:14'), (2019, 'July', 8, '95.00', '87.10', '82.00', '79.54', '2.46', '05:56 ↑ (67°)', '18:49 ↑ (293°)', '12:53:56', '−0:15'), (2019, 'July', 9, '93.00', '85.40', '77.00', '79.53', '-2.53', '05:56 ↑ (67°)', '18:50 ↑ (293°)', '12:53:40', '−0:16'), (2019, 'July', 10, '93.00', '85.50', '79.00', '79.52', '-0.52', '05:56 ↑ (67°)', '18:50 ↑ (293°)', '12:53:23', '−0:17'), (2019, 'July', 11, '95.00', '87.00', '81.00', '79.53', '1.47', '05:56 ↑ (67°)', '18:49 ↑ (293°)', '12:53:04', '−0:18'), (2019, 'July', 12, '97.00', '87.70', '82.00', '79.54', '2.46', '05:57 ↑ (67°)', '18:49 ↑ (293°)', '12:52:45', '−0:19'), (2019, 'July', 13, '95.00', '87.20', '79.00', '79.54', '-0.54', '05:57 ↑ (67°)', '18:49 ↑ (293°)', '12:52:25', '−0:19'), (2019, 'July', 14, '93.00', '87.50', '82.00', '79.55', '2.45', '05:57 ↑ (67°)', '18:49 ↑ (293°)', '12:52:05', '−0:20'), (2019, 'July', 15, '93.00', '85.90', '79.00', '79.55', '-0.55', '05:58 ↑ (68°)', '18:49 ↑ (292°)', '12:51:43', '−0:21'), (2019, 'July', 16, '95.00', '87.10', '81.00', '79.56', '1.44', '05:58 ↑ (68°)', '18:49 ↑ (292°)', '12:51:21', '−0:22'), (2019, 'July', 17, '93.00', '87.10', '82.00', '79.57', '2.43', '05:58 ↑ (68°)', '18:49 ↑ (292°)', '12:50:57', '−0:23'), (2019, 'July', 18, '95.00', '87.60', '81.00', '79.58', '1.42', '05:58 ↑ (68°)', '18:49 ↑ (292°)', '12:50:33', '−0:23'), (2019, 'July', 19, '97.00', '86.70', '75.00', '79.56', '-4.56', '05:59 ↑ (68°)', '18:49 ↑ (292°)', '12:50:09', '−0:24'), (2019, 'July', 20, '93.00', '84.30', '77.00', '79.54', '-2.54', '05:59 ↑ (68°)', '18:49 ↑ (292°)', '12:49:43', '−0:25'), (2019, 'July', 21, '93.00', '86.30', '81.00', '79.55', '1.45', '05:59 ↑ (69°)', '18:49 ↑ (291°)', '12:49:17', '−0:26'), (2019, 'July', 22, '95.00', '87.70', '81.00', '79.56', '1.44', '06:00 ↑ (69°)', '18:48 ↑ (291°)', '12:48:50', '−0:26'), (2019, 'July', 23, '95.00', '85.70', '73.00', '79.52', '-6.52', '06:00 ↑ (69°)', '18:48 ↑ (291°)', '12:48:22', '−0:27'), (2019, 'July', 24, '91.00', '85.60', '81.00', '79.53', '1.47', '06:00 ↑ (69°)', '18:48 ↑ (291°)', '12:47:54', '−0:28'), (2019, 'July', 25, '91.00', '84.20', '77.00', '79.52', '-2.52', '06:00 ↑ (69°)', '18:48 ↑ (290°)', '12:47:25', '−0:28'), (2019, 'July', 26, '91.00', '84.50', '79.00', '79.52', '-0.52', '06:01 ↑ (70°)', '18:47 ↑ (290°)', '12:46:55', '−0:29'), (2019, 'July', 27, '91.00', '86.40', '82.00', '79.53', '2.47', '06:01 ↑ (70°)', '18:47 ↑ (290°)', '12:46:25', '−0:30'), (2019, 'July', 28, '95.00', '88.10', '82.00', '79.54', '2.46', '06:01 ↑ (70°)', '18:47 ↑ (290°)', '12:45:54', '−0:30'), (2019, 'July', 29, '95.00', '87.30', '82.00', '79.55', '2.45', '06:01 ↑ (70°)', '18:47 ↑ (290°)', '12:45:23', '−0:31'), (2019, 'July', 30, '93.00', '86.90', '81.00', '79.56', '1.44', '06:02 ↑ (71°)', '18:46 ↑ (289°)', '12:44:51', '−0:32'), (2019, 'July', 31, '93.00', '85.10', '79.00', '79.56', '-0.56', '06:02 ↑ (71°)', '18:46 ↑ (289°)', '12:44:18', '−0:32'), (2019, 'August', 1, '90.00', '83.90', '79.00', '79.55', '-0.55', '06:02 ↑ (71°)', '18:46 ↑ (289°)', '12:43:45', '−0:33'), (2019, 'August', 2, '91.00', '84.70', '77.00', '79.54', '-2.54', '06:02 ↑ (71°)', '18:45 ↑ (289°)', '12:43:11', '−0:33'), (2019, 'August', 3, '90.00', '85.10', '81.00', '79.55', '1.45', '06:02 ↑ (72°)', '18:45 ↑ (288°)', '12:42:36', '−0:34'), (2019, 'August', 4, '88.00', '82.90', '77.00', '79.54', '-2.54', '06:03 ↑ (72°)', '18:45 ↑ (288°)', '12:42:02', '−0:34'), (2019, 'August', 5, '91.00', '82.60', '77.00', '79.53', '-2.53', '06:03 ↑ (72°)', '18:44 ↑ (288°)', '12:41:26', '−0:35'), (2019, 'August', 6, '90.00', '83.40', '79.00', '79.52', '-0.52', '06:03 ↑ (72°)', '18:44 ↑ (287°)', '12:40:50', '−0:35'), (2019, 'August', 7, '90.00', '83.80', '79.00', '79.52', '-0.52', '06:03 ↑ (73°)', '18:43 ↑ (287°)', '12:40:14', '−0:36'), (2019, 'August', 8, '90.00', '83.40', '79.00', '79.52', '-0.52', '06:03 ↑ (73°)', '18:43 ↑ (287°)', '12:39:37', '−0:36'), (2019, 'August', 9, '91.00', '84.00', '79.00', '79.52', '-0.52', '06:04 ↑ (73°)', '18:43 ↑ (287°)', '12:39:00', '−0:37'), (2019, 'August', 10, '91.00', '84.50', '81.00', '79.52', '1.48', '06:04 ↑ (74°)', '18:42 ↑ (286°)', '12:38:22', '−0:37'), (2019, 'August', 11, '91.00', '85.00', '81.00', '79.53', '1.47', '06:04 ↑ (74°)', '18:42 ↑ (286°)', '12:37:44', '−0:38'), (2019, 'August', 12, '93.00', '85.50', '81.00', '79.54', '1.46', '06:04 ↑ (74°)', '18:41 ↑ (286°)', '12:37:06', '−0:38'), (2019, 'August', 13, '90.00', '85.40', '81.00', '79.54', '1.46', '06:04 ↑ (75°)', '18:41 ↑ (285°)', '12:36:27', '−0:38'), (2019, 'August', 14, '93.00', '85.90', '81.00', '79.55', '1.45', '06:04 ↑ (75°)', '18:40 ↑ (285°)', '12:35:48', '−0:39'), (2019, 'August', 15, '95.00', '87.10', '82.00', '79.56', '2.44', '06:04 ↑ (75°)', '18:40 ↑ (285°)', '12:35:09', '−0:39'), (2019, 'August', 16, '95.00', '87.00', '79.00', '79.56', '-0.56', '06:05 ↑ (75°)', '18:39 ↑ (284°)', '12:34:29', '−0:39'), (2019, 'August', 17, '91.00', '85.20', '81.00', '79.56', '1.44', '06:05 ↑ (76°)', '18:39 ↑ (284°)', '12:33:48', '−0:40'), (2019, 'August', 18, '93.00', '86.30', '81.00', '79.57', '1.43', '06:05 ↑ (76°)', '18:38 ↑ (284°)', '12:33:08', '−0:40'), (2019, 'August', 19, '93.00', '86.20', '82.00', '79.58', '2.42', '06:05 ↑ (76°)', '18:37 ↑ (283°)', '12:32:27', '−0:40'), (2019, 'August', 20, '91.00', '83.30', '79.00', '79.58', '-0.58', '06:05 ↑ (77°)', '18:37 ↑ (283°)', '12:31:46', '−0:41'), (2019, 'August', 21, '93.00', '84.50', '77.00', '79.57', '-2.57', '06:05 ↑ (77°)', '18:36 ↑ (283°)', '12:31:05', '−0:41'), (2019, 'August', 22, '91.00', '84.00', '77.00', '79.56', '-2.56', '06:05 ↑ (77°)', '18:36 ↑ (282°)', '12:30:23', '−0:41'), (2019, 'August', 23, '90.00', '84.70', '79.00', '79.55', '-0.55', '06:05 ↑ (78°)', '18:35 ↑ (282°)', '12:29:41', '−0:41'), (2019, 'August', 24, '91.00', '84.70', '81.00', '79.56', '1.44', '06:05 ↑ (78°)', '18:34 ↑ (282°)', '12:28:59', '−0:42'), (2019, 'August', 25, '91.00', '84.60', '79.00', '79.56', '-0.56', '06:06 ↑ (79°)', '18:34 ↑ (281°)', '12:28:16', '−0:42'), (2019, 'August', 26, '90.00', '81.60', '77.00', '79.55', '-2.55', '06:06 ↑ (79°)', '18:33 ↑ (281°)', '12:27:34', '−0:42'), (2019, 'August', 27, '90.00', '83.90', '77.00', '79.54', '-2.54', '06:06 ↑ (79°)', '18:33 ↑ (281°)', '12:26:51', '−0:42'), (2019, 'August', 28, '93.00', '85.10', '79.00', '79.53', '-0.53', '06:06 ↑ (80°)', '18:32 ↑ (280°)', '12:26:08', '−0:43'), (2019, 'August', 29, '91.00', '84.70', '79.00', '79.53', '-0.53', '06:06 ↑ (80°)', '18:31 ↑ (280°)', '12:25:25', '−0:43'), (2019, 'August', 30, '82.00', '81.60', '79.00', '79.53', '-0.53', '06:06 ↑ (80°)', '18:31 ↑ (279°)', '12:24:41', '−0:43'), (2019, 'August', 31, '88.00', '82.90', '79.00', '79.53', '-0.53', '06:06 ↑ (81°)', '18:30 ↑ (279°)', '12:23:57', '−0:43'), (2019, 'September', 1, '91.00', '83.00', '79.00', '79.52', '-0.52', '06:06 ↑ (81°)', '18:29 ↑ (279°)', '12:23:14', '−0:43'), (2019, 'September', 2, '90.00', '83.60', '79.00', '79.52', '-0.52', '06:06 ↑ (81°)', '18:29 ↑ (278°)', '12:22:30', '−0:43'), (2019, 'September', 3, '88.00', '81.40', '77.00', '79.51', '-2.51', '06:06 ↑ (82°)', '18:28 ↑ (278°)', '12:21:46', '−0:44'), (2019, 'September', 4, '86.00', '82.10', '79.00', '79.51', '-0.51', '06:06 ↑ (82°)', '18:27 ↑ (278°)', '12:21:01', '−0:44'), (2019, 'September', 5, '91.00', '82.80', '79.00', '79.51', '-0.51', '06:06 ↑ (83°)', '18:26 ↑ (277°)', '12:20:17', '−0:44'), (2019, 'September', 6, '91.00', '83.50', '79.00', '79.51', '-0.51', '06:06 ↑ (83°)', '18:26 ↑ (277°)', '12:19:32', '−0:44'), (2019, 'September', 7, '90.00', '85.40', '81.00', '79.51', '1.49', '06:06 ↑ (83°)', '18:25 ↑ (276°)', '12:18:48', '−0:44'), (2019, 'September', 8, '91.00', '84.10', '81.00', '79.52', '1.48', '06:06 ↑ (84°)', '18:24 ↑ (276°)', '12:18:03', '−0:44'), (2019, 'September', 9, '91.00', '85.10', '81.00', '79.52', '1.48', '06:06 ↑ (84°)', '18:24 ↑ (276°)', '12:17:18', '−0:44'), (2019, 'September', 10, '91.00', '83.70', '77.00', '79.51', '-2.51', '06:06 ↑ (85°)', '18:23 ↑ (275°)', '12:16:33', '−0:44'), (2019, 'September', 11, '93.00', '84.40', '79.00', '79.51', '-0.51', '06:06 ↑ (85°)', '18:22 ↑ (275°)', '12:15:48', '−0:45'), (2019, 'September', 12, '93.00', '85.30', '77.00', '79.50', '-2.50', '06:06 ↑ (85°)', '18:21 ↑ (275°)', '12:15:03', '−0:45'), (2019, 'September', 13, '93.00', '86.30', '81.00', '79.51', '1.49', '06:06 ↑ (86°)', '18:21 ↑ (274°)', '12:14:18', '−0:45'), (2019, 'September', 14, '90.00', '82.60', '77.00', '79.50', '-2.50', '06:06 ↑ (86°)', '18:20 ↑ (274°)', '12:13:32', '−0:45'), (2019, 'September', 15, '93.00', '84.50', '77.00', '79.49', '-2.49', '06:06 ↑ (86°)', '18:19 ↑ (273°)', '12:12:47', '−0:45'), (2019, 'September', 16, '93.00', '83.00', '75.00', '79.47', '-4.47', '06:06 ↑ (87°)', '18:18 ↑ (273°)', '12:12:02', '−0:45'), (2019, 'September', 17, '95.00', '84.90', '77.00', '79.46', '-2.46', '06:06 ↑ (87°)', '18:18 ↑ (273°)', '12:11:16', '−0:45'), (2019, 'September', 18, '93.00', '83.20', '75.00', '79.44', '-4.44', '06:06 ↑ (88°)', '18:17 ↑ (272°)', '12:10:31', '−0:45'), (2019, 'September', 19, '88.00', '81.00', '77.00', '79.44', '-2.44', '06:07 ↑ (88°)', '18:16 ↑ (272°)', '12:09:46', '−0:45'), (2019, 'September', 20, '88.00', '82.00', '77.00', '79.43', '-2.43', '06:07 ↑ (88°)', '18:16 ↑ (271°)', '12:09:00', '−0:45'), (2019, 'September', 21, '86.00', '78.90', '77.00', '79.42', '-2.42', '06:07 ↑ (89°)', '18:15 ↑ (271°)', '12:08:15', '−0:45'), (2019, 'September', 22, '84.00', '79.90', '75.00', '79.40', '-4.40', '06:07 ↑ (89°)', '18:14 ↑ (271°)', '12:07:29', '−0:45'), (2019, 'September', 23, '84.00', '78.20', '75.00', '79.38', '-4.38', '06:07 ↑ (90°)', '18:13 ↑ (270°)', '12:06:44', '−0:45'), (2019, 'September', 24, '91.00', '83.40', '77.00', '79.37', '-2.37', '06:07 ↑ (90°)', '18:13 ↑ (270°)', '12:05:58', '−0:45'), (2019, 'September', 25, '91.00', '85.10', '81.00', '79.38', '1.62', '06:07 ↑ (90°)', '18:12 ↑ (269°)', '12:05:13', '−0:45'), (2019, 'September', 26, '95.00', '86.50', '79.00', '79.38', '-0.38', '06:07 ↑ (91°)', '18:11 ↑ (269°)', '12:04:27', '−0:45'), (2019, 'September', 27, '93.00', '85.80', '81.00', '79.39', '1.61', '06:07 ↑ (91°)', '18:10 ↑ (269°)', '12:03:42', '−0:45'), (2019, 'September', 28, '93.00', '86.40', '79.00', '79.38', '-0.38', '06:07 ↑ (92°)', '18:10 ↑ (268°)', '12:02:56', '−0:45'), (2019, 'September', 29, '95.00', '87.30', '79.00', '79.38', '-0.38', '06:07 ↑ (92°)', '18:09 ↑ (268°)', '12:02:11', '−0:45'), (2019, 'September', 30, '93.00', '86.20', '81.00', '79.39', '1.61', '06:07 ↑ (92°)', '18:08 ↑ (267°)', '12:01:26', '−0:45'), (2019, 'October', 1, '93.00', '84.10', '75.00', '79.37', '-4.37', '06:07 ↑ (93°)', '18:08 ↑ (267°)', '12:00:40', '−0:45'), (2019, 'October', 2, '93.00', '85.10', '79.00', '79.37', '-0.37', '06:07 ↑ (93°)', '18:07 ↑ (267°)', '11:59:55', '−0:45'), (2019, 'October', 3, '91.00', '84.70', '81.00', '79.38', '1.62', '06:07 ↑ (94°)', '18:06 ↑ (266°)', '11:59:10', '−0:45'), (2019, 'October', 4, '91.00', '82.00', '75.00', '79.36', '-4.36', '06:07 ↑ (94°)', '18:06 ↑ (266°)', '11:58:25', '−0:45'), (2019, 'October', 5, '90.00', '80.60', '77.00', '79.35', '-2.35', '06:07 ↑ (94°)', '18:05 ↑ (265°)', '11:57:40', '−0:45'), (2019, 'October', 6, '90.00', '81.10', '77.00', '79.34', '-2.34', '06:07 ↑ (95°)', '18:04 ↑ (265°)', '11:56:55', '−0:44'), (2019, 'October', 7, '91.00', '81.60', '77.00', '79.34', '-2.34', '06:07 ↑ (95°)', '18:04 ↑ (265°)', '11:56:10', '−0:44'), (2019, 'October', 8, '91.00', '82.60', '75.00', '79.32', '-4.32', '06:07 ↑ (96°)', '18:03 ↑ (264°)', '11:55:25', '−0:44'), (2019, 'October', 9, '91.00', '83.30', '79.00', '79.32', '-0.32', '06:08 ↑ (96°)', '18:02 ↑ (264°)', '11:54:41', '−0:44'), (2019, 'October', 10, '93.00', '84.40', '79.00', '79.32', '-0.32', '06:08 ↑ (96°)', '18:02 ↑ (263°)', '11:53:56', '−0:44'), (2019, 'October', 11, '93.00', '85.80', '77.00', '79.31', '-2.31', '06:08 ↑ (97°)', '18:01 ↑ (263°)', '11:53:12', '−0:44'), (2019, 'October', 12, '93.00', '84.80', '79.00', '79.31', '-0.31', '06:08 ↑ (97°)', '18:00 ↑ (263°)', '11:52:27', '−0:44'), (2019, 'October', 13, '93.00', '85.40', '79.00', '79.31', '-0.31', '06:08 ↑ (98°)', '18:00 ↑ (262°)', '11:51:43', '−0:44'), (2019, 'October', 14, '93.00', '84.20', '75.00', '79.29', '-4.29', '06:08 ↑ (98°)', '17:59 ↑ (262°)', '11:50:59', '−0:43'), (2019, 'October', 15, '91.00', '84.30', '79.00', '79.29', '-0.29', '06:08 ↑ (98°)', '17:58 ↑ (261°)', '11:50:16', '−0:43'), (2019, 'October', 16, '91.00', '85.40', '79.00', '79.29', '-0.29', '06:08 ↑ (99°)', '17:58 ↑ (261°)', '11:49:32', '−0:43'), (2019, 'October', 17, '91.00', '85.40', '79.00', '79.29', '-0.29', '06:08 ↑ (99°)', '17:57 ↑ (261°)', '11:48:49', '−0:43'), (2019, 'October', 18, '93.00', '86.00', '79.00', '79.29', '-0.29', '06:09 ↑ (99°)', '17:57 ↑ (260°)', '11:48:06', '−0:43'), (2019, 'October', 19, '93.00', '86.10', '81.00', '79.29', '1.71', '06:09 ↑ (100°)', '17:56 ↑ (260°)', '11:47:23', '−0:42'), (2019, 'October', 20, '91.00', '86.00', '81.00', '79.30', '1.70', '06:09 ↑ (100°)', '17:56 ↑ (260°)', '11:46:40', '−0:42'), (2019, 'October', 21, '93.00', '87.30', '81.00', '79.31', '1.69', '06:09 ↑ (101°)', '17:55 ↑ (259°)', '11:45:57', '−0:42'), (2019, 'October', 22, '93.00', '86.40', '81.00', '79.31', '1.69', '06:09 ↑ (101°)', '17:55 ↑ (259°)', '11:45:15', '−0:42'), (2019, 'October', 23, '95.00', '88.10', '82.00', '79.32', '2.68', '06:10 ↑ (101°)', '17:54 ↑ (258°)', '11:44:33', '−0:42'), (2019, 'October', 24, '97.00', '88.30', '82.00', '79.33', '2.67', '06:10 ↑ (102°)', '17:54 ↑ (258°)', '11:43:51', '−0:41'), (2019, 'October', 25, '95.00', '87.70', '79.00', '79.33', '-0.33', '06:10 ↑ (102°)', '17:53 ↑ (258°)', '11:43:09', '−0:41'), (2019, 'October', 26, '95.00', '87.70', '81.00', '79.33', '1.67', '06:10 ↑ (102°)', '17:53 ↑ (257°)', '11:42:28', '−0:41'), (2019, 'October', 27, '93.00', '87.00', '82.00', '79.34', '2.66', '06:10 ↑ (103°)', '17:52 ↑ (257°)', '11:41:47', '−0:41'), (2019, 'October', 28, '95.00', '86.90', '79.00', '79.34', '-0.34', '06:11 ↑ (103°)', '17:52 ↑ (257°)', '11:41:06', '−0:40'), (2019, 'October', 29, '93.00', '85.70', '81.00', '79.35', '1.65', '06:11 ↑ (103°)', '17:51 ↑ (256°)', '11:40:26', '−0:40'), (2019, 'October', 30, '91.00', '84.80', '81.00', '79.35', '1.65', '06:11 ↑ (104°)', '17:51 ↑ (256°)', '11:39:46', '−0:40'), (2019, 'October', 31, '91.00', '82.90', '75.00', '79.34', '-4.34', '06:12 ↑ (104°)', '17:51 ↑ (256°)', '11:39:06', '−0:39'), (2019, 'November', 1, '82.00', '77.60', '75.00', '79.32', '-4.32', '06:12 ↑ (104°)', '17:50 ↑ (255°)', '11:38:27', '−0:39'), (2019, 'November', 2, '84.00', '79.00', '75.00', '79.31', '-4.31', '06:12 ↑ (105°)', '17:50 ↑ (255°)', '11:37:48', '−0:38'), (2019, 'November', 3, '93.00', '82.60', '73.00', '79.29', '-6.29', '06:12 ↑ (105°)', '17:50 ↑ (255°)', '11:37:09', '−0:38'), (2019, 'November', 4, '90.00', '82.20', '77.00', '79.28', '-2.28', '06:13 ↑ (105°)', '17:49 ↑ (254°)', '11:36:31', '−0:38'), (2019, 'November', 5, '91.00', '84.50', '77.00', '79.28', '-2.28', '06:13 ↑ (106°)', '17:49 ↑ (254°)', '11:35:53', '−0:37'), (2019, 'November', 6, '91.00', '85.00', '79.00', '79.27', '-0.27', '06:13 ↑ (106°)', '17:49 ↑ (254°)', '11:35:16', '−0:37'), (2019, 'November', 7, '91.00', '85.50', '79.00', '79.27', '-0.27', '06:14 ↑ (106°)', '17:48 ↑ (253°)', '11:34:39', '−0:36'), (2019, 'November', 8, '90.00', '83.70', '77.00', '79.27', '-2.27', '06:14 ↑ (107°)', '17:48 ↑ (253°)', '11:34:03', '−0:36'), (2019, 'November', 9, '88.00', '81.50', '77.00', '79.26', '-2.26', '06:14 ↑ (107°)', '17:48 ↑ (253°)', '11:33:27', '−0:35'), (2019, 'November', 10, '88.00', '81.00', '73.00', '79.24', '-6.24', '06:15 ↑ (107°)', '17:48 ↑ (253°)', '11:32:51', '−0:35'), (2019, 'November', 11, '90.00', '82.40', '73.00', '79.22', '-6.22', '06:15 ↑ (108°)', '17:48 ↑ (252°)', '11:32:16', '−0:34'), (2019, 'November', 12, '93.00', '84.80', '79.00', '79.22', '-0.22', '06:16 ↑ (108°)', '17:47 ↑ (252°)', '11:31:42', '−0:34'), (2019, 'November', 13, '90.00', '82.50', '77.00', '79.21', '-2.21', '06:16 ↑ (108°)', '17:47 ↑ (252°)', '11:31:08', '−0:33'), (2019, 'November', 14, '90.00', '82.80', '79.00', '79.21', '-0.21', '06:16 ↑ (108°)', '17:47 ↑ (251°)', '11:30:35', '−0:33'), (2019, 'November', 15, '90.00', '83.80', '77.00', '79.20', '-2.20', '06:17 ↑ (109°)', '17:47 ↑ (251°)', '11:30:02', '−0:32'), (2019, 'November', 16, '91.00', '84.00', '75.00', '79.19', '-4.19', '06:17 ↑ (109°)', '17:47 ↑ (251°)', '11:29:30', '−0:32'), (2019, 'November', 17, '93.00', '84.50', '77.00', '79.18', '-2.18', '06:18 ↑ (109°)', '17:47 ↑ (251°)', '11:28:59', '−0:31'), (2019, 'November', 18, '95.00', '85.50', '77.00', '79.18', '-2.18', '06:18 ↑ (109°)', '17:47 ↑ (250°)', '11:28:28', '−0:30'), (2019, 'November', 19, '95.00', '85.90', '79.00', '79.18', '-0.18', '06:19 ↑ (110°)', '17:47 ↑ (250°)', '11:27:57', '−0:30'), (2019, 'November', 20, '90.00', '83.30', '75.00', '79.16', '-4.16', '06:19 ↑ (110°)', '17:47 ↑ (250°)', '11:27:28', '−0:29'), (2019, 'November', 21, '90.00', '83.10', '75.00', '79.15', '-4.15', '06:20 ↑ (110°)', '17:47 ↑ (250°)', '11:26:59', '−0:28'), (2019, 'November', 22, '91.00', '82.70', '75.00', '79.14', '-4.14', '06:20 ↑ (110°)', '17:47 ↑ (249°)', '11:26:31', '−0:28'), (2019, 'November', 23, '91.00', '84.00', '75.00', '79.13', '-4.13', '06:21 ↑ (111°)', '17:47 ↑ (249°)', '11:26:03', '−0:27'), (2019, 'November', 24, '91.00', '83.30', '73.00', '79.11', '-6.11', '06:21 ↑ (111°)', '17:47 ↑ (249°)', '11:25:37', '−0:26'), (2019, 'November', 25, '93.00', '84.70', '77.00', '79.10', '-2.10', '06:22 ↑ (111°)', '17:47 ↑ (249°)', '11:25:11', '−0:25'), (2019, 'November', 26, '91.00', '83.20', '73.00', '79.08', '-6.08', '06:22 ↑ (111°)', '17:47 ↑ (249°)', '11:24:45', '−0:25'), (2019, 'November', 27, '91.00', '85.80', '77.00', '79.08', '-2.08', '06:23 ↑ (111°)', '17:47 ↑ (248°)', '11:24:21', '−0:24'), (2019, 'November', 28, '93.00', '86.30', '79.00', '79.08', '-0.08', '06:23 ↑ (112°)', '17:47 ↑ (248°)', '11:23:57', '−0:23'), (2019, 'November', 29, '91.00', '84.20', '77.00', '79.07', '-2.07', '06:24 ↑ (112°)', '17:47 ↑ (248°)', '11:23:34', '−0:22'), (2019, 'November', 30, '91.00', '85.30', '77.00', '79.06', '-2.06', '06:24 ↑ (112°)', '17:48 ↑ (248°)', '11:23:12', '−0:21'), (2019, 'December', 1, '91.00', '85.20', '79.00', '79.06', '-0.06', '06:25 ↑ (112°)', '17:48 ↑ (248°)', '11:22:51', '−0:21'), (2019, 'December', 2, '90.00', '82.90', '77.00', '79.06', '-2.06', '06:25 ↑ (112°)', '17:48 ↑ (248°)', '11:22:34', '−0:20'); COMMIT; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<filename>06. Databases/2018/2018.11.01/12.sql /* 12. Намерете всички служители със заплата повече от 50000 */ use soft_uni; SELECT first_name, last_name, salary FROM employees WHERE salary >= 50000 ORDER BY salary DESC;
<reponame>NathanHowell/sqlfluff<filename>test/fixtures/parser/mysql/handler_sqlstate.sql<gh_stars>1000+ DECLARE exit handler for SQLSTATE '1' begin select 1; end;
-- 2019-04-19T13:25:51.855 -- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator INSERT INTO AD_Ref_List (AD_Client_ID,AD_Org_ID,AD_Ref_List_ID,AD_Reference_ID,Created,CreatedBy,EntityType,IsActive,Name,Updated,UpdatedBy,Value,ValueName) VALUES (0,0,541953,183,TO_TIMESTAMP('2019-04-19 13:25:51','YYYY-MM-DD HH24:MI:SS'),100,'D','Y','Abgabemeldung',TO_TIMESTAMP('2019-04-19 13:25:51','YYYY-MM-DD HH24:MI:SS'),100,'SD','Shipment Declaration') ; -- 2019-04-19T13:25:51.855 -- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator INSERT INTO AD_Ref_List_Trl (AD_Language,AD_Ref_List_ID, Description,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language, t.AD_Ref_List_ID, t.Description,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Ref_List t WHERE l.IsActive='Y'AND (l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N') AND t.AD_Ref_List_ID=541953 AND NOT EXISTS (SELECT 1 FROM AD_Ref_List_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Ref_List_ID=t.AD_Ref_List_ID) ; -- 2019-04-19T13:30:34.447 -- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator INSERT INTO AD_Ref_List (AD_Client_ID,AD_Org_ID,AD_Ref_List_ID,AD_Reference_ID,Created,CreatedBy,EntityType,IsActive,Name,Updated,UpdatedBy,Value,ValueName) VALUES (0,0,541954,148,TO_TIMESTAMP('2019-04-19 13:30:34','YYYY-MM-DD HH24:MI:SS'),100,'de.metas.vertical.pharma','Y','Narcotic',TO_TIMESTAMP('2019-04-19 13:30:34','YYYY-MM-DD HH24:MI:SS'),100,'NAR','NAR') ; -- 2019-04-19T13:30:34.457 -- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator INSERT INTO AD_Ref_List_Trl (AD_Language,AD_Ref_List_ID, Description,Name, IsTranslated,AD_Client_ID,AD_Org_ID,Created,Createdby,Updated,UpdatedBy) SELECT l.AD_Language, t.AD_Ref_List_ID, t.Description,t.Name, 'N',t.AD_Client_ID,t.AD_Org_ID,t.Created,t.Createdby,t.Updated,t.UpdatedBy FROM AD_Language l, AD_Ref_List t WHERE l.IsActive='Y'AND (l.IsSystemLanguage='Y' AND l.IsBaseLanguage='N') AND t.AD_Ref_List_ID=541954 AND NOT EXISTS (SELECT 1 FROM AD_Ref_List_Trl tt WHERE tt.AD_Language=l.AD_Language AND tt.AD_Ref_List_ID=t.AD_Ref_List_ID) ; -- 2019-04-19T13:59:05.858 -- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator UPDATE AD_Val_Rule SET Code=' (''@DocBaseType@''=''ARI'' AND AD_Ref_List.ValueName IN (''AQ'', ''AP'', ''Healthcare_CH-EA'', ''Healthcare_CH-GM'', ''Healthcare_CH-KV'', ''Healthcare_CH-KT'')) OR (''@DocBaseType@''=''ARC'' AND AD_Ref_List.Value IN (''CQ'', ''CR'',''CS'', ''RI'', ''RC'')) OR (''@DocBaseType@'' IN(''API'', ''MOP'') AND AD_Ref_List.Value IN (''QI'', ''VI'')) OR (''@DocBaseType@'' = ''MMI'' AND AD_Ref_List.Value = ''MD'') OR (''@DocBaseType@''=''SD'' AND AD_Ref_List.Value = ''NAR'') OR (''@DocBaseType@'' NOT IN (''API'', ''ARI'', ''ARC'', ''MOP'') AND AD_Ref_List.Value NOT IN (''AQ'', ''AP'', ''CQ'', ''CR'', ''QI'')) /* fallback for the rest of the entries */ ',Updated=TO_TIMESTAMP('2019-04-19 13:59:05','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Val_Rule_ID=540219 ; -- 2019-04-19T15:37:34.119 -- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator UPDATE AD_Ref_List SET Value='SDD',Updated=TO_TIMESTAMP('2019-04-19 15:37:34','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Ref_List_ID=541953 ; -- 2019-04-19T15:37:49.073 -- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator UPDATE C_DocType SET DocBaseType='SDD',Updated=TO_TIMESTAMP('2019-04-19 15:37:49','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE C_DocType_ID=540969 ; -- 2019-04-19T15:48:44.495 -- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator UPDATE AD_Val_Rule SET Code=' (''@DocBaseType@''=''ARI'' AND AD_Ref_List.ValueName IN (''AQ'', ''AP'', ''Healthcare_CH-EA'', ''Healthcare_CH-GM'', ''Healthcare_CH-KV'', ''Healthcare_CH-KT'')) OR (''@DocBaseType@''=''ARC'' AND AD_Ref_List.Value IN (''CQ'', ''CR'',''CS'', ''RI'', ''RC'')) OR (''@DocBaseType@'' IN(''API'', ''MOP'') AND AD_Ref_List.Value IN (''QI'', ''VI'')) OR (''@DocBaseType@'' = ''MMI'' AND AD_Ref_List.Value = ''MD'') OR (''@DocBaseType@''=''SDD'' AND AD_Ref_List.Value = ''NAR'') OR (''@DocBaseType@'' NOT IN (''API'', ''ARI'', ''ARC'', ''MOP'') AND AD_Ref_List.Value NOT IN (''AQ'', ''AP'', ''CQ'', ''CR'', ''QI'')) /* fallback for the rest of the entries */ ',Updated=TO_TIMESTAMP('2019-04-19 15:48:44','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Val_Rule_ID=540219 ; -- 2019-04-19T16:06:43.290 -- I forgot to set the DICTIONARY_ID_COMMENTS System Configurator UPDATE AD_Field SET DisplayLogic='@DocBaseType@=''SOO'' | @DocBaseType@=''POO'' | @DocBaseType@=''ARI'' | @DocBaseType@=''ARC'' | @DocBaseType@=''MOP'' | @DocBaseType@=''MMR'' | @DocBaseType@=''MMS'' | @DocBaseType@=''API'' | @DocBaseType@=''MMI'' | @DocBaseType@ = ''APC'' | @DocBaseType@=''SDD''',Updated=TO_TIMESTAMP('2019-04-19 16:06:43','YYYY-MM-DD HH24:MI:SS'),UpdatedBy=100 WHERE AD_Field_ID=2581 ;
CREATE OR REPLACE FUNCTION public.findvoucherlabels() RETURNS SETOF voucherlabeltype LANGUAGE plpgsql IMMUTABLE STRICT AS $function$ DECLARE sheetcount integer; r voucherlabeltype%rowtype; n integer; BEGIN FOR r IN select co1.objectnumber, findhybridaffinhtml(tig.id) determinationformatted, case when (tn.family is not null and tn.family <> '') then regexp_replace(tn.family, '^.*\)''(.*)''$', '\1') end as family, case when fc.item is not null and co1.fieldcollectionnumber is not null and sdg.datedisplaydate is not null then regexp_replace(fc.item, '^.*\)''(.*)''$', '\1')||' '||co1.fieldcollectionnumber||', '||sdg.datedisplaydate when fc.item is not null and co1.fieldcollectionnumber is not null and sdg.datedisplaydate is null then regexp_replace(fc.item, '^.*\)''(.*)''$', '\1')||' '||co1.fieldcollectionnumber||', s.d.' when fc.item is not null and co1.fieldcollectionnumber is null and sdg.datedisplaydate is not null then regexp_replace(fc.item, '^.*\)''(.*)''$', '\1')||' s.n., '||sdg.datedisplaydate when fc.item is not null and co1.fieldcollectionnumber is null and sdg.datedisplaydate is null then regexp_replace(fc.item, '^.*\)''(.*)''$', '\1')||' s.n., s.d.' when fc.item is null and co1.fieldcollectionnumber is not null and sdg.datedisplaydate is not null then co1.fieldcollectionnumber||', '||sdg.datedisplaydate when fc.item is null and co1.fieldcollectionnumber is not null and sdg.datedisplaydate is null then co1.fieldcollectionnumber||', s.d.' when fc.item is null and co1.fieldcollectionnumber is null and sdg.datedisplaydate is not null then 's.n., '||sdg.datedisplaydate end as collectioninfo, lc.loanoutnumber vouchernumber, lnh.numlent numbersheets, lb.labelrequested, case when (lb.gardenlocation is not null and lb.gardenlocation <> '') then 'Garden No. '||co1.objectnumber||', Bed '||regexp_replace(lb.gardenlocation, '^.*\)''(.*)''$', '\1') else 'Garden No. '||co1.objectnumber||', Bed unknown' end as gardeninfo, case when lb.hortwild='Horticultural' then 'Horticultural voucher:' when lb.hortwild='Wild' then 'Wild voucher:' end as vouchertype, lb.fieldcollectionnote, lb.annotation, case when (lbc.item is not null and lbc.item <> '' and lc.loanoutdate is not null) then regexp_replace(lbc.item, '^.*\)''(.*)''$', '\1')||', '||to_char(date(lc.loanoutdate + interval '8 hours'), 'MM/dd/yyyy') when (lbc.item is not null and lbc.item <> '' and lc.loanoutdate is null) then regexp_replace(lbc.item, '^.*\)''(.*)''$', '\1') when (lbc.item is null and lc.loanoutdate is not null) then to_char(date(lc.loanoutdate + interval '8 hours'), 'MM/dd/yyyy') end as vouchercollectioninfo from loansout_common lc join loansout_naturalhistory lnh on (lc.id=lnh.id) join loansout_botgarden lb on (lc.id=lb.id) left outer join loansout_botgarden_collectorlist lbc on (lbc.id = lc.id and lbc.pos=0) join hierarchy h1 on lc.id=h1.id join relations_common r1 on (h1.name=r1.subjectcsid and objectdocumenttype='CollectionObject') join hierarchy h2 on (r1.objectcsid=h2.name) join collectionobjects_common co1 on (co1.id=h2.id) left outer join hierarchy htig on (co1.id = htig.parentid and htig.pos = 0 and htig.name = 'collectionobjects_naturalhistory:taxonomicIdentGroupList') left outer join taxonomicIdentGroup tig on (tig.id = htig.id) join collectionspace_core core on (core.id=co1.id) join misc misc2 on (misc2.id = co1.id and misc2.lifecyclestate <> 'deleted') join misc misc1 on (misc1.id = lc.id and misc1.lifecyclestate <> 'deleted') left outer join taxon_common tc on (tig.taxon=tc.refname) left outer join taxon_naturalhistory tn on (tc.id=tn.id) left outer join hierarchy htt on (tc.id=htt.parentid and htt.name='taxon_common:taxonTermGroupList' and htt.pos=0) left outer join taxontermgroup tt on (tt.id=htt.id) left outer join collectionobjects_common_fieldCollectors fc on (co1.id = fc.id and fc.pos = 0) left outer join hierarchy hfcdg on (co1.id = hfcdg.parentid and hfcdg.name='collectionobjects_common:fieldCollectionDateGroup') left outer join structureddategroup sdg on (sdg.id = hfcdg.id) where lb.labelrequested = 'Yes' order by objectnumber LOOP -- return next r; sheetcount := r.numbersheets; for n in 1..sheetcount loop return next r; END LOOP; END LOOP; RETURN; END; $function$
/* utils.aggRepGroupField(coid varchar, tname varchar, cname varchar, sepval varchar) * -- aggregates values of a repeating group of fields * -- requires: * -- coid = collectionobjects_common.id of the parent Collection Object * -- tname = table name of the repeating group * -- cname = column name of the repeating group field * -- sepval = separator/delimiter character */ CREATE SCHEMA IF NOT EXISTS utils AUTHORIZATION nuxeo_pahma; -- DROP FUNCTION utils.aggRepGroupField(coid varchar, tname varchar, cname varchar, sepval varchar); CREATE OR REPLACE FUNCTION utils.aggRepGroupField( coid VARCHAR, tname VARCHAR, cname VARCHAR, sepval VARCHAR) RETURNS VARCHAR AS $$ DECLARE ctype VARCHAR; cstr VARCHAR; aggstr VARCHAR; BEGIN SELECT data_type INTO ctype FROM information_schema.columns WHERE table_name = tname AND column_name = cname; IF ctype IN ('text', 'character varying') THEN cstr := cname; ELSE cstr := cname || '::text'; END IF; EXECUTE 'SELECT STRING_AGG(CASE WHEN n.' || cname || ' IS NULL THEN ''%NULLVALUE%''' || ' ELSE regexp_replace(n.' || cstr || ', E''[\\n\\r]+'', ''\n'', ''g'') END, ''' || sepval || ''' ORDER BY h.pos)' || ' FROM hierarchy h' || ' JOIN ' || tname || ' n ON (h.id = n.id AND lower(h.primarytype) = $1)' || ' WHERE h.parentid = $2' || ' GROUP BY h.parentid' INTO aggstr USING tname, coid; RETURN aggstr; END; $$ LANGUAGE plpgsql; GRANT EXECUTE ON FUNCTION utils.aggRepGroupField(coid varchar, tname varchar, cname varchar, sepval varchar) to reporters_pahma; -- SELECT proacl FROM pg_proc WHERE proname = 'aggrepgroupfield'; -- SELECT utils.aggRepGroupField('2dbcbf18-4966-4178-9880-e81258568921', 'referencegroup', 'reference', '|'); -- SELECT utils.aggRepGroupField('2dbcbf18-4966-4178-9880-e81258568921', 'nagprareportfiledgroup', 'nagprareportfiled', '|');
<reponame>Ambal/mangos ALTER TABLE db_version CHANGE COLUMN required_8931_01_mangos_spell_bonus_data required_8932_01_mangos_spell_chain bit; DELETE FROM spell_chain WHERE spell_id IN (50518,53558,53559,53560,53561,53562); INSERT INTO spell_chain VALUES (50518,0,50518,1,0), (53558,50518,50518,2,0), (53559,53558,50518,3,0), (53560,53559,50518,4,0), (53561,53560,50518,5,0), (53562,53561,50518,6,0);
<gh_stars>0 -- DRB: Deleting notification requests failed if any notification were pending create or replace package body notification_request as function new ( request_id in notification_requests.request_id%TYPE default null, object_type in acs_objects.object_type%TYPE default 'notification_request', type_id in notification_requests.type_id%TYPE, user_id in notification_requests.user_id%TYPE, object_id in notification_requests.object_id%TYPE, interval_id in notification_requests.interval_id%TYPE, delivery_method_id in notification_requests.delivery_method_id%TYPE, format in notification_requests.format%TYPE, creation_date in acs_objects.creation_date%TYPE default sysdate, creation_user in acs_objects.creation_user%TYPE, creation_ip in acs_objects.creation_ip%TYPE, context_id in acs_objects.context_id%TYPE default null ) return notification_requests.request_id%TYPE is v_request_id acs_objects.object_id%TYPE; begin v_request_id := acs_object.new ( object_id => request_id, object_type => object_type, creation_date => creation_date, creation_user => creation_user, creation_ip => creation_ip, context_id => context_id ); insert into notification_requests (request_id, type_id, user_id, object_id, interval_id, delivery_method_id, format) values (v_request_id, type_id, user_id, object_id, interval_id, delivery_method_id, format); return v_request_id; end new; procedure delete ( request_id in notification_requests.request_id%TYPE default null ) is begin for v_notifications in (select notification_id from notifications n, notification_requests nr where n.response_id = nr.object_id and nr.request_id = request_id) loop acs_object.delete(v_notifications.notification_id); end loop; acs_object.delete(request_id); end delete; procedure delete_all ( object_id in notification_requests.object_id%TYPE default null ) is v_request notification_requests%ROWTYPE; begin for v_request in (select request_id from notification_requests where object_id= delete_all.object_id) LOOP notification_request.delete(v_request.request_id); END LOOP; end delete_all; end notification_request; / show errors
--############################################################################## --############################################################################## --### 4CE Phase 1.2 and 2.2 --### Date: April 21, 2021 translated May 19, 2021 --### Database: Oracle --### Data Model: i2b2 --### Created By: <NAME> (<EMAIL>) --### Translated to Oracle: <NAME> ( <EMAIL>) --############################################################################## --############################################################################## /* INTRODUCTION: This script contains code to generate both 4CE Phase 1.2 and Phase 2.2 files. By default, it will only generate Phase 1.2 files, which contain obfuscated aggregate counts and statistics. You need to change the settings in the fource_config table so that it generates the Phase 2.2 files. Phase 2.2 files include non-obfuscated versions of all the Phase 1.1 files, as well as files containing patient-level data. PHASE 1.2 FILES: This script creates the following Phase 1.2 files with obfuscated counts. These files are uploaded to 4CE. 1) DailyCounts - Patient counts by calendar date 2) ClinicalCourse - Counts by number of days since hospital admission 3) AgeSex - Age and sex breakdowns 4) Labs - Lab values per day since admission 5) DiagProcMed - Diagnoses, procedures, and meds before and after admission 6) RaceByLocalCode - Breakdowns based on the codes used within your hospital 7) RaceBy4CECode - Breackdowns based on 4CE race categories 8) LabCodes - The laboratory code and unit mappings used (no patient data) PHASE 2.2 FILES: For Phase 2.2, the script creates a copy of Phase 1.2 tables 1-7 with "Local" added to the beginning of the file name (e.g., LocalDailyCounts, LocalLabs, etc.). These tables contain counts that are not obfuscated. They should be stored locally and not shared with 4CE. Phase 2.2 creates 5 additional files that contain patient-level data. These should also be stored locally and not shared with 4CE. 1) LocalPatientSummary - One row per patient in each cohort 2) LocalPatientClinicalCourse - Daily summary of patient hospitalizations 3) LocalPatientObservations - Each diagnosis, lab test, etc. per day 4) LocalPatientRace - Each patient's race code(s) 5) LocalPatientMapping - Map from i2b2 patient_num to ID used in the files CONFIGURATION AND MAPPINGS: You will need to modify the configuration and mapping tables at the beginning of this script. Read the instructions carefully. You might also have to edit the logic used in the queries to identify admissions, ICU visits, and deaths, which are placed right after the code mappings. OUTPUT OPTIONS: For each Phase, the script provides three output options. The first option, "output to columns" returns the data as tables to your query tool (e.g., SSMS). The data are not saved to the database. This is useful if you want to copy the data into a program like Excel. The second option, "output to CSV", returns the data as tables with a single column that contains a CSV-style string. You can copy and paste this into a text file and save it with a .csv extension. It will then be in the correct format to upload to 4CE. The third option saves the data as tables in your database. You need to specify a prefix, like "dbo_FourCE_" that will be added to the begining of each table name. COHORTS: By default, this script only selects patients who had a positive COVID test and were admitted. It partitions these patients into cohorts based on the quarter (2020 Q1, 2020 Q2, etc.) they were admitted. The configuration options include_extra_cohorts_phase1 and include_extra_cohorts_phase2 will add extra cohorts to the Phase 1.2 and/or Phase 2.2 files. These extra cohorts include (1) patients who were admitted with a negative COVID test, (2) COVID positive patients who were not admitted, (3) COVID negative patients who were not admitted, and (4) patients with a U07.1 diagnoses (confirmed COVID), but no recorded positive COVID test. In other words, they capture every patient who had a COVID. test. These extra cohorts are also partitioned by quarter. Note that for patients who were not admitted, their first COVID test (pr U07.1 diagnosis) date is used as the "admission" date; and, "days_since_admission" is really days since the COVID test. Note that including these extra cohorts greatly increases the sizes of the patient level Phase 2.2 files and makes the script take much longer run. (You can optionally define additional cohorts, based on custom inclusion or exclusion criteria, matching algorithms, or date range partitions.) SOURCE DATA UPDATED DATE: Use the configuration setting source_data_updated_date to indicate when the data that this script is run on was last updated. For example, if you are running this script on May 1, 2021, but the data has not been updated since April 15, 2021, then set the source_data_updated_date to April 15. This is needed to determine the date beyond which data are censored. For example, if a patient is still in the hospital on April 15, then the discharge date is unknown. (You can optionally assign a different source_data_updated_date to each cohort. This would be needed if, for example, the data on COVID positive patients are updated more frequently than COVID negative patients.) ALTERNATIVE SCHEMAS AND MULTIPLE FACT TABLES: The code assumes your fact and dimension tables are in the DBO schema. If you use a different schema, then do a search-and-replace to change "dbo_" to your schema. The code also assumes you have a single fact table called "dbo_observation_fact". If you use multiple fact tables, then search for "observation_fact" and change it as needed. */ --############################################################################## --############################################################################## --############################################################################## --############################################################################## --### --### Configuration and code mappings (modify for your institution) --### --############################################################################## --############################################################################## --############################################################################## --############################################################################## --############################################################################## --### Set output csv file path --### Global replace @exportFilePath with the path where you want your output --### files to land --### Example replace @exportFilePath with C:\User\My4ceDir --############################################################################## -------------------------------------------------------------------------------- -- General settings -------------------------------------------------------------------------------- --drop table fource_config; -- make sure everything is clean create table fource_config ( siteid varchar(20), -- Up to 20 letters or numbers, must start with letter, no spaces or special characters. race_data_available int, -- 1 if your site collects race/ethnicity data; 0 if your site does not collect this. icu_data_available int, -- 1 if you have data on whether patients were in the ICU death_data_available int, -- 1 if you have data on whether patients have died code_prefix_icd9cm varchar(50), -- prefix (scheme) used in front of a ICD9CM diagnosis code; set to '' if not collected or no prefix used code_prefix_icd10cm varchar(50), -- prefix (scheme) used in front of a ICD10CM diagnosis code; set to '' if not collected or no prefix used source_data_updated_date date, -- the date your source data were last updated (e.g., '3/25/2021'); set to NULL if data go to the day you run this script -- Phase 1.2 (obfuscated aggregate data) options include_extra_cohorts_phase1 int, -- 0 by default, 1 to include COVID negative, U07.1, and non-admitted cohorts to Phase 1 files obfuscation_blur int, -- Add random number || /-blur to each count (0 = no blur) obfuscation_small_count_mask int, -- Replace counts less than mask with -99 (0 = no small count masking) obfuscation_small_count_delete int, -- Delete rows where all values are small counts (0 = no, 1 = yes) obfuscation_agesex int, -- Replace combination of age-sex and total counts with -999 (0 = no, 1 = yes) output_phase1_as_columns int, -- Return the data in tables with separate columns per field output_phase1_as_csv int, -- Return the data in tables with a single column containing comma separated values save_phase1_as_columns int, -- Save the data as tables with separate columns per field save_phase1_as_prefix varchar(50), -- Table name prefix when saving the data as tables -- Phase 2.2 (non-obfuscated aggregate and patient level data) options include_extra_cohorts_phase2 int, -- 0 by default, 1 to include COVID negative, U07.1, and non-admitted cohorts to Phase 2 files replace_patient_num int, -- Replace the patient_num with a unique random number output_phase2_as_columns int, -- Return the data in tables with separate columns per field output_phase2_as_csv int, -- Return the data in tables with a single column containing comma separated values save_phase2_as_columns int, -- Save the data as tables with separate columns per field save_phase2_as_prefix varchar(50), -- Table name prefix when saving the data as tables eval_start_date date -- use this so that dates can be changed consitently throughout the script --blackout_days_before -7 blackout_days_before 14 add these later ); commit; --truncate table fource_config; insert into fource_config select 'UPitt', -- siteid 1, -- race_data_available 1, -- icu_data_available 1, -- death_data_available 'ICD9CM:', -- code_prefix_icd9cm 'ICD10CM:', -- code_prefix_icd10cm NULL, -- source_data_updated_date -- Phase 1 0, -- include_extra_cohorts_phase1 (please set to 1 if allowed by your IRB and institution) 0, -- obfuscation_blur 10, -- obfuscation_small_count_mask 1, -- obfuscation_small_count_delete 0, -- obfuscation_agesex 0, -- output_phase1_as_columns 1, -- output_phase1_as_csv 0, -- save_phase1_as_columns 'dbo_FourCE_', -- save_phase1_as_prefix (don't use "4CE" since it starts with a number) -- Phase 2 0, -- include_extra_cohorts_phase2 (please set to 1 if allowed by your IRB and institution) 1, -- replace_patient_num 0, -- output_phase2_as_columns 0, -- output_phase2_as_csv 0, -- save_phase2_as_columns 'dbo_FourCE_', -- save_phase2_as_prefix (don't use "4CE" since it starts with a number to_date('01-JAN-2019') from dual; commit; -- ! If your diagnosis codes do not start with a prefix (e.g., "ICD:"), -- ! then you will need to customize queries the use the observation_fact table -- ! so that only diagnoses are selected. -------------------------------------------------------------------------------- -- Code mappings (excluding labs and meds) -- * Don't change the "code" value. -- * Modify the "local_code" to match your database. -- * Repeat a code multiple times if you have more than one local code. -- * Comment out rows that are not applicable to your database. -------------------------------------------------------------------------------- --drop table fource_code_map; create table fource_code_map ( code varchar(50) not null, local_code varchar(50) not null ); alter table fource_code_map add primary key (code, local_code); -- Inpatient visit codes -- * The SQL supports using either visit_dimension table or the observation_fact table. -- * Change the code as needed. Comment out the versions that you do not use. -- * You can replace this code with external mapping tables of location codes for example a list of hospital units insert into fource_code_map select '', '' from dual where 1=0 -- Inpatient visits (from the visit_dimension.inout_cd field) union all select 'inpatient_inout_cd', 'I' from dual union all select 'inpatient_inout_cd', 'IN' from dual -- Inpatient visits (from the visit_dimension.location_cd field) -- copy the line below for each code or location that represents an inpatient union all select 'inpatient_location_cd', 'Inpatient' from dual -- ICU visits (from the observation_fact.concept_cd field) -- copy the line below for each code or location that represents an inpatient union all select 'inpatient_concept_cd', 'UMLS:C1547137' from dual;-- from ACT ontology commit; -- ICU visit codes (optional) -- * The SQL supports using either visit_dimension table or the observation_fact table. -- * Change the code as needed. Comment out the versions that you do not use. --truncate table fource_code_map; insert into fource_code_map select '', '' from dual where 1=0 -- ICU visits (from the visit_dimension.inout_cd field) union all select 'icu_inout_cd', 'ICU' from dual -- ICU visits (from the visit_dimension.location_cd field) union all select 'icu_location_cd', 'ICU' from dual -- ICU visits (from the observation_fact.concept_cd field) union all select 'icu_concept_cd', 'UMLS:C1547136' from dual-- from ACT ontology union all select 'icu_concept_cd', 'CPT4:99291' from dual-- from ACT ontology union all select 'icu_concept_cd', 'CPT4:99292' from dual-- from ACT ontology -- ICU visits (from the observation_fact.location_cd field) union all select 'icu_fact_location_cd', 'ICU' from dual -- ICU location_cd in observation_fact selected from external icu map union all select 'icu_fact_location_cd', icu_unit_code from external_icu_map; --select * from fource_code_map; commit; -- If you use location_cd to map ICU locations you can create a list here or load from an external mapping table drop table fource_icu_location; create table fource_icu_location as select cast(department_id as varchar2(50)) location_cd from icus; --create table fource_icu_location as --select cast('icu1' as varchar2(50)) as location_cd from dual --union --select cast('icu1' as varchar2(50)) as location_cd from dual; commit; -- Sex codes insert into fource_code_map select '', '' from dual where 1=0 -- Sex (from the patient_dimension.sex_cd field) union all select 'sex_patient:male', 'M' from dual union all select 'sex_patient:male', 'Male' from dual union all select 'sex_patient:female', 'F' from dual union all select 'sex_patient:female', 'Female' from dual -- Sex (from the observation_fact.concept_cd field) union all select 'sex_fact:male', 'DEM|SEX:M' from dual union all select 'sex_fact:male', 'DEM|SEX:Male' from dual union all select 'sex_fact:female', 'DEM|SEX:F' from dual union all select 'sex_fact:female', 'DEM|SEX:Female' from dual; -- Race codes (use the code set for your country, comment out other countries) insert into fource_code_map select '', '' from dual where 1=0 ------------------------------------------------------------------- -- Race: United States ------------------------------------------------------------------- -- Race (from the patient _dimension.race_cd field) union all select 'race_patient:american_indian', 'NA' from dual union all select 'race_patient:asian', 'A' from dual union all select 'race_patient:asian', 'AS' from dual union all select 'race_patient:black', 'B' from dual union all select 'race_patient:hawaiian_pacific_islander', 'H' from dual union all select 'race_patient:hawaiian_pacific_islander', 'P' from dual union all select 'race_patient:white', 'W' from dual union all select 'race_patient:hispanic_latino', 'HL' from dual union all select 'race_patient:other', 'O' -- include multiple if no additional information is known from dual union all select 'race_patient:no_information', 'NI' -- unknown, not available, missing, refused to answer, not recorded, etc. -- Race (from the observation_fact.concept_cd field) from dual union all select 'race_fact:american_indian', 'DEM|race:NA' from dual union all select 'race_fact:asian', 'DEM|race:A' from dual union all select 'race_fact:asian', 'DEM|race:AS' from dual union all select 'race_fact:black', 'DEM|race:B' from dual union all select 'race_fact:hawaiian_pacific_islander', 'DEM|race:H' from dual union all select 'race_fact:hawaiian_pacific_islander', 'DEM|race:P' from dual union all select 'race_fact:white', 'DEM|race:W' from dual union all select 'race_fact:hispanic_latino', 'DEM|HISP:Y' from dual union all select 'race_fact:hispanic_latino', 'DEM|HISPANIC:Y' from dual union all select 'race_fact:other', 'DEM|race:O' -- include multiple if no additional information is known from dual union all select 'race_fact:no_information', 'DEM|race:NI' from dual; -- unknown, not available, missing, refused to answer, not recorded, etc. ------------------------------------------------------------------- -- Race: United Kingdom (Ethnicity) ------------------------------------------------------------------- -- Ethnicity (from the patient_dimension.race_cd field) -- from dual union all select 'race_patient:uk_asian', 'Asian' -- Asian or Asian British (Indian, Pakistani, Bangladeshi, Chinese, other Asian background) -- from dual union all select 'race_patient:uk_black', 'Black' -- Black, African, Carribean, or Black British (African/ Caribbean/ any other Black, African or Caribbean background) -- from dual union all select 'race_patient:uk_white', 'White' -- White (English/ Welsh/ Scottish/Northern Irish/ British, Irish, Gypsy or Irish Traveller, other White background) -- from dual union all select 'race_patient:uk_multiple', 'Multiple' -- Mixed or Multiple ethnic groups (White and Black Caribbean, White and Black African, White and Asian, Any other Mixed or Multiple ethnic background) -- from dual union all select 'race_patient:uk_other', 'Other' -- Other ethnic group (Arab, other ethnic group) -- from dual union all select 'race_patient:uk_no_information', 'NI' -- unknown, not available, missing, refused to answer, not recorded, etc. ------------------------------------------------------------------- -- Race: Singapore ------------------------------------------------------------------- -- Race (from the patient_dimension.race_cd field) -- from dual union all select 'race_patient:singapore_chinese', 'Chinese' -- from dual union all select 'race_patient:singapore_malay', 'Malay' -- from dual union all select 'race_patient:singapore_indian', 'Indian' -- from dual union all select 'race_patient:singapore_other', 'Other' -- from dual union all select 'race_patient:singapore_no_information', 'NI' -- unknown, not available, missing, refused to answer, not recorded, etc. ------------------------------------------------------------------- -- Race: Brazil ------------------------------------------------------------------- -- Race (from the patient_dimension.race_cd field) -- from dual union all select 'race_patient:brazil_branco', 'Branco' -- from dual union all select 'race_patient:brazil_pardo', 'Pardo' -- from dual union all select 'race_patient:brazil_preto', 'Preto' -- from dual union all select 'race_patient:brazil_indigena', 'Indigena' -- from dual union all select 'race_patient:brazil_amarelo', 'Amarelo' -- from dual union all select 'race_patient:brazil_no_information', 'NI' -- unknown, not available, missing, refused to answer, not recorded, etc. -- Codes that indicate a COVID-19 nucleic acid test result (use option #1 and/or option #2) -- COVID-19 Test Option #1: individual concept_cd values insert into fource_code_map select 'covidpos', 'LAB|LOINC:COVID19POS' from dual union all select 'covidneg', 'LAB|LOINC:COVID19NEG' from dual; -- COVID-19 Test Option #2: an ontology path (e.g., COVID ACT "Any Positive Test" path) insert into fource_code_map select distinct 'covidpos', concept_cd from @crcSchema.concept_dimension c where concept_path like '\ACT\UMLS_C0031437\SNOMED_3947185011\UMLS_C0022885\UMLS_C1335447\%' and concept_cd is not null and not exists (select * from fource_code_map m where m.code='covidpos' and m.local_code=c.concept_cd) union all select distinct 'covidneg', concept_cd from @crcSchema.concept_dimension c where concept_path like '\ACT\UMLS_C0031437\SNOMED_3947185011\UMLS_C0022885\UMLS_C1334932\%' and concept_cd is not null and not exists (select * from fource_code_map m where m.code='covidneg' and m.local_code=c.concept_cd); -- Other codes that indicate confirmed COVID-19 (e.g., ICD-10 code U07.1, but not U07.2 or U07.3) insert into fource_code_map select 'covidU071', code_prefix_icd10cm || 'U07.1' from fource_config union all select 'covidU071', code_prefix_icd10cm || 'U071' --place holder from fource_config; commit; -------------------------------------------------------------------------------- -- Lab mappings -- * Do not change the fource_* columns. -- * Modify the local_* columns to match how your lab data are represented. -- * Add another row for a lab if you use multiple codes (e.g., see PaCO2). -- * Delete a row if you don't have that lab. -- * Change the scale_factor if you use different units. -- * The lab value will be multiplied by the scale_factor -- * to convert from your units to the 4CE units. -- * Add another row if the same code can have multiple units (e.g., see PaO2). -- * Set local_lab_units='DEFAULT' to match labs with '' or NULL units -- * (e.g., see PaO2). Only use this if you are sure what the units are. -- * Add what you think the true units are to the end of the local_lab_name. -------------------------------------------------------------------------------- --DROP TABLE fource_lab_map; create table fource_lab_map ( fource_loinc varchar(20) not null, fource_lab_units varchar(20) not null, fource_lab_name varchar(100) not null, scale_factor float not null, local_lab_code varchar(50) not null, local_lab_units varchar(20) not null, local_lab_name varchar(500) not null ); alter table fource_lab_map add primary key (fource_loinc, local_lab_code, local_lab_units); insert into fource_lab_map select fource_loinc, fource_lab_units, fource_lab_name, scale_factor, 'LOINC:' || local_lab_code, -- Change "LOINC:" to your local LOINC code prefix (scheme) local_lab_units, local_lab_name from ( select null fource_loinc, null fource_lab_units, null fource_lab_name, null scale_factor, null local_lab_code, null local_lab_units, null local_lab_name from dual where 1=0 union select '1742-6', 'U/L', 'alanine aminotransferase (ALT)', 1, '1742-6', 'U/L', 'YourLocalLabName' from dual union select '1751-7', 'g/dL', 'albumin', 1, '1751-7', 'g/dL', 'YourLocalLabName' from dual union select '1920-8', 'U/L', 'aspartate aminotransferase (AST)', 1, '1920-8', 'U/L', 'YourLocalLabName' from dual union select '1975-2', 'mg/dL', 'total bilirubin', 1, '1975-2', 'mg/dL', 'YourLocalLabName' from dual union select '1988-5', 'mg/L', 'C-reactive protein (CRP) (Normal Sensitivity)', 1, '1988-5', 'mg/L', 'YourLocalLabName' from dual union select '2019-8', 'mmHg', 'PaCO2', 1, '2019-8', 'mmHg', 'YourLocalLabName' from dual union select '2160-0', 'mg/dL', 'creatinine', 1, '2160-0', 'mg/dL', 'YourLocalLabName' from dual union select '2276-4', 'ng/mL', 'Ferritin', 1, '2276-4', 'ng/mL', 'YourLocalLabName' from dual union select '2532-0', 'U/L', 'lactate dehydrogenase (LDH)', 1, '2532-0', 'U/L', 'YourLocalLabName' from dual union select '2703-7', 'mmHg', 'PaO2', 1, '2703-7', 'mmHg', 'YourLocalLabName' from dual union select '3255-7', 'mg/dL', 'Fibrinogen', 1, '3255-7', 'mg/dL', 'YourLocalLabName' from dual union select '33959-8', 'ng/mL', 'procalcitonin', 1, '33959-8', 'ng/mL', 'YourLocalLabName' from dual union select '48065-7', 'ng/mL{FEU}', 'D-dimer (FEU)', 1, '48065-7', 'ng/mL{FEU}', 'YourLocalLabName' from dual union select '48066-5', 'ng/mL{DDU}', 'D-dimer (DDU)', 1, '48066-5', 'ng/mL{DDU}', 'YourLocalLabName' from dual union select '49563-0', 'ng/mL', 'cardiac troponin (High Sensitivity)', 1, '49563-0', 'ng/mL', 'YourLocalLabName' from dual union select '6598-7', 'ug/L', 'cardiac troponin (Normal Sensitivity)', 1, '6598-7', 'ug/L', 'YourLocalLabName' from dual union select '5902-2', 's', 'prothrombin time (PT)', 1, '5902-2', 's', 'YourLocalLabName' from dual union select '6690-2', '10*3/uL', 'white blood cell count (Leukocytes)', 1, '6690-2', '10*3/uL', 'YourLocalLabName' from dual union select '731-0', '10*3/uL', 'lymphocyte count', 1, '731-0', '10*3/uL', 'YourLocalLabName' from dual union select '751-8', '10*3/uL', 'neutrophil count', 1, '751-8', '10*3/uL', 'YourLocalLabName' from dual union select '777-3', '10*3/uL', 'platelet count', 1, '777-3', '10*3/uL', 'YourLocalLabName' from dual union select '34714-6', 'DEFAULT', 'INR', 1, '34714-6', 'DEFAULT', 'YourLocalLabName' from dual --Example of listing an additional code for the same lab --from dual union select '2019-8', 'mmHg', 'PaCO2', 1, 'LAB:PaCO2', 'mmHg', 'Carbon dioxide partial pressure in arterial blood' --Examples of listing different units for the same lab --from dual union select '2703-7', 'mmHg', 'PaO2', 10, '2703-7', 'cmHg', 'PaO2' --from dual union select '2703-7', 'mmHg', 'PaO2', 25.4, '2703-7', 'inHg', 'PaO2' --This will use the given scale factor (in this case 1) for any lab with NULL or empty string units --from dual union select '2703-7', 'mmHg', 'PaO2', 1, '2703-7', 'DEFAULT', 'PaO2 [mmHg]' ) t; commit; -- Use the concept_dimension table to get an expanded list of local lab codes (optional). -- This will find paths corresponding to concepts already in the fource_lab_map table, -- and then find all the concepts corresponding to child paths. Make sure you update the -- scale_factor, local_lab_units, and local_lab_name as needed. -- WARNING: This query might take several minutes to run. -- ****THIS IS UNTESTED***** /* insert into fource_lab_map select distinct l.fource_loinc, l.fource_lab_units, l.fource_lab_name, l.scale_factor, d.concept_cd, l.local_lab_units, l.local_lab_name from fource_lab_map l inner join @crcSchema.concept_dimension c on l.local_lab_code = c.concept_cd inner join @crcSchema.concept_dimension d on d.concept_path like c.concept_path || '%' where not exists ( select * from fource_lab_map t where t.fource_loinc = l.fource_loinc and t.local_lab_code = d.concept_cd ) */ -- Use the concept_dimension table to get the local names for labs (optional). /* update l set l.local_lab_name = c.name_char from fource_lab_map l inner join @crcSchema.concept_dimension c on l.local_lab_code = c.concept_cd */ -------------------------------------------------------------------------------- -- Lab mappings report (for debugging lab mappings) -------------------------------------------------------------------------------- -- Get a list of all the codes and units in the data for 4CE labs since 1/1/2019 create table fource_lab_units_facts ( fact_code varchar(50) not null, fact_units varchar(50), num_facts int, mean_value numeric(18,5), stdev_value numeric(18,5) ); --188s create index fource_lap_map_ndx on fource_lab_map(local_lab_code); insert into fource_lab_units_facts select * from ( with labs_in_period as ( select concept_cd, units_cd, nval_num from @crcSchema.observation_fact f join fource_lab_map m on m.local_lab_code = f.concept_cd where trunc(start_date) >= (select trunc(start_date) from fource_config where rownum = 1) ) select concept_cd, units_cd, count(*) num_facts, avg(nval_num) avg_val, stddev(nval_num) stdev_val from labs_in_period group by concept_cd, units_cd); commit; --select * from fource_lab_units_facts; /* insert into fource_lab_units_facts select concept_cd, units_cd, count(*), avg(nval_num), stddev(nval_num) from @crcSchema.observation_fact f join fource_lab_map m on m.local_lab_code = f.concept_cd where trunc(start_date) >= (select trunc(start_date) from fource_config where rownum = 1) group by concept_cd, units_cd; */ -- Create a table that stores a report about local lab units --drop table fource_lab_map_report; create table fource_lab_map_report ( fource_loinc varchar(20) not null, fource_lab_units varchar(20), fource_lab_name varchar(100), scale_factor float, local_lab_code varchar(50) not null, local_lab_units varchar(20) not null, local_lab_name varchar(500), num_facts int, mean_value numeric(18,5), stdev_value numeric(18,5), notes varchar(1000) ) ; alter table fource_lab_map_report add primary key (fource_loinc, local_lab_code, local_lab_units); -- Compare the fource_lab_map table to the codes and units in the data insert into fource_lab_map_report select nvl(m.fource_loinc,a.fource_loinc) fource_loinc, nvl(m.fource_lab_units,a.fource_lab_units) fource_lab_units, nvl(m.fource_lab_name,a.fource_lab_name) fource_lab_name, nvl(m.scale_factor,0) scale_factor, nvl(m.local_lab_code,f.fact_code) local_lab_code, coalesce(m.local_lab_units,f.fact_units,'((null))') local_lab_units, nvl(m.local_lab_name,'((missing))') local_lab_name, nvl(f.num_facts,0) num_facts, nvl(f.mean_value,-999) mean_value, nvl(f.stdev_value,-999) stdev_value, (case when scale_factor is not null and num_facts is not null then 'GOOD: Code and units found in the data' when m.fource_loinc is not null and c.fact_code is null then 'WARNING: This code from the lab mappings table could not be found in the data -- double check if you use another loinc or local code' when scale_factor is not null then 'WARNING: These local_lab_units in the lab mappings table could not be found in the data ' else 'WARNING: These local_lab_units exist in the data but are missing from the lab mappings table -- map to the 4CE units using scale factor' end) notes from fource_lab_map m full outer join fource_lab_units_facts f on f.fact_code=m.local_lab_code and nvl(nullif(f.fact_units,''),'DEFAULT')=m.local_lab_units left outer join ( select distinct fource_loinc, fource_lab_units, fource_lab_name, local_lab_code from fource_lab_map ) a on a.local_lab_code=f.fact_code left outer join ( select distinct fact_code from fource_lab_units_facts ) c on m.local_lab_code=c.fact_code; commit; --select * from fource_lab_map_report; -- View the results, including counts, to help you check your mappings (optional) /* select * from fource_lab_map_report order by fource_loinc, num_facts desc */ -------------------------------------------------------------------------------- -- Medication mappings -- * Do not change the med_class or add additional medications. -- * The ATC and RxNorm codes represent the same list of medications. -- * Use ATC and/or RxNorm, depending on what your institution uses. -------------------------------------------------------------------------------- --drop table fource_med_map; create table fource_med_map ( med_class varchar(50) not null, code_type varchar(10) not null, local_med_code varchar(50) not null ) ; alter table fource_med_map add primary key (med_class, code_type, local_med_code); -- ATC codes (optional) insert into fource_med_map select m, 'ATC' t, 'ATC:' || c -- Change "ATC:" to your local ATC code prefix (scheme) from ( -- Don't add or remove drugs select 'ACEI' m, c from (select 'C09AA01' c from dual union select 'C09AA02' from dual union select 'C09AA03' from dual union select 'C09AA04' from dual union select 'C09AA05' from dual union select 'C09AA06' from dual union select 'C09AA07' from dual union select 'C09AA08' from dual union select 'C09AA09' from dual union select 'C09AA10' from dual union select 'C09AA11' from dual union select 'C09AA13' from dual union select 'C09AA15' from dual union select 'C09AA16' from dual) t union select 'ARB', c from (select 'C09CA01' c from dual union select 'C09CA02' from dual union select 'C09CA03' from dual union select 'C09CA04' from dual union select 'C09CA06' from dual union select 'C09CA07' from dual union select 'C09CA08' from dual) t union select 'COAGA', c from (select 'B01AC04' c from dual union select 'B01AC05' from dual union select 'B01AC07' from dual union select 'B01AC10' from dual union select 'B01AC13' from dual union select 'B01AC16' from dual union select 'B01AC17' from dual union select 'B01AC22' from dual union select 'B01AC24' from dual union select 'B01AC25' from dual union select 'B01AC26' from dual) t union select 'COAGB', c from (select 'B01AA01' c from dual union select 'B01AA03' from dual union select 'B01AA04' from dual union select 'B01AA07' from dual union select 'B01AA11' from dual union select 'B01AB01' from dual union select 'B01AB04' from dual union select 'B01AB05' from dual union select 'B01AB06' from dual union select 'B01AB07' from dual union select 'B01AB08' from dual union select 'B01AB10' from dual union select 'B01AB12' from dual union select 'B01AE01' from dual union select 'B01AE02' from dual union select 'B01AE03' from dual union select 'B01AE06' from dual union select 'B01AE07' from dual union select 'B01AF01' from dual union select 'B01AF02' from dual union select 'B01AF03' from dual union select 'B01AF04' from dual union select 'B01AX05' from dual union select 'B01AX07' from dual) t union select 'COVIDVIRAL', c from (select 'J05AE10' c from dual union select 'J05AP01' from dual union select 'J05AR10' from dual) t union select 'DIURETIC', c from (select 'C03CA01' c from dual union select 'C03CA02' from dual union select 'C03CA03' from dual union select 'C03CA04' from dual union select 'C03CB01' from dual union select 'C03CB02' from dual union select 'C03CC01' from dual) t union select 'HCQ', c from (select 'P01BA01' c from dual union select 'P01BA02' from dual) t union select 'ILI', c from (select 'L04AC03' c from dual union select 'L04AC07' from dual union select 'L04AC11' from dual union select 'L04AC14' from dual) t union select 'INTERFERON', c from (select 'L03AB08' c from dual union select 'L03AB11' from dual) t union select 'SIANES', c from (select 'M03AC03' c from dual union select 'M03AC09' from dual union select 'M03AC11' from dual union select 'N01AX03' from dual union select 'N01AX10' from dual union select 'N05CD08' from dual union select 'N05CM18' from dual) t union select 'SICARDIAC', c from (select 'B01AC09' c from dual union select 'C01CA03' from dual union select 'C01CA04' from dual union select 'C01CA06' from dual union select 'C01CA07' from dual union select 'C01CA24' from dual union select 'C01CE02' from dual union select 'C01CX09' from dual union select 'H01BA01' from dual union select 'R07AX01' from dual) t ) t; commit; -- RxNorm codes (optional) insert into fource_med_map select m, 'RxNorm' t, 'RXNORM:' || c -- Change "RxNorm:" to your local RxNorm code prefix (scheme) from ( -- Don't add or remove drugs select 'ACEI' m, c from (select '36908' c from dual union select '39990' from dual union select '104375' from dual union select '104376' from dual union select '104377' from dual union select '104378' from dual union select '104383' from dual union select '104384' from dual union select '104385' from dual union select '1299896' from dual union select '1299897' from dual union select '1299963' from dual union select '1299965' from dual union select '1435623' from dual union select '1435624' from dual union select '1435630' from dual union select '1806883' from dual union select '1806884' from dual union select '1806890' from dual union select '18867' from dual union select '197884' from dual union select '198187' from dual union select '198188' from dual union select '198189' from dual union select '199351' from dual union select '199352' from dual union select '199353' from dual union select '199622' from dual union select '199707' from dual union select '199708' from dual union select '199709' from dual union select '1998' from dual union select '199816' from dual union select '199817' from dual union select '199931' from dual union select '199937' from dual union select '205326' from dual union select '205707' from dual union select '205778' from dual union select '205779' from dual union select '205780' from dual union select '205781' from dual union select '206277' from dual union select '206313' from dual union select '206764' from dual union select '206765' from dual union select '206766' from dual union select '206771' from dual union select '207780' from dual union select '207792' from dual union select '207800' from dual union select '207820' from dual union select '207891' from dual union select '207892' from dual union select '207893' from dual union select '207895' from dual union select '210671' from dual union select '210672' from dual union select '210673' from dual union select '21102' from dual union select '211535' from dual union select '213482' from dual union select '247516' from dual union select '251856' from dual union select '251857' from dual union select '260333' from dual union select '261257' from dual union select '261258' from dual union select '261962' from dual union select '262076' from dual union select '29046' from dual union select '30131' from dual union select '308607' from dual union select '308609' from dual union select '308612' from dual union select '308613' from dual union select '308962' from dual union select '308963' from dual union select '308964' from dual union select '310063' from dual union select '310065' from dual union select '310066' from dual union select '310067' from dual union select '310422' from dual union select '311353' from dual union select '311354' from dual union select '311734' from dual union select '311735' from dual union select '312311' from dual union select '312312' from dual union select '312313' from dual union select '312748' from dual union select '312749' from dual union select '312750' from dual union select '313982' from dual union select '313987' from dual union select '314076' from dual union select '314077' from dual union select '314203' from dual union select '317173' from dual union select '346568' from dual union select '347739' from dual union select '347972' from dual union select '348000' from dual union select '35208' from dual union select '35296' from dual union select '371001' from dual union select '371254' from dual union select '371506' from dual union select '372007' from dual union select '372274' from dual union select '372614' from dual union select '372945' from dual union select '373293' from dual union select '373731' from dual union select '373748' from dual union select '373749' from dual union select '374176' from dual union select '374177' from dual union select '374938' from dual union select '378288' from dual union select '3827' from dual union select '38454' from dual union select '389182' from dual union select '389183' from dual union select '389184' from dual union select '393442' from dual union select '401965' from dual union select '401968' from dual union select '411434' from dual union select '50166' from dual union select '542702' from dual union select '542704' from dual union select '54552' from dual union select '60245' from dual union select '629055' from dual union select '656757' from dual union select '807349' from dual union select '845488' from dual union select '845489' from dual union select '854925' from dual union select '854927' from dual union select '854984' from dual union select '854986' from dual union select '854988' from dual union select '854990' from dual union select '857169' from dual union select '857171' from dual union select '857183' from dual union select '857187' from dual union select '857189' from dual union select '858804' from dual union select '858806' from dual union select '858810' from dual union select '858812' from dual union select '858813' from dual union select '858815' from dual union select '858817' from dual union select '858819' from dual union select '858821' from dual union select '898687' from dual union select '898689' from dual union select '898690' from dual union select '898692' from dual union select '898719' from dual union select '898721' from dual union select '898723' from dual union select '898725' from dual) t union select 'ARB' m, c from (select '118463' c from dual union select '108725' from dual union select '153077' from dual union select '153665' from dual union select '153666' from dual union select '153667' from dual union select '153821' from dual union select '153822' from dual union select '153823' from dual union select '153824' from dual union select '1996253' from dual union select '1996254' from dual union select '199850' from dual union select '199919' from dual union select '200094' from dual union select '200095' from dual union select '200096' from dual union select '205279' from dual union select '205304' from dual union select '205305' from dual union select '2057151' from dual union select '2057152' from dual union select '2057158' from dual union select '206256' from dual union select '213431' from dual union select '213432' from dual union select '214354' from dual union select '261209' from dual union select '261301' from dual union select '282755' from dual union select '284531' from dual union select '310139' from dual union select '310140' from dual union select '311379' from dual union select '311380' from dual union select '314073' from dual union select '349199' from dual union select '349200' from dual union select '349201' from dual union select '349483' from dual union select '351761' from dual union select '351762' from dual union select '352001' from dual union select '352274' from dual union select '370704' from dual union select '371247' from dual union select '372651' from dual union select '374024' from dual union select '374279' from dual union select '374612' from dual union select '378276' from dual union select '389185' from dual union select '484824' from dual union select '484828' from dual union select '484855' from dual union select '52175' from dual union select '577776' from dual union select '577785' from dual union select '577787' from dual union select '598024' from dual union select '615856' from dual union select '639536' from dual union select '639537' from dual union select '639539' from dual union select '639543' from dual union select '69749' from dual union select '73494' from dual union select '83515' from dual union select '83818' from dual union select '979480' from dual union select '979482' from dual union select '979485' from dual union select '979487' from dual union select '979492' from dual union select '979494' from dual) t union select 'COAGA' m, c from (select '27518' c from dual union select '10594' from dual union select '108911' from dual union select '1116632' from dual union select '1116634' from dual union select '1116635' from dual union select '1116639' from dual union select '1537034' from dual union select '1537038' from dual union select '1537039' from dual union select '1537045' from dual union select '1656052' from dual union select '1656055' from dual union select '1656056' from dual union select '1656061' from dual union select '1656683' from dual union select '1666332' from dual union select '1666334' from dual union select '1736469' from dual union select '1736470' from dual union select '1736472' from dual union select '1736477' from dual union select '1736478' from dual union select '1737465' from dual union select '1737466' from dual union select '1737468' from dual union select '1737471' from dual union select '1737472' from dual union select '1812189' from dual union select '1813035' from dual union select '1813037' from dual union select '197622' from dual union select '199314' from dual union select '200348' from dual union select '200349' from dual union select '205253' from dual union select '206714' from dual union select '207569' from dual union select '208316' from dual union select '208558' from dual union select '213169' from dual union select '213299' from dual union select '241162' from dual union select '261096' from dual union select '261097' from dual union select '309362' from dual union select '309952' from dual union select '309953' from dual union select '309955' from dual union select '313406' from dual union select '32968' from dual union select '333833' from dual union select '3521' from dual union select '371917' from dual union select '374131' from dual union select '374583' from dual union select '375035' from dual union select '392451' from dual union select '393522' from dual union select '613391' from dual union select '73137' from dual union select '749196' from dual union select '749198' from dual union select '75635' from dual union select '83929' from dual union select '855811' from dual union select '855812' from dual union select '855816' from dual union select '855818' from dual union select '855820' from dual) t union select 'COAGB' m, c from (select '2110605' c from dual union select '237057' from dual union select '69528' from dual union select '8150' from dual union select '163426' from dual union select '1037042' from dual union select '1037044' from dual union select '1037045' from dual union select '1037049' from dual union select '1037179' from dual union select '1037181' from dual union select '1110708' from dual union select '1114195' from dual union select '1114197' from dual union select '1114198' from dual union select '1114202' from dual union select '11289' from dual union select '114934' from dual union select '1232082' from dual union select '1232084' from dual union select '1232086' from dual union select '1232088' from dual union select '1241815' from dual union select '1241823' from dual union select '1245458' from dual union select '1245688' from dual union select '1313142' from dual union select '1359733' from dual union select '1359900' from dual union select '1359967' from dual union select '1360012' from dual union select '1360432' from dual union select '1361029' from dual union select '1361038' from dual union select '1361048' from dual union select '1361226' from dual union select '1361568' from dual union select '1361574' from dual union select '1361577' from dual union select '1361607' from dual union select '1361613' from dual union select '1361615' from dual union select '1361853' from dual union select '1362024' from dual union select '1362026' from dual union select '1362027' from dual union select '1362029' from dual union select '1362030' from dual union select '1362048' from dual union select '1362052' from dual union select '1362054' from dual union select '1362055' from dual union select '1362057' from dual union select '1362059' from dual union select '1362060' from dual union select '1362061' from dual union select '1362062' from dual union select '1362063' from dual union select '1362065' from dual union select '1362067' from dual union select '1362824' from dual union select '1362831' from dual union select '1362837' from dual union select '1362935' from dual union select '1362962' from dual union select '1364430' from dual union select '1364434' from dual union select '1364435' from dual union select '1364441' from dual union select '1364445' from dual union select '1364447' from dual union select '1490491' from dual union select '1490493' from dual union select '15202' from dual union select '152604' from dual union select '154' from dual union select '1549682' from dual union select '1549683' from dual union select '1598' from dual union select '1599538' from dual union select '1599542' from dual union select '1599543' from dual union select '1599549' from dual union select '1599551' from dual union select '1599553' from dual union select '1599555' from dual union select '1599557' from dual union select '1656595' from dual union select '1656599' from dual union select '1656760' from dual union select '1657991' from dual union select '1658634' from dual union select '1658637' from dual union select '1658647' from dual union select '1658659' from dual union select '1658690' from dual union select '1658692' from dual union select '1658707' from dual union select '1658717' from dual union select '1658719' from dual union select '1658720' from dual union select '1659195' from dual union select '1659197' from dual union select '1659260' from dual union select '1659263' from dual union select '1723476' from dual union select '1723478' from dual union select '1798389' from dual union select '1804730' from dual union select '1804735' from dual union select '1804737' from dual union select '1804738' from dual union select '1807809' from dual union select '1856275' from dual union select '1856278' from dual union select '1857598' from dual union select '1857949' from dual union select '1927851' from dual union select '1927855' from dual union select '1927856' from dual union select '1927862' from dual union select '1927864' from dual union select '1927866' from dual union select '197597' from dual union select '198349' from dual union select '1992427' from dual union select '1992428' from dual union select '1997015' from dual union select '1997017' from dual union select '204429' from dual union select '204431' from dual union select '205791' from dual union select '2059015' from dual union select '2059017' from dual union select '209081' from dual union select '209082' from dual union select '209083' from dual union select '209084' from dual union select '209086' from dual union select '209087' from dual union select '209088' from dual union select '211763' from dual union select '212123' from dual union select '212124' from dual union select '212155' from dual union select '238722' from dual union select '238727' from dual union select '238729' from dual union select '238730' from dual union select '241112' from dual union select '241113' from dual union select '242501' from dual union select '244230' from dual union select '244231' from dual union select '244239' from dual union select '244240' from dual union select '246018' from dual union select '246019' from dual union select '248140' from dual union select '248141' from dual union select '251272' from dual union select '280611' from dual union select '282479' from dual union select '283855' from dual union select '284458' from dual union select '284534' from dual union select '308351' from dual union select '308769' from dual union select '310710' from dual union select '310713' from dual union select '310723' from dual union select '310732' from dual union select '310733' from dual union select '310734' from dual union select '310739' from dual union select '310741' from dual union select '313410' from dual union select '313732' from dual union select '313733' from dual union select '313734' from dual union select '313735' from dual union select '313737' from dual union select '313738' from dual union select '313739' from dual union select '314013' from dual union select '314279' from dual union select '314280' from dual union select '321208' from dual union select '349308' from dual union select '351111' from dual union select '352081' from dual union select '352102' from dual union select '370743' from dual union select '371679' from dual union select '371810' from dual union select '372012' from dual union select '374319' from dual union select '374320' from dual union select '374638' from dual union select '376834' from dual union select '381158' from dual union select '389189' from dual union select '402248' from dual union select '402249' from dual union select '404141' from dual union select '404142' from dual union select '404143' from dual union select '404144' from dual union select '404146' from dual union select '404147' from dual union select '404148' from dual union select '404259' from dual union select '404260' from dual union select '415379' from dual union select '5224' from dual union select '540217' from dual union select '542824' from dual union select '545076' from dual union select '562130' from dual union select '562550' from dual union select '581236' from dual union select '60819' from dual union select '616862' from dual union select '616912' from dual union select '645887' from dual union select '67031' from dual union select '67108' from dual union select '67109' from dual union select '69646' from dual union select '727382' from dual union select '727383' from dual union select '727384' from dual union select '727559' from dual union select '727560' from dual union select '727562' from dual union select '727563' from dual union select '727564' from dual union select '727565' from dual union select '727566' from dual union select '727567' from dual union select '727568' from dual union select '727718' from dual union select '727719' from dual union select '727722' from dual union select '727723' from dual union select '727724' from dual union select '727725' from dual union select '727726' from dual union select '727727' from dual union select '727728' from dual union select '727729' from dual union select '727730' from dual union select '727778' from dual union select '727831' from dual union select '727832' from dual union select '727834' from dual union select '727838' from dual union select '727851' from dual union select '727859' from dual union select '727860' from dual union select '727861' from dual union select '727878' from dual union select '727880' from dual union select '727881' from dual union select '727882' from dual union select '727883' from dual union select '727884' from dual union select '727888' from dual union select '727892' from dual union select '727920' from dual union select '727922' from dual union select '727926' from dual union select '729968' from dual union select '729969' from dual union select '729970' from dual union select '729971' from dual union select '729972' from dual union select '729973' from dual union select '729974' from dual union select '729976' from dual union select '730002' from dual union select '746573' from dual union select '746574' from dual union select '753111' from dual union select '753112' from dual union select '753113' from dual union select '759595' from dual union select '759596' from dual union select '759597' from dual union select '759598' from dual union select '759599' from dual union select '75960' from dual union select '759600' from dual union select '759601' from dual union select '792060' from dual union select '795798' from dual union select '827000' from dual union select '827001' from dual union select '827003' from dual union select '827069' from dual union select '827099' from dual union select '829884' from dual union select '829885' from dual union select '829886' from dual union select '829888' from dual union select '830698' from dual union select '848335' from dual union select '848339' from dual union select '849297' from dual union select '849298' from dual union select '849299' from dual union select '849300' from dual union select '849301' from dual union select '849312' from dual union select '849313' from dual union select '849317' from dual union select '849333' from dual union select '849337' from dual union select '849338' from dual union select '849339' from dual union select '849340' from dual union select '849341' from dual union select '849342' from dual union select '849344' from dual union select '849699' from dual union select '849702' from dual union select '849710' from dual union select '849712' from dual union select '849715' from dual union select '849718' from dual union select '849722' from dual union select '849726' from dual union select '849764' from dual union select '849770' from dual union select '849776' from dual union select '849814' from dual union select '854228' from dual union select '854232' from dual union select '854235' from dual union select '854236' from dual union select '854238' from dual union select '854239' from dual union select '854241' from dual union select '854242' from dual union select '854245' from dual union select '854247' from dual union select '854248' from dual union select '854249' from dual union select '854252' from dual union select '854253' from dual union select '854255' from dual union select '854256' from dual union select '855288' from dual union select '855290' from dual union select '855292' from dual union select '855296' from dual union select '855298' from dual union select '855300' from dual union select '855302' from dual union select '855304' from dual union select '855306' from dual union select '855308' from dual union select '855312' from dual union select '855314' from dual union select '855316' from dual union select '855318' from dual union select '855320' from dual union select '855322' from dual union select '855324' from dual union select '855326' from dual union select '855328' from dual union select '855332' from dual union select '855334' from dual union select '855336' from dual union select '855338' from dual union select '855340' from dual union select '855342' from dual union select '855344' from dual union select '855346' from dual union select '855348' from dual union select '855350' from dual union select '857253' from dual union select '857255' from dual union select '857257' from dual union select '857259' from dual union select '857261' from dual union select '857645' from dual union select '861356' from dual union select '861358' from dual union select '861360' from dual union select '861362' from dual union select '861363' from dual union select '861364' from dual union select '861365' from dual union select '861366' from dual union select '978713' from dual union select '978715' from dual union select '978717' from dual union select '978718' from dual union select '978719' from dual union select '978720' from dual union select '978721' from dual union select '978722' from dual union select '978723' from dual union select '978725' from dual union select '978727' from dual union select '978733' from dual union select '978735' from dual union select '978736' from dual union select '978737' from dual union select '978738' from dual union select '978740' from dual union select '978741' from dual union select '978744' from dual union select '978745' from dual union select '978746' from dual union select '978747' from dual union select '978755' from dual union select '978757' from dual union select '978759' from dual union select '978761' from dual union select '978777' from dual union select '978778' from dual) t union select 'COVIDVIRAL' m, c from (select '108766' c from dual union select '1236627' from dual union select '1236628' from dual union select '1236632' from dual union select '1298334' from dual union select '1359269' from dual union select '1359271' from dual union select '1486197' from dual union select '1486198' from dual union select '1486200' from dual union select '1486202' from dual union select '1486203' from dual union select '1487498' from dual union select '1487500' from dual union select '1863148' from dual union select '1992160' from dual union select '207406' from dual union select '248109' from dual union select '248110' from dual union select '248112' from dual union select '284477' from dual union select '284640' from dual union select '311368' from dual union select '311369' from dual union select '312817' from dual union select '312818' from dual union select '352007' from dual union select '352337' from dual union select '373772' from dual union select '373773' from dual union select '373774' from dual union select '374642' from dual union select '374643' from dual union select '376293' from dual union select '378671' from dual union select '460132' from dual union select '539485' from dual union select '544400' from dual union select '597718' from dual union select '597722' from dual union select '597729' from dual union select '597730' from dual union select '602770' from dual union select '616129' from dual union select '616131' from dual union select '616133' from dual union select '643073' from dual union select '643074' from dual union select '670026' from dual union select '701411' from dual union select '701413' from dual union select '746645' from dual union select '746647' from dual union select '754738' from dual union select '757597' from dual union select '757598' from dual union select '757599' from dual union select '757600' from dual union select '790286' from dual union select '794610' from dual union select '795742' from dual union select '795743' from dual union select '824338' from dual union select '824876' from dual union select '831868' from dual union select '831870' from dual union select '847330' from dual union select '847741' from dual union select '847745' from dual union select '847749' from dual union select '850455' from dual union select '850457' from dual union select '896790' from dual union select '902312' from dual union select '902313' from dual union select '9344' from dual) t union select 'DIURETIC' m, c from (select '392534' c from dual union select '4109' from dual union select '392464' from dual union select '33770' from dual union select '104220' from dual union select '104222' from dual union select '1112201' from dual union select '132604' from dual union select '1488537' from dual union select '1546054' from dual union select '1546056' from dual union select '1719285' from dual union select '1719286' from dual union select '1719290' from dual union select '1719291' from dual union select '1727568' from dual union select '1727569' from dual union select '1727572' from dual union select '1729520' from dual union select '1729521' from dual union select '1729523' from dual union select '1729527' from dual union select '1729528' from dual union select '1808' from dual union select '197417' from dual union select '197418' from dual union select '197419' from dual union select '197730' from dual union select '197731' from dual union select '197732' from dual union select '198369' from dual union select '198370' from dual union select '198371' from dual union select '198372' from dual union select '199610' from dual union select '200801' from dual union select '200809' from dual union select '204154' from dual union select '205488' from dual union select '205489' from dual union select '205490' from dual union select '205732' from dual union select '208076' from dual union select '208078' from dual union select '208080' from dual union select '208081' from dual union select '208082' from dual union select '248657' from dual union select '250044' from dual union select '250660' from dual union select '251308' from dual union select '252484' from dual union select '282452' from dual union select '282486' from dual union select '310429' from dual union select '313988' from dual union select '371157' from dual union select '371158' from dual union select '372280' from dual union select '372281' from dual union select '374168' from dual union select '374368' from dual union select '38413' from dual union select '404018' from dual union select '4603' from dual union select '545041' from dual union select '561969' from dual union select '630032' from dual union select '630035' from dual union select '645036' from dual union select '727573' from dual union select '727574' from dual union select '727575' from dual union select '727845' from dual union select '876422' from dual union select '95600' from dual) t union select 'HCQ' m, c from (select '1116758' c from dual union select '1116760' from dual union select '1117346' from dual union select '1117351' from dual union select '1117353' from dual union select '1117531' from dual union select '197474' from dual union select '197796' from dual union select '202317' from dual union select '213378' from dual union select '226388' from dual union select '2393' from dual union select '249663' from dual union select '250175' from dual union select '261104' from dual union select '370656' from dual union select '371407' from dual union select '5521' from dual union select '755624' from dual union select '755625' from dual union select '756408' from dual union select '979092' from dual union select '979094' from dual) t union select 'ILI' m, c from (select '1441526' c from dual union select '1441527' from dual union select '1441530' from dual union select '1535218' from dual union select '1535242' from dual union select '1535247' from dual union select '1657973' from dual union select '1657974' from dual union select '1657976' from dual union select '1657979' from dual union select '1657980' from dual union select '1657981' from dual union select '1657982' from dual union select '1658131' from dual union select '1658132' from dual union select '1658135' from dual union select '1658139' from dual union select '1658141' from dual union select '1923319' from dual union select '1923332' from dual union select '1923333' from dual union select '1923338' from dual union select '1923345' from dual union select '1923347' from dual union select '2003754' from dual union select '2003755' from dual union select '2003757' from dual union select '2003766' from dual union select '2003767' from dual union select '351141' from dual union select '352056' from dual union select '612865' from dual union select '72435' from dual union select '727708' from dual union select '727711' from dual union select '727714' from dual union select '727715' from dual union select '895760' from dual union select '895764' from dual) t union select 'INTERFERON' m, c from (select '120608' c from dual union select '1650893' from dual union select '1650894' from dual union select '1650896' from dual union select '1650922' from dual union select '1650940' from dual union select '1651307' from dual union select '1721323' from dual union select '198360' from dual union select '207059' from dual union select '351270' from dual union select '352297' from dual union select '378926' from dual union select '403986' from dual union select '72257' from dual union select '731325' from dual union select '731326' from dual union select '731328' from dual union select '731330' from dual union select '860244' from dual) t union select 'SIANES' m, c from (select '106517' c from dual union select '1087926' from dual union select '1188478' from dual union select '1234995' from dual union select '1242617' from dual union select '1249681' from dual union select '1301259' from dual union select '1313988' from dual union select '1373737' from dual union select '1486837' from dual union select '1535224' from dual union select '1535226' from dual union select '1535228' from dual union select '1535230' from dual union select '1551393' from dual union select '1551395' from dual union select '1605773' from dual union select '1666776' from dual union select '1666777' from dual union select '1666797' from dual union select '1666798' from dual union select '1666800' from dual union select '1666814' from dual union select '1666821' from dual union select '1666823' from dual union select '1718899' from dual union select '1718900' from dual union select '1718902' from dual union select '1718906' from dual union select '1718907' from dual union select '1718909' from dual union select '1718910' from dual union select '1730193' from dual union select '1730194' from dual union select '1730196' from dual union select '1732667' from dual union select '1732668' from dual union select '1732674' from dual union select '1788947' from dual union select '1808216' from dual union select '1808217' from dual union select '1808219' from dual union select '1808222' from dual union select '1808223' from dual union select '1808224' from dual union select '1808225' from dual union select '1808234' from dual union select '1808235' from dual union select '1862110' from dual union select '198383' from dual union select '199211' from dual union select '199212' from dual union select '199775' from dual union select '2050125' from dual union select '2057964' from dual union select '206967' from dual union select '206970' from dual union select '206972' from dual union select '207793' from dual union select '207901' from dual union select '210676' from dual union select '210677' from dual union select '238082' from dual union select '238083' from dual union select '238084' from dual union select '240606' from dual union select '259859' from dual union select '284397' from dual union select '309710' from dual union select '311700' from dual union select '311701' from dual union select '311702' from dual union select '312674' from dual union select '319864' from dual union select '372528' from dual union select '372922' from dual union select '375623' from dual union select '376856' from dual union select '377135' from dual union select '377219' from dual union select '377483' from dual union select '379133' from dual union select '404091' from dual union select '404092' from dual union select '404136' from dual union select '422410' from dual union select '446503' from dual union select '48937' from dual union select '584528' from dual union select '584530' from dual union select '6130' from dual union select '631205' from dual union select '68139' from dual union select '6960' from dual union select '71535' from dual union select '828589' from dual union select '828591' from dual union select '830752' from dual union select '859437' from dual union select '8782' from dual union select '884675' from dual union select '897073' from dual union select '897077' from dual union select '998210' from dual union select '998211' from dual) t union select 'SICARDIAC' m, c from (select '7442' c from dual union select '1009216' from dual union select '1045470' from dual union select '1049182' from dual union select '1049184' from dual union select '1052767' from dual union select '106686' from dual union select '106779' from dual union select '106780' from dual union select '1087043' from dual union select '1087047' from dual union select '1090087' from dual union select '1114874' from dual union select '1114880' from dual union select '1114888' from dual union select '11149' from dual union select '1117374' from dual union select '1232651' from dual union select '1232653' from dual union select '1234563' from dual union select '1234569' from dual union select '1234571' from dual union select '1234576' from dual union select '1234578' from dual union select '1234579' from dual union select '1234581' from dual union select '1234584' from dual union select '1234585' from dual union select '1234586' from dual union select '1251018' from dual union select '1251022' from dual union select '1292716' from dual union select '1292731' from dual union select '1292740' from dual union select '1292751' from dual union select '1292887' from dual union select '1299137' from dual union select '1299141' from dual union select '1299145' from dual union select '1299879' from dual union select '1300092' from dual union select '1302755' from dual union select '1305268' from dual union select '1305269' from dual union select '1307224' from dual union select '1358843' from dual union select '1363777' from dual union select '1363785' from dual union select '1363786' from dual union select '1363787' from dual union select '1366958' from dual union select '141848' from dual union select '1490057' from dual union select '1542385' from dual union select '1546216' from dual union select '1546217' from dual union select '1547926' from dual union select '1548673' from dual union select '1549386' from dual union select '1549388' from dual union select '1593738' from dual union select '1658178' from dual union select '1660013' from dual union select '1660014' from dual union select '1660016' from dual union select '1661387' from dual union select '1666371' from dual union select '1666372' from dual union select '1666374' from dual union select '1721536' from dual union select '1743862' from dual union select '1743869' from dual union select '1743871' from dual union select '1743877' from dual union select '1743879' from dual union select '1743938' from dual union select '1743941' from dual union select '1743950' from dual union select '1743953' from dual union select '1745276' from dual union select '1789858' from dual union select '1791839' from dual union select '1791840' from dual union select '1791842' from dual union select '1791854' from dual union select '1791859' from dual union select '1791861' from dual union select '1812167' from dual union select '1812168' from dual union select '1812170' from dual union select '1870205' from dual union select '1870207' from dual union select '1870225' from dual union select '1870230' from dual union select '1870232' from dual union select '1939322' from dual union select '198620' from dual union select '198621' from dual union select '198786' from dual union select '198787' from dual union select '198788' from dual union select '1989112' from dual union select '1989117' from dual union select '1991328' from dual union select '1991329' from dual union select '1999003' from dual union select '1999006' from dual union select '1999007' from dual union select '1999012' from dual union select '204395' from dual union select '204843' from dual union select '209217' from dual union select '2103181' from dual union select '2103182' from dual union select '2103184' from dual union select '211199' from dual union select '211200' from dual union select '211704' from dual union select '211709' from dual union select '211712' from dual union select '211714' from dual union select '211715' from dual union select '212343' from dual union select '212770' from dual union select '212771' from dual union select '212772' from dual union select '212773' from dual union select '238217' from dual union select '238218' from dual union select '238219' from dual union select '238230' from dual union select '238996' from dual union select '238997' from dual union select '238999' from dual union select '239000' from dual union select '239001' from dual union select '241033' from dual union select '242969' from dual union select '244284' from dual union select '245317' from dual union select '247596' from dual union select '247940' from dual union select '260687' from dual union select '309985' from dual union select '309986' from dual union select '309987' from dual union select '310011' from dual union select '310012' from dual union select '310013' from dual union select '310116' from dual union select '310117' from dual union select '310127' from dual union select '310132' from dual union select '311705' from dual union select '312395' from dual union select '312398' from dual union select '313578' from dual union select '313967' from dual union select '314175' from dual union select '347930' from dual union select '351701' from dual union select '351702' from dual union select '351982' from dual union select '359907' from dual union select '3616' from dual union select '3628' from dual union select '372029' from dual union select '372030' from dual union select '372031' from dual union select '373368' from dual union select '373369' from dual union select '373370' from dual union select '373372' from dual union select '373375' from dual union select '374283' from dual union select '374570' from dual union select '376521' from dual union select '377281' from dual union select '379042' from dual union select '387789' from dual union select '392099' from dual union select '393309' from dual union select '3992' from dual union select '404093' from dual union select '477358' from dual union select '477359' from dual union select '52769' from dual union select '542391' from dual union select '542655' from dual union select '542674' from dual union select '562501' from dual union select '562502' from dual union select '562592' from dual union select '584580' from dual union select '584582' from dual union select '584584' from dual union select '584588' from dual union select '602511' from dual union select '603259' from dual union select '603276' from dual union select '603915' from dual union select '617785' from dual union select '669267' from dual union select '672683' from dual union select '672685' from dual union select '672891' from dual union select '692479' from dual union select '700414' from dual union select '704955' from dual union select '705163' from dual union select '705164' from dual union select '705170' from dual union select '727310' from dual union select '727316' from dual union select '727345' from dual union select '727347' from dual union select '727373' from dual union select '727386' from dual union select '727410' from dual union select '727842' from dual union select '727843' from dual union select '727844' from dual union select '746206' from dual union select '746207' from dual union select '7512' from dual union select '8163' from dual union select '827706' from dual union select '864089' from dual union select '880658' from dual union select '8814' from dual union select '883806' from dual union select '891437' from dual union select '891438' from dual) t ) t; commit; -- Remdesivir defined separately since many sites will have custom codes (optional) insert into fource_med_map select 'REMDESIVIR', 'RxNorm', 'RxNorm:2284718' from dual union select 'REMDESIVIR', 'RxNorm', 'RxNorm:2284960' from dual union select 'REMDESIVIR', 'Custom', 'ACT|LOCAL:REMDESIVIR' from dual; commit; -- Use the concept_dimension to get an expanded list of medication codes (optional) -- This will find paths corresponding to concepts already in the fource_med_map table, -- and then find all the concepts corresponding to child paths. -- WARNING: This query might take several minutes to run. -- ****THIS IS UNTESTED ****** /* select concept_path, concept_cd into #med_paths from @crcSchema.concept_dimension where concept_path like '\ACT\Medications\%' and concept_cd in (select concept_cd from @crcSchema.observation_fact --with (nolock)) ; alter table #med_paths add primary key (concept_path) ; insert into fource_med_map select distinct m.med_class, 'Expand', d.concept_cd from fource_med_map m inner join @crcSchema.concept_dimension c on m.local_med_code = c.concept_cd inner join #med_paths d on d.concept_path like c.concept_path || '%' where not exists ( select * from fource_med_map t where t.med_class = m.med_class and t.local_med_code = d.concept_cd ) */ -------------------------------------------------------------------------------- -- Procedure mappings -- * Do not change the proc_group or add additional procedures. -------------------------------------------------------------------------------- create table fource_proc_map ( proc_group varchar(50) not null, code_type varchar(10) not null, local_proc_code varchar(50) not null ); alter table fource_proc_map add primary key (proc_group, code_type, local_proc_code); -- CPT4 (United States) insert into fource_proc_map select p, 'CPT4', 'CPT4:' || c -- Change "CPT4:" to your local RxNorm code prefix (scheme) from ( select '' p, '' c from dual where 1=0 union all select 'EmergencyGeneralSurgery', c from (select '44970' c from dual union select '47562' from dual union select '47563' from dual union select '44950' from dual union select '49320' from dual union select '44180' from dual union select '49585' from dual union select '44120' from dual) t union all select 'EmergencyOrthopedics', c from (select '27245' c from dual union select '27236' from dual union select '27759' from dual union select '24538' from dual union select '11044' from dual union select '27506' from dual union select '22614' from dual union select '27814' from dual union select '63030' from dual) t union all select 'EmergencyVascularSurgery', c from (select '36247' c from dual) t union all select 'EmergencyOBGYN', c from (select '59151' c from dual) t union all select 'RenalReplacement', c from (select '90935' c from dual union select '90937' from dual union select '90945' from dual) t union all select 'SupplementalOxygenSevere', c from (select '94002' c from dual union select '94003' from dual union select '94660' from dual union select '31500' from dual) t union all select 'ECMO', c from (select '33946' c from dual union select '33947' from dual union select '33951' from dual union select '33952' from dual) t union all select 'CPR', c from (select '92950' c from dual) t union all select 'ArterialCatheter', c from (select '36620' c from dual) t union all select 'CTChest', c from (select '71250' c from dual union select '71260' from dual union select '71270' from dual) t union all select 'Bronchoscopy', c from (select '31645' c from dual) t union all select 'CovidVaccine', c from (select '0001A' c from dual union select '0002A' from dual union select '0011A' from dual union select '0012A' from dual union select '0021A' from dual union select '0022A' from dual union select '0031A' from dual union select '91300' from dual union select '91301' from dual union select '91302' from dual union select '91303' from dual) t ) t; commit; -- CCAM (France) insert into fource_proc_map select p, 'CCAM', 'CCAM:' || c -- Change "CCAM:" to your local RxNorm code prefix (scheme) from ( select '' p, '' c from dual where 1=0 union all select 'EmergencyGeneralSurgery', c from (select 'HHFA016' c from dual union select 'HMFC004' from dual union select 'HHFA011' from dual union select 'ZCQC002' from dual union select 'HGPC015' from dual union select 'LMMA006' from dual union select 'LMMA009' from dual union select 'HGFA007' from dual union select 'HGFC021' from dual) t union all select 'EmergencyOrthopedics', c from (select 'NBCA006' c from dual union select 'NBCA005' from dual union select 'NCCB006' from dual union select 'MBCB001' from dual union select 'NBCA007' from dual union select 'LHDA001' from dual union select 'LHDA002' from dual union select 'NCCA017' from dual union select 'LFFA001' from dual union select 'LDFA003' from dual) t union all select 'EmergencyOBGYN', c from (select 'JJFC001' c from dual) t union all select 'RenalReplacement', c from (select 'JVJF004' c from dual union select 'JVJF005' from dual union select 'JVJF004' from dual union select 'JVJB001' from dual union select 'JVJB002' from dual union select 'JVJF003' from dual union select 'JVJF008' from dual) t union all select 'SupplementalOxygenSevere', c from (select 'GLMF001' c from dual union select 'GLLD003' from dual union select 'GLLD012' from dual union select 'GLLD019' from dual union select 'GLMP001' from dual union select 'GLLD008' from dual union select 'GLLD015' from dual union select 'GLLD004' from dual union select 'GELD004' from dual) t union all select 'SupplementalOxygenOther', c from (select 'GLLD017' c from dual) t union all select 'ECMO', c from (select 'EQLA002' c from dual union select 'EQQP004' from dual union select 'GLJF010' from dual) t union all select 'CPR', c from (select 'DKMD001' c from dual union select 'DKMD002' from dual) t union all select 'ArterialCatheter', c from (select 'ENLF001' c from dual) t union all select 'CTChest', c from (select 'ZBQK001' c from dual union select 'ZBQH001' from dual) t union all select 'Bronchoscopy', c from (select 'GEJE001' c from dual union select 'GEJE003' from dual) t ) t; commit; -- OPCS4 (United Kingdom) insert into fource_proc_map select p, 'OPCS4', 'OPCS4:' || c -- Change "OPCS4:" to your local RxNorm code prefix (scheme) from ( select '' p, '' c from dual where 1=0 union all select 'EmergencyGeneralSurgery', c from (select 'H01' c from dual union select 'Y75.2' from dual union select 'J18' from dual union select 'Y75.2' from dual union select 'J18.2' from dual union select 'Y75.2' from dual union select 'H01' from dual union select 'T43' from dual union select 'T43.8' from dual union select 'T41.3' from dual union select 'T24' from dual union select 'G58.4' from dual union select 'G69.3' from dual) t union all select 'EmergencyOrthopedics', c from (select 'W24.1' c from dual union select 'W19.1' from dual union select 'W33.6' from dual union select 'W19.2' from dual union select 'V38' from dual union select 'V55.1' from dual union select 'W20.5' from dual union select 'V25.2' from dual union select 'V67.2' from dual union select 'V55.1' from dual) t union all select 'RenalReplacement', c from (select 'X40.3' c from dual union select 'X40.3' from dual union select 'X40.2' from dual union select 'X40.4' from dual union select 'X40.5' from dual union select 'X40.6' from dual union select 'X40.7' from dual union select 'X40.8' from dual union select 'X40.9' from dual) t union all select 'SupplementalOxygenSevere', c from (select 'E85.2' c from dual union select 'E85.4' from dual union select 'E85.6' from dual union select 'X56.2' from dual) t union all select 'SupplementalOxygenOther', c from (select 'X52' c from dual) t union all select 'ECMO', c from (select 'X58.1' c from dual union select 'X58.1' from dual union select 'X58.1' from dual union select 'X58.1' from dual) t union all select 'CPR', c from (select 'X50.3' c from dual) t union all select 'CTChest', c from (select 'U07.1' c from dual union select 'Y97.2' from dual union select 'U07.1' from dual union select 'Y97.3' from dual union select 'U07.1' from dual union select 'Y97.1' from dual) t union all select 'Bronchoscopy', c from (select 'E48.4' c from dual union select 'E50.4' from dual) t ) t; -- OPS (Germany) insert into fource_proc_map select p, 'OPS', 'OPS:' || c -- Change "OPS:" to your local RxNorm code prefix (scheme) from ( select '' p, '' c from dual where 1=0 union all select 'EmergencyGeneralSurgery', c from (select '5-470.1' c from dual union select '5-511.1' from dual union select '5-511.12' from dual union select '5-470' from dual union select '1-694' from dual union select '5-534' from dual union select '5-459.0' from dual) t union all select 'EmergencyOrthopedics', c from (select '5-790.2f' c from dual union select '5-793.1e' from dual union select '5-790.2m' from dual union select '5-791.6m' from dual union select '5-790.13' from dual union select '5-780.6' from dual union select '5-791.6g' from dual union select '5-836.30' from dual union select '5-032.30' from dual) t union all select 'RenalReplacement', c from (select '8-854' c from dual union select '8-854' from dual union select '8-857' from dual union select '8-853' from dual union select '8-855' from dual union select '8-856' from dual) t union all select 'SupplementalOxygenSevere', c from (select '8-716.00' c from dual union select '8-711.0' from dual union select '8-712.0' from dual union select '8-701' from dual) t union all select 'SupplementalOxygenOther', c from (select '8-72' c from dual) t union all select 'ECMO', c from (select '8-852.0' c from dual union select '8-852.30' from dual union select '8-852.31' from dual) t union all select 'CPR', c from (select '8-771' c from dual) t union all select 'CTChest', c from (select '3-202' c from dual union select '3-221' from dual) t ) t; -- TOSP (Singapore) insert into fource_proc_map select p, 'TOSP', 'TOSP:' || c -- Change "TOSP:" to your local RxNorm code prefix (scheme) from ( select '' p, '' c from dual where 1=0 union all select 'EmergencyGeneralSurgery', c from (select 'SF849A' c from dual union select 'SF801G' from dual union select 'SF704G' from dual union select 'SF849A' from dual union select 'SF808A' from dual union select 'SF800A' from dual union select 'SF801A' from dual union select 'SF814A' from dual union select 'SF707I' from dual) t union all select 'EmergencyOrthopedics', c from (select 'SB811F' c from dual union select 'SB703F' from dual union select 'SB705T' from dual union select 'SB810F' from dual union select 'SB700A' from dual union select 'SB812S' from dual) t union all select 'EmergencyOBGYN', c from (select 'SI805F' c from dual) t union all select 'SupplementalOxygenSevere', c from (select 'SC719T' c from dual union select 'SC720T' from dual) t union all select 'ECMO', c from (select 'SD721H' c from dual union select 'SD721H' from dual union select 'SD721H' from dual union select 'SD721H' from dual) t union all select 'ArterialCatheter', c from (select 'SD718A' c from dual) t union all select 'Bronchoscopy', c from (select 'SC703B' c from dual union select 'SC704B' from dual) t ) t; -- ICD10AM (Singapore, Australia) insert into fource_proc_map select p, 'ICD10AM', 'ICD10AM:' || c -- Change "ICD10AM:" to your local RxNorm code prefix (scheme) from ( select '' p, '' c from dual where 1=0 union all select 'RenalReplacement', c from (select '13100-00' c from dual union select '13100-00' from dual) t union all select 'SupplementalOxygenSevere', c from (select '92039-00' c from dual union select '13882-00' from dual union select '13882-01' from dual union select '92038-00' from dual) t union all select 'SupplementalOxygenOther', c from (select '92044-00' c from dual) t union all select 'CPR', c from (select '92052-00' c from dual) t ) t; -- CBHPM (Brazil-TUSS) insert into fource_proc_map select p, 'CBHPM', 'CBHPM:' || c -- Change "CBHPM:" to your local RxNorm code prefix (scheme) from ( select '' p, '' c from dual where 1=0 union all select 'EmergencyGeneralSurgery', c from (select '31003079' c from dual union select '31005497' from dual union select '31005470' from dual union select '31003079' from dual union select '31009166' from dual) t union all select 'EmergencyOrthopedics', c from (select '30725119' c from dual union select '30725160' from dual union select '30727138' from dual union select '40803104' from dual union select '30715016' from dual union select '30715199' from dual) t union all select 'EmergencyOBGYN', c from (select '31309186' c from dual) t union all select 'RenalReplacement', c from (select '30909023' c from dual union select '30909031' from dual union select '31008011' from dual) t union all select 'SupplementalOxygenSevere', c from (select '20203012' c from dual union select '20203012' from dual union select '40202445' from dual) t union all select 'Bronchoscopy', c from (select '40201058' c from dual) t ) t; -- ICD9Proc insert into fource_proc_map select p, 'ICD9', 'ICD9PROC:' || c -- Change "ICD9:" to your local RxNorm code prefix (scheme) from ( select '' p, '' c from dual where 1=0 union all select 'EmergencyGeneralSurgery', c from (select '47.01' c from dual union select '51.23' from dual union select '47.0' from dual union select '54.51' from dual union select '53.4' from dual) t union all select 'EmergencyOrthopedics', c from (select '79.11' c from dual union select '79.6' from dual union select '79.35' from dual union select '81.03' from dual union select '81.05' from dual union select '81.07' from dual union select '79.36' from dual) t union all select 'EmergencyOBGYN', c from (select '66.62' c from dual) t union all select 'RenalReplacement', c from (select '39.95' c from dual union select '39.95' from dual) t union all select 'SupplementalOxygenSevere', c from (select '93.90' c from dual union select '96.70' from dual union select '96.71' from dual union select '96.72' from dual union select '96.04' from dual) t union all select 'SupplementalOxygenOther', c from (select '93.96' c from dual) t union all select 'ECMO', c from (select '39.65' c from dual union select '39.65' from dual union select '39.65' from dual union select '39.65' from dual) t union all select 'CPR', c from (select '99.60' c from dual) t union all select 'ArterialCatheter', c from (select '38.91' c from dual) t union all select 'CTChest', c from (select '87.41' c from dual union select '87.41' from dual union select '87.41' from dual) t union all select 'Bronchoscopy', c from (select '33.22' c from dual union select '33.23' from dual) t ) t; commit; -- ICD10-PCS insert into fource_proc_map select p, 'ICD10', 'ICD10PCS:' || c -- Change "ICD10:" to your local RxNorm code prefix (scheme) from ( select '' p, '' c from dual where 1=0 union all select 'EmergencyGeneralSurgery', c from (select '0DBJ4ZZ' c from dual union select '0DTJ4ZZ' from dual union select '0FB44ZZ' from dual union select '0FB44ZX' from dual union select '0DBJ0ZZ' from dual union select '0DTJ0ZZ' from dual union select '0DJU4ZZ' from dual union select '0DN84ZZ' from dual union select '0DNE4ZZ' from dual) t union all select 'EmergencyOrthopedics', c from (select '0QQ60ZZ' c from dual union select '0QQ70ZZ' from dual union select '0QH806Z' from dual union select '0QH906Z' from dual) t union all select 'SupplementalOxygenSevere', c from (select '5A19054' c from dual union select '5A0935Z' from dual union select '5A0945Z' from dual union select '5A0955Z' from dual union select '5A09357' from dual union select '0BH17EZ' from dual) t union all select 'ECMO', c from (select '5A1522H' c from dual union select '5A1522G' from dual) t union all select 'CTChest', c from (select 'BW24' c from dual union select 'BW24Y0Z' from dual union select 'BW24YZZ' from dual) t ) t; commit; -- SNOMED insert into fource_proc_map select p, 'SNOMED', 'SNOMED:' || c -- Change "SNOMED:" to your local RxNorm code prefix (scheme) from ( select '' p, '' c from dual where 1=0 union all select 'EmergencyGeneralSurgery', c from (select '174041007' c from dual union select '45595009' from dual union select '20630000' from dual union select '80146002' from dual union select '450435004' from dual union select '18433007' from dual union select '5789003' from dual union select '44946007' from dual union select '359572002' from dual) t union all select 'EmergencyOrthopedics', c from (select '179097007' c from dual union select '179018001' from dual union select '73156009' from dual union select '2480009' from dual union select '36939002' from dual union select '55705006' from dual union select '439756000' from dual union select '302129007' from dual union select '231045009' from dual union select '3968003' from dual union select '260648008' from dual union select '178619000' from dual) t union all select 'EmergencyVascularSurgery', c from (select '392247006' c from dual) t union all select 'EmergencyOBGYN', c from (select '63596003' c from dual union select '61893009' from dual) t union all select 'RenalReplacement', c from (select '302497006' c from dual union select '302497006' from dual) t union all select 'SupplementalOxygenSevere', c from (select '428311008' c from dual union select '410210009' from dual union select '409025002' from dual union select '47545007' from dual union select '16883004' from dual) t union all select 'SupplementalOxygenOther', c from (select '57485005' c from dual) t union all select 'ECMO', c from (select '786453001' c from dual union select '786451004' from dual) t union all select 'CPR', c from (select '150819003' c from dual) t union all select 'ArterialCatheter', c from (select '392248001' c from dual) t union all select 'CTChest', c from (select '395081000119108' c from dual union select '75385009' from dual union select '169069000' from dual) t union all select 'Bronchoscopy', c from (select '10847001' c from dual union select '68187007' from dual) t ) t; commit; -- Use the concept_dimension to get an expanded list of medication codes (optional) -- This will find paths corresponding to concepts already in the fource_med_map table, -- and then find all the concepts corresponding to child paths. -- WARNING: This query might take several minutes to run. -- ***** THIS IS UNTESTED ***** /* select concept_path, concept_cd into #med_paths from @crcSchema.concept_dimension where concept_path like '\ACT\Medications\%' and concept_cd in (select concept_cd from @crcSchema.observation_fact --with (nolock)) ; alter table #med_paths add primary key (concept_path) ; insert into fource_med_map select distinct m.med_class, 'Expand', d.concept_cd from fource_med_map m inner join @crcSchema.concept_dimension c on m.local_med_code = c.concept_cd inner join #med_paths d on d.concept_path like c.concept_path || '%' where not exists ( select * from fource_med_map t where t.med_class = m.med_class and t.local_med_code = d.concept_cd ) */ -------------------------------------------------------------------------------- -- Multisystem Inflammatory Syndrome in Children (MIS-C) (optional) -- * Write a custom query to populate this table with the patient_num's of -- * children who develop MIS-C and their first MIS-C diagnosis date. -------------------------------------------------------------------------------- drop table fource_misc; create table fource_misc ( patient_num int not null, misc_date date not null ); alter table fource_misc add primary key (patient_num); insert into fource_misc select -1, '01-JAN-1900' from dual where 1=0; --Replace with a list of patients and MIS-C diagnosis dates --union all select 1, '3/1/2020' from dual --union all select 2, '4/1/2020' from dual; commit; -------------------------------------------------------------------------------- -- Cohorts -- * In general, use the default values that select patients who were admitted -- * with a positive COVID test result, broken out in three-month blocks. -- * Modify this table only if you are working on a specific project that -- * has defined custom patient cohorts to analyze. -------------------------------------------------------------------------------- create table fource_cohort_config ( cohort varchar(50) not null, include_in_phase1 int, -- 1 = include the cohort in the phase 1 output, otherwise 0 include_in_phase2 int, -- 1 = include the cohort in the phase 2 output and saved files, otherwise 0 source_data_updated_date date, -- the date your source data were last updated; set to NULL to use the value in the fource_config table earliest_adm_date date, -- the earliest possible admission date allowed in this cohort (NULL if no minimum date) latest_adm_date date -- the lastest possible admission date allowed this cohort (NULL if no maximum date) ); alter table fource_cohort_config add primary key (cohort); insert into fource_cohort_config select 'PosAdm2020Q1', 1, 1, NULL, '01-JAN-2020', '31-MAR-2020' from dual union all select 'PosAdm2020Q2', 1, 1, NULL, '01-APR-2020', '30-JUN-2020' from dual union all select 'PosAdm2020Q3', 1, 1, NULL, '01-JUL-2020', '30-SEP-2020' from dual union all select 'PosAdm2020Q4', 1, 1, NULL, '01-OCT-2020', '31-DEC-2020' from dual union all select 'PosAdm2021Q1', 1, 1, NULL, '01-JAN-2021', '31-MAR-2021' from dual union all select 'PosAdm2021Q2', 1, 1, NULL, '01-APR-2021', '30-JUN-2021' from dual union all select 'PosAdm2021Q3', 1, 1, NULL, '01-JUL-2021', '30-SEP-2021' from dual union all select 'PosAdm2021Q4', 1, 1, NULL, '01-OCT-2021', '31-DEC-2021' from dual; commit; -- Assume the data were updated on the date this script is run if source_data_updated_date is null update fource_cohort_config set source_data_updated_date = nvl((select source_data_updated_date from fource_config),sysdate) where source_data_updated_date is null; commit; --############################################################################## --############################################################################## --############################################################################## --############################################################################## --### --### Get COVID test results, admission, ICU dates, and death dates. --### Many sites will not have to modify this code. --### Only make changes if you require special logic for these variables. --### --############################################################################## --############################################################################## --############################################################################## --############################################################################## -------------------------------------------------------------------------------- -- Create a list of all COVID-19 test results. -------------------------------------------------------------------------------- --drop table fource_covid_tests; create table fource_covid_tests ( patient_num int not null, test_result varchar(10) not null, test_date date not null ); alter table fource_covid_tests add primary key (patient_num, test_result, test_date); insert into fource_covid_tests select distinct f.patient_num, m.code, trunc(start_date) from @crcSchema.observation_fact f --with (nolock) inner join fource_code_map m on f.concept_cd = m.local_code and m.code in ('covidpos','covidneg','covidU071'); commit; --select * from fource_covid_tests; -------------------------------------------------------------------------------- -- Create a list of patient admission dates. -------------------------------------------------------------------------------- --drop table fource_admissions; create table fource_admissions ( patient_num int not null, admission_date date not null, discharge_date date not null ); alter table fource_admissions add primary key (patient_num, admission_date, discharge_date); insert into fource_admissions select distinct patient_num, cast(start_date as date), nvl(cast(end_date as date),'01-JAN-2199') -- a very future date for missing discharge dates from ( -- Select by inout_cd select patient_num, trunc(start_date) start_date, trunc(end_date) end_date from @crcSchema.visit_dimension where trunc(start_date) >= (select trunc(eval_start_date) from fource_config where rownum = 1) and patient_num in (select patient_num from fource_covid_tests) and inout_cd in (select local_code from fource_code_map where code = 'inpatient_inout_cd') union all -- Select by location_cd select patient_num, trunc(start_date), trunc(end_date) from @crcSchema.visit_dimension v where trunc(start_date) >= (select trunc(eval_start_date) from fource_config where rownum = 1) and patient_num in (select patient_num from fource_covid_tests) and location_cd in (select local_code from fource_code_map where code = 'inpatient_location_cd') union all -- Select by concept_cd select f.patient_num, trunc(f.start_date), nvl(trunc(f.end_date),trunc(v.end_date)) from @crcSchema.observation_fact f inner join @crcSchema.visit_dimension v on v.encounter_num=f.encounter_num and v.patient_num=f.patient_num where trunc(f.start_date) >= (select trunc(eval_start_date) from fource_config where rownum = 1) and f.patient_num in (select patient_num from fource_covid_tests) and f.concept_cd in (select local_code from fource_code_map where code = 'inpatient_concept_cd') ) t; commit; --select * from FOURCE_ADMISSIONS; --select * from fource_admissions; -- remove vists that end before they start --select 'Number of admissions with discharge before admission: ' || count(*) from fource_admissions where discharge_date < admission_date; delete from fource_admissions where discharge_date < admission_date; -------------------------------------------------------------------------------- -- Create a list of dates where patients were in the ICU. -------------------------------------------------------------------------------- create table fource_icu ( patient_num int not null, start_date date not null, end_date date not null ); alter table fource_icu add primary key (patient_num, start_date, end_date); --truncate table fource_icu; insert into fource_icu select distinct patient_num, cast(start_date as date), nvl(cast(end_date as date), '01-JAN-2199') -- a very future date for missing end dates from ( -- Select by patient_dimension inout_cd select patient_num, trunc(start_date) start_date, trunc(end_date) end_date from @crcSchema.visit_dimension where trunc(start_date) >= (select trunc(eval_start_date) from fource_config where rownum = 1) and patient_num in (select patient_num from fource_covid_tests) and inout_cd in (select local_code from fource_code_map where code = 'icu_inout_cd') union all -- Select by location_cd select patient_num, trunc(start_date) start_date, trunc(end_date) end_date --***** SOMEONE PLEASE AUDIT THIS SECTION****** from @crcSchema.visit_dimension v where trunc(start_date) >= (select trunc(eval_start_date) from fource_config where rownum = 1) and patient_num in (select patient_num from fource_covid_tests) and location_cd in (select local_code from fource_code_map where code = 'icu_location_cd') union all -- Select by concept_cd select f.patient_num, trunc(f.start_date) start_date, nvl(trunc(f.end_date),trunc(v.end_date)) end_date from @crcSchema.observation_fact f inner join @crcSchema.visit_dimension v on v.encounter_num=f.encounter_num and v.patient_num=f.patient_num where trunc(f.start_date) >= (select trunc(eval_start_date) from fource_config where rownum = 1) and f.patient_num in (select patient_num from fource_covid_tests) and f.concept_cd in (select local_code from fource_code_map where code = 'icu_concept_cd') union all -- Select by location_cd in observation_fact table -- If you have an external table that has the local location_cd for ICU units use this block -- Need to be able to clean up epic encounters vs visits - is that in the analysis or later in the script select distinct f.patient_num, trunc(f.start_date) start_date, case when trunc(f.end_date)> trunc(v.end_date) then trunc(f.end_date) else nvl(trunc(v.end_date), '01-JAN-2199') end end_date from @crcSchema.observation_fact f inner join @crcSchema.visit_dimension v on v.encounter_num=f.encounter_num and v.patient_num=f.patient_num inner join fource_icu_location l on l.location_cd = f.location_cd where trunc(f.start_date) >= (select trunc(eval_start_date) from fource_config where rownum = 1) and f.patient_num in (select patient_num from fource_covid_tests) order by patient_num, start_date --and f.concept_cd in (select local_code from fource_code_map where code = 'icu_fact_location_cd') --**** TODO: CHECK SHOULD THIS BE CONDITIONAL MICHELE FIX THIS ) t where (select icu_data_available from fource_config where rownum = 1) = 1; commit; delete from fource_icu where trunc(end_date) < trunc(start_date); -------------------------------------------------------------------------------- -- Create a list of dates when patients died. -------------------------------------------------------------------------------- --drop table fource_death; create table fource_death ( patient_num int not null, death_date date not null ); alter table fource_death add primary key (patient_num); -- The death_date is estimated later in the SQL if it is null here. insert into fource_death select patient_num, death_date from ( select patient_num, nvl(death_date,'01-JAN-1900') death_date from @crcSchema.patient_dimension where (death_date is not null or vital_status_cd in ('Y', 'DEM|VITAL STATUS:D')) and patient_num in (select patient_num from fource_covid_tests) )t where (select death_data_available from fource_config where rownum = 1) = 1 ; commit; --TODO: Check this logic again --select patient_num, nvl(death_date,'01-JAN-1900') -- from patient_dimension -- where (death_date is not null or vital_status_cd in ('Y', 'DEM|VITAL STATUS:D')) -- and patient_num in (select patient_num from fource_covid_tests); --select patient_num, nvl(death_date,'01-JAN-1900') -- from patient_dimension -- where 1 in (select 1 from dual); --############################################################################## --############################################################################## --############################################################################## --############################################################################## --### --### Setup the cohorts and retrieve the clinical data for the patients --### (Most sites will not have to modify any SQL beyond this point) --### --############################################################################## --############################################################################## --############################################################################## --############################################################################## --****************************************************************************** --****************************************************************************** --*** Setup the cohorts --****************************************************************************** --****************************************************************************** -------------------------------------------------------------------------------- -- Get the earliest positive and earliest negative COVID-19 test results. -------------------------------------------------------------------------------- --drop table fource_first_covid_tests; create table fource_first_covid_tests ( patient_num int not null, first_pos_date date, first_neg_date date, first_U071_date date ); alter table fource_first_covid_tests add primary key (patient_num); insert into fource_first_covid_tests select patient_num, min(case when test_result='covidpos' then test_date else null end), min(case when test_result='covidneg' then test_date else null end), min(case when test_result='covidU071' then test_date else null end) from fource_covid_tests group by patient_num; commit; -------------------------------------------------------------------------------- -- Get the list of patients who will be in the cohorts. -- By default, these will be patients who had an admission between 7 days before -- and 14 days after their first covid positive test date. -------------------------------------------------------------------------------- --drop table fource_cohort_patients; create table fource_cohort_patients ( cohort varchar(50) not null, patient_num int not null, admission_date date not null, source_data_updated_date date not null, severe int not null, severe_date date, death_date date ); alter table fource_cohort_patients add primary key (patient_num, cohort); --select * from fource_cohort_config; insert into fource_cohort_patients (cohort, patient_num, admission_date, source_data_updated_date, severe) select c.cohort, t.patient_num, t.admission_date, c.source_data_updated_date, 0 from fource_cohort_config c, ( select t.patient_num, min(a.admission_date) admission_date from fource_first_covid_tests t inner join fource_admissions a on t.patient_num=a.patient_num --and datediff(dd,t.first_pos_date,a.admission_date) between @blackout_days_before and @blackout_days_after and trunc(a.admission_date) - trunc(t.first_pos_date) between -7 and 14 where t.first_pos_date is not null group by t.patient_num ) t where c.cohort like 'PosAdm%' and trunc(t.admission_date) >= trunc(nvl(c.earliest_adm_date,t.admission_date)) and trunc(t.admission_date) <= trunc(nvl(c.latest_adm_date,t.admission_date)) and trunc(t.admission_date) <= trunc(nvl(c.source_data_updated_date,t.admission_date)); commit; --select * from fource_cohort_patients; -------------------------------------------------------------------------------- -- Add optional cohorts that contain all patients tested for COVID-19 -------------------------------------------------------------------------------- -- Create cohorts for patients who were admitted insert into fource_cohort_config select * from ( -- Patients with a U07.1 code, no recorded positive test result, and were admitted select replace(c.cohort,'PosAdm','U071Adm'), g.include_extra_cohorts_phase1, g.include_extra_cohorts_phase2, c.source_data_updated_date, c.earliest_adm_date, c.latest_adm_date from fource_cohort_config c cross apply fource_config g where c.cohort like 'PosAdm%' -- Patients who have no U07.1 code, no recorded positive test result, a negative test result, and were admitted union all select replace(c.cohort,'PosAdm','NegAdm'), g.include_extra_cohorts_phase1, g.include_extra_cohorts_phase2, c.source_data_updated_date, c.earliest_adm_date, c.latest_adm_date from fource_cohort_config c cross apply fource_config g where c.cohort like 'PosAdm%') where (select include_extra_cohorts_phase1 from fource_config where rownum = 1) = 1 or (select include_extra_cohorts_phase2 from fource_config where rownum = 1) = 1; commit; --select * from fource_cohort_config; -- Add the patients for those cohorts insert into fource_cohort_patients (cohort, patient_num, admission_date, source_data_updated_date, severe) select * from ( select c.cohort, t.patient_num, t.admission_date, c.source_data_updated_date, 0 from fource_cohort_config c, ( select t.patient_num, 'U071Adm' cohort, min(a.admission_date) admission_date from fource_first_covid_tests t inner join fource_admissions a on t.patient_num=a.patient_num --and datediff(dd,t.first_U071_date,a.admission_date) between @blackout_days_before and @blackout_days_after and trunc(a.admission_date) - trunc(t.first_U071_date) between -7 and 14 where t.first_U071_date is not null and t.first_pos_date is null group by t.patient_num union all select t.patient_num, 'NegAdm' cohort, min(a.admission_date) admission_date from fource_first_covid_tests t inner join fource_admissions a on t.patient_num=a.patient_num --and datediff(dd,t.first_neg_date,a.admission_date) between @blackout_days_before and @blackout_days_after and trunc(a.admission_date) - trunc(t.first_neg_date) between -7 and 14 where t.first_neg_date is not null and t.first_U071_date is null and t.first_pos_date is null group by t.patient_num ) t where c.cohort like t.cohort || '%' and trunc(t.admission_date) >= trunc(nvl(c.earliest_adm_date,t.admission_date)) and trunc(t.admission_date) <= trunc(nvl(c.latest_adm_date,t.admission_date)) and trunc(t.admission_date) <= trunc(nvl(c.source_data_updated_date,t.admission_date)) ) t where (select include_extra_cohorts_phase1 from fource_config where rownum = 1) = 1 or (select include_extra_cohorts_phase2 from fource_config where rownum = 1) = 1; commit; -- Create cohorts for patients who were not admitted insert into fource_cohort_config select replace(c.cohort,'Adm','NotAdm'), g.include_extra_cohorts_phase1, g.include_extra_cohorts_phase2, c.source_data_updated_date, c.earliest_adm_date, c.latest_adm_date from fource_cohort_config c cross apply fource_config g where c.cohort like 'PosAdm%' or c.cohort like 'NegAdm%' or c.cohort like 'U071Adm%'; commit; -- Add the patients for those cohorts using the test or diagnosis date as the "admission" (index) date insert into fource_cohort_patients (cohort, patient_num, admission_date, source_data_updated_date, severe) select c.cohort, t.patient_num, t.first_pos_date, c.source_data_updated_date, 0 from fource_cohort_config c cross join fource_first_covid_tests t where c.cohort like 'PosNotAdm%' and t.first_pos_date is not null and trunc(t.first_pos_date) >= trunc(nvl(c.earliest_adm_date,t.first_pos_date)) and trunc(t.first_pos_date) <= trunc(nvl(c.latest_adm_date,t.first_pos_date)) and trunc(t.first_pos_date) <= trunc(nvl(c.source_data_updated_date,t.first_pos_date)) and t.patient_num not in (select patient_num from fource_cohort_patients) union all select c.cohort, t.patient_num, t.first_U071_date, c.source_data_updated_date, 0 from fource_cohort_config c cross join fource_first_covid_tests t where c.cohort like 'U071NotAdm%' and t.first_pos_date is null and t.first_U071_date is not null and trunc(t.first_U071_date) >= trunc(nvl(c.earliest_adm_date,t.first_U071_date)) and trunc(t.first_U071_date) <= trunc(nvl(c.latest_adm_date,t.first_U071_date)) and trunc(t.first_U071_date) <= trunc(nvl(c.source_data_updated_date,t.first_U071_date)) and t.patient_num not in (select patient_num from fource_cohort_patients) union all select c.cohort, t.patient_num, t.first_neg_date, c.source_data_updated_date, 0 from fource_cohort_config c cross join fource_first_covid_tests t where c.cohort like 'NegNotAdm%' and t.first_pos_date is null and t.first_U071_date is null and t.first_neg_date is not null and trunc(t.first_neg_date) >= trunc(nvl(c.earliest_adm_date,t.first_neg_date)) and trunc(t.first_neg_date) <= trunc(nvl(c.latest_adm_date,t.first_neg_date)) and trunc(t.first_neg_date) <= trunc(nvl(c.source_data_updated_date,t.first_neg_date)) and t.patient_num not in (select patient_num from fource_cohort_patients); -------------------------------------------------------------------------------- -- Add additional custom cohorts here -------------------------------------------------------------------------------- -- My custom cohorts commit; --****************************************************************************** --****************************************************************************** --*** Create a table of patient observations --****************************************************************************** --****************************************************************************** -- Get a distinct list of patients create table fource_patients ( patient_num int not null, first_admission_date date not null ); alter table fource_patients add primary key (patient_num); insert into fource_patients select patient_num, min(admission_date) from fource_cohort_patients group by patient_num; commit; -- Create the table to store the observations create table fource_observations ( cohort varchar(50) not null, patient_num int not null, severe int not null, concept_type varchar(50) not null, concept_code varchar(50) not null, calendar_date date not null, days_since_admission int not null, value numeric(18,5) not null, logvalue numeric(18,10) not null ); alter table fource_observations add primary key (cohort, patient_num, concept_type, concept_code, days_since_admission); -------------------------------------------------------------------------------- -- Add covid tests -------------------------------------------------------------------------------- insert into fource_observations (cohort, patient_num, severe, concept_type, concept_code, calendar_date, days_since_admission, value, logvalue) select distinct p.cohort, p.patient_num, p.severe, 'COVID-TEST', t.test_result, t.test_date, trunc(t.test_date) - trunc(p.admission_date), -999, -999 from fource_cohort_patients p inner join fource_covid_tests t on p.patient_num=t.patient_num; commit; -------------------------------------------------------------------------------- -- Add children who develop MIS-C -------------------------------------------------------------------------------- insert into fource_observations (cohort, patient_num, severe, concept_type, concept_code, calendar_date, days_since_admission, value, logvalue) select distinct p.cohort, p.patient_num, p.severe, 'COVID-MISC', 'misc', cast(f.misc_date as date), trunc(cast(f.misc_date as date)) - trunc(p.admission_date), -999, -999 from fource_cohort_patients p inner join fource_misc f --with (nolock) on p.patient_num=f.patient_num; create index fource_cohort_patients_ndx on fource_cohort_patients(patient_num); -------------------------------------------------------------------------------- -- Add diagnoses (ICD9) going back 365 days from admission -------------------------------------------------------------------------------- insert into fource_observations (cohort, patient_num, severe, concept_type, concept_code, calendar_date, days_since_admission, value, logvalue) select distinct p.cohort, p.patient_num, p.severe, 'DIAG-ICD9', substr(f.concept_cd, instr(f.concept_cd,':')+1), trunc(f.start_date), trunc(f.start_date) - trunc(p.admission_date), -999, -999 from @crcSchema.observation_fact f --with (nolock) inner join fource_cohort_patients p on f.patient_num=p.patient_num --and cast(trunc(f.start_date) as date) between dateadd(dd,@lookback_days,p.admission_date) and p.source_data_updated_date and trunc(f.start_date) between trunc(p.admission_date)-365 and trunc(p.source_data_updated_date) where f.concept_cd like (select code_prefix_icd9cm || '%' from fource_config where rownum = 1);-- and code_prefix_icd9cm <>''; commit; -------------------------------------------------------------------------------- -- Add diagnoses (ICD10) going back 365 days -------------------------------------------------------------------------------- insert into fource_observations (cohort, patient_num, severe, concept_type, concept_code, calendar_date, days_since_admission, value, logvalue) select distinct p.cohort, p.patient_num, nvl(p.severe,0), 'DIAG-ICD10', substr(f.concept_cd, instr(f.concept_cd,':')+1), trunc(f.start_date), trunc(f.start_date) - trunc(p.admission_date), -999, -999 from @crcSchema.observation_fact f --with (nolock) inner join fource_cohort_patients p on f.patient_num=p.patient_num --and cast(trunc(f.start_date) as date) between dateadd(dd,@lookback_days,p.admission_date) and p.source_data_updated_date and trunc(f.start_date) between trunc(p.admission_date)-365 and trunc(p.source_data_updated_date) where f.concept_cd like (select code_prefix_icd10cm || '%' from fource_config where rownum = 1);-- and code_prefix_icd10cm <>''; commit; --select count(distinct patient_num) from fource_observations; --293601 -------------------------------------------------------------------------------- -- Add medications (Med Class) going back 365 days -------------------------------------------------------------------------------- insert into fource_observations (cohort, patient_num, severe, concept_type, concept_code, calendar_date, days_since_admission, value, logvalue) select distinct p.cohort, p.patient_num, p.severe, 'MED-CLASS', m.med_class, trunc(f.start_date), trunc(f.start_date) - trunc(p.admission_date), -999, -999 from fource_med_map m inner join @crcSchema.observation_fact f --with (nolock) on f.concept_cd = m.local_med_code inner join fource_cohort_patients p on f.patient_num=p.patient_num and trunc(f.start_date) between trunc(p.admission_date)-365 and trunc(p.source_data_updated_date); commit; --and cast(trunc(f.start_date) as date) between dateadd(dd,@lookback_days,p.admission_date) and p.source_data_updated_date -------------------------------------------------------------------------------- -- Add labs (LOINC) going back 60 days (two months) -------------------------------------------------------------------------------- insert into fource_observations (cohort, patient_num, severe, concept_type, concept_code, calendar_date, days_since_admission, value, logvalue) select p.cohort, p.patient_num, p.severe, 'LAB-LOINC', l.fource_loinc, trunc(f.start_date), trunc(f.start_date) - trunc(p.admission_date), avg(f.nval_num*l.scale_factor), ln(avg(f.nval_num*l.scale_factor) + 0.5) -- natural log (ln), not log base 10; using log(avg()) rather than avg(log()) on purpose from fource_lab_map l inner join @crcSchema.observation_fact f --with (nolock) on f.concept_cd=l.local_lab_code and nvl(nullif(f.units_cd,''),'DEFAULT')=l.local_lab_units inner join fource_cohort_patients p on f.patient_num=p.patient_num where l.local_lab_code is not null and f.nval_num is not null and f.nval_num >= 0 and trunc(f.start_date) between trunc(p.admission_date)-60 and trunc(p.source_data_updated_date) --@lab lookback days group by p.cohort, p.patient_num, p.severe, p.admission_date, trunc(f.start_date), l.fource_loinc; commit; -------------------------------------------------------------------------------- -- Add procedures (Proc Groups) going back 365 days -------------------------------------------------------------------------------- --select * from fource_proc_map; insert into fource_observations (cohort, patient_num, severe, concept_type, concept_code, calendar_date, days_since_admission, value, logvalue) select distinct p.cohort, p.patient_num, p.severe, 'PROC-GROUP', x.proc_group, trunc(f.start_date), trunc(f.start_date) - trunc(p.admission_date), -999, -999 from fource_proc_map x inner join @crcSchema.observation_fact f --with (nolock) on f.concept_cd = x.local_proc_code inner join fource_cohort_patients p on f.patient_num=p.patient_num where x.local_proc_code is not null and trunc(f.start_date) between trunc(p.admission_date)-365 and trunc(p.source_data_updated_date); commit; -------------------------------------------------------------------------------- -- Flag observations that contribute to the disease severity definition -------------------------------------------------------------------------------- --test select * from fource_observations where concept_code = 'ARDS'; insert into fource_observations (cohort, patient_num, severe, concept_type, concept_code, calendar_date, days_since_admission, value, logvalue) -- Any PaCO2 or PaO2 lab test select cohort, patient_num, severe, 'SEVERE-LAB' concept_type, 'BloodGas' concept_code, calendar_date, days_since_admission, avg(value), avg(logvalue) from fource_observations where concept_type='LAB-LOINC' and concept_code in ('2019-8','2703-7') group by cohort, patient_num, severe, calendar_date, days_since_admission -- Acute respiratory distress syndrome (diagnosis) union all select distinct cohort, patient_num, severe, 'SEVERE-DIAG' concept_type, 'ARDS' concept_code, calendar_date, days_since_admission, value, logvalue from fource_observations where (concept_type='DIAG-ICD9' and concept_code in ('518.82','51882')) or (concept_type='DIAG-ICD10' and concept_code in ('J80')) -- Ventilator associated pneumonia (diagnosis) union all select distinct cohort, patient_num, severe, 'SEVERE-DIAG' concept_type, 'VAP' concept_code, calendar_date, days_since_admission, value, logvalue from fource_observations where (concept_type='DIAG-ICD9' and concept_code in ('997.31','99731')) or (concept_type='DIAG-ICD10' and concept_code in ('J95.851','J95851')); commit; --****************************************************************************** --****************************************************************************** --*** Determine which patients had severe disease or died --****************************************************************************** --****************************************************************************** -------------------------------------------------------------------------------- -- Flag the patients who had severe disease with 30 days of admission. -------------------------------------------------------------------------------- --test select * from fource_cohort_patients where severe = 0; update fource_cohort_patients p set severe = 1, severe_date=(select min(f.calendar_date) from fource_observations f where f.days_since_admission between 0 and 30 and f.cohort=p.cohort and f.patient_num=p.patient_num and ( -- Any severe lab or diagnosis (f.concept_type in ('SEVERE-LAB','SEVERE-DIAG')) -- Any severe medication or (f.concept_type='MED-CLASS' and f.concept_code in ('SIANES','SICARDIAC')) -- Any severe procedure or (f.concept_type='PROC-GROUP' and f.concept_code in ('SupplementalOxygenSevere','ECMO')) ) group by f.cohort, f.patient_num ) where exists ( select min(f.calendar_date) from fource_observations f where f.cohort=p.cohort and f.patient_num=p.patient_num and f.days_since_admission between 0 and 30 and ( -- Any severe lab or diagnosis (f.concept_type in ('SEVERE-LAB','SEVERE-DIAG')) -- Any severe medication or (f.concept_type='MED-CLASS' and f.concept_code in ('SIANES','SICARDIAC')) -- Any severe procedure or (f.concept_type='PROC-GROUP' and f.concept_code in ('SupplementalOxygenSevere','ECMO')) ) group by f.cohort, f.patient_num ); commit; -- Flag the severe patients in the observations table update fource_observations f set f.severe=1 where exists(select patient_num,cohort from fource_cohort_patients c where c.severe=1 and f.patient_num = c.patient_num and f.cohort = c.cohort); -------------------------------------------------------------------------------- -- Add death dates to patients who have died. -------------------------------------------------------------------------------- --if exists (select * from fource_config where death_data_available = 1) --begin; -- Add the original death date. merge into fource_cohort_patients c using ( select p.patient_num, min(case when p.death_date > nvl(c.severe_date,c.admission_date) then cast(p.death_date as date) else nvl(c.severe_date,c.admission_date) end) as death_date from fource_cohort_patients c inner join fource_death p on p.patient_num = c.patient_num group by p.patient_num) d on (c.patient_num = d.patient_num and (select death_data_available from fource_config where rownum = 1)= 1) WHEN MATCHED THEN UPDATE SET c.death_date = d.death_date; commit; -- Check that there aren't more recent facts for the deceased patients. -- ****Be careful with this - if you trust your death data omit this - -- ****Be sure that the future facts aren't orders, future appointmnents or late arriving lab results merge into fource_cohort_patients c using ( select p.patient_num, cast(max(f.calendar_date) as date) death_date from fource_cohort_patients p inner join fource_observations f on f.cohort=p.cohort and f.patient_num=p.patient_num where p.death_date is not null and f.calendar_date > p.death_date group by p.cohort, p.patient_num ) d on (c.patient_num = d.patient_num and (select death_data_available from fource_config where rownum = 1)= 1) WHEN MATCHED THEN UPDATE SET c.death_date = d.death_date; -- Make sure the death date is not after the source data updated date update fource_cohort_patients set death_date = null where death_date > source_data_updated_date and (select death_data_available from fource_config where rownum = 1)= 1; commit; --****************************************************************************** --****************************************************************************** --*** For each cohort, create a list of dates since the first case. --****************************************************************************** --****************************************************************************** create table fource_date_list ( cohort varchar(50) not null, d date not null ); alter table fource_date_list add primary key (cohort, d); insert into fource_date_list select * from ( with n as ( select 0 n from dual union all select 1 from dual union all select 2 from dual union all select 3 from dual union all select 4 from dual union all select 5 from dual union all select 6 from dual union all select 7 from dual union all select 8 from dual union all select 9 from dual ) select l.cohort, d from ( --select cohort, nvl(cast(dateadd(dd,a.n+10*b.n+100*c.n,p.s) as date),'01-JAN-2020') d select cohort, nvl((p.min_admit_date-a.n+10*b.n+100*c.n),'01-JAN-2020') d from ( select cohort, min(admission_date) min_admit_date from fource_cohort_patients group by cohort ) p cross join n a cross join n b cross join n c ) l inner join fource_cohort_config f on l.cohort=f.cohort where d <= f.source_data_updated_date ); commit; --############################################################################## --############################################################################## --############################################################################## --############################################################################## --### --### Assemble data for Phase 2 local PATIENT-LEVEL tables --### --############################################################################## --############################################################################## --############################################################################## --############################################################################## -------------------------------------------------------------------------------- -- LocalPatientClinicalCourse: Status by number of days since admission -------------------------------------------------------------------------------- --select * from fource_LocalPatientClinicalCourse; --drop table fource_LocalPatientClinicalCourse; create table fource_LocalPatientClinicalCourse ( siteid varchar(50) not null, cohort varchar(50) not null, patient_num int not null, days_since_admission int not null, calendar_date date not null, in_hospital int not null, severe int not null, in_icu int not null, dead int not null ); alter table fource_LocalPatientClinicalCourse add primary key (cohort, patient_num, days_since_admission, siteid); -- Get the list of dates and flag the ones where the patients were severe or deceased insert into fource_LocalPatientClinicalCourse (siteid, cohort, patient_num, days_since_admission, calendar_date, in_hospital, severe, in_icu, dead) select (select siteid from fource_config where rownum = 1) siteid, p.cohort, p.patient_num, trunc(d.d)-trunc(p.admission_date) days_since_admission, d.d calendar_date, 0 in_hospital, max(case when p.severe=1 and trunc(d.d)>=trunc(p.severe_date) then 1 else 0 end) severe, max(case when (select icu_data_available from fource_config where rownum = 1)=0 then -999 else 0 end) in_icu, max(case when (select death_data_available from fource_config where rownum = 1)=0 then -999 when p.death_date is not null and trunc(d.d) >= trunc(p.death_date) then 1 else 0 end) dead -- from fource_config x from fource_cohort_patients p inner join fource_date_list d on p.cohort=d.cohort and trunc(d.d)>=trunc(p.admission_date) group by p.cohort, p.patient_num, p.admission_date, d.d; commit; -- 5 minutes -- Flag the days when the patients was in the hospital merge into fource_LocalPatientClinicalCourse p using ( select distinct p.patient_num, p.calendar_date from fource_LocalPatientClinicalCourse p inner join fource_admissions a on a.patient_num = p.patient_num and trunc(a.admission_date)>= trunc(p.calendar_date)-days_since_admission --TODO: Check the logic again - MICHELE IS THE SUBTRACTION CORRECT and trunc(a.admission_date)<=trunc(p.calendar_date) and a.discharge_date>=trunc(p.calendar_date) )d on (d.patient_num=p.patient_num and d.calendar_date=p.calendar_date) when matched then update set p.in_hospital=1; commit; -- Flag the days when the patient was in the ICU, making sure the patient was also in the hospital on those days merge into fource_LocalPatientClinicalCourse p using ( --with pt_icu as ( select patient_num, calendar_date, in_hospital, in_icu from ( select distinct p.patient_num, p.calendar_date, p.in_hospital, p.in_icu from fource_LocalPatientClinicalCourse p inner join fource_icu i on i.patient_num=p.patient_num and trunc(i.start_date)>=trunc(p.calendar_date)-days_since_admission and trunc(i.start_date)<=trunc(p.calendar_date) and trunc(i.end_date)>=trunc(p.calendar_date) and (select icu_data_available from fource_config where rownum=1)=1 ))d --group by patient_num, calendar_date, in_hospital ;--order by patient_num, calendar_date); --select patient_num, calendar_date, in_hospital, in_icu from pt_icu )d on (d.patient_num=p.patient_num and d.calendar_date=p.calendar_date) when matched then update set p.in_icu=p.in_hospital; commit; --2005 rows 70032 --select count(distinct patient_num) from fource_LocalPatientClinicalCourse where in_icu = 1; --52 -------------------------------------------------------------------------------- -- LocalPatientSummary: Dates, outcomes, age, and sex -------------------------------------------------------------------------------- --drop table fource_LocalPatientSummary; create table fource_LocalPatientSummary ( siteid varchar(50) not null, cohort varchar(50) not null, patient_num int not null, admission_date date not null, source_data_updated_date date not null, days_since_admission int not null, last_discharge_date date not null, still_in_hospital int not null, severe_date date not null, severe int not null, icu_date date not null, icu int not null, death_date date not null, dead int not null, age_group varchar(50) not null, age int not null, sex varchar(50) not null ); alter table fource_LocalPatientSummary add primary key (cohort, patient_num, siteid); -- Get the admission, severe, and death dates; and age and sex. insert into fource_LocalPatientSummary select (select siteid from fource_config where rownum=1), c.cohort, c.patient_num, c.admission_date, c.source_data_updated_date, trunc(c.source_data_updated_date)-trunc(c.admission_date) days_since_admission, '01-JAN-1900' last_discharge_date, 0 still_in_hospital, nvl(c.severe_date,'01-JAN-1900') severe_date, c.severe, '01-JAN-1900' icu_date, (case when (select icu_data_available from fource_config where rownum=1)=0 then -999 else 0 end) in_icu, nvl(c.death_date,'01-JAN-1900') death_date, (case when (select death_data_available from fource_config where rownum=1)=0 then -999 when c.death_date is not null then 1 else 0 end) dead, (case when p.age_in_years_num between 0 and 2 then '00to02' when p.age_in_years_num between 3 and 5 then '03to05' when p.age_in_years_num between 6 and 11 then '06to11' when p.age_in_years_num between 12 and 17 then '12to17' when p.age_in_years_num between 18 and 20 then '18to20' when p.age_in_years_num between 21 and 25 then '21to25' when p.age_in_years_num between 26 and 49 then '26to49' when p.age_in_years_num between 50 and 69 then '50to69' when p.age_in_years_num between 70 and 79 then '70to79' when p.age_in_years_num >= 80 then '80plus' else 'other' end) age_group, (case when p.age_in_years_num is null then -999 when p.age_in_years_num<0 then -999 else age_in_years_num end) age, nvl(substr(m.code,13,99),'other') from fource_cohort_patients c left outer join @crcSchema.patient_dimension p on p.patient_num=c.patient_num left outer join fource_code_map m on p.sex_cd = m.local_code and m.code in ('sex_patient:male','sex_patient:female'); commit; --select * from fource_LocalPatientSummary; -- Update sex if sex stored in observation_fact table merge into fource_LocalPatientSummary s using (select p.sex, p.patient_num from fource_LocalPatientSummary s inner join ( select patient_num, (case when male=1 then 'male' else 'female' end) sex from ( select patient_num, max(case when m.code='sex_fact:male' then 1 else 0 end) male, max(case when m.code='sex_fact:female' then 1 else 0 end) female from @crcSchema.observation_fact f --with (nolock) inner join fource_code_map m on f.concept_cd=m.local_code and m.code in ('sex_fact:male','sex_fact:female') group by patient_num ) t where male+female=1 ) p on s.patient_num = p.patient_num )x on (s.patient_num = x.patient_num ) when matched then update set s.sex = (case when s.sex='other' then x.sex when s.sex<>x.sex then 'other' else s.sex end); commit; -- Get the last discharge date and whether the patient is still in the hospital as of the source_data_updated_date. merge into fource_LocalPatientSummary s using ( select p.cohort, p.patient_num, max(a.discharge_date) last_discharge_date from fource_LocalPatientSummary p inner join fource_admissions a on a.patient_num=p.patient_num and trunc(a.admission_date)>=trunc(p.admission_date) group by p.cohort, p.patient_num ) x on (s.cohort=x.cohort and s.patient_num=x.patient_num) when matched then update set s.last_discharge_date = (case when x.last_discharge_date>s.source_data_updated_date then to_date('01-JAN-1900','DD-MON-YYYY') else x.last_discharge_date end), s.still_in_hospital = (case when x.last_discharge_date>s.source_data_updated_date then 1 else 0 end); commit; --select * from fource_LocalPatientClinicalCourse where in_icu = 1; -- Get earliest ICU date for patients who were in the ICU. merge into fource_LocalPatientSummary s using ( select cohort, patient_num, min(calendar_date) icu_date from fource_LocalPatientClinicalCourse where in_icu=1 group by cohort, patient_num ) x on (s.cohort=x.cohort and s.patient_num=x.patient_num and (select icu_data_available from fource_config where rownum = 1)=1) when matched then update set s.icu_date = x.icu_date, s.icu = 1; commit; -------------------------------------------------------------------------------- -- LocalPatientObservations: Diagnoses, procedures, medications, and labs -------------------------------------------------------------------------------- --drop table fource_LocalPatientObservations; create table fource_LocalPatientObservations ( siteid varchar(50) not null, cohort varchar(50) not null, patient_num int not null, days_since_admission int not null, concept_type varchar(50) not null, concept_code varchar(50) not null, value numeric(18,5) not null ); alter table fource_LocalPatientObservations add primary key (cohort, patient_num, days_since_admission, concept_type, concept_code, siteid); insert into fource_LocalPatientObservations select (select siteid from fource_config where rownum = 1), cohort, patient_num, days_since_admission, concept_type, concept_code, value from fource_observations; commit; -------------------------------------------------------------------------------- -- LocalPatientRace: local and 4CE race code(s) for each patient -------------------------------------------------------------------------------- --drop table fource_LocalPatientRace; create table fource_LocalPatientRace ( siteid varchar(50) not null, cohort varchar(50) not null, patient_num int not null, race_local_code varchar(500) not null, race_4ce varchar(100) not null ); alter table fource_LocalPatientRace add primary key (cohort, patient_num, race_local_code, siteid); insert into fource_LocalPatientRace select distinct (select siteid from fource_config where rownum = 1) siteid, cohort, patient_num, race_local_code, race_4ce from ( -- Race from the patient_dimension table select c.cohort, c.patient_num, m.local_code race_local_code, substr(m.code,14,999) race_4ce from fource_cohort_patients c inner join @crcSchema.patient_dimension p on p.patient_num=c.patient_num inner join fource_code_map m on p.race_cd = m.local_code and m.code like 'race_patient:%' union all -- Race from the observation_fact table select c.cohort, c.patient_num, m.local_code race_local_code, substr(m.code,11,999) race_4ce from fource_cohort_patients c inner join @crcSchema.observation_fact p --with (nolock) on p.patient_num=c.patient_num inner join fource_code_map m on p.concept_cd = m.local_code and m.code like 'race_fact:%' ) t where (select race_data_available from fource_config where rownum =1)=1; commit; --############################################################################## --############################################################################## --############################################################################## --############################################################################## --### --### Assemble data for Phase 2 local AGGREGATE COUNT tables. --### These are the local versions without obfuscation. --### --############################################################################## --############################################################################## --############################################################################## --############################################################################## -------------------------------------------------------------------------------- -- LocalCohorts -------------------------------------------------------------------------------- --drop table fource_LocalCohorts; create table fource_LocalCohorts ( siteid varchar(50) not null, cohort varchar(50) not null, include_in_phase1 int not null, include_in_phase2 int not null, source_data_updated_date date not null, earliest_adm_date date not null, latest_adm_date date not null ); alter table fource_LocalCohorts add primary key (cohort, siteid); insert into fource_LocalCohorts select (select siteid from fource_config where rownum = 1) siteid, cohort, include_in_phase1, include_in_phase2, source_data_updated_date, earliest_adm_date, latest_adm_date from fource_cohort_config; commit; -------------------------------------------------------------------------------- -- LocalDailyCounts -------------------------------------------------------------------------------- --drop table fource_LocalDailyCounts; create table fource_LocalDailyCounts ( siteid varchar(50) not null, cohort varchar(50) not null, calendar_date date not null, cumulative_pts_all int not null, cumulative_pts_icu int not null, cumulative_pts_dead int not null, cumulative_pts_severe int not null, cumulative_pts_severe_icu int not null, cumulative_pts_severe_dead int not null, pts_in_hosp_on_this_date int not null, pts_in_icu_on_this_date int not null, pts_severe_in_hosp_on_date int not null, pts_severe_in_icu_on_date int not null ); alter table fource_LocalDailyCounts add primary key (cohort, calendar_date, siteid); -- Get daily counts, except for ICU insert into fource_LocalDailyCounts select (select siteid from fource_config where rownum = 1) siteid, cohort, calendar_date, -- Cumulative counts count(*), (case when x.icu_data_available=0 then -999 else 0 end), (case when x.death_data_available=0 then -999 else sum(dead) end), sum(severe), (case when x.icu_data_available=0 then -999 else 0 end), (case when x.death_data_available=0 then -999 else sum(severe*dead) end), -- Counts on the calendar_date sum(in_hospital), (case when x.icu_data_available=0 then -999 else sum(in_icu) end), sum(in_hospital*severe), (case when x.icu_data_available=0 then -999 else sum(in_icu*severe) end) from fource_config x cross join fource_LocalPatientClinicalCourse c group by cohort, calendar_date, icu_data_available, death_data_available; commit; -- Update daily counts based on the first time patients were in the ICU --TODO: MICHELE Make sure Griffin or someone else reviews this update --select * from fource_LocalDailyCounts; --select * from fource_LocalPatientSummary where icu_date >to_date('01-JAN-1900', 'DD-MON-YYYY') order by cohort, admission_date; --**************TODO: MICHELE ERROR Unable to get stable rows in source***************** /*merge into fource_LocalDailyCounts c using ( select calendar_date, cohort, icu_date, cumm_daily_count from ( select distinct d.calendar_date, d.cohort, a.icu_date, count(*) cumm_daily_count from fource_LocalDailyCounts d join fource_LocalPatientSummary a on a.cohort=d.cohort and a.icu_date>to_date('01-JAN-1900', 'DD-MON-YYYY') and a.icu_date<=d.calendar_date group by d.cohort,d.calendar_date, a.icu_date ) ) x on (x.cohort=c.cohort and x.calendar_date=c.calendar_date) when matched then update set c.cumulative_pts_icu = x.cumm_daily_count; select * from fource_LocalPatientSummary a; select * from visit_dimension where patient_num = 348143; cumulative_pts_severe_icu = ( select count(*) from fource_LocalPatientSummary a where a.cohort=c.cohort and a.icu_date<=c.calendar_date and a.icu_date>'01-JAN-1900' and a.severe=1 ) (select icu_data_available from fource_config where rownum=1)=1); */ -------------------------------------------------------------------------------- -- LocalClinicalCourse -------------------------------------------------------------------------------- create table fource_LocalClinicalCourse ( siteid varchar(50) not null, cohort varchar(50) not null, days_since_admission int not null, pts_all_in_hosp int not null, pts_all_in_icu int not null, pts_all_dead int not null, pts_severe_by_this_day int not null, pts_ever_severe_in_hosp int not null, pts_ever_severe_in_icu int not null, pts_ever_severe_dead int not null ); alter table fource_LocalClinicalCourse add primary key (cohort, days_since_admission, siteid); insert into fource_LocalClinicalCourse select (select siteid from fource_config where rownum = 1) siteid, c.cohort, c.days_since_admission, sum(c.in_hospital), (case when x.icu_data_available=0 then -999 else sum(c.in_icu) end), (case when x.death_data_available=0 then -999 else sum(c.dead) end), sum(c.severe), sum(c.in_hospital*p.severe), (case when x.icu_data_available=0 then -999 else sum(c.in_icu*p.severe) end), (case when x.death_data_available=0 then -999 else sum(c.dead*p.severe) end) from fource_config x cross join fource_LocalPatientClinicalCourse c inner join fource_cohort_patients p on c.cohort=p.cohort and c.patient_num=p.patient_num group by c.cohort, c.days_since_admission, icu_data_available, death_data_available; commit; -------------------------------------------------------------------------------- -- LocalAgeSex -------------------------------------------------------------------------------- --drop table fource_LocalAgeSex; create table fource_LocalAgeSex ( siteid varchar(50) not null, cohort varchar(50) not null, age_group varchar(20) not null, mean_age numeric(18,10) not null, sex varchar(10) not null, pts_all int not null, pts_ever_severe int not null ); alter table fource_LocalAgeSex add primary key (cohort, age_group, sex, siteid); insert into fource_LocalAgeSex select (select siteid from fource_config where rownum = 1) siteid, cohort, age_group, nvl(avg(cast(nullif(age,-999) as numeric(18,10))),-999), sex, count(*), sum(severe) from fource_LocalPatientSummary group by cohort, age_group, sex union all select (select siteid from fource_config where rownum = 1) siteid, cohort, 'all', nvl(avg(cast(nullif(age,-999) as numeric(18,10))),-999), sex, count(*), sum(severe) from fource_LocalPatientSummary group by cohort, sex union all select (select siteid from fource_config where rownum = 1) siteid, cohort, age_group, nvl(avg(cast(nullif(age,-999) as numeric(18,10))),-999), 'all', count(*), sum(severe) from fource_LocalPatientSummary group by cohort, age_group union all select (select siteid from fource_config where rownum = 1) siteid, cohort, 'all', nvl(avg(cast(nullif(age,-999) as numeric(18,10))),-999), 'all', count(*), sum(severe) from fource_LocalPatientSummary group by cohort; commit; -------------------------------------------------------------------------------- -- LocalLabs -------------------------------------------------------------------------------- --drop table fource_LocalLabs; create table fource_LocalLabs ( siteid varchar(50) not null, cohort varchar(50) not null, loinc varchar(20) not null, days_since_admission int not null, pts_all int, mean_value_all numeric(18,5), stdev_value_all numeric(18,5), mean_log_value_all numeric(18,10), stdev_log_value_all numeric(18,10), pts_ever_severe int, mean_value_ever_severe numeric(18,5), stdev_value_ever_severe numeric(18,5), mean_log_value_ever_severe numeric(18,10), stdev_log_value_ever_severe numeric(18,10), pts_never_severe int, mean_value_never_severe numeric(18,5), stdev_value_never_severe numeric(18,5), mean_log_value_never_severe numeric(18,10), stdev_log_value_never_severe numeric(18,10) ); alter table fource_LocalLabs add primary key (cohort, loinc, days_since_admission, siteid); insert into fource_LocalLabs select (select siteid from fource_config where rownum = 1) siteid, cohort, concept_code, days_since_admission, count(*), avg(value), nvl(stddev(value),0), avg(logvalue), nvl(stddev(logvalue),0), sum(severe), (case when sum(severe)=0 then -999 else avg(case when severe=1 then value else null end) end), (case when sum(severe)=0 then -999 else nvl(stddev(case when severe=1 then value else null end),0) end), (case when sum(severe)=0 then -999 else avg(case when severe=1 then logvalue else null end) end), (case when sum(severe)=0 then -999 else nvl(stddev(case when severe=1 then logvalue else null end),0) end), sum(1-severe), (case when sum(1-severe)=0 then -999 else avg(case when severe=0 then value else null end) end), (case when sum(1-severe)=0 then -999 else nvl(stddev(case when severe=0 then value else null end),0) end), (case when sum(1-severe)=0 then -999 else avg(case when severe=0 then logvalue else null end) end), (case when sum(1-severe)=0 then -999 else nvl(stddev(case when severe=0 then logvalue else null end),0) end) from fource_observations where concept_type='LAB-LOINC' and days_since_admission>=0 group by cohort, concept_code, days_since_admission; commit; -------------------------------------------------------------------------------- -- LocalDiagProcMed -------------------------------------------------------------------------------- --drop table fource_LocalDiagProcMed; create table fource_LocalDiagProcMed ( siteid varchar(50) not null, cohort varchar(50) not null, concept_type varchar(50) not null, concept_code varchar(50) not null, pts_all_before_adm int, -- an observation occurred from day @lookback_days to -15 relative to the admission date pts_all_since_adm int, -- an observation occurred on day >=0 pts_all_dayN14toN1 int, -- an observation occurred from day -14 to -1 pts_all_day0to29 int, -- an observation occurred from day 0 to 29 pts_all_day30to89 int, -- an observation occurred from day 30 to 89 pts_all_day30plus int, -- an observation occurred on day >=30 pts_all_day90plus int, -- an observation occurred on day >=90 pts_all_1st_day0to29 int, -- the first observation is day 0 to 29 (no observations from day @lookback_days to -1) pts_all_1st_day30plus int, -- the first observation is day >=30 (no observations from day @lookback_days to 29) pts_all_1st_day90plus int, -- the first observation is day >=90 (no observations from day @lookback_days to 89) pts_ever_severe_before_adm int, pts_ever_severe_since_adm int, pts_ever_severe_dayN14toN1 int, pts_ever_severe_day0to29 int, pts_ever_severe_day30to89 int, pts_ever_severe_day30plus int, pts_ever_severe_day90plus int, pts_ever_severe_1st_day0to29 int, pts_ever_severe_1st_day30plus int, pts_ever_severe_1st_day90plus int ); alter table fource_LocalDiagProcMed add primary key (cohort, concept_type, concept_code, siteid); insert into fource_LocalDiagProcMed select (select siteid from fource_config where rownum = 1) siteid, cohort, concept_type, concept_code, sum(before_adm), sum(since_adm), sum(dayN14toN1), sum(day0to29), sum(day30to89), sum(day30plus), sum(day90plus), sum(case when first_day between 0 and 29 then 1 else 0 end), sum(case when first_day >= 30 then 1 else 0 end), sum(case when first_day >= 90 then 1 else 0 end), sum(severe*before_adm), sum(severe*since_adm), sum(severe*dayN14toN1), sum(severe*day0to29), sum(severe*day30to89), sum(severe*day30plus), sum(severe*day90plus), sum(severe*(case when first_day between 0 and 29 then 1 else 0 end)), sum(severe*(case when first_day >= 30 then 1 else 0 end)), sum(severe*(case when first_day >= 90 then 1 else 0 end)) from ( select cohort, patient_num, severe, concept_type, (case when concept_type in ('DIAG-ICD9','DIAG-ICD10') then substr(concept_code,1,3) else concept_code end) concept_code, --max(case when days_since_admission between @lookback_days and -15 then 1 else 0 end) before_adm, max(case when days_since_admission between -365 and -15 then 1 else 0 end) before_adm, max(case when days_since_admission between -14 and -1 then 1 else 0 end) dayN14toN1, max(case when days_since_admission >= 0 then 1 else 0 end) since_adm, max(case when days_since_admission between 0 and 29 then 1 else 0 end) day0to29, max(case when days_since_admission between 30 and 89 then 1 else 0 end) day30to89, max(case when days_since_admission >= 30 then 1 else 0 end) day30plus, max(case when days_since_admission >= 90 then 1 else 0 end) day90plus, min(case when days_since_admission >= 0 then days_since_admission else null end) first_day_since_adm, min(days_since_admission) first_day from fource_observations where concept_type in ('DIAG-ICD9','DIAG-ICD10','MED-CLASS','PROC-GROUP','COVID-TEST','SEVERE-LAB','SEVERE-DIAG') group by cohort, patient_num, severe, concept_type, (case when concept_type in ('DIAG-ICD9','DIAG-ICD10') then substr(concept_code,1,3) else concept_code end) ) t group by cohort, concept_type, concept_code; commit; -------------------------------------------------------------------------------- -- LocalRaceByLocalCode -------------------------------------------------------------------------------- --drop table fource_LocalRaceByLocalCode; create table fource_LocalRaceByLocalCode ( siteid varchar(50) not null, cohort varchar(50) not null, race_local_code varchar(500) not null, race_4ce varchar(100) not null, pts_all int not null, pts_ever_severe int not null ); alter table fource_LocalRaceByLocalCode add primary key (cohort, race_local_code, siteid); insert into fource_LocalRaceByLocalCode select (select siteid from fource_config where rownum = 1), r.cohort, r.race_local_code, r.race_4ce, count(*), sum(p.severe) from fource_LocalPatientRace r inner join fource_cohort_patients p on r.cohort=p.cohort and r.patient_num=p.patient_num group by r.cohort, r.race_local_code, r.race_4ce; commit; -------------------------------------------------------------------------------- -- LocalRaceBy4CECode -------------------------------------------------------------------------------- create table fource_LocalRaceBy4CECode ( siteid varchar(50) not null, cohort varchar(50) not null, race_4ce varchar(100) not null, pts_all int not null, pts_ever_severe int not null ); alter table fource_LocalRaceBy4CECode add primary key (cohort, race_4ce, siteid); insert into fource_LocalRaceBy4CECode select (select siteid from fource_config where rownum = 1), r.cohort, r.race_4ce, count(*), sum(p.severe) from fource_LocalPatientRace r inner join fource_cohort_patients p on r.cohort=p.cohort and r.patient_num=p.patient_num group by r.cohort, r.race_4ce; commit; --############################################################################## --############################################################################## --############################################################################## --############################################################################## --### --### Assemble data for Phase 1 shared AGGREGATE COUNT tables. --### These are the shared versions which may include obfuscation. --### --############################################################################## --############################################################################## --############################################################################## --############################################################################## -------------------------------------------------------------------------------- -- Cohorts ------------------------------------------------------------------------------- create table fource_Cohorts ( siteid varchar(50) not null, cohort varchar(50) not null, source_data_updated_date date not null, earliest_adm_date date not null, latest_adm_date date not null ); alter table fource_Cohorts add primary key (cohort, siteid); --truncate table fource_Cohorts; insert into fource_Cohorts select (select siteid from fource_config where rownum = 1), cohort, source_data_updated_date, earliest_adm_date, latest_adm_date from fource_cohort_config where include_in_phase1=1; commit; -------------------------------------------------------------------------------- -- DailyCounts -------------------------------------------------------------------------------- --drop table fource_DailyCounts; create table fource_DailyCounts ( siteid varchar(50) not null, cohort varchar(50) not null, calendar_date date not null, cumulative_pts_all int not null, cumulative_pts_icu int not null, cumulative_pts_dead int not null, cumulative_pts_severe int not null, cumulative_pts_severe_icu int not null, cumulative_pts_severe_dead int not null, pts_in_hosp_on_this_date int not null, pts_in_icu_on_this_date int not null, pts_severe_in_hosp_on_date int not null, pts_severe_in_icu_on_date int not null ); alter table fource_DailyCounts add primary key (cohort, calendar_date, siteid); insert into fource_DailyCounts select * from fource_LocalDailyCounts where cohort in (select cohort from fource_cohort_config where include_in_phase1=1); COMMIT; -------------------------------------------------------------------------------- -- ClinicalCourse -------------------------------------------------------------------------------- --drop table fource_CLinicalCourse; create table fource_ClinicalCourse ( siteid varchar(50) not null, cohort varchar(50) not null, days_since_admission int not null, pts_all_in_hosp int not null, pts_all_in_icu int not null, pts_all_dead int not null, pts_severe_by_this_day int not null, pts_ever_severe_in_hosp int not null, pts_ever_severe_in_icu int not null, pts_ever_severe_dead int not null ); alter table fource_ClinicalCourse add primary key (cohort, days_since_admission); insert into fource_ClinicalCourse select * from fource_LocalClinicalCourse where cohort in (select cohort from fource_cohort_config where include_in_phase1=1); commit; -------------------------------------------------------------------------------- -- AgeSex -------------------------------------------------------------------------------- --drop table fource_AgeSex; create table fource_AgeSex ( siteid varchar(50) not null, cohort varchar(50) not null, age_group varchar(20) not null, mean_age numeric(18,10) not null, sex varchar(10) not null, pts_all int not null, pts_ever_severe int not null ); alter table fource_AgeSex add primary key (cohort, age_group, sex, siteid); insert into fource_AgeSex select * from fource_LocalAgeSex where cohort in (select cohort from fource_cohort_config where include_in_phase1=1); commit; -------------------------------------------------------------------------------- -- Labs -------------------------------------------------------------------------------- --drop table fource_Labs; create table fource_Labs ( siteid varchar(50) not null, cohort varchar(50) not null, loinc varchar(20) not null, days_since_admission int not null, pts_all int, mean_value_all numeric(18,5), stdev_value_all numeric(18,5), mean_log_value_all numeric(18,10), stdev_log_value_all numeric(18,10), pts_ever_severe int, mean_value_ever_severe numeric(18,5), stdev_value_ever_severe numeric(18,5), mean_log_value_ever_severe numeric(18,10), stdev_log_value_ever_severe numeric(18,10), pts_never_severe int, mean_value_never_severe numeric(18,5), stdev_value_never_severe numeric(18,5), mean_log_value_never_severe numeric(18,10), stdev_log_value_never_severe numeric(18,10) ); alter table fource_Labs add primary key (cohort, loinc, days_since_admission, siteid); insert into fource_Labs select * from fource_LocalLabs where cohort in (select cohort from fource_cohort_config where include_in_phase1=1); commit; -------------------------------------------------------------------------------- -- DiagProcMed -------------------------------------------------------------------------------- --drop table fource_DiagProcMed; create table fource_DiagProcMed ( siteid varchar(50) not null, cohort varchar(50) not null, concept_type varchar(50) not null, concept_code varchar(50) not null, pts_all_before_adm int, pts_all_since_adm int, pts_all_dayN14toN1 int, pts_all_day0to29 int, pts_all_day30to89 int, pts_all_day30plus int, pts_all_day90plus int, pts_all_1st_day0to29 int, pts_all_1st_day30plus int, pts_all_1st_day90plus int, pts_ever_severe_before_adm int, pts_ever_severe_since_adm int, pts_ever_severe_dayN14toN1 int, pts_ever_severe_day0to29 int, pts_ever_severe_day30to89 int, pts_ever_severe_day30plus int, pts_ever_severe_day90plus int, pts_ever_severe_1st_day0to29 int, pts_ever_severe_1st_day30plus int, pts_ever_severe_1st_day90plus int ); alter table fource_DiagProcMed add primary key (cohort, concept_type, concept_code, siteid); insert into fource_DiagProcMed select * from fource_LocalDiagProcMed where cohort in (select cohort from fource_cohort_config where include_in_phase1=1); commit; -------------------------------------------------------------------------------- -- RaceByLocalCode -------------------------------------------------------------------------------- create table fource_RaceByLocalCode ( siteid varchar(50) not null, cohort varchar(50) not null, race_local_code varchar(500) not null, race_4ce varchar(100) not null, pts_all int not null, pts_ever_severe int not null ); alter table fource_RaceByLocalCode add primary key (cohort, race_local_code, siteid); insert into fource_RaceByLocalCode select * from fource_LocalRaceByLocalCode where cohort in (select cohort from fource_cohort_config where include_in_phase1=1); commit; -------------------------------------------------------------------------------- -- RaceBy4CECode -------------------------------------------------------------------------------- --drop table fource_RaceBy4CECode; create table fource_RaceBy4CECode ( siteid varchar(50) not null, cohort varchar(50) not null, race_4ce varchar(100) not null, pts_all int not null, pts_ever_severe int not null ); alter table fource_RaceBy4CECode add primary key (cohort, race_4ce, siteid); insert into fource_RaceBy4CECode select * from fource_LocalRaceBy4CECode where cohort in (select cohort from fource_cohort_config where include_in_phase1=1); -------------------------------------------------------------------------------- -- LabCodes -------------------------------------------------------------------------------- create table fource_LabCodes ( siteid varchar(50) not null, fource_loinc varchar(20) not null, fource_lab_units varchar(20) not null, fource_lab_name varchar(100) not null, scale_factor float not null, local_lab_code varchar(50) not null, local_lab_units varchar(20) not null, local_lab_name varchar(500) not null, notes varchar(1000) ); alter table fource_LabCodes add primary key (fource_loinc, local_lab_code, local_lab_units, siteid); insert into fource_LabCodes select (select siteid from fource_config where rownum = 1), fource_loinc, fource_lab_units, fource_lab_name, scale_factor, replace(local_lab_code,',',';'), replace(local_lab_units,',',';'), replace(local_lab_name,',',';'), replace(notes,',',';') from fource_lab_map_report; --****************************************************************************** --****************************************************************************** --*** Obfuscate the shared Phase 1 files as needed (optional) --****************************************************************************** --****************************************************************************** -------------------------------------------------------------------------------- -- Blur counts by adding a small random number. -------------------------------------------------------------------------------- update fource_DailyCounts set cumulative_pts_all = (round(cumulative_pts_all/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , cumulative_pts_icu = (round(cumulative_pts_icu/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , cumulative_pts_dead = (round(cumulative_pts_dead/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , cumulative_pts_severe = (round(cumulative_pts_severe/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , cumulative_pts_severe_icu = (round(cumulative_pts_severe_icu/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , cumulative_pts_severe_dead = (round(cumulative_pts_severe_dead/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_in_hosp_on_this_date = (round(pts_in_hosp_on_this_date/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_in_icu_on_this_date = (round(pts_in_icu_on_this_date/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_severe_in_hosp_on_date = (round(pts_severe_in_hosp_on_date/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_severe_in_icu_on_date = (round(pts_severe_in_icu_on_date/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) where (select obfuscation_blur from fource_config where rownum = 1) > 0; update fource_ClinicalCourse set pts_all_in_hosp = (round(pts_all_in_hosp/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_all_in_icu = (round(pts_all_in_icu/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_all_dead = (round(pts_all_dead/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_severe_by_this_day = (round(pts_severe_by_this_day/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_ever_severe_in_hosp = (round(pts_ever_severe_in_hosp/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_ever_severe_in_icu = (round(pts_ever_severe_in_icu/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_ever_severe_dead = (round(pts_ever_severe_dead/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) where (select obfuscation_blur from fource_config where rownum = 1) > 0; update fource_AgeSex set pts_all = (round(pts_all/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_ever_severe = (round(pts_ever_severe/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) where (select obfuscation_blur from fource_config where rownum = 1) > 0; update fource_Labs set pts_all = (round(pts_all/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_ever_severe = (round(pts_ever_severe/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_never_severe = (round(pts_never_severe/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) where (select obfuscation_blur from fource_config where rownum = 1) > 0; update fource_DiagProcMed set pts_all_before_adm = (round(pts_all_before_adm/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_all_since_adm = (round(pts_all_since_adm/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_all_dayN14toN1 = (round(pts_all_dayN14toN1/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_all_day0to29 = (round(pts_all_day0to29/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_all_day30to89 = (round(pts_all_day30to89/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_all_day30plus = (round(pts_all_day30plus/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_all_day90plus = (round(pts_all_day90plus/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_all_1st_day0to29 = (round(pts_all_1st_day0to29/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_all_1st_day30plus = (round(pts_all_1st_day30plus/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_all_1st_day90plus = (round(pts_all_1st_day90plus/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_ever_severe_before_adm = (round(pts_ever_severe_before_adm/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_ever_severe_since_adm = (round(pts_ever_severe_since_adm/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_ever_severe_dayN14toN1 = (round(pts_ever_severe_dayN14toN1/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_ever_severe_day0to29 = (round(pts_ever_severe_day0to29/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_ever_severe_day30to89 = (round(pts_ever_severe_day30to89/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_ever_severe_day30plus = (round(pts_ever_severe_day30plus/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_ever_severe_day90plus = (round(pts_ever_severe_day90plus/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_ever_severe_1st_day0to29 = (round(pts_ever_severe_1st_day0to29/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_ever_severe_1st_day30plus = (round(pts_ever_severe_1st_day30plus/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_ever_severe_1st_day90plus = (round(pts_ever_severe_1st_day90plus/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) where (select obfuscation_blur from fource_config where rownum = 1) > 0; update fource_RaceByLocalCode set pts_all = (round(pts_all/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_ever_severe = (round(pts_ever_severe/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) where (select obfuscation_blur from fource_config where rownum = 1) > 0; update fource_RaceBy4CECode set pts_all = (round(pts_all/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) , pts_ever_severe = (round(pts_ever_severe/5.0,0)*5)+greatest(-1*(select obfuscation_blur from fource_config where rownum = 1),least(round(dbms_random.normal*2.8,0),(select obfuscation_blur from fource_config where rownum = 1))) where (select obfuscation_blur from fource_config where rownum = 1) > 0; -------------------------------------------------------------------------------- -- Mask small counts with "-99". -------------------------------------------------------------------------------- update fource_DailyCounts set cumulative_pts_all = (case when cumulative_pts_all<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else cumulative_pts_all end), cumulative_pts_icu = (case when cumulative_pts_icu=-999 then -999 when cumulative_pts_icu<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else cumulative_pts_icu end), cumulative_pts_dead = (case when cumulative_pts_dead=-999 then -999 when cumulative_pts_dead<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else cumulative_pts_dead end), cumulative_pts_severe = (case when cumulative_pts_severe<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else cumulative_pts_severe end), cumulative_pts_severe_icu = (case when cumulative_pts_severe_icu=-999 then -999 when cumulative_pts_severe_icu<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else cumulative_pts_severe_icu end), cumulative_pts_severe_dead = (case when cumulative_pts_severe_dead=-999 then -999 when cumulative_pts_severe_dead<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else cumulative_pts_severe_dead end), pts_in_hosp_on_this_date = (case when pts_in_hosp_on_this_date<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_in_hosp_on_this_date end), pts_in_icu_on_this_date = (case when pts_in_icu_on_this_date=-999 then -999 when pts_in_icu_on_this_date<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_in_icu_on_this_date end), pts_severe_in_hosp_on_date = (case when pts_severe_in_hosp_on_date<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_severe_in_hosp_on_date end), pts_severe_in_icu_on_date = (case when pts_severe_in_icu_on_date=-999 then -999 when pts_severe_in_icu_on_date<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_severe_in_icu_on_date end) where (select obfuscation_small_count_mask from fource_config where rownum=1) > 0; update fource_ClinicalCourse set pts_all_in_hosp = (case when pts_all_in_hosp<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_all_in_hosp end), pts_all_in_icu = (case when pts_all_in_icu=-999 then -999 when pts_all_in_icu<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_all_in_icu end), pts_all_dead = (case when pts_all_dead=-999 then -999 when pts_all_dead<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_all_dead end), pts_severe_by_this_day = (case when pts_severe_by_this_day<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_severe_by_this_day end), pts_ever_severe_in_hosp = (case when pts_ever_severe_in_hosp<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_ever_severe_in_hosp end), pts_ever_severe_in_icu = (case when pts_ever_severe_in_icu=-999 then -999 when pts_ever_severe_in_icu<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_ever_severe_in_icu end), pts_ever_severe_dead = (case when pts_ever_severe_dead=-999 then -999 when pts_ever_severe_dead<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_ever_severe_dead end) where (select obfuscation_small_count_mask from fource_config where rownum=1) > 0; update fource_AgeSex set pts_all = (case when pts_all<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_all end), pts_ever_severe = (case when pts_ever_severe<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_ever_severe end) where (select obfuscation_small_count_mask from fource_config where rownum=1) > 0; update fource_Labs set pts_all=-99, mean_value_all=-99, stdev_value_all=-99, mean_log_value_all=-99, stdev_log_value_all=-99 where pts_all<(select obfuscation_small_count_mask from fource_config where rownum=1) and (select obfuscation_small_count_mask from fource_config where rownum=1) > 0; update fource_Labs -- Need to mask both ever_severe and never_severe if either of them are below the small count threshold, since all=ever+never set pts_ever_severe=-99, mean_value_ever_severe=-99, stdev_value_ever_severe=-99, mean_log_value_ever_severe=-99, stdev_log_value_ever_severe=-99, pts_never_severe=-99, mean_value_never_severe=-99, stdev_value_never_severe=-99, mean_log_value_never_severe=-99, stdev_log_value_never_severe=-99 where (pts_ever_severe<(select obfuscation_small_count_mask from fource_config where rownum=1)) or (pts_never_severe<(select obfuscation_small_count_mask from fource_config where rownum=1)) and (select obfuscation_small_count_mask from fource_config where rownum=1) > 0; update fource_DiagProcMed set pts_all_before_adm = (case when pts_all_before_adm<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_all_before_adm end), pts_all_since_adm = (case when pts_all_since_adm<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_all_since_adm end), pts_all_dayN14toN1 = (case when pts_all_dayN14toN1<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_all_dayN14toN1 end), pts_all_day0to29 = (case when pts_all_day0to29<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_all_day0to29 end), pts_all_day30to89 = (case when pts_all_day30to89<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_all_day30to89 end), pts_all_day30plus = (case when pts_all_day30plus<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_all_day30plus end), pts_all_day90plus = (case when pts_all_day90plus<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_all_day90plus end), pts_all_1st_day0to29 = (case when pts_all_1st_day0to29<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_all_1st_day0to29 end), pts_all_1st_day30plus = (case when pts_all_1st_day30plus<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_all_1st_day30plus end), pts_all_1st_day90plus = (case when pts_all_1st_day90plus<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_all_1st_day90plus end), pts_ever_severe_before_adm = (case when pts_ever_severe_before_adm<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_ever_severe_before_adm end), pts_ever_severe_since_adm = (case when pts_ever_severe_since_adm<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_ever_severe_since_adm end), pts_ever_severe_dayN14toN1 = (case when pts_ever_severe_dayN14toN1<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_ever_severe_dayN14toN1 end), pts_ever_severe_day0to29 = (case when pts_ever_severe_day0to29<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_ever_severe_day0to29 end), pts_ever_severe_day30to89 = (case when pts_ever_severe_day30to89<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_ever_severe_day30to89 end), pts_ever_severe_day30plus = (case when pts_ever_severe_day30plus<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_ever_severe_day30plus end), pts_ever_severe_day90plus = (case when pts_ever_severe_day90plus<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_ever_severe_day90plus end), pts_ever_severe_1st_day0to29 = (case when pts_ever_severe_1st_day0to29<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_ever_severe_1st_day0to29 end), pts_ever_severe_1st_day30plus = (case when pts_ever_severe_1st_day30plus<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_ever_severe_1st_day30plus end), pts_ever_severe_1st_day90plus = (case when pts_ever_severe_1st_day90plus<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_ever_severe_1st_day90plus end) where (select obfuscation_small_count_mask from fource_config where rownum=1) > 0; update fource_RaceByLocalCode set pts_all = (case when pts_all<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_all end), pts_ever_severe = (case when pts_ever_severe<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_ever_severe end) where (select obfuscation_small_count_mask from fource_config where rownum=1) > 0; update fource_RaceBy4CECode set pts_all = (case when pts_all<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_all end), pts_ever_severe = (case when pts_ever_severe<(select obfuscation_small_count_mask from fource_config where rownum=1) then -99 else pts_ever_severe end) where (select obfuscation_small_count_mask from fource_config where rownum=1) > 0; -------------------------------------------------------------------------------- -- To protect obfuscated age and sex breakdowns, set combinations and -- the total count to -999. -------------------------------------------------------------------------------- update fource_AgeSex set pts_all = -999, pts_ever_severe = -999, mean_age = -999 where (age_group<>'all') and (sex<>'all') and (select obfuscation_agesex from fource_config where rownum = 1) = 1; -------------------------------------------------------------------------------- -- Delete small counts. -------------------------------------------------------------------------------- delete from fource_DailyCounts where cumulative_pts_all<(select obfuscation_small_count_delete from fource_config where rownum = 1) and (select obfuscation_small_count_delete from fource_config where rownum = 1) > 0; delete from fource_ClinicalCourse where pts_all_in_hosp<(select obfuscation_small_count_delete from fource_config where rownum = 1) and pts_all_dead<(select obfuscation_small_count_delete from fource_config where rownum = 1) and pts_severe_by_this_day<(select obfuscation_small_count_delete from fource_config where rownum = 1) and (select obfuscation_small_count_delete from fource_config where rownum = 1) > 0; delete from fource_Labs where pts_all<(select obfuscation_small_count_delete from fource_config where rownum = 1) and (select obfuscation_small_count_delete from fource_config where rownum = 1) > 0; delete from fource_DiagProcMed where pts_all_before_adm<(select obfuscation_small_count_delete from fource_config where rownum = 1) and pts_all_since_adm<(select obfuscation_small_count_delete from fource_config where rownum = 1) and pts_all_dayN14toN1<(select obfuscation_small_count_delete from fource_config where rownum = 1) and (select obfuscation_small_count_delete from fource_config where rownum = 1) > 0; --Do not delete small count rows from Age, Sex, and Race tables. --We want to know the rows in the tables, even if the counts are masked. --delete from fource_AgeSex where pts_all<(select obfuscation_small_count_delete from fource_config where rownum = 1) --delete from fource_RaceByLocalCode where pts_all<@obfuscation_small_count_delete --delete from fource_RaceBy4CECode where pts_all<@obfuscation_small_count_delete --############################################################################## --############################################################################## --############################################################################## --############################################################################## --### --### Finish up --### --############################################################################## --############################################################################## --############################################################################## --############################################################################## -------------------------------------------------------------------------------- -- Delete cohorts that should not be included in Phase2 patient level files -------------------------------------------------------------------------------- --Phase 2 patient level tables delete from fource_LocalPatientClinicalCourse where cohort in (select cohort from fource_cohort_config where include_in_phase2=0); delete from fource_LocalPatientSummary where cohort in (select cohort from fource_cohort_config where include_in_phase2=0); delete from fource_LocalPatientObservations where cohort in (select cohort from fource_cohort_config where include_in_phase2=0); delete from fource_LocalPatientRace where cohort in (select cohort from fource_cohort_config where include_in_phase2=0); -------------------------------------------------------------------------------- -- Remove rows where all values are zeros to reduce the size of the files -------------------------------------------------------------------------------- delete from fource_LocalPatientClinicalCourse where in_hospital=0 and severe=0 and in_icu=0 and dead=0; -------------------------------------------------------------------------------- -- Replace the patient_num with a random study_num integer Phase2 tables -- if replace_patient_num is set to 0 this code won't do anything so you can comment out -------------------------------------------------------------------------------- create table fource_LocalPatientMapping ( siteid varchar(50) not null, patient_num int not null, study_num int not null ); alter table fource_LocalPatientMapping add primary key (patient_num, study_num, siteid); --Create new patient_nums and a mapping table insert into fource_LocalPatientMapping (siteid, patient_num, study_num) select (select siteid from fource_config where rownum = 1) siteid, patient_num, rownum from ( select distinct patient_num from fource_LocalPatientSummary ) t where (select replace_patient_num from fource_config where rownum = 1) = 1; ALTER TABLE fource_LocalPatientClinicalCourse ADD patient_num_orig int; update fource_LocalPatientClinicalCourse set patient_num_orig = patient_num; merge into fource_LocalPatientClinicalCourse t using (select patient_num, study_num from fource_LocalPatientMapping where (select replace_patient_num from fource_config where rownum = 1) = 1) m on (t.patient_num_orig = m.patient_num) when matched then update set patient_num = m.study_num; ALTER TABLE fource_LocalPatientClinicalCourse drop column patient_num_orig; commit; ALTER TABLE fource_LocalPatientSummary ADD patient_num_orig int; update fource_LocalPatientSummary set patient_num_orig = patient_num; merge into fource_LocalPatientSummary t using (select patient_num, study_num from fource_LocalPatientMapping where (select replace_patient_num from fource_config where rownum = 1) = 1) m on (t.patient_num_orig = m.patient_num) when matched then update set patient_num = m.study_num; ALTER TABLE fource_LocalPatientSummary drop column patient_num_orig; commit; ALTER TABLE fource_LocalPatientObservations ADD patient_num_orig int; update fource_LocalPatientObservations set patient_num_orig = patient_num; merge into fource_LocalPatientObservations t using (select patient_num, study_num from fource_LocalPatientMapping where (select replace_patient_num from fource_config where rownum = 1) = 1) m on (t.patient_num_orig = m.patient_num) when matched then update set patient_num = m.study_num; ALTER TABLE fource_LocalPatientObservations drop column patient_num_orig; commit; ALTER TABLE fource_LocalPatientRace ADD patient_num_orig int; update fource_LocalPatientRace set patient_num_orig = patient_num; merge into fource_LocalPatientRace t using (select patient_num, study_num from fource_LocalPatientMapping where (select replace_patient_num from fource_config where rownum = 1) = 1) m on (t.patient_num_orig = m.patient_num) when matched then update set patient_num = m.study_num; ALTER TABLE fource_LocalPatientRace drop column patient_num_orig; commit; -- Else map existing patient_num to itself insert into fource_LocalPatientMapping (siteid, patient_num, study_num) select distinct (select siteid from fource_config where rownum = 1) siteid, patient_num, patient_num from fource_LocalPatientSummary where (select replace_patient_num from fource_config where rownum = 1) = 0; commit; /* NOt necessaryalready done. Oracle did not like the '' in the above statements so put the site id as tables were being buily -------------------------------------------------------------------------------- -- Set the siteid to a unique value for your institution. -- * Make sure you are not using another institution's siteid. -- * The siteid must be no more than 20 letters or numbers. -- * It must start with a letter. -- * It cannot have any blank spaces or special characters. -------------------------------------------------------------------------------- --Phase 2 patient level tables update fource_LocalPatientClinicalCourse set siteid = (select siteid from fource_config) update fource_LocalPatientSummary set siteid = (select siteid from fource_config) update fource_LocalPatientObservations set siteid = (select siteid from fource_config) update fource_LocalPatientRace set siteid = (select siteid from fource_config) update fource_LocalPatientMapping set siteid = (select siteid from fource_config) --Phase 2 aggregate count tables update fource_LocalCohorts set siteid = (select siteid from fource_config) update fource_LocalDailyCounts set siteid = (select siteid from fource_config) update fource_LocalClinicalCourse set siteid = (select siteid from fource_config) update fource_LocalAgeSex set siteid = (select siteid from fource_config) update fource_LocalLabs set siteid = (select siteid from fource_config) update fource_LocalDiagProcMed set siteid = (select siteid from fource_config) update fource_LocalRaceByLocalCode set siteid = (select siteid from fource_config) update fource_LocalRaceBy4CECode set siteid = (select siteid from fource_config) --Phase 1 aggregate count tables update fource_Cohorts set siteid = (select siteid from fource_config) update fource_DailyCounts set siteid = (select siteid from fource_config) update fource_ClinicalCourse set siteid = (select siteid from fource_config) update fource_AgeSex set siteid = (select siteid from fource_config) update fource_Labs set siteid = (select siteid from fource_config) update fource_DiagProcMed set siteid = (select siteid from fource_config) update fource_RaceByLocalCode set siteid = (select siteid from fource_config) update fource_RaceBy4CECode set siteid = (select siteid from fource_config) update fource_LabCodes set siteid = (select siteid from fource_config) */ --############################################################################## --############################################################################## --############################################################################## --############################################################################## --### --### Output results --### --############################################################################## --############################################################################## --############################################################################## --############################################################################## --****************************************************************************** --****************************************************************************** --*** OPTION #1: View the data as tables. --*** Make sure everything looks reasonable. --*** Copy into Excel, convert dates into YYYY-MM-DD format, save in csv format. --****************************************************************************** --****************************************************************************** --Phase 1 obfuscated aggregate files select * from fource_DailyCounts where (select output_phase1_as_columns from fource_config where rownum=1) = 1 order by cohort, calendar_date ; select * from fource_ClinicalCourse where (select output_phase1_as_columns from fource_config where rownum=1) = 1 order by cohort, days_since_admission; select * from fource_AgeSex where (select output_phase1_as_columns from fource_config where rownum=1) = 1 order by cohort, age_group, sex; select * from fource_Labs where (select output_phase1_as_columns from fource_config where rownum=1) = 1 order by cohort, loinc, days_since_admission; select * from fource_DiagProcMed where (select output_phase1_as_columns from fource_config where rownum=1) = 1 order by cohort, concept_type, concept_code; select * from fource_RaceByLocalCode where (select output_phase1_as_columns from fource_config where rownum=1) = 1 order by cohort, race_local_code; select * from fource_RaceBy4CECode where (select output_phase1_as_columns from fource_config where rownum=1) = 1 order by cohort, race_4ce; select * from fource_LabCodes where (select output_phase1_as_columns from fource_config where rownum=1) = 1 order by fource_loinc, local_lab_code, local_lab_units; --Phase 2 non-obfuscated local aggregate files select * from fource_LocalDailyCounts where (select output_phase2_as_columns from fource_config where rownum=1) = 1 order by cohort, calendar_date; select * from fource_LocalClinicalCourse where (select output_phase2_as_columns from fource_config where rownum=1) = 1 order by cohort, days_since_admission; select * from fource_LocalAgeSex where (select output_phase2_as_columns from fource_config where rownum=1) = 1 order by cohort, age_group, sex; select * from fource_LocalLabs where (select output_phase2_as_columns from fource_config where rownum=1) = 1 order by cohort, loinc, days_since_admission; select * from fource_LocalDiagProcMed where (select output_phase2_as_columns from fource_config where rownum=1) = 1 order by cohort, concept_type, concept_code; select * from fource_LocalRaceByLocalCode where (select output_phase2_as_columns from fource_config where rownum=1) = 1 order by cohort, race_local_code; select * from fource_LocalRaceBy4CECode where (select output_phase2_as_columns from fource_config where rownum=1) = 1 order by cohort, race_4ce; --Phase 2 patient-level files select * from fource_LocalPatientClinicalCourse where (select output_phase2_as_columns from fource_config where rownum=1) = 1 order by cohort, patient_num, days_since_admission; select * from fource_LocalPatientSummary where (select output_phase2_as_columns from fource_config where rownum=1) = 1 order by cohort, patient_num; select * from fource_LocalPatientObservations where (select output_phase2_as_columns from fource_config where rownum=1) = 1 order by cohort, patient_num, days_since_admission, concept_type, concept_code; select * from fource_LocalPatientRace where (select output_phase2_as_columns from fource_config where rownum=1) = 1 order by cohort, patient_num, race_local_code; select * from fource_LocalPatientMapping where (select output_phase2_as_columns from fource_config where rownum=1) = 1 order by patient_num; --****************************************************************************** --****************************************************************************** --*** OPTION #2: View the data as csv strings. --*** Replace @exportFilePath with path to the directory wher you want to dump your files --*** and then run the block it will export the csv files using the spool function. --*** It may be easier to separate into its own script --*** If using SQLDeveloper default params will display only a subset of the file but the entire data set will be spooled --*** to the appropriate file --*** If you are in sqldeveloper and run this as a run all it will work --*** however if you try to run it as a highlighted block it will not spool properly --*** Copy and paste to a text file, save it FileName.csv. --*** Make sure it is not saved as fource_FileName.csv. --*** Make sure it is not saved as FileName.csv.txt. --****************************************************************************** --****************************************************************************** set pagesize 0 set echo off set feedback off set term off spool c:\Devtools\NCATS\covid\4cescripts\DailyCounts.csv select s DailyCountsCSV from ( select 0 z, 'siteid,cohort,calendar_date,cumulative_pts_all,cumulative_pts_icu,cumulative_pts_dead,cumulative_pts_severe,cumulative_pts_severe_icu,cumulative_pts_severe_dead,pts_in_hosp_on_this_date,pts_in_icu_on_this_date,pts_severe_in_hosp_on_date,pts_severe_in_icu_on_date' s from dual union all select row_number() over (order by cohort,calendar_date) z, cast(siteid as varchar2(2000)) || ',' || cast(cohort as varchar2(2000)) || ',' || to_char(calendar_date, 'YYYY-MM-DD') || ',' || cast(cumulative_pts_all as varchar2(2000)) || ',' || cast(cumulative_pts_icu as varchar2(2000)) || ',' || cast(cumulative_pts_dead as varchar2(2000)) || ',' || cast(cumulative_pts_severe as varchar2(2000)) || ',' || cast(cumulative_pts_severe_icu as varchar2(2000)) || ',' || cast(cumulative_pts_severe_dead as varchar2(2000)) || ',' || cast(pts_in_hosp_on_this_date as varchar2(2000)) || ',' || cast(pts_in_icu_on_this_date as varchar2(2000)) || ',' || cast(pts_severe_in_hosp_on_date as varchar2(2000)) || ',' || cast(pts_severe_in_icu_on_date as varchar2(2000)) from fource_DailyCounts union all select 9999999 z, '' from dual) t order by z; spool off spool c:\Devtools\NCATS\covid\4cescripts\ClinicalCourse.csv select s ClinicalCourseCSV from ( select 0 z, 'siteid,cohort,days_since_admission,pts_all_in_hosp,pts_all_in_icu,pts_all_dead,pts_severe_by_this_day,pts_ever_severe_in_hosp,pts_ever_severe_in_icu,pts_ever_severe_dead' s from dual union all select row_number() over (order by cohort,days_since_admission) z, cast(siteid as varchar2(2000)) || ',' || cast(cohort as varchar2(2000)) || ',' || cast(days_since_admission as varchar2(2000)) || ',' || cast(pts_all_in_hosp as varchar2(2000)) || ',' || cast(pts_all_in_icu as varchar2(2000)) || ',' || cast(pts_all_dead as varchar2(2000)) || ',' || cast(pts_severe_by_this_day as varchar2(2000)) || ',' || cast(pts_ever_severe_in_hosp as varchar2(2000)) || ',' || cast(pts_ever_severe_in_icu as varchar2(2000)) || ',' || cast(pts_ever_severe_dead as varchar2(2000)) from fource_ClinicalCourse union all select 9999999 z, '' from dual) t order by z; spool off spool c:\Devtools\NCATS\covid\4cescripts\AgeSex.csv select s AgeSexCSV from ( select 0 z, 'siteid,cohort,age_group,mean_age,sex,pts_all,pts_ever_severe' s from dual union all select row_number() over (order by cohort,age_group,sex) z, cast(siteid as varchar2(2000)) || ',' || cast(cohort as varchar2(2000)) || ',' || cast(age_group as varchar2(2000)) || ',' || cast(mean_age as varchar2(2000)) || ',' || cast(sex as varchar2(2000)) || ',' || cast(pts_all as varchar2(2000)) || ',' || cast(pts_ever_severe as varchar2(2000)) from fource_AgeSex union all select 9999999 z, '' from dual) t order by z; spool off spool c:\Devtools\NCATS\covid\4cescripts\Labs.csv select s LabsCSV from ( select 0 z, 'siteid,cohort,loinc,days_since_admission,pts_all,mean_value_all,stdev_value_all,mean_log_value_all,stdev_log_value_all,pts_ever_severe,mean_value_ever_severe,stdev_value_ever_severe,mean_log_value_ever_severe,stdev_log_value_ever_severe,pts_never_severe,mean_value_never_severe,stdev_value_never_severe,mean_log_value_never_severe,stdev_log_value_never_severe' s from dual union all select row_number() over (order by cohort,loinc,days_since_admission) z, cast(siteid as varchar2(2000)) || ',' || cast(cohort as varchar2(2000)) || ',' || cast(loinc as varchar2(2000)) || ',' || cast(days_since_admission as varchar2(2000)) || ',' || cast(pts_all as varchar2(2000)) || ',' || cast(mean_value_all as varchar2(2000)) || ',' || cast(stdev_value_all as varchar2(2000)) || ',' || cast(mean_log_value_all as varchar2(2000)) || ',' || cast(stdev_log_value_all as varchar2(2000)) || ',' || cast(pts_ever_severe as varchar2(2000)) || ',' || cast(mean_value_ever_severe as varchar2(2000)) || ',' || cast(stdev_value_ever_severe as varchar2(2000)) || ',' || cast(mean_log_value_ever_severe as varchar2(2000)) || ',' || cast(stdev_log_value_ever_severe as varchar2(2000)) || ',' || cast(pts_never_severe as varchar2(2000)) || ',' || cast(mean_value_never_severe as varchar2(2000)) || ',' || cast(stdev_value_never_severe as varchar2(2000)) || ',' || cast(mean_log_value_never_severe as varchar2(2000)) || ',' || cast(stdev_log_value_never_severe as varchar2(2000)) from fource_Labs union all select 9999999 z, '' from dual) t order by z; spool off spool c:\Devtools\NCATS\covid\4cescripts\DiagProcMed.csv select s DiagProcMedCSV from ( select 0 z, 'siteid,cohort,concept_type,concept_code,pts_all_before_adm,pts_all_since_adm,pts_all_dayN14toN1,pts_all_day0to29,pts_all_day30to89,pts_all_day30plus,pts_all_day90plus,pts_all_1st_day0to29,pts_all_1st_day30plus,pts_all_1st_day90plus,pts_ever_severe_before_adm,pts_ever_severe_since_adm,pts_ever_severe_dayN14toN1,pts_ever_severe_day0to29,pts_ever_severe_day30to89,pts_ever_severe_day30plus,pts_ever_severe_day90plus,pts_ever_severe_1st_day0to29,pts_ever_severe_1st_day30plus,pts_ever_severe_1st_day90plus' s from dual union all select row_number() over (order by cohort,concept_type,concept_code) z, cast(siteid as varchar2(2000)) || ',' || cast(cohort as varchar2(2000)) || ',' || cast(concept_type as varchar2(2000)) || ',' || cast(concept_code as varchar2(2000)) || ',' || cast(pts_all_before_adm as varchar2(2000)) || ',' || cast(pts_all_since_adm as varchar2(2000)) || ',' || cast(pts_all_dayN14toN1 as varchar2(2000)) || ',' || cast(pts_all_day0to29 as varchar2(2000)) || ',' || cast(pts_all_day30to89 as varchar2(2000)) || ',' || cast(pts_all_day30plus as varchar2(2000)) || ',' || cast(pts_all_day90plus as varchar2(2000)) || ',' || cast(pts_all_1st_day0to29 as varchar2(2000)) || ',' || cast(pts_all_1st_day30plus as varchar2(2000)) || ',' || cast(pts_all_1st_day90plus as varchar2(2000)) || ',' || cast(pts_ever_severe_before_adm as varchar2(2000)) || ',' || cast(pts_ever_severe_since_adm as varchar2(2000)) || ',' || cast(pts_ever_severe_dayN14toN1 as varchar2(2000)) || ',' || cast(pts_ever_severe_day0to29 as varchar2(2000)) || ',' || cast(pts_ever_severe_day30to89 as varchar2(2000)) || ',' || cast(pts_ever_severe_day30plus as varchar2(2000)) || ',' || cast(pts_ever_severe_day90plus as varchar2(2000)) || ',' || cast(pts_ever_severe_1st_day0to29 as varchar2(2000)) || ',' || cast(pts_ever_severe_1st_day30plus as varchar2(2000)) || ',' || cast(pts_ever_severe_1st_day90plus as varchar2(2000)) from fource_DiagProcMed union all select 9999999 z, '' from dual) t order by z; spool off spool c:\Devtools\NCATS\covid\4cescripts\RaceByLocalCode.csv select s RaceByLocalCodeCSV from ( select 0 z, 'siteid,cohort,race_local_code,race_4ce,pts_all,pts_ever_severe' s from dual union all select row_number() over (order by cohort,race_local_code) z, cast(siteid as varchar2(2000)) || ',' || cast(cohort as varchar2(2000)) || ',' || cast(race_local_code as varchar2(2000)) || ',' || cast(race_4ce as varchar2(2000)) || ',' || cast(pts_all as varchar2(2000)) || ',' || cast(pts_ever_severe as varchar2(2000)) from fource_RaceByLocalCode union all select 9999999 z, '' from dual) t order by z; spool off spool c:\Devtools\NCATS\covid\4cescripts\RaceBy4CECode.csv select s RaceBy4CECodeCSV from ( select 0 z, 'siteid,cohort,race_4ce,pts_all,pts_ever_severe' s from dual union all select row_number() over (order by cohort,race_4ce) z, cast(siteid as varchar2(2000)) || ',' || cast(cohort as varchar2(2000)) || ',' || cast(race_4ce as varchar2(2000)) || ',' || cast(pts_all as varchar2(2000)) || ',' || cast(pts_ever_severe as varchar2(2000)) from fource_RaceBy4CECode union all select 9999999 z, '' from dual) t order by z; spool off spool c:\Devtools\NCATS\covid\4cescripts\LabCodes.csv select s LabCodesCSV from ( select 0 z, 'siteid,fource_loinc,fource_lab_units,fource_lab_name,scale_factor,local_lab_code,local_lab_units,local_lab_name,notes' s from dual union all select row_number() over (order by fource_loinc,local_lab_code,local_lab_units) z, cast(siteid as varchar2(2000)) || ',' || cast(fource_loinc as varchar2(2000)) || ',' || cast(fource_lab_units as varchar2(2000)) || ',' || cast(fource_lab_name as varchar2(2000)) || ',' || cast(scale_factor as varchar2(2000)) || ',' || cast(local_lab_code as varchar2(2000)) || ',' || cast(local_lab_units as varchar2(2000)) || ',' || cast(local_lab_name as varchar2(2000)) || ',' || cast(notes as varchar2(2000)) from fource_LabCodes union all select 9999999 z, '' from dual) t order by z; spool off spool c:\Devtools\NCATS\covid\4cescripts\LocalDailyCounts.csv select s LocalDailyCountsCSV from ( select 0 z, 'siteid,cohort,calendar_date,cumulative_pts_all,cumulative_pts_icu,cumulative_pts_dead,cumulative_pts_severe,cumulative_pts_severe_icu,cumulative_pts_severe_dead,pts_in_hosp_on_this_date,pts_in_icu_on_this_date,pts_severe_in_hosp_on_date,pts_severe_in_icu_on_date' s from dual union all select row_number() over (order by cohort,calendar_date) z, cast(siteid as varchar2(2000)) || ',' || cast(cohort as varchar2(2000)) || ',' || to_char(calendar_date, 'YYYY-MM-DD') || ',' || cast(cumulative_pts_all as varchar2(2000)) || ',' || cast(cumulative_pts_icu as varchar2(2000)) || ',' || cast(cumulative_pts_dead as varchar2(2000)) || ',' || cast(cumulative_pts_severe as varchar2(2000)) || ',' || cast(cumulative_pts_severe_icu as varchar2(2000)) || ',' || cast(cumulative_pts_severe_dead as varchar2(2000)) || ',' || cast(pts_in_hosp_on_this_date as varchar2(2000)) || ',' || cast(pts_in_icu_on_this_date as varchar2(2000)) || ',' || cast(pts_severe_in_hosp_on_date as varchar2(2000)) || ',' || cast(pts_severe_in_icu_on_date as varchar2(2000)) from fource_LocalDailyCounts union all select 9999999 z, '' from dual) t order by z; spool off spool c:\Devtools\NCATS\covid\4cescripts\LocalClinicalCourse.csv select s LocalClinicalCourseCSV from ( select 0 z, 'siteid,cohort,days_since_admission,pts_all_in_hosp,pts_all_in_icu,pts_all_dead,pts_severe_by_this_day,pts_ever_severe_in_hosp,pts_ever_severe_in_icu,pts_ever_severe_dead' s from dual union all select row_number() over (order by cohort,days_since_admission) z, cast(siteid as varchar2(2000)) || ',' || cast(cohort as varchar2(2000)) || ',' || cast(days_since_admission as varchar2(2000)) || ',' || cast(pts_all_in_hosp as varchar2(2000)) || ',' || cast(pts_all_in_icu as varchar2(2000)) || ',' || cast(pts_all_dead as varchar2(2000)) || ',' || cast(pts_severe_by_this_day as varchar2(2000)) || ',' || cast(pts_ever_severe_in_hosp as varchar2(2000)) || ',' || cast(pts_ever_severe_in_icu as varchar2(2000)) || ',' || cast(pts_ever_severe_dead as varchar2(2000)) from fource_LocalClinicalCourse union all select 9999999 z, '' from dual) t order by z; spool off spool c:\Devtools\NCATS\covid\4cescripts\LocalAgeSex.csv select s LocalAgeSexCSV from ( select 0 z, 'siteid,cohort,age_group,mean_age,sex,pts_all,pts_ever_severe' s from dual union all select row_number() over (order by cohort,age_group,sex) z, cast(siteid as varchar2(2000)) || ',' || cast(cohort as varchar2(2000)) || ',' || cast(age_group as varchar2(2000)) || ',' || cast(mean_age as varchar2(2000)) || ',' || cast(sex as varchar2(2000)) || ',' || cast(pts_all as varchar2(2000)) || ',' || cast(pts_ever_severe as varchar2(2000)) from fource_LocalAgeSex union all select 9999999 z, '' from dual) t order by z; spool off spool c:\Devtools\NCATS\covid\4cescripts\LocalLabs.csv select s LocalLabsCSV from ( select 0 z, 'siteid,cohort,loinc,days_since_admission,pts_all,mean_value_all,stdev_value_all,mean_log_value_all,stdev_log_value_all,pts_ever_severe,mean_value_ever_severe,stdev_value_ever_severe,mean_log_value_ever_severe,stdev_log_value_ever_severe,pts_never_severe,mean_value_never_severe,stdev_value_never_severe,mean_log_value_never_severe,stdev_log_value_never_severe' s from dual union all select row_number() over (order by cohort,loinc,days_since_admission) z, cast(siteid as varchar2(2000)) || ',' || cast(cohort as varchar2(2000)) || ',' || cast(loinc as varchar2(2000)) || ',' || cast(days_since_admission as varchar2(2000)) || ',' || cast(pts_all as varchar2(2000)) || ',' || cast(mean_value_all as varchar2(2000)) || ',' || cast(stdev_value_all as varchar2(2000)) || ',' || cast(mean_log_value_all as varchar2(2000)) || ',' || cast(stdev_log_value_all as varchar2(2000)) || ',' || cast(pts_ever_severe as varchar2(2000)) || ',' || cast(mean_value_ever_severe as varchar2(2000)) || ',' || cast(stdev_value_ever_severe as varchar2(2000)) || ',' || cast(mean_log_value_ever_severe as varchar2(2000)) || ',' || cast(stdev_log_value_ever_severe as varchar2(2000)) || ',' || cast(pts_never_severe as varchar2(2000)) || ',' || cast(mean_value_never_severe as varchar2(2000)) || ',' || cast(stdev_value_never_severe as varchar2(2000)) || ',' || cast(mean_log_value_never_severe as varchar2(2000)) || ',' || cast(stdev_log_value_never_severe as varchar2(2000)) from fource_LocalLabs union all select 9999999 z, '' from dual) t order by z; spool off spool c:\Devtools\NCATS\covid\4cescripts\LocalDiagProcMed.csv select s LocalDiagProcMedCSV from ( select 0 z, 'siteid,cohort,concept_type,concept_code,pts_all_before_adm,pts_all_since_adm,pts_all_dayN14toN1,pts_all_day0to29,pts_all_day30to89,pts_all_day30plus,pts_all_day90plus,pts_all_1st_day0to29,pts_all_1st_day30plus,pts_all_1st_day90plus,pts_ever_severe_before_adm,pts_ever_severe_since_adm,pts_ever_severe_dayN14toN1,pts_ever_severe_day0to29,pts_ever_severe_day30to89,pts_ever_severe_day30plus,pts_ever_severe_day90plus,pts_ever_severe_1st_day0to29,pts_ever_severe_1st_day30plus,pts_ever_severe_1st_day90plus' s from dual union all select row_number() over (order by cohort,concept_type,concept_code) z, cast(siteid as varchar2(2000)) || ',' || cast(cohort as varchar2(2000)) || ',' || cast(concept_type as varchar2(2000)) || ',' || cast(concept_code as varchar2(2000)) || ',' || cast(pts_all_before_adm as varchar2(2000)) || ',' || cast(pts_all_since_adm as varchar2(2000)) || ',' || cast(pts_all_dayN14toN1 as varchar2(2000)) || ',' || cast(pts_all_day0to29 as varchar2(2000)) || ',' || cast(pts_all_day30to89 as varchar2(2000)) || ',' || cast(pts_all_day30plus as varchar2(2000)) || ',' || cast(pts_all_day90plus as varchar2(2000)) || ',' || cast(pts_all_1st_day0to29 as varchar2(2000)) || ',' || cast(pts_all_1st_day30plus as varchar2(2000)) || ',' || cast(pts_all_1st_day90plus as varchar2(2000)) || ',' || cast(pts_ever_severe_before_adm as varchar2(2000)) || ',' || cast(pts_ever_severe_since_adm as varchar2(2000)) || ',' || cast(pts_ever_severe_dayN14toN1 as varchar2(2000)) || ',' || cast(pts_ever_severe_day0to29 as varchar2(2000)) || ',' || cast(pts_ever_severe_day30to89 as varchar2(2000)) || ',' || cast(pts_ever_severe_day30plus as varchar2(2000)) || ',' || cast(pts_ever_severe_day90plus as varchar2(2000)) || ',' || cast(pts_ever_severe_1st_day0to29 as varchar2(2000)) || ',' || cast(pts_ever_severe_1st_day30plus as varchar2(2000)) || ',' || cast(pts_ever_severe_1st_day90plus as varchar2(2000)) from fource_LocalDiagProcMed union all select 9999999 z, '' from dual) t order by z; spool off spool c:\Devtools\NCATS\covid\4cescripts\LocalRaceByLocalCode.csv select s LocalRaceByLocalCodeCSV from ( select 0 z, 'siteid,cohort,race_local_code,race_4ce,pts_all,pts_ever_severe' s from dual union all select row_number() over (order by cohort,race_local_code) z, cast(siteid as varchar2(2000)) || ',' || cast(cohort as varchar2(2000)) || ',' || cast(race_local_code as varchar2(2000)) || ',' || cast(race_4ce as varchar2(2000)) || ',' || cast(pts_all as varchar2(2000)) || ',' || cast(pts_ever_severe as varchar2(2000)) from fource_LocalRaceByLocalCode union all select 9999999 z, '' from dual) t order by z; spool off spool c:\Devtools\NCATS\covid\4cescripts\LocalRaceBy4CECode.csv select s LocalRaceBy4CECodeCSV from ( select 0 z, 'siteid,cohort,race_4ce,pts_all,pts_ever_severe' s from dual union all select row_number() over (order by cohort,race_4ce) z, cast(siteid as varchar2(2000)) || ',' || cast(cohort as varchar2(2000)) || ',' || cast(race_4ce as varchar2(2000)) || ',' || cast(pts_all as varchar2(2000)) || ',' || cast(pts_ever_severe as varchar2(2000)) from fource_LocalRaceBy4CECode union all select 9999999 z, '' from dual) t order by z; spool off spool c:\Devtools\NCATS\covid\4cescripts\LocalPatientClinicalCourse.csv select s LocalPatientClinicalCourseCSV from ( select 0 z, 'siteid,cohort,patient_num,days_since_admission,calendar_date,in_hospital,severe,in_icu,dead' s from dual union all select row_number() over (order by cohort,patient_num,days_since_admission) z, cast(siteid as varchar2(2000)) || ',' || cast(cohort as varchar2(2000)) || ',' || cast(patient_num as varchar2(2000)) || ',' || cast(days_since_admission as varchar2(2000)) || ',' || cast(calendar_date as varchar2(2000)) || ',' || cast(in_hospital as varchar2(2000)) || ',' || cast(severe as varchar2(2000)) || ',' || cast(in_icu as varchar2(2000)) || ',' || cast(dead as varchar2(2000)) from fource_LocalPatientClinicalCourse union all select 9999999 z, '' from dual) t order by z; spool off spool c:\Devtools\NCATS\covid\4cescripts\LocalPatientSummary.csv select s LocalPatientSummaryCSV from ( select 0 z, 'siteid,cohort,patient_num,admission_date,source_data_updated_date,days_since_admission,last_discharge_date,still_in_hospital,severe_date,severe,icu_date,icu,death_date,dead,age_group,age,sex' s from dual union all select row_number() over (order by cohort,patient_num) z, cast(siteid as varchar2(2000)) || ',' || cast(cohort as varchar2(2000)) || ',' || cast(patient_num as varchar2(2000)) || ',' || cast(admission_date as varchar2(2000)) || ',' || to_char(source_data_updated_date, 'YYYY-MM-DD') || ',' || cast(days_since_admission as varchar2(2000)) || ',' || to_char(last_discharge_date, 'YYYY-MM-DD') || ',' || cast(still_in_hospital as varchar2(2000)) || ',' || to_char(severe_date, 'YYYY-MM-DD') || ',' || cast(severe as varchar2(2000)) || ',' || to_char(icu_date, 'YYYY-MM-DD') || ',' || cast(icu as varchar2(2000)) || ',' || to_char(death_date, 'YYYY-MM-DD') || ',' || cast(dead as varchar2(2000)) || ',' || cast(age_group as varchar2(2000)) || ',' || cast(age as varchar2(2000)) || ',' || cast(sex as varchar2(2000)) from fource_LocalPatientSummary union all select 9999999 z, '' from dual) t order by z; spool off spool c:\Devtools\NCATS\covid\4cescripts\LocalPatientObservations.csv select s LocalPatientObservationsCSV from ( select 0 z, 'siteid,cohort,patient_num,days_since_admission,concept_type,concept_code,value' s from dual union all select row_number() over (order by cohort,patient_num,days_since_admission,concept_type,concept_code) z, cast(siteid as varchar2(2000)) || ',' || cast(cohort as varchar2(2000)) || ',' || cast(patient_num as varchar2(2000)) || ',' || cast(days_since_admission as varchar2(2000)) || ',' || cast(concept_type as varchar2(2000)) || ',' || cast(concept_code as varchar2(2000)) || ',' || cast(value as varchar2(2000)) from fource_LocalPatientObservations union all select 9999999 z, '' from dual) t order by z; spool off spool c:\Devtools\NCATS\covid\4cescripts\LocalPatientRace.csv select s LocalPatientRaceCSV from ( select 0 z, 'siteid,cohort,patient_num,race_local_code,race_4ce' s from dual union all select row_number() over (order by cohort,patient_num,race_local_code) z, cast(siteid as varchar2(2000)) || ',' || cast(cohort as varchar2(2000)) || ',' || cast(patient_num as varchar2(2000)) || ',' || cast(race_local_code as varchar2(2000)) || ',' || cast(race_4ce as varchar2(2000)) from fource_LocalPatientRace union all select 9999999 z, '' from dual) t order by z; spool off spool c:\Devtools\NCATS\covid\4cescripts\LocalPatientMapping.csv select s LocalPatientMappingCSV from ( select 0 z, 'siteid,patient_num,study_num' s from dual union all select row_number() over (order by patient_num) z, cast(siteid as varchar2(2000)) || ',' || cast(patient_num as varchar2(2000)) || ',' || cast(study_num as varchar2(2000)) from fource_LocalPatientMapping union all select 9999999 z, '' from dual) t order by z; spool off --############################################################################## --############################################################################## --############################################################################## --############################################################################## --### --### Cleanup to drop temp tables (optional) --### --############################################################################## --############################################################################## --############################################################################## --############################################################################## /* -- Main configuration table drop table fource_config -- Code mapping tables drop table fource_code_map drop table fource_med_map drop table fource_proc_map drop table fource_lab_map drop table fource_lab_units_facts drop table fource_lab_map_report -- Admissions, ICU visits, and deaths drop table fource_admissions drop table fource_icu drop table fource_death -- COVID tests, cohort definitions and patients drop table fource_cohort_config drop table fource_covid_tests drop table fource_first_covid_tests drop table fource_date_list drop table fource_cohort_patients -- List of patients and observations mapped to 4CE codes drop table fource_patients drop table fource_observations -- Used to create the CSV formatted tables drop table fource_file_csv -- Phase 1 obfuscated aggregate files drop table fource_DailyCounts drop table fource_ClinicalCourse drop table fource_AgeSex drop table fource_Labs drop table fource_DiagProcMed drop table fource_RaceByLocalCode drop table fource_RaceBy4CECode drop table fource_LabCodes -- Phase 2 non-obfuscated local aggregate files drop table fource_LocalDailyCounts drop table fource_LocalClinicalCourse drop table fource_LocalAgeSex drop table fource_LocalLabs drop table fource_LocalDiagProcMed drop table fource_LocalRaceByLocalCode drop table fource_LocalRaceBy4CECode -- Phase 2 patient-level files drop table fource_LocalPatientSummary drop table fource_LocalPatientClinicalCourse drop table fource_LocalPatientObservations drop table fource_LocalPatientRace drop table fource_LocalPatientMapping */ /** Not necessary for ORACLE as there #temp tables are not supported a block to delete all or some of the tables will replace this block -- Optional: Run the commented code below to generate SQL for previously saved tables, rather than the temp tables. -- Replace "dbo_fource_" with the prefix you used to save the tables. -- Copy and paste the SQL strings into a query window and run the queries. select file_index, file_name, replace(file_sql,'fource_','dbo_FourCE_') file_sql from fource_file_csv order by file_index --****************************************************************************** --****************************************************************************** --*** OPTION #3: Save the data as tables. --*** Make sure everything looks reasonable. --*** Export the tables to csv files. --****************************************************************************** --****************************************************************************** /* -- delete the truly temp tables -- delete the tables that contain the Phase 1.2 and Phase 2.2 data set --TO DO: Delete this code??? --below is irrelevant in oracle if exists (select * from fource_config where save_phase1_as_columns=1) begin -- Drop existing tables declare @SavePhase1AsTablesSQL nvarchar(max) select @SavePhase1AsTablesSQL = '' --Phase 1 obfuscated aggregate files || 'if (select object_id(''' || save_phase1_as_prefix || 'DailyCounts'', ''U'') from fource_config) is not null drop table ' || save_phase1_as_prefix || 'DailyCounts;' || 'if (select object_id(''' || save_phase1_as_prefix || 'ClinicalCourse'', ''U'') from fource_config) is not null drop table ' || save_phase1_as_prefix || 'ClinicalCourse;' || 'if (select object_id(''' || save_phase1_as_prefix || 'AgeSex'', ''U'') from fource_config) is not null drop table ' || save_phase1_as_prefix || 'AgeSex;' || 'if (select object_id(''' || save_phase1_as_prefix || 'Labs'', ''U'') from fource_config) is not null drop table ' || save_phase1_as_prefix || 'Labs;' || 'if (select object_id(''' || save_phase1_as_prefix || 'DiagProcMed'', ''U'') from fource_config) is not null drop table ' || save_phase1_as_prefix || 'DiagProcMed;' || 'if (select object_id(''' || save_phase1_as_prefix || 'RaceByLocalCode'', ''U'') from fource_config) is not null drop table ' || save_phase1_as_prefix || 'RaceByLocalCode;' || 'if (select object_id(''' || save_phase1_as_prefix || 'RaceBy4CECode'', ''U'') from fource_config) is not null drop table ' || save_phase1_as_prefix || 'RaceBy4CECode;' || 'if (select object_id(''' || save_phase1_as_prefix || 'LabCodes'', ''U'') from fource_config) is not null drop table ' || save_phase1_as_prefix || 'LabCodes;' from fource_config exec sp_executesql @SavePhase1AsTablesSQL -- Save new tables select @SavePhase1AsTablesSQL = '' --Phase 1 obfuscated aggregate files || 'select * into ' || save_phase1_as_prefix || 'DailyCounts from fource_DailyCounts;' || 'select * into ' || save_phase1_as_prefix || 'ClinicalCourse from fource_ClinicalCourse;' || 'select * into ' || save_phase1_as_prefix || 'AgeSex from fource_AgeSex;' || 'select * into ' || save_phase1_as_prefix || 'Labs from fource_Labs;' || 'select * into ' || save_phase1_as_prefix || 'DiagProcMed from fource_DiagProcMed;' || 'select * into ' || save_phase1_as_prefix || 'RaceByLocalCode from fource_RaceByLocalCode;' || 'select * into ' || save_phase1_as_prefix || 'RaceBy4CECode from fource_RaceBy4CECode;' || 'select * into ' || save_phase1_as_prefix || 'LabCodes from fource_LabCodes;' || '; alter table ' || save_phase1_as_prefix || 'DailyCounts add primary key (cohort, calendar_date, siteid);' || '; alter table ' || save_phase1_as_prefix || 'ClinicalCourse add primary key (cohort, days_since_admission, siteid);' || '; alter table ' || save_phase1_as_prefix || 'AgeSex add primary key (cohort, age_group, sex, siteid);' || '; alter table ' || save_phase1_as_prefix || 'Labs add primary key (cohort, loinc, days_since_admission, siteid);' || '; alter table ' || save_phase1_as_prefix || 'DiagProcMed add primary key (cohort, concept_type, concept_code, siteid);' || '; alter table ' || save_phase1_as_prefix || 'RaceByLocalCode add primary key (cohort, race_local_code, siteid);' || '; alter table ' || save_phase1_as_prefix || 'RaceBy4CECode add primary key (cohort, race_4ce, siteid);' || '; alter table ' || save_phase1_as_prefix || 'LabCodes add primary key (fource_loinc, local_lab_code, local_lab_units, siteid);' from fource_config exec sp_executesql @SavePhase1AsTablesSQL end --if exists (select * from fource_config where save_phase2_as_columns=1) --begin -- Drop existing tables -- declare @SavePhase2AsTablesSQL nvarchar(max) select @SavePhase2AsTablesSQL = ''; select '' --Phase 2 non-obfuscated local aggregate files || 'if (select object_id(''' || save_phase2_as_prefix || 'LocalDailyCounts'', ''U'') from fource_config) is not null drop table ' || save_phase2_as_prefix || 'LocalDailyCounts;' || 'if (select object_id(''' || save_phase2_as_prefix || 'LocalClinicalCourse'', ''U'') from fource_config) is not null drop table ' || save_phase2_as_prefix || 'LocalClinicalCourse;' || 'if (select object_id(''' || save_phase2_as_prefix || 'LocalAgeSex'', ''U'') from fource_config) is not null drop table ' || save_phase2_as_prefix || 'LocalAgeSex;' || 'if (select object_id(''' || save_phase2_as_prefix || 'LocalLabs'', ''U'') from fource_config) is not null drop table ' || save_phase2_as_prefix || 'LocalLabs;' || 'if (select object_id(''' || save_phase2_as_prefix || 'LocalDiagProcMed'', ''U'') from fource_config) is not null drop table ' || save_phase2_as_prefix || 'LocalDiagProcMed;' || 'if (select object_id(''' || save_phase2_as_prefix || 'LocalRaceByLocalCode'', ''U'') from fource_config) is not null drop table ' || save_phase2_as_prefix || 'LocalRaceByLocalCode;' || 'if (select object_id(''' || save_phase2_as_prefix || 'LocalRaceBy4CECode'', ''U'') from fource_config) is not null drop table ' || save_phase2_as_prefix || 'LocalRaceBy4CECode;' --Phase 2 patient-level files || 'if (select object_id(''' || save_phase2_as_prefix || 'LocalPatientSummary'', ''U'') from fource_config) is not null drop table ' || save_phase2_as_prefix || 'LocalPatientSummary;' || 'if (select object_id(''' || save_phase2_as_prefix || 'LocalPatientClinicalCourse'', ''U'') from fource_config) is not null drop table ' || save_phase2_as_prefix || 'LocalPatientClinicalCourse;' || 'if (select object_id(''' || save_phase2_as_prefix || 'LocalPatientObservations'', ''U'') from fource_config) is not null drop table ' || save_phase2_as_prefix || 'LocalPatientObservations;' || 'if (select object_id(''' || save_phase2_as_prefix || 'LocalPatientRace'', ''U'') from fource_config) is not null drop table ' || save_phase2_as_prefix || 'LocalPatientRace;' || 'if (select object_id(''' || save_phase2_as_prefix || 'LocalPatientMapping'', ''U'') from fource_config) is not null drop table ' || save_phase2_as_prefix || 'LocalPatientMapping;' from fource_config; exec sp_executesql @SavePhase2AsTablesSQL -- Save new tables select save_phase2_as_prefix from fource_config; select * into ' || save_phase2_as_prefix || 'LocalDailyCounts from fource_LocalDailyCounts; --If you want to save the Phase 2 data as tables run this block select @SavePhase2AsTablesSQL = '' --Phase 2 non-obfuscated local aggregate files || 'select * into ' || save_phase2_as_prefix || 'LocalDailyCounts from fource_LocalDailyCounts;' || 'select * into ' || save_phase2_as_prefix || 'LocalClinicalCourse from fource_LocalClinicalCourse;' || 'select * into ' || save_phase2_as_prefix || 'LocalAgeSex from fource_LocalAgeSex;' || 'select * into ' || save_phase2_as_prefix || 'LocalLabs from fource_LocalLabs;' || 'select * into ' || save_phase2_as_prefix || 'LocalDiagProcMed from fource_LocalDiagProcMed;' || 'select * into ' || save_phase2_as_prefix || 'LocalRaceByLocalCode from fource_LocalRaceByLocalCode;' || 'select * into ' || save_phase2_as_prefix || 'LocalRaceBy4CECode from fource_LocalRaceBy4CECode;' || '; alter table ' || save_phase2_as_prefix || 'LocalDailyCounts add primary key (cohort, calendar_date, siteid);' || '; alter table ' || save_phase2_as_prefix || 'LocalClinicalCourse add primary key (cohort, days_since_admission, siteid);' || '; alter table ' || save_phase2_as_prefix || 'LocalAgeSex add primary key (cohort, age_group, sex, siteid);' || '; alter table ' || save_phase2_as_prefix || 'LocalLabs add primary key (cohort, loinc, days_since_admission, siteid);' || '; alter table ' || save_phase2_as_prefix || 'LocalDiagProcMed add primary key (cohort, concept_type, concept_code, siteid);' || '; alter table ' || save_phase2_as_prefix || 'LocalRaceByLocalCode add primary key (cohort, race_local_code, siteid);' || '; alter table ' || save_phase2_as_prefix || 'LocalRaceBy4CECode add primary key (cohort, race_4ce, siteid);' --Phase 2 patient-level files || 'select * into ' || save_phase2_as_prefix || 'LocalPatientSummary from fource_LocalPatientSummary;' || 'select * into ' || save_phase2_as_prefix || 'LocalPatientClinicalCourse from fource_LocalPatientClinicalCourse;' || 'select * into ' || save_phase2_as_prefix || 'LocalPatientObservations from fource_LocalPatientObservations;' || 'select * into ' || save_phase2_as_prefix || 'LocalPatientRace from fource_LocalPatientRace;' || 'select * into ' || save_phase2_as_prefix || 'LocalPatientMapping from fource_LocalPatientMapping;' || '; alter table ' || save_phase2_as_prefix || 'LocalPatientClinicalCourse add primary key (cohort, patient_num, days_since_admission, siteid);' || '; alter table ' || save_phase2_as_prefix || 'LocalPatientMapping add primary key (patient_num, study_num, siteid);' || '; alter table ' || save_phase2_as_prefix || 'LocalPatientObservations add primary key (cohort, patient_num, days_since_admission, concept_type, concept_code, siteid);' || '; alter table ' || save_phase2_as_prefix || 'LocalPatientRace add primary key (cohort, patient_num, race_local_code, siteid);' || '; alter table ' || save_phase2_as_prefix || 'LocalPatientSummary add primary key (cohort, patient_num, siteid);' from fource_config exec sp_executesql @SavePhase2AsTablesSQL end */
<gh_stars>1-10 CREATE TABLE t_actor ( id int NOT NULL AUTO_INCREMENT COMMENT 'ID', first_name varchar(255) NULL COMMENT 'first_name', last_name varchar(255) NOT NULL COMMENT 'last_name', PRIMARY KEY (id) );
SELECT DISTINCT SOURCE_CODE, 1 as concept_id, SOURCE_CODE_DESCRIPTION FROM SOURCE_TO_CONCEPT_MAP WHERE SOURCE_CODE in ('798.1', '761.6', '798', 'E913.1', '798.2', 'E978', '798.9', '798.0')
<reponame>CD30-Devil/SI3P0<gh_stars>1-10 /*==============================================================*/ /* Nom de SGBD : PostgreSQL SI3P0 */ /* Date de création : 21/07/2021 22:26:29 */ /*==============================================================*/ -- schémas spécifiques SI3P0 (m = modèle) set search_path to m, public; /*==============================================================*/ /* Table : etatavancement3v */ /*==============================================================*/ create table etatavancement3v ( codeetatavancement3v INT4 not null, description VARCHAR null, constraint pketatavancement3v primary key (codeetatavancement3v) ); insert into EtatAvancement3V (CodeEtatAvancement3V, Description) values (1, 'Projet'), (2, 'Tracé arrêté'), (3, 'Travaux en cours'), (4, 'Ouvert'); /*==============================================================*/ /* Index : etatavancement3v_pk */ /*==============================================================*/ create unique index etatavancement3v_pk on etatavancement3v ( codeetatavancement3v ); /*==============================================================*/ /* Table : itinerairecyclable */ /*==============================================================*/ create table itinerairecyclable ( numeroitinerairecyclable VARCHAR not null, nomofficiel VARCHAR null, nomusage VARCHAR null, depart VARCHAR null, arrivee VARCHAR null, estinscrit BOOL null, niveauschema VARCHAR null constraint ckcniveauschema check (niveauschema is null or (niveauschema in ('Infra-communal','Communal','Inter-communal','Départemental','Régional','National','Européen','International'))), anneeinscription INT4 null, siteweb VARCHAR null, anneeouverture INT4 null, constraint pkitinerairecyclable primary key (numeroitinerairecyclable) ); /*==============================================================*/ /* Index : itinerairecyclable_pk */ /*==============================================================*/ create unique index itinerairecyclable_pk on itinerairecyclable ( numeroitinerairecyclable ); /*==============================================================*/ /* Table : portioncyclable */ /*==============================================================*/ create table portioncyclable ( idportioncyclable SERIAL not null, codetypeportioncyclable VARCHAR null, nom VARCHAR null, description VARCHAR null, constraint pkportioncyclable primary key (idportioncyclable) ); /*==============================================================*/ /* Index : portioncyclable_pk */ /*==============================================================*/ create unique index portioncyclable_pk on portioncyclable ( idportioncyclable ); /*==============================================================*/ /* Index : portioncyclable_typeportioncyclable_fk */ /*==============================================================*/ create index portioncyclable_typeportioncyclable_fk on portioncyclable ( codetypeportioncyclable ); /*==============================================================*/ /* Table : portioncyclable_itinerairecyclable */ /*==============================================================*/ create table portioncyclable_itinerairecyclable ( idportioncyclable INT4 not null, numeroitinerairecyclable VARCHAR not null, ordre INT4 null, constraint pkportioncyclable_itinerairecyclable primary key (idportioncyclable, numeroitinerairecyclable) ); /*==============================================================*/ /* Index : portioncyclable_itinerairecyclable_pk */ /*==============================================================*/ create unique index portioncyclable_itinerairecyclable_pk on portioncyclable_itinerairecyclable ( idportioncyclable, numeroitinerairecyclable ); /*==============================================================*/ /* Index : itinerairecyclable_portioncyclable_fk */ /*==============================================================*/ create index itinerairecyclable_portioncyclable_fk on portioncyclable_itinerairecyclable ( idportioncyclable ); /*==============================================================*/ /* Index : portioncyclable_itinerairecyclable_fk */ /*==============================================================*/ create index portioncyclable_itinerairecyclable_fk on portioncyclable_itinerairecyclable ( numeroitinerairecyclable ); /*==============================================================*/ /* Table : prcyclable */ /*==============================================================*/ create table prcyclable ( idprcyclable SERIAL not null, codetypeprcyclable VARCHAR null, libelle VARCHAR null, numeroserie VARCHAR null, constraint pkprcyclable primary key (idprcyclable) ); select AddGeometryColumn('prcyclable', 'geom', 2154, 'POINT', 3); create index prcyclable_geom_idx on prcyclable using gist (geom); /*==============================================================*/ /* Index : prcyclable_pk */ /*==============================================================*/ create unique index prcyclable_pk on prcyclable ( idprcyclable ); /*==============================================================*/ /* Index : prcyclable_typeprcyclable_fk */ /*==============================================================*/ create index prcyclable_typeprcyclable_fk on prcyclable ( codetypeprcyclable ); /*==============================================================*/ /* Table : revetement3v */ /*==============================================================*/ create table revetement3v ( coderevetement3v VARCHAR not null, description VARCHAR null, constraint pkrevetement3v primary key (coderevetement3v) ); insert into Revetement3V (CodeRevetement3V, Description) values ('LIS', 'Lisse'), ('RUG', 'Rugueux'), ('MEU', 'Meuble'); /*==============================================================*/ /* Index : revetement3v_pk */ /*==============================================================*/ create unique index revetement3v_pk on revetement3v ( coderevetement3v ); /*==============================================================*/ /* Table : segmentcyclable */ /*==============================================================*/ create table segmentcyclable ( idsegmentcyclable SERIAL not null, codeetatavancement3v INT4 null, codestatut3v VARCHAR null, coderevetement3v VARCHAR null, anneeouverture INT4 null, sensunique BOOL null, datesaisie DATE null, precisionestimee VARCHAR null, sourcegeometrie VARCHAR null, idgeometrie VARCHAR null, datesource DATE null, fictif BOOL null, constraint pksegmentcyclable primary key (idsegmentcyclable) ); select AddGeometryColumn('segmentcyclable', 'geom', 2154, 'LINESTRING', 3); create index segmentcyclable_geom_idx on segmentcyclable using gist (geom); /*==============================================================*/ /* Index : segmentcyclable_pk */ /*==============================================================*/ create unique index segmentcyclable_pk on segmentcyclable ( idsegmentcyclable ); /*==============================================================*/ /* Index : segmentcyclable_etatavancement3v_fk */ /*==============================================================*/ create index segmentcyclable_etatavancement3v_fk on segmentcyclable ( codeetatavancement3v ); /*==============================================================*/ /* Index : segmentcyclable_revetement3v_fk */ /*==============================================================*/ create index segmentcyclable_revetement3v_fk on segmentcyclable ( coderevetement3v ); /*==============================================================*/ /* Index : segmentcyclable_statut3v_fk */ /*==============================================================*/ create index segmentcyclable_statut3v_fk on segmentcyclable ( codestatut3v ); /*==============================================================*/ /* Table : segmentcyclable_gestionnaire */ /*==============================================================*/ create table segmentcyclable_gestionnaire ( idsegmentcyclable INT4 not null, siren VARCHAR not null, constraint pksegmentcyclable_gestionnaire primary key (idsegmentcyclable, siren) ); /*==============================================================*/ /* Index : segmentcyclable_gestionnaire_pk */ /*==============================================================*/ create unique index segmentcyclable_gestionnaire_pk on segmentcyclable_gestionnaire ( idsegmentcyclable, siren ); /*==============================================================*/ /* Index : gestionnaire_segmentcyclable_fk */ /*==============================================================*/ create index gestionnaire_segmentcyclable_fk on segmentcyclable_gestionnaire ( idsegmentcyclable ); /*==============================================================*/ /* Index : segmentcyclable_gestionnaire_fk */ /*==============================================================*/ create index segmentcyclable_gestionnaire_fk on segmentcyclable_gestionnaire ( siren ); /*==============================================================*/ /* Table : segmentcyclable_portioncyclable */ /*==============================================================*/ create table segmentcyclable_portioncyclable ( idsegmentcyclable INT4 not null, idportioncyclable INT4 not null, constraint pksegmentcyclable_portioncyclable primary key (idsegmentcyclable, idportioncyclable) ); /*==============================================================*/ /* Index : segmentcyclable_portioncyclable_pk */ /*==============================================================*/ create unique index segmentcyclable_portioncyclable_pk on segmentcyclable_portioncyclable ( idsegmentcyclable, idportioncyclable ); /*==============================================================*/ /* Index : portioncyclable_segmentcyclable_fk */ /*==============================================================*/ create index portioncyclable_segmentcyclable_fk on segmentcyclable_portioncyclable ( idsegmentcyclable ); /*==============================================================*/ /* Index : segmentcyclable_portioncyclable_fk */ /*==============================================================*/ create index segmentcyclable_portioncyclable_fk on segmentcyclable_portioncyclable ( idportioncyclable ); /*==============================================================*/ /* Table : segmentcyclable_proprietaire */ /*==============================================================*/ create table segmentcyclable_proprietaire ( idsegmentcyclable INT4 not null, siren VARCHAR not null, constraint pksegmentcyclable_proprietaire primary key (idsegmentcyclable, siren) ); /*==============================================================*/ /* Index : segmentcyclable_proprietaire_pk */ /*==============================================================*/ create unique index segmentcyclable_proprietaire_pk on segmentcyclable_proprietaire ( idsegmentcyclable, siren ); /*==============================================================*/ /* Index : proprietaire_segmentcyclable_fk */ /*==============================================================*/ create index proprietaire_segmentcyclable_fk on segmentcyclable_proprietaire ( idsegmentcyclable ); /*==============================================================*/ /* Index : segmentcyclable_proprietaire_fk */ /*==============================================================*/ create index segmentcyclable_proprietaire_fk on segmentcyclable_proprietaire ( siren ); /*==============================================================*/ /* Table : statut3v */ /*==============================================================*/ create table statut3v ( codestatut3v VARCHAR not null, description VARCHAR null, constraint pkstatut3v primary key (codestatut3v) ); insert into Statut3V (CodeStatut3V, Description) values ('VV', 'Voie verte'), ('PCY', 'Piste cyclable'), ('ASP', 'Autre site propre'), ('RTE', 'Route'), ('BCY', 'Bande cyclable'), ('ICA', 'Itinéraire à circulation apaisée'); /*==============================================================*/ /* Index : statut3v_pk */ /*==============================================================*/ create unique index statut3v_pk on statut3v ( codestatut3v ); /*==============================================================*/ /* Table : typeportioncyclable */ /*==============================================================*/ create table typeportioncyclable ( codetypeportioncyclable VARCHAR not null, description VARCHAR null, constraint pktypeportioncyclable primary key (codetypeportioncyclable) ); insert into TypePortionCyclable (CodeTypePortionCyclable, Description) values ('ETP', 'Etape'), ('VAR', 'Variante'), ('PRV', 'Portion provisoire'), ('OBS', 'Portion observée'); /*==============================================================*/ /* Index : typeportioncyclable_pk */ /*==============================================================*/ create unique index typeportioncyclable_pk on typeportioncyclable ( codetypeportioncyclable ); /*==============================================================*/ /* Table : typeprcyclable */ /*==============================================================*/ create table typeprcyclable ( codetypeprcyclable VARCHAR not null, description VARCHAR null, constraint pktypeprcyclable primary key (codetypeprcyclable) ); insert into TypePRCyclable (CodeTypePRCyclable, Description) values ('DFE', 'Début/fin d''étape'), ('INT', 'Intersection d''itinéraires'), ('BIF', 'Bifurcation'), ('APO', 'Accès POI'), ('CPT', 'Compteur'), ('PDL', 'Passage délicat'), ('PCT', 'Point de connexion transfrontalier'); /*==============================================================*/ /* Index : typeprcyclable_pk */ /*==============================================================*/ create unique index typeprcyclable_pk on typeprcyclable ( codetypeprcyclable ); alter table portioncyclable add constraint fk_portioncyclable_typeportioncyclable foreign key (codetypeportioncyclable) references typeportioncyclable (codetypeportioncyclable) on delete restrict on update restrict; alter table portioncyclable_itinerairecyclable add constraint fk_itinerairecyclable_portioncyclable foreign key (idportioncyclable) references portioncyclable (idportioncyclable) on delete restrict on update restrict; alter table portioncyclable_itinerairecyclable add constraint fk_portioncyclable_itinerairecyclable foreign key (numeroitinerairecyclable) references itinerairecyclable (numeroitinerairecyclable) on delete restrict on update restrict; alter table prcyclable add constraint fk_prcyclable_typeprcyclable foreign key (codetypeprcyclable) references typeprcyclable (codetypeprcyclable) on delete restrict on update restrict; alter table segmentcyclable add constraint fk_segmentcyclable_etatavancement3v foreign key (codeetatavancement3v) references etatavancement3v (codeetatavancement3v) on delete restrict on update restrict; alter table segmentcyclable add constraint fk_segmentcyclable_revetement3v foreign key (coderevetement3v) references revetement3v (coderevetement3v) on delete restrict on update restrict; alter table segmentcyclable add constraint fk_segmentcyclable_statut3v foreign key (codestatut3v) references statut3v (codestatut3v) on delete restrict on update restrict; alter table segmentcyclable_gestionnaire add constraint fk_gestionnaire_segment foreign key (idsegmentcyclable) references segmentcyclable (idsegmentcyclable) on delete restrict on update restrict; alter table segmentcyclable_portioncyclable add constraint fk_portioncyclable_segmentcyclable foreign key (idsegmentcyclable) references segmentcyclable (idsegmentcyclable) on delete restrict on update restrict; alter table segmentcyclable_portioncyclable add constraint fk_segmentcyclable_portioncyclable foreign key (idportioncyclable) references portioncyclable (idportioncyclable) on delete restrict on update restrict; alter table segmentcyclable_proprietaire add constraint fk_proprietaire_segment foreign key (idsegmentcyclable) references segmentcyclable (idsegmentcyclable) on delete restrict on update restrict;
/* Object: dbo.Query_311 Transaction: NCU:MCN Note: Before images (MCO) are used for consistency checks, and to delete the multi IP name connection between the IP-NAME-NUMBER of the PG/HR and the IP-BASE-NUMBER-ref. in the detail record. */ CREATE PROCEDURE [dbo].[Query_311] @HeaderID AS bigint AS SET NOCOUNT ON; DECLARE @SQL AS nvarchar(MAX) = N' INSERT INTO ipi.IPName (RowID, ID, IPNN, NameType, Name, FirstName, AmendDate, AmendTime, CreationDate, CreationTime) SELECT B.RowID, C.ID, B.IPNN, B.NameType, B.Name, B.FirstName, B.AmendDate, B.AmendTime, B.CreationDate, B.CreationTime FROM dbo.[Row] AS H -- NCU INNER JOIN dbo.[Row] AS A -- MCO ON H.HeaderID = A.HeaderID AND H.HeaderCode = ''NCU'' AND H.RowCode = ''NCU'' AND A.RowCode = ''MCO'' INNER JOIN dbo.[Row] AS B -- MCN (must be next to NCO) ON H.HeaderID = B.HeaderID AND A.RowID + 1 = B.RowID AND B.RowCode IN (''MCN'', ''NCN'') INNER JOIN ipi.IP AS C ON H.IPBNNew = C.IPBN WHERE H.HeaderID = @HeaderID AND H.IPBN != H.IPBNNew; -- the relink condition (from one IP to another) '; EXEC sp_executesql @SQL , N'@HeaderID bigint' , @HeaderID = @HeaderID; GO
<reponame>MyintMyatKaungPHP/TandT<filename>travel.sql -- phpMyAdmin SQL Dump -- version 4.8.3 -- https://www.phpmyadmin.net/ -- -- Host: localhost -- Generation Time: Oct 06, 2018 at 02:52 PM -- Server version: 10.1.35-MariaDB -- PHP Version: 7.2.9 SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; SET AUTOCOMMIT = 0; START TRANSACTION; SET time_zone = "+00:00"; /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8mb4 */; -- -- Database: `travel` -- -- -------------------------------------------------------- -- -- Table structure for table `bookings` -- CREATE TABLE `bookings` ( `id` int(10) UNSIGNED NOT NULL, `user_id` int(11) NOT NULL, `package_id` int(11) NOT NULL, `route_id` int(11) DEFAULT NULL, `hotel_id` int(11) DEFAULT NULL, `departure_date` date NOT NULL, `qty` int(11) NOT NULL, `total_price` double NOT NULL, `confirm_price` double DEFAULT NULL, `user_msg` longtext COLLATE utf8mb4_unicode_ci, `admin_msg` longtext COLLATE utf8mb4_unicode_ci, `status` enum('pending','acknowledge','confirm') COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT 'pending', `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Dumping data for table `bookings` -- INSERT INTO `bookings` (`id`, `user_id`, `package_id`, `route_id`, `hotel_id`, `departure_date`, `qty`, `total_price`, `confirm_price`, `user_msg`, `admin_msg`, `status`, `created_at`, `updated_at`) VALUES (1, 3, 1, 16, 15, '2018-09-23', 5, 1370000, 1371000, 'Hello Admin', 'Price changed.', 'confirm', '2018-09-14 02:20:31', '2017-09-14 02:24:55'), (2, 2, 2, 9, 17, '2018-09-05', 2, 450000, 450000, 'Bar Nyar', 'ok', 'confirm', '2018-09-14 02:31:36', '2018-09-14 02:32:08'), (3, 4, 1, 1, 22, '2018-09-29', 3, 651000, 651000, 'Hello', 'Complete', 'confirm', '2018-09-14 02:55:35', '2018-09-14 02:57:08'), (8, 2, 7, 44, 41, '2018-09-17', 2, 110000, 110000, 'Hello Admin', 'ok', 'confirm', '2018-09-16 22:27:25', '2018-09-16 22:28:56'), (10, 2, 4, 52, 32, '2018-09-28', 11, 4169000, 4169000, NULL, NULL, 'confirm', '2018-09-16 23:03:05', '2018-09-16 23:05:21'), (11, 2, 6, 54, 34, '2018-09-27', 3, 1344000, NULL, 'Hello Admin', NULL, 'pending', '2018-09-16 23:31:20', '2018-09-16 23:31:20'), (12, 2, 12, 20, 23, '2018-10-19', 3, 1566000, 1566000, 'dfasfd', 'safa', 'acknowledge', '2018-10-06 06:15:45', '2018-10-06 06:16:42'); -- -------------------------------------------------------- -- -- Table structure for table `cities` -- CREATE TABLE `cities` ( `id` int(10) UNSIGNED NOT NULL, `name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `del_status` enum('1','0') COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT '0', `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Dumping data for table `cities` -- INSERT INTO `cities` (`id`, `name`, `del_status`, `created_at`, `updated_at`) VALUES (1, 'Yangon', '0', '2018-08-30 14:48:10', '2018-08-30 14:48:10'), (2, 'Mandalay', '0', '2018-08-30 14:48:13', '2018-08-30 14:48:13'), (3, '<NAME>', '0', '2018-08-30 14:48:22', '2018-08-30 14:48:22'), (4, '<NAME>', '0', '2018-08-30 14:48:41', '2018-08-30 14:48:41'), (5, '<NAME>', '0', '2018-08-30 14:48:50', '2018-08-30 14:48:50'), (6, 'Bagan', '0', '2018-08-30 14:59:14', '2018-08-30 14:59:14'), (7, 'Ayayarwady', '0', '2018-08-30 15:05:52', '2018-08-30 15:05:52'), (8, 'Sittway', '0', '2018-08-30 15:06:05', '2018-08-30 20:53:35'), (9, 'Pathein', '0', '2018-08-30 15:14:45', '2018-08-30 15:14:45'), (10, 'Taungoo', '0', '2018-08-30 20:46:47', '2018-08-30 20:46:47'), (11, 'Pyay', '1', '2018-08-30 20:47:20', '2018-08-30 20:50:49'), (12, '<NAME>', '0', '2018-09-13 23:20:48', '2018-09-13 23:20:48'), (13, '<NAME>', '0', '2018-09-13 23:21:50', '2018-09-13 23:21:50'), (14, 'Ngapali', '0', '2018-09-13 23:22:02', '2018-09-13 23:22:02'), (15, 'Sightseeing', '0', '2018-09-14 00:39:03', '2018-09-14 00:39:03'); -- -------------------------------------------------------- -- -- Table structure for table `hotels` -- CREATE TABLE `hotels` ( `id` int(10) UNSIGNED NOT NULL, `name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `place` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `link` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `price` double NOT NULL, `del_status` enum('1','0') COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT '0', `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Dumping data for table `hotels` -- INSERT INTO `hotels` (`id`, `name`, `place`, `link`, `price`, `del_status`, `created_at`, `updated_at`) VALUES (11, '<NAME>', 'Yangon', 'http://www.shangri-la.com', 85000, '0', '2018-09-06 22:12:29', '2018-09-06 22:17:32'), (12, 'Novotel Yangon Max Hotel', 'Yangon', 'https://www.accorhotels.com', 90000, '0', '2018-09-06 22:15:31', '2018-09-06 22:17:46'), (13, 'Hotel Marvel', 'Mandalay', 'https://www.agoda.com/hotel-marvel/hotel/mandalay-mm.html', 65000, '1', '2018-09-06 22:17:10', '2018-09-06 22:43:49'), (14, 'Mandalay Karaweik Mobile Hotel', 'Mandalay', 'https://www.agoda.com/mandalay-karaweik-mobile-hotel/hotel/mandalay-mm.html', 70000, '1', '2018-09-06 22:18:49', '2018-09-06 22:44:07'), (15, 'Hotel A1', 'Mandalay', 'http://hotela1mandalay.com/', 68000, '0', '2018-09-06 22:21:49', '2018-09-06 22:42:48'), (16, 'Hotel Akimomi', 'Pyin Oo Lwin', 'https://www.akimomi.com', 78000, '0', '2018-09-06 22:23:38', '2018-09-06 22:23:38'), (17, 'Royal Park View Hotel', 'Pyin Oo Lwin', 'http://royalparkview.hotelspyinoolwin.com/', 55000, '0', '2018-09-06 22:28:20', '2018-09-06 22:41:24'), (18, 'Royal Flower Guest Hotel', '<NAME>', 'https://www.royalflowerguesthouse.com/en-us', 45000, '0', '2018-09-06 22:29:11', '2018-09-06 22:39:48'), (19, 'Myanmar Treasure Inle Lake', '<NAME>', 'https://www.tripadvisor.com/Hotel_Review-g303658-d1023658-Reviews-Myanmar_Treasure_Inle_Lake-Taunggyi_Shan_State.html', 75000, '1', '2018-09-06 22:31:28', '2018-09-06 22:44:19'), (20, 'Vision Hotel', '<NAME>', 'https://www.tripadvisor.com/Hotel_Review-g303658-d9805447-Reviews-Vision_Hotel-Taunggyi_Shan_State.html', 88000, '1', '2018-09-06 22:32:46', '2018-09-06 22:44:42'), (21, 'Aureum Palace Hotel & Resort Bagan', 'Bagan', 'https://www.booking.com/hotel/mm/aureum-palace-amp-resort-bagan.en-us.html', 67500, '1', '2018-09-06 22:36:36', '2018-09-06 22:44:53'), (22, 'Hotel Mandalay', 'Mandalay', 'http://hotelmandalaymyanmar.com/', 55000, '0', '2018-09-06 22:45:56', '2018-09-06 22:45:56'), (23, 'Shwe Kyun Hotel', '<NAME>', 'https://www.shwekyunhotel-myanmar.com/en-us', 77000, '0', '2018-09-06 22:48:20', '2018-09-06 22:48:20'), (24, 'Royal Star Hotel', '<NAME>', 'https://royalstarhotel.business.site/', 58000, '0', '2018-09-06 22:49:14', '2018-09-06 22:49:14'), (25, 'Thande Hotel', 'Bagan', 'http://www.thandehotel.com/bagan/', 87983, '0', '2018-09-06 22:50:47', '2018-09-06 22:50:47'), (26, 'The Hotel UMBRA', 'Bagan', 'https://www.thehotelumbrabagan.com/en-us', 89000, '0', '2018-09-06 22:52:13', '2018-09-06 22:52:13'), (27, 'Hotel Yadanarbon', 'Bagan', 'http://www.hotelyadanarbonbagan.com/', 99000, '0', '2018-09-06 22:53:35', '2018-09-06 22:53:35'), (28, 'Beller Resort Hotel', '<NAME>', 'http://www.belleresorts.com/', 87000, '0', '2018-09-06 22:56:51', '2018-09-06 22:56:51'), (29, 'Amazing Chaung Thar', '<NAME>', 'http://www.amazingchaungtharesort.com/', 99000, '0', '2018-09-06 22:58:09', '2018-09-06 22:58:09'), (30, 'Grand Hotel', '<NAME>', 'http://www.grandhotelchaungtha.com/', 81000, '0', '2018-09-06 22:59:18', '2018-09-06 22:59:18'), (31, 'Yuzana Reosrt Hotel', '<NAME>', 'http://www.yuzanahotels.com/index.php/contactus-yuzana-hotel', 79000, '0', '2018-09-06 23:00:55', '2018-09-06 23:00:55'), (32, 'Ocean Paradise Hotel', '<NAME>', 'http://www.sunnyparadiseresort.net/', 88000, '0', '2018-09-06 23:02:22', '2018-09-06 23:02:22'), (33, 'Eastern Palace Hotel', 'Mandalay', 'http://www.easternpalacehotels.com/', 65000, '0', '2018-09-13 23:48:47', '2018-09-13 23:48:47'), (34, 'NgapaliParadise Hotel', 'Ngapali', 'http://www.ngapaliparadisehotel.com/', 66000, '0', '2018-09-13 23:49:35', '2018-09-13 23:49:35'), (35, 'Amazing Ngaoali Resort', 'Ngapali', 'http://www.amazingngapaliresort.com/', 77000, '0', '2018-09-13 23:50:01', '2018-09-13 23:50:01'), (36, 'The Villager Lodge', 'Ngapali', 'http://www.thevillagerlodgengapali.com/', 80000, '0', '2018-09-13 23:50:24', '2018-09-13 23:50:24'), (37, 'Emerald Sea Resort', '<NAME>', 'http://www.emeraldseahotel.com/', 97000, '0', '2018-09-13 23:51:30', '2018-09-13 23:51:30'), (38, 'Aureum Resort & Spa', '<NAME>', 'http://www.aureumplacehotel.com/', 150000, '0', '2018-09-13 23:52:18', '2018-09-13 23:52:18'), (39, 'Ocean Blue Beach Hotel', '<NAME>', 'http://www.oceanbluengwesaung.com/', 70000, '0', '2018-09-13 23:53:03', '2018-09-13 23:53:03'), (40, 'Bay of Bengal Hotel Resort', '<NAME>', 'http://www.bayofbengalresort.com/', 65000, '0', '2018-09-13 23:54:22', '2018-09-13 23:54:22'), (41, 'Sightseeing', 'Sightseeing', 'Not Available', 0, '0', '2018-09-14 00:39:23', '2018-09-14 00:39:23'); -- -------------------------------------------------------- -- -- Table structure for table `migrations` -- CREATE TABLE `migrations` ( `id` int(10) UNSIGNED NOT NULL, `migration` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `batch` int(11) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Dumping data for table `migrations` -- INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES (14, '2018_08_30_082518_create_cities_table', 1), (15, '2018_08_30_113709_create_packages_table', 1), (16, '2018_08_30_161447_create_hotels_table', 1), (17, '2018_08_30_211536_create_routes_table', 1), (19, '2018_09_04_065302_create_users_table', 2), (20, '2018_09_11_163937_create_bookings_table', 3); -- -------------------------------------------------------- -- -- Table structure for table `packages` -- CREATE TABLE `packages` ( `id` int(10) UNSIGNED NOT NULL, `title` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `city_id` int(11) NOT NULL, `price` double NOT NULL, `type` enum('normal','honeymoon','vacation','sightseeing') COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT 'normal', `description` longtext COLLATE utf8mb4_unicode_ci NOT NULL, `itinerary` longtext COLLATE utf8mb4_unicode_ci NOT NULL, `geo_location` longtext COLLATE utf8mb4_unicode_ci NOT NULL, `duration` int(11) NOT NULL, `images` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `del_status` enum('1','0') COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT '0', `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Dumping data for table `packages` -- INSERT INTO `packages` (`id`, `title`, `city_id`, `price`, `type`, `description`, `itinerary`, `geo_location`, `duration`, `images`, `del_status`, `created_at`, `updated_at`) VALUES (1, '<NAME>', 2, 40000, 'normal', '<p>Mandalay is the second largest city in Myanmar and was established in 1857. It lies on the east bank of the Ayeyarwaddy River and in the upper part of Myanmar. Mandalay has the Royal Palace of the last Konbaung Dynasty. Mandalay inherits many cultural heritage from the ancient Myanmar Kingdoms and beautiful places to visit.</p>', '<p>Day1</p>\r\n<ul>\r\n<li>-Arrive yangon</li>\r\n<li>-Visit Shwedagon Pagoda</li>\r\n<li>-Walk around Sule Pagoda</li>\r\n<li>-Lunch</li>\r\n<li>-Visit Feel tranquillity at Swal Taw Pagoda&nbsp;</li>\r\n<li>-Visit Kyauk Taw Gyi Temple</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>Day2&nbsp;</p>\r\n<ul>\r\n<li>-Visit Musmeah Yeshua Synagogue</li>\r\n<li>-Visit the wet market on 17th Street</li>\r\n<li>-Visit Take a stroll along Inya Lake</li>\r\n<li>-Lunch</li>\r\n<li>-Visit Ride Yangon&rsquo;s Circular Train</li>\r\n<li>-Gaze at beautiful paintings at the Yangon Gallery</li>\r\n<li>-Dinner</li>\r\n<li>&nbsp;</li>\r\n</ul>\r\n<p>day3</p>\r\n<ul>\r\n<li>-Arrive Mandaly</li>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visit Pont U-Bain Amarapura</li>\r\n<li>-Visit Cultural Museum Mandalay</li>\r\n<li>-Lunch&nbsp;</li>\r\n<li>-Visit Myan Nan San Kyaw, or Royal Palace</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>&nbsp;</p>\r\n<p>ERVICE INCLUDES:</p>\r\n<p>&nbsp;</p>\r\n<p>&nbsp; &nbsp; Sightseeing Program in Bagan Half-day tour with English Speaking Guide</p>\r\n<p>&nbsp; &nbsp; Half Day Cycling Tour in Bagan.</p>\r\n<p>&nbsp; &nbsp; Bicycle Fees</p>\r\n<p>&nbsp; &nbsp; Tour Program Sightseeing with private air- condition and good experienced car driver.</p>\r\n<p>&nbsp; &nbsp; Purify drinking water and snow Towel along in program.</p>', 'https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d118427.04428191797!2d96.00578337954974!3d21.940504297770303!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0x30cb6d23f0d27411%3A0x24146be01e4e5646!2sMandalay!5e0!3m2!1sen!2smm!4v1536904095266', 3, '93c3aec62f29d19871d13c9523a4d606.jpg,430145e051fb9ae6d43bc5d021cbacae.jpg,ee8def548f1c9a7740177e63821864f7.jpg', '0', '2018-09-14 01:17:55', '2018-10-06 05:30:37'), (2, '<NAME>', 5, 45000, 'normal', '<p>Pyin Oo Lwin&nbsp;</p>', '<p>day1</p>\r\n<p>-Arrive Pyin Oo Lwin</p>\r\n<p>-Breakfast&nbsp;</p>\r\n<p>-Visit Maymyo Botanical Garden (National Kandawgyi Park)</p>\r\n<p>-Lunch</p>\r\n<p>-Visit Pyin Oo Lwin Trek</p>\r\n<p>-Visit Peik Chin Myaung Cave</p>\r\n<p>day2</p>\r\n<p>-Arrive yangon</p>\r\n<p>-Visit Shwedagon Pagoda</p>\r\n<p>-Walk around Sule Pagoda</p>\r\n<p>-Lunch</p>\r\n<p>-Visit Feel tranquillity at Swal Taw Pagoda&nbsp;</p>\r\n<p>-Visit Kyauk Taw Gyi Temple</p>\r\n<p>-Dinner</p>\r\n<p>day3</p>\r\n<p>-Breakfast&nbsp;</p>\r\n<p>-Visiting near yangon place</p>\r\n<p>-Lunch</p>\r\n<p>-Yangon Night see view&nbsp;</p>\r\n<p>-Dinner</p>\r\n<p>ERVICE INCLUDES:</p>\r\n<p>&nbsp;</p>\r\n<p>&nbsp; &nbsp; Sightseeing Program in Bagan Half-day tour with English Speaking Guide</p>\r\n<p>&nbsp; &nbsp; Half Day Cycling Tour in Bagan.</p>\r\n<p>&nbsp; &nbsp; Bicycle Fees</p>\r\n<p>&nbsp; &nbsp; Tour Program Sightseeing with private air- condition and good experienced car driver.</p>\r\n<p>&nbsp; &nbsp; Purify drinking water and snow Towel along in program.</p>', 'https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d118348.97178300636!2d96.40465733116375!3d22.034083792473744!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0x30cc9e9c4e50f127%3A0xe9303d2a683f438f!2sPyin+Oo+Lwin!5e0!3m2!1sen!2smm!4v1536904264534', 3, '09676f237f1d934f4e61a8e33614bdc4.jpg,b9201de1a4501109e3b7ce2b0f87bd8f.jpg,3a7f603e6b64d1726aa4986ec88e0101.jpg', '0', '2018-09-14 01:23:50', '2018-09-14 01:23:50'), (3, 'Asian Bagan', 6, 58000, 'normal', '<p>The main tourist destination in Myanmar is Bagan, capital of the first Myanmar Empire; one of the richest archaeological sites in South-east Asia. Situated on the eastern bank of the Ayeyawaddy River. The Magic of Bagan has inspired visitors to Myanmar for nearly 1000 years.</p>', '<p>day1</p>\r\n<ul>\r\n<li>-Arrive Bagan</li>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visit Seinnyet Nyima Paya &amp; Seinnyet Ama Pahto&nbsp;</li>\r\n<li>-Visit Htilominlo Pahto Pagoda</li>\r\n<li>-Lunch</li>\r\n<li>-Visit Gawdawpalin Pahto</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>&nbsp;</p>\r\n<p>day2</p>\r\n<ul>\r\n<li>-Arrive yangon</li>\r\n<li>-Visit Shwedagon Pagoda</li>\r\n<li>-Walk around Sule Pagoda</li>\r\n<li>-Lunch</li>\r\n<li>-Visit Feel tranquillity at Swal Taw Pagoda&nbsp;</li>\r\n<li>-Visit Kyauk Taw Gyi Temple</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>&nbsp;</p>\r\n<p>day3</p>\r\n<ul>\r\n<li>&nbsp; &nbsp; -Breakfast&nbsp;</li>\r\n<li>-Visiting near yangon place</li>\r\n<li>-Lunch</li>\r\n<li>-Yangon Night see view&nbsp;</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>&nbsp;</p>\r\n<p>ERVICE INCLUDES:</p>\r\n<p>&nbsp;</p>\r\n<p>&nbsp; &nbsp; Sightseeing Program in Bagan Half-day tour with English Speaking Guide</p>\r\n<p>&nbsp; &nbsp; Half Day Cycling Tour in Bagan.</p>\r\n<p>&nbsp; &nbsp; Bicycle Fees</p>\r\n<p>&nbsp; &nbsp; Tour Program Sightseeing with private air- condition and good experienced car driver.</p>\r\n<p>&nbsp; &nbsp; Purify drinking water and snow Towel along in program.</p>\r\n<p>&nbsp;</p>', 'https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d7441.0035729029805!2d94.85448717343688!3d21.172216498338816!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0x30b5e50393d8ca61%3A0x371a4717c3e89981!2sOld+Bagan!5e0!3m2!1sen!2smm!4v1536904207340', 4, '4d0f2462d90086da3ec61e504ccbf011.jpg,ffcd54b60b2f3c170df3ce129f1ebd6a.jpg,ba6512b9ecf52ed511f48495e9bceec3.jpg', '0', '2018-09-14 01:27:11', '2018-09-14 01:27:11'), (4, '<NAME>', 12, 15000, 'honeymoon', '<p>Chaung-tha Beach ,is located 40 km to the west of Pathein (Bassein) in Ayeyarwaddy Division. It is a very convenient drive from Yangon via Pathein taking about 5 hours. There are bungalow type hotels along the beach with modern facilities.&nbsp;</p>\r\n<p>&nbsp;</p>', '<p>Day 1</p>\r\n<ul>\r\n<li>- breakfast at hotel</li>\r\n<li>- visit to Lovers Island</li>\r\n<li>- Annawar Patheing Sea Food Restaurant (Recommend restaurant for lunch)</li>\r\n<li>- The Seagate Restaurant &amp; Bar (Recommend restaurant for dinner)</li>\r\n</ul>\r\n<p>Day 2</p>\r\n<ul>\r\n<li>- breakfast at hotel</li>\r\n<li>- William Restaurant &amp; Guest House (Recommend restaurant for lunch)</li>\r\n<li>- Mister George (Recommend restaurant for dinner)</li>\r\n</ul>\r\n<p>Day 3</p>\r\n<ul>\r\n<li>- breakfast at hotel</li>\r\n<li>- return back to Yangon</li>\r\n</ul>\r\n<p>&nbsp;</p>', 'https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d15264.832880292797!2d94.4354938230367!3d16.964305237421755!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0x30bfb478a92ed553%3A0xc3d8bf0753adfb47!2sChaung+Thar!5e0!3m2!1sen!2smm!4v1536904444761', 4, '57d66c01c297a13a5c4ee89c3d449144.jpg,d9b8d720a90564101f1796d2e918efe1.jpg,f2ea86683dcde71777778f200d448569.jpg', '0', '2018-09-14 01:31:04', '2018-09-14 01:31:04'), (5, 'Ngwe Saung Beach', 13, 20000, 'honeymoon', '<p>Ngwe Saung Beach Resort, opened in the year 2,000, is one of the loveliest and most pleasant beach resorts in Myanmar. Located in the Ayeyarwady Division, some 48 kilometres from the town of Pathein, <NAME>, with a beach frontage on the Bay of Bengal with its clear blue waters, its white crested waves, sandy beaches and unspoiled and pollution-free natural surroundings, is indeed one of the best places to select for a holiday interlude of rest and relaxation.</p>', '<p>Day1</p>\r\n<ul>\r\n<li>- breakfast at hotel</li>\r\n<li>- Soe KoKo Restaruant (Recommend restaurant for lunch)</li>\r\n<li>- Golden Heart Restaurant (Recommend restaurant for dinner)</li>\r\n</ul>\r\n<p>Day2</p>\r\n<ul>\r\n<li>- breakfast at hotel</li>\r\n<li>- Garden Breeze Restaurant &amp; Bar (Recommend restaurant for lunch)</li>\r\n<li>- Brizo Restaurant (Recommend restaurant for dinner)</li>\r\n</ul>\r\n<p>Day 3</p>\r\n<ul>\r\n<li>- breakfast at hotel</li>\r\n<li>- return back to Yangon</li>\r\n</ul>', 'https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d61097.33481158732!2d94.35311717968088!3d16.847009730223775!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0x30be35516a81d903%3A0x71c4ba09c8725022!2sNgwesaung!5e0!3m2!1sen!2smm!4v1536904516760', 4, '799bad5a3b514f096e69bbc4a7896cd9.jpg,d0096ec6c83575373e3a21d129ff8fef.jpg,6ac0c67390b9cf28574e74a0109f46b2.jpg', '0', '2018-09-14 01:33:11', '2018-09-14 01:33:11'), (6, 'Ngapali Beach', 14, 35000, 'honeymoon', '<p>It is famous for its natural and unspotted beauty up to this day. The beach stretches about 3 km with soft white sand fringed by coconut palms. Ngapali Beach is one of the loveliest places in Myanmar.</p>\r\n<p>&nbsp;</p>', '<p>day1</p>\r\n<ul>\r\n<li>-Arrive yangon</li>\r\n<li>-Visit Shwedagon Pagoda</li>\r\n<li>-Walk around Sule Pagoda</li>\r\n<li>-Lunch</li>\r\n<li>-Visit Feel tranquillity at Swal Taw Pagoda&nbsp;</li>\r\n<li>-Visit Kyauk Taw Gyi Temple</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>day2&nbsp;</p>\r\n<ul>\r\n<li>&nbsp; &nbsp; -Breakfast&nbsp;</li>\r\n<li>-Visiting near yangon place</li>\r\n<li>-Lunch</li>\r\n<li>-Yangon Night see view&nbsp;</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>day3</p>\r\n<ul>\r\n<li>-Arrive yangon</li>\r\n<li>-Visit Shwedagon Pagoda</li>\r\n<li>-Walk around Sule Pagoda</li>\r\n<li>-Lunch</li>\r\n<li>-Visit Feel tranquillity at Swal Taw Pagoda&nbsp;</li>\r\n<li>-Visit Kyauk Taw Gyi Temple</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>day4</p>\r\n<ul>\r\n<li>- breakfast at hotel</li>\r\n<li>- Take in the ocean scenery during the boat ride to Maung Shwe Lay</li>\r\n<li>- Sunset Viwe Beach Bar &amp; Restaruant (Recommend restaurant for lunch)</li>\r\n<li>- Learn about local history and marine life from the guide</li>\r\n<li>- Ngapali Kitchen (Recommend restaurant for dinner)</li>\r\n</ul>\r\n<p>day5</p>\r\n<ul>\r\n<li>- breakfast at hotel</li>\r\n<li>- Visit a coastal village in Myanmar that retains its fishing traditions</li>\r\n<li>- Ocean Pearl (Recommend restaurant for lunch)</li>\r\n<li>- Pleasant View Islet Restaurant (Recommend restaurant for dinner)</li>\r\n</ul>\r\n<p>SERVICE INCLUDES:</p>\r\n<p>&nbsp;</p>\r\n<p>&nbsp; &nbsp; Sightseeing Program in Bagan Half-day tour with English Speaking Guide</p>\r\n<p>&nbsp; &nbsp; Half Day Cycling Tour in Bagan.</p>\r\n<p>&nbsp; &nbsp; Bicycle Fees</p>\r\n<p>&nbsp; &nbsp; Tour Program Sightseeing with private air- condition and good experienced car driver.</p>\r\n<p>&nbsp; &nbsp; Purify drinking water and snow Towel along in program.</p>', 'https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d60550.949548910125!2d94.30389328492848!3d18.46397474913527!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0x30b91e6590582d9d%3A0x12274c0421c8e487!2sThandwe!5e0!3m2!1sen!2smm!4v1536904482618', 6, '44d8ac7e1355972e48156085e6127362.jpg,0d5b1c4c7f720f698946c7f6ab08f687.jpg,e5d67599bbb698e0cb7ec7a0713419e2.jpg', '0', '2018-09-14 01:35:23', '2018-09-14 01:35:23'), (7, 'Yangon Sightseeing Trip', 15, 55000, 'sightseeing', '<p>Yangon, the commercial capital, is the main gateway to Myanmar. Evergreen and cool with lush tropical trees, shady parks and beautiful lakes, Yangon has earned the name of \"The Garden City of the East\". Yangon was founded by King Alaungpaya on the site of a small settlement called Dagon when he conquered Lower Myanmar in 1755.</p>', '<p>&nbsp;</p>\r\n<p>&nbsp;</p>\r\n<ul>\r\n<li>-Visiting Tample&nbsp;</li>\r\n<li>-Visiting near yangon place</li>\r\n<li>-Lunch&nbsp;</li>\r\n<li>-Visiting famous plase in Yangon</li>\r\n<li>-Yangon Night see view&nbsp;</li>\r\n<li>-Dinner&nbsp;</li>\r\n</ul>', 'https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d244307.58873964142!2d96.01189219913104!3d16.910187746895044!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0x30c1949e223e196b%3A0x56fbd271f8080bb4!2sYangon!5e0!3m2!1sen!2smm!4v1536903947075', 1, '7fdc1a630c238af0815181f9faa190f5.jpg,3e2bba0d1f2c5de6d7ef6ea20ed0c8ae.jpg,4575d15550def67719bf8293c5b91237.jpg', '0', '2018-09-14 01:38:09', '2018-09-16 20:28:34'), (8, 'Yangon-Mandalay-Py<NAME>', 5, 65000, 'vacation', '<p>&nbsp;</p>\r\n<p>Mandalay is the second largest city in Myanmar and was established in 1857. It lies on the east bank of the Ayeyarwaddy River and in the upper part of Myanmar. Mandalay has the Royal Palace of the last Konbaung Dynasty. Mandalay inherits many cultural heritage from the ancient Myanmar Kingdoms and beautiful places to visit.</p>', '<p>day1</p>\r\n<ul>\r\n<li>-Arrive Yangon</li>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visiting Tample&nbsp;</li>\r\n<li>-Lunch&nbsp;</li>\r\n<li>-Visiting famous plase in Yangon</li>\r\n<li>-Dinner&nbsp;</li>\r\n</ul>\r\n<p>&nbsp;</p>\r\n<p>day2</p>\r\n<ul>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visiting near yangon place</li>\r\n<li>-Lunch</li>\r\n<li>-Yangon Night see view&nbsp;</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>&nbsp;</p>\r\n<p>&nbsp;</p>\r\n<p>day3</p>\r\n<ul>\r\n<li>-Arrive Mandaly</li>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visit Pont U-Bain Amarapura</li>\r\n<li>-Visit Cultural Museum Mandalay</li>\r\n<li>-Lunch&nbsp;</li>\r\n<li>-Visit Myan Nan San Kyaw, or Royal Palace</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>&nbsp;</p>\r\n<p>day4</p>\r\n<ul>\r\n<li>-Breakfast</li>\r\n<li>-Visit Sandamuni Pagoda</li>\r\n<li>-visit Kyauktawgyi Pagoda</li>\r\n<li>-Lunch</li>\r\n<li>-Visit Kuthodaw Pagoda</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>day5</p>\r\n<ul>\r\n<li>&nbsp; &nbsp; -Breakfast</li>\r\n<li>-Cycling to Nyaung U Market Gubyauk Gyi,</li>\r\n<li>-Lunch</li>\r\n<li>&nbsp; &nbsp; -Shwezigone and Ahlopye Stupa</li>\r\n<li>&nbsp; &nbsp; -Htilominlo, Min Oh Chan Thar Pagoda</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>&nbsp;</p>\r\n<p>day6</p>\r\n<ul>\r\n<li>-Arrive Pyin Oo Lwin</li>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visit Maymyo Botanical Garden (National Kandawgyi Park)</li>\r\n<li>-Lunch</li>\r\n<li>-Visit Pyin Oo Lwin Trek</li>\r\n<li>-Visit Peik Chin Myaung Cave</li>\r\n</ul>\r\n<p>day7</p>\r\n<ul>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visit Maha Ant Htoo Kan Thar Pagoda</li>\r\n<li>-Hampshire Falls and Caves (Pwe Kauk)</li>\r\n<li>-Lunch</li>\r\n<li>-Visit All Saints Anglican Church</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>&nbsp;</p>\r\n<p>&nbsp;</p>\r\n<p>day8</p>\r\n<ul>\r\n<li>-Breakfast</li>\r\n<li>-Visit Aung Chantha Paya</li>\r\n<li>-Lunch</li>\r\n<li>-Visit HTOO Orange Farm</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>&nbsp;</p>\r\n<p>SERVICE INCLUDES:</p>\r\n<p>&nbsp;</p>\r\n<p>&nbsp; &nbsp; Sightseeing Program in Bagan Half-day tour with English Speaking Guide</p>\r\n<p>&nbsp; &nbsp; Half Day Cycling Tour in Bagan.</p>\r\n<p>&nbsp; &nbsp; Bicycle Fees</p>\r\n<p>&nbsp; &nbsp; Tour Program Sightseeing with private air- condition and good experienced car driver.</p>\r\n<p>&nbsp; &nbsp; Purify drinking water and snow Towel along in program.</p>', 'https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d118348.97178300636!2d96.40465733116375!3d22.034083792473744!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0x30cc9e9c4e50f127%3A0xe9303d2a683f438f!2sPyin+Oo+Lwin!5e0!3m2!1sen!2smm!4v1536904264534', 9, '2760735506a5bc187a35f6c829fae70d.jpg,881ab63b3db7d21d9ad32c6c5a19597b.jpg,b8d602ab421ea80896c7eb01bd7e14e3.jpg', '0', '2018-09-14 01:42:20', '2018-09-14 01:42:20'), (9, 'Mandalay-Yangon', 1, 55000, 'vacation', '<p>Yangon, the commercial capital, is the main gateway to Myanmar. Evergreen and cool with lush tropical trees, shady parks and beautiful lakes, Yangon has earned the name of \"The Garden City of the East\". Yangon was founded by King Alaungpaya on the site of a small settlement called Dagon when he conquered Lower Myanmar in 1755.</p>', '<p>day1</p>\r\n<ul>\r\n<li>-Arrive Mandaly</li>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visit Pont U-Bain Amarapura</li>\r\n<li>-Visit Cultural Museum Mandalay</li>\r\n<li>-Lunch&nbsp;</li>\r\n<li>-Visit Myan Nan San Kyaw, or Royal Palace</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>day2</p>\r\n<ul>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visit Seinnyet Nyima Paya &amp; Seinnyet Ama Pahto&nbsp;</li>\r\n<li>-Visit Htilominlo Pahto Pagoda</li>\r\n<li>-Lunch</li>\r\n<li>-Visit Gawdawpalin Pahto</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>day 3</p>\r\n<ul>\r\n<li>-Arrive yangon</li>\r\n<li>-Visit Shwedagon Pagoda</li>\r\n<li>-Walk around Sule Pagoda</li>\r\n<li>-Lunch</li>\r\n<li>-Visit Feel tranquillity at Swal Taw Pagoda&nbsp;</li>\r\n<li>-Visit Kyauk Taw Gyi Temple</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>day4</p>\r\n<ul>\r\n<li>-Arrive Mandaly</li>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visit Pont U-Bain Amarapura</li>\r\n<li>-Visit Cultural Museum Mandalay</li>\r\n<li>-Lunch&nbsp;</li>\r\n<li>-Visit Myan Nan San Kyaw, or Royal Palace</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>day5</p>\r\n<ul>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visit Seinnyet Nyima Paya &amp; Seinnyet Ama Pahto&nbsp;</li>\r\n<li>-Visit Htilominlo Pahto Pagoda</li>\r\n<li>-Lunch</li>\r\n<li>-Visit Gawdawpalin Pahto</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>day 6</p>\r\n<ul>\r\n<li>-Arrive yangon</li>\r\n<li>-Visit Shwedagon Pagoda</li>\r\n<li>-Walk around Sule Pagoda</li>\r\n<li>-Lunch</li>\r\n<li>-Visit Feel tranquillity at Swal Taw Pagoda&nbsp;</li>\r\n<li>-Visit Kyauk Taw Gyi Temple</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>&nbsp;</p>\r\n<p>&nbsp;</p>\r\n<p>ERVICE INCLUDES:</p>\r\n<p>&nbsp;</p>\r\n<p>&nbsp; &nbsp; Sightseeing Program in Bagan Half-day tour with English Speaking Guide</p>\r\n<p>&nbsp; &nbsp; Half Day Cycling Tour in Bagan.</p>\r\n<p>&nbsp; &nbsp; Bicycle Fees</p>\r\n<p>&nbsp; &nbsp; Tour Program Sightseeing with private air- condition and good experienced car driver.</p>\r\n<p>&nbsp; &nbsp; Purify drinking water and snow Towel along in program.</p>', 'https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d244307.58873964142!2d96.01189219913104!3d16.910187746895044!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0x30c1949e223e196b%3A0x56fbd271f8080bb4!2sYangon!5e0!3m2!1sen!2smm!4v1536903947075', 7, '430145e051fb9ae6d43bc5d021cbacae.jpg,93c3aec62f29d19871d13c9523a4d606.jpg,ee8def548f1c9a7740177e63821864f7.jpg', '0', '2018-09-14 01:45:50', '2018-10-06 05:06:19'), (10, '<NAME>', 1, 43000, 'vacation', '<p>Yangon, the commercial capital, is the main gateway to Myanmar. Evergreen and cool with lush tropical trees, shady parks and beautiful lakes, Yangon has earned the name of \"The Garden City of the East\". Yangon was founded by King Alaungpaya on the site of a small settlement called Dagon when he conquered Lower Myanmar in 1755.</p>', '<p>day1</p>\r\n<ul>\r\n<li>-Arrive Pyin Oo Lwin</li>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visit Maymyo Botanical Garden (National Kandawgyi Park)</li>\r\n<li>-Lunch</li>\r\n<li>-Visit Pyin Oo Lwin Trek</li>\r\n<li>-Visit Peik Chin Myaung Cave</li>\r\n</ul>\r\n<p>day2</p>\r\n<ul>\r\n<li>-Arrive yangon</li>\r\n<li>-Visit Shwedagon Pagoda</li>\r\n<li>-Walk around Sule Pagoda</li>\r\n<li>-Lunch</li>\r\n<li>-Visit Feel tranquillity at Swal Taw Pagoda&nbsp;</li>\r\n<li>-Visit Kyauk Taw Gyi Temple</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>day3</p>\r\n<ul>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visiting near yangon place</li>\r\n<li>-Lunch</li>\r\n<li>-Yangon Night see view&nbsp;</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>day4</p>\r\n<ul>\r\n<li>-Arrive Pyin Oo Lwin</li>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visit Maymyo Botanical Garden (National Kandawgyi Park)</li>\r\n<li>-Lunch</li>\r\n<li>-Visit Pyin Oo Lwin Trek</li>\r\n<li>-Visit Peik Chin Myaung Cave</li>\r\n</ul>\r\n<p>day5</p>\r\n<ul>\r\n<li>-Arrive yangon</li>\r\n<li>-Visit Shwedagon Pagoda</li>\r\n<li>-Walk around Sule Pagoda</li>\r\n<li>-Lunch</li>\r\n<li>-Visit Feel tranquillity at Swal Taw Pagoda&nbsp;</li>\r\n<li>-Visit Kyauk Taw Gyi Temple</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>day6</p>\r\n<ul>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visiting near yangon place</li>\r\n<li>-Lunch</li>\r\n<li>-Yangon Night see view&nbsp;</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>ERVICE INCLUDES:</p>\r\n<p>&nbsp;</p>\r\n<p>&nbsp; &nbsp; Sightseeing Program in Bagan Half-day tour with English Speaking Guide</p>\r\n<p>&nbsp; &nbsp; Half Day Cycling Tour in Bagan.</p>\r\n<p>&nbsp; &nbsp; Bicycle Fees</p>\r\n<p>&nbsp; &nbsp; Tour Program Sightseeing with private air- condition and good experienced car driver.</p>\r\n<p>&nbsp; &nbsp; Purify drinking water and snow Towel along in program.</p>', 'https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d244307.58873964142!2d96.01189219913104!3d16.910187746895044!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0x30c1949e223e196b%3A0x56fbd271f8080bb4!2sYangon!5e0!3m2!1sen!2smm!4v1536903947075', 7, 'f3ccdd27d2000e3f9255a7e3e2c48800.jpg,7fdc1a630c238af0815181f9faa190f5.jpg,601aea1a28bf16e6a6f33e42c42c9226.jpg', '0', '2018-09-14 01:48:22', '2018-09-16 20:33:13'), (11, 'Yangon-Bangan', 6, 38000, 'honeymoon', '<p>The main tourist destination in Myanmar is Bagan, capital of the first Myanmar Empire; one of the richest archaeological sites in South-east Asia. Situated on the eastern bank of the Ayeyawaddy River. The Magic of Bagan has inspired visitors to Myanmar for nearly 1000 years.</p>', '<p>day1&nbsp;</p>\r\n<ul>\r\n<li>-Arrive Yangon</li>\r\n<li>&nbsp; -Breakfast&nbsp;</li>\r\n<li>-Visiting Tample&nbsp;</li>\r\n<li>-Lunch&nbsp;</li>\r\n<li>-Visiting famous plase in Yangon</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>day2</p>\r\n<ul>\r\n<li>&nbsp; &nbsp; -Breakfast&nbsp;</li>\r\n<li>-Visiting near yangon place</li>\r\n<li>-Lunch</li>\r\n<li>-Yangon Night see view&nbsp;</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>day3</p>\r\n<ul>\r\n<li>-Arrive Bagan</li>\r\n<li>-Visit Ananda Pahto pagoda</li>\r\n<li>-Visit Shwesandaw Paya pagoda</li>\r\n<li>-Lunch</li>\r\n<li>-Visit Thatbyinnyu Pahto pagoda</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>&nbsp;</p>\r\n<p>day4</p>\r\n<ul>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visit Seinnyet Nyima Paya &amp; Seinnyet Ama Pahto&nbsp;</li>\r\n<li>-Visit Htilominlo Pahto Pagoda</li>\r\n<li>-Lunch</li>\r\n<li>-Visit Gawdawpalin Pahto</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>&nbsp;</p>\r\n<p>day5</p>\r\n<ul>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visit three-storey building pagoda</li>\r\n<li>-Lunch</li>\r\n<li>-Visit Dhammayazika Pagoda</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>&nbsp;</p>\r\n<p>ERVICE INCLUDES:</p>\r\n<p>&nbsp;</p>\r\n<p>&nbsp; &nbsp; Sightseeing Program in Bagan Half-day tour with English Speaking Guide</p>\r\n<p>&nbsp; &nbsp; Half Day Cycling Tour in Bagan.</p>\r\n<p>&nbsp; &nbsp; Bicycle Fees</p>\r\n<p>&nbsp; &nbsp; Tour Program Sightseeing with private air- condition and good experienced car driver.</p>\r\n<p>&nbsp; &nbsp; Purify drinking water and snow Towel along in program.</p>', 'https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d7441.0035729029805!2d94.85448717343688!3d21.172216498338816!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0x30b5e50393d8ca61%3A0x371a4717c3e89981!2sOld+Bagan!5e0!3m2!1sen!2smm!4v1536904207340', 6, '5938f7e89ba34b7c252d2069d7c64b55.jpg,4d0f2462d90086da3ec61e504ccbf011.jpg,ffcd54b60b2f3c170df3ce129f1ebd6a.jpg', '0', '2018-09-14 01:50:40', '2018-09-14 01:50:40'), (12, 'Taunggyi Trip', 3, 40000, 'normal', '<p>Taunggyi is situated in the southern Shan State and is the capital town of Shan States. It is 4.712feet above the sea level and has a moderate climate.&nbsp;</p>\r\n<p>It is situated on a high plateau surrounded by high mountains. As Taunggyi lies on a hill region. the atmosphere is conducive to good health especially by means of the salubrious mountain air. It usually rains in Taunggyi from June to November and average annual rainfall is 32.68 inches. To reach Taunggyi you first have to fly to Heho. about 40 kilometers to the West. because the town itself does not boast an airfield.</p>', '<p>day1</p>\r\n<ul>\r\n<li>-Arrive Yangon</li>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visiting Tample&nbsp;</li>\r\n<li>-Lunch&nbsp;</li>\r\n<li>-Visiting famous plase in Yangon</li>\r\n<li>-Dinner&nbsp;</li>\r\n</ul>\r\n<p>day2</p>\r\n<ul>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visiting near yangon place</li>\r\n<li>-Lunch</li>\r\n<li>-Yangon Night see view&nbsp;</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>day3</p>\r\n<ul>\r\n<li>-Arrive Taunggyi</li>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visit Myanmar Vineyard</li>\r\n<li>-Visit Shwe Phone Pye Pagoda</li>\r\n<li>-Lunch</li>\r\n<li>-Visit Taunggyi\'s main downtown pagoda.</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>&nbsp;</p>\r\n<p>day4</p>\r\n<ul>\r\n<li>-Breakfast&nbsp;</li>\r\n<li>-Visit St Joseph\'s Cathedral&nbsp;</li>\r\n<li>-Lunch</li>\r\n<li>-Visit St George Anglican Church</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>day5</p>\r\n<ul>\r\n<li>&nbsp; &nbsp; -Breakfast</li>\r\n<li>-Visit Inle lake</li>\r\n<li>-Lunch</li>\r\n<li>-Dinner</li>\r\n</ul>\r\n<p>&nbsp;</p>\r\n<p><span style=\"white-space: pre;\">S</span>ERVICE INCLUDES:</p>\r\n<p>&nbsp;</p>\r\n<p>&nbsp; &nbsp; Sightseeing Program in Bagan Half-day tour with English Speaking Guide</p>\r\n<p>&nbsp; &nbsp; Half Day Cycling Tour in Bagan.</p>\r\n<p>&nbsp; &nbsp; Bicycle Fees</p>\r\n<p>&nbsp; &nbsp; Tour Program Sightseeing with private air- condition and good experienced car driver.</p>\r\n<p>&nbsp; &nbsp; Purify drinking water and snow Towel along in program.</p>', 'https://www.google.com/maps/embed?pb=!1m18!1m12!1m3!1d59681.692653221886!2d97.01499914351612!3d20.787007594429326!2m3!1f0!2f0!3f0!3m2!1i1024!2i768!4f13.1!3m3!1m2!1s0x30ce86381a88cc0f%3A0xf53083c4263eb24b!2sTaunggyi!5e0!3m2!1sen!2smm!4v1536904373816', 6, '62b9a8da1ce40b29f257d33eeac3d332.jpg,75b67de2a3e6c3872d618ff9e3ea8cbc.jpg,5cfff6830d6625922b9b343c6562f849.jpg', '0', '2018-09-14 01:55:36', '2018-09-14 01:55:36'); -- -------------------------------------------------------- -- -- Table structure for table `routes` -- CREATE TABLE `routes` ( `id` int(10) UNSIGNED NOT NULL, `from_city` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `to_city` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `type` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `price` double NOT NULL, `del_status` enum('1','0') COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT '0', `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Dumping data for table `routes` -- INSERT INTO `routes` (`id`, `from_city`, `to_city`, `type`, `price`, `del_status`, `created_at`, `updated_at`) VALUES (1, 'Yangon', 'Mandalay', 'Car', 12000, '0', '2018-08-30 15:31:20', '2018-09-13 23:28:30'), (2, 'Yangon', 'Mandalay', 'Ship', 8000, '0', '2018-08-30 15:31:37', '2018-09-13 23:29:25'), (3, 'Yangon', 'Mandalay', 'Locomotive', 7500, '0', '2018-08-30 15:31:51', '2018-08-30 15:50:36'), (4, 'Yangon', 'Mandalay', 'Flight', 65000, '0', '2018-08-30 15:32:32', '2018-08-30 15:32:32'), (5, 'Mandalay', 'Yangon', 'Car', 5500, '0', '2018-08-30 15:56:19', '2018-08-30 15:56:19'), (6, 'Mandalay', 'Yangon', 'Ship', 5500, '0', '2018-08-30 15:56:29', '2018-08-30 15:56:29'), (7, 'Mandalay', 'Yangon', 'Locomotive', 7000, '0', '2018-08-30 15:56:43', '2018-08-30 15:56:43'), (8, 'Mandalay', 'Yangon', 'Flight', 65000, '0', '2018-08-30 15:56:56', '2018-08-30 15:56:56'), (9, 'Yangon', '<NAME>', 'Car', 15000, '0', '2018-08-30 15:57:10', '2018-08-30 15:57:10'), (10, '<NAME>', 'Yangon', 'Car', 15000, '0', '2018-08-30 15:57:24', '2018-08-30 15:57:24'), (11, 'Mandalay', 'Pyin O<NAME>', 'Locomotive', 5000, '0', '2018-08-30 15:57:46', '2018-08-30 18:16:12'), (12, 'Mandalay', '<NAME>', 'Car', 5000, '0', '2018-08-30 15:57:59', '2018-08-30 15:57:59'), (13, 'Yangon', 'Bagan', 'Car', 25000, '0', '2018-08-30 15:59:18', '2018-08-30 15:59:18'), (14, 'Bagan', 'Yangon', 'Car', 25000, '0', '2018-08-30 15:59:26', '2018-08-30 15:59:26'), (15, 'Mandalay', 'Bagan', 'Car', 30000, '0', '2018-08-30 15:59:54', '2018-08-30 15:59:54'), (16, 'Bagan', 'Mandalay', 'Car', 30000, '0', '2018-08-30 16:00:02', '2018-08-30 16:00:02'), (17, 'Mandalay', 'Bagan', 'Ship', 16000, '0', '2018-08-30 16:00:18', '2018-08-30 16:00:18'), (18, 'Yangon', 'Bagan', 'Locomotive', 22000, '0', '2018-08-30 16:00:37', '2018-08-31 02:33:11'), (19, 'Yangon', '<NAME>', 'Car', 18000, '0', '2018-08-30 18:17:39', '2018-08-30 18:17:39'), (20, 'Mandalay', '<NAME>', 'Car', 20000, '0', '2018-08-30 18:17:51', '2018-08-30 18:17:51'), (21, 'Yangon', '<NAME>', 'Flight', 85000, '0', '2018-08-30 18:18:02', '2018-08-30 18:18:02'), (22, 'Mandalay', '<NAME>', 'Flight', 85000, '0', '2018-08-30 18:18:15', '2018-08-30 18:18:15'), (23, 'Yangon', 'Pathein', 'Car', 10000, '0', '2018-08-30 18:28:43', '2018-08-30 18:28:43'), (24, 'Mandalay', 'Pathein', 'Car', 12000, '0', '2018-08-30 18:28:57', '2018-08-30 18:28:57'), (25, 'Sightseeing', 'Sightseeing', 'Car', 0, '0', '2018-08-31 02:25:30', '2018-09-16 22:31:26'), (26, 'Yangon', 'Bagan', 'Flight', 65000, '0', '2018-08-31 02:34:37', '2018-08-31 02:34:37'), (27, 'Yangon', 'Bagan', 'Flight', 65000, '1', '2018-08-31 02:35:37', '2018-08-31 02:36:18'), (28, 'Yangon', '<NAME>', 'Locomotive', 21000, '0', '2018-08-31 02:37:38', '2018-08-31 02:37:38'), (29, 'Yangon', 'Ayayarwady', 'Car', 12000, '0', '2018-08-31 02:44:33', '2018-08-31 02:44:33'), (30, 'Yangon', 'Ayayarwady', 'Ship', 10000, '0', '2018-08-31 02:45:16', '2018-08-31 02:45:16'), (33, '<NAME>', 'Yangon', 'Locomotive', 14000, '0', '2018-08-31 02:53:47', '2018-08-31 02:53:47'), (34, '<NAME>', 'Yangon', 'Flight', 45000, '0', '2018-08-31 02:55:28', '2018-08-31 02:55:28'), (35, '<NAME>', 'Mandalay', 'Car', 5500, '0', '2018-08-31 02:57:38', '2018-08-31 02:57:38'), (36, '<NAME>', '<NAME>', 'Car', 0, '0', '2018-08-31 02:57:59', '2018-08-31 02:57:59'), (37, 'Mandalay', 'Bagan', 'Locomotive', 25000, '0', '2018-09-02 07:41:30', '2018-09-02 07:41:30'), (38, 'Mandalay', 'Bagan', 'Flight', 85000, '0', '2018-09-02 07:41:50', '2018-09-02 07:41:50'), (40, 'Bagan', 'Yangon', 'Locomotive', 22000, '0', '2018-09-02 07:44:58', '2018-09-02 07:44:58'), (41, 'Bagan', 'Yangon', 'Flight', 75000, '0', '2018-09-02 07:45:17', '2018-09-02 07:45:17'), (42, 'Bagan', 'Mandalay', 'Locomotive', 24000, '0', '2018-09-02 07:45:43', '2018-09-02 07:45:43'), (43, 'Bagan', 'Mandalay', 'Flight', 85000, '0', '2018-09-02 07:46:02', '2018-09-02 07:46:02'), (45, '<NAME>', 'Yangon', 'Car', 18000, '0', '2018-09-02 07:47:55', '2018-09-02 07:47:55'), (46, '<NAME>', 'Yangon', 'Locomotive', 22000, '0', '2018-09-02 07:48:20', '2018-09-02 07:48:20'), (47, '<NAME>', 'Yangon', 'Flight', 85000, '0', '2018-09-02 07:48:39', '2018-09-02 07:48:39'), (52, 'Yangon', '<NAME>', 'Car', 12000, '0', '2018-09-13 23:25:06', '2018-09-13 23:25:06'), (53, 'Yangon', '<NAME>', 'Car', 12000, '0', '2018-09-13 23:25:29', '2018-09-13 23:25:29'), (54, 'Yangon', 'Ngapali', 'Car', 17000, '0', '2018-09-13 23:25:48', '2018-09-13 23:25:48'); -- -------------------------------------------------------- -- -- Table structure for table `users` -- CREATE TABLE `users` ( `id` int(10) UNSIGNED NOT NULL, `bank_id` int(11) NOT NULL, `name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `email` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `password` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `role` enum('admin','editor','user') COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT 'user', `phone` varchar(11) COLLATE utf8mb4_unicode_ci NOT NULL, `address` longtext COLLATE utf8mb4_unicode_ci NOT NULL, `image` varchar(191) COLLATE utf8mb4_unicode_ci DEFAULT NULL, `ban_status` enum('0','1') COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT '0', `remember_token` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Dumping data for table `users` -- INSERT INTO `users` (`id`, `bank_id`, `name`, `email`, `password`, `role`, `phone`, `address`, `image`, `ban_status`, `remember_token`, `created_at`, `updated_at`) VALUES (1, 4, 'Mr. Admin', '<EMAIL>', <PASSWORD>Lmsk0EADkX4PpalvVmVf5kq9ysy', 'admin', '0944445525', 'My Address', '1fc8381ccac933612936bb617a5ae906.png', '0', 'vpb9Gs6USCdm0GO4TVycjz1nmqts8L4V0UsYXz0E1W9M5cU84o6p6NxKDXGu', '2018-09-14 01:07:13', '2018-09-14 01:07:13'), (2, 2, '<NAME>', '<EMAIL>', <PASSWORD>', 'editor', '0944445525', 'My Address', '5a9877864354995ef1ed8d27160b5e9e.jpg', '0', 'T67ipb5hUAkHYym25TulX0X6ocbnM9Yk3zs7CwAg51BiIIcqXlQWxOCnNd9j', '2018-09-14 02:11:29', '2018-09-18 15:44:40'), (3, 6, '<NAME>', '<EMAIL>', <PASSWORD>', 'user', '0944445525', 'Address', 'e0682e4e7bfb76b132e54d87ba1ec579.jpg', '0', 'uPNC1cKguccxGVFy7x7ltCKWxt16rZo5NufmtlktFqVSQDTijFqlUpDAcMju', '2017-09-14 02:17:16', '2018-09-14 02:17:16'), (4, 7, '<NAME>', '<EMAIL>', <PASSWORD>', 'user', '0944445525', 'Yangon', 'e0682e4e7bfb76b132e54d87ba1ec579.jpg', '0', '', '2018-09-14 02:54:06', '2018-09-14 02:54:06'), (5, 7, 'Editor', '<EMAIL>', <PASSWORD>', 'user', '9999999999', 'akdj;aldksdf', '1197b29c66a036a4909afb9bfc03d54c.jpg', '0', 'CZ9h9pqktbhz31rxF8L1zdSvgRMWFZhyO03hzGFohYncvMMUP45TTMgpPcET', '2018-10-06 05:34:02', '2018-10-06 05:34:02'); -- -- Indexes for dumped tables -- -- -- Indexes for table `bookings` -- ALTER TABLE `bookings` ADD PRIMARY KEY (`id`); -- -- Indexes for table `cities` -- ALTER TABLE `cities` ADD PRIMARY KEY (`id`); -- -- Indexes for table `hotels` -- ALTER TABLE `hotels` ADD PRIMARY KEY (`id`); -- -- Indexes for table `migrations` -- ALTER TABLE `migrations` ADD PRIMARY KEY (`id`); -- -- Indexes for table `packages` -- ALTER TABLE `packages` ADD PRIMARY KEY (`id`); -- -- Indexes for table `routes` -- ALTER TABLE `routes` ADD PRIMARY KEY (`id`); -- -- Indexes for table `users` -- ALTER TABLE `users` ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `users_email_unique` (`email`); -- -- AUTO_INCREMENT for dumped tables -- -- -- AUTO_INCREMENT for table `bookings` -- ALTER TABLE `bookings` MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=13; -- -- AUTO_INCREMENT for table `cities` -- ALTER TABLE `cities` MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=16; -- -- AUTO_INCREMENT for table `hotels` -- ALTER TABLE `hotels` MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=42; -- -- AUTO_INCREMENT for table `migrations` -- ALTER TABLE `migrations` MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=21; -- -- AUTO_INCREMENT for table `packages` -- ALTER TABLE `packages` MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=13; -- -- AUTO_INCREMENT for table `routes` -- ALTER TABLE `routes` MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=55; -- -- AUTO_INCREMENT for table `users` -- ALTER TABLE `users` MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=6; COMMIT; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
-- phpMyAdmin SQL Dump -- version 4.8.5 -- https://www.phpmyadmin.net/ -- -- Host: 127.0.0.1 -- Generation Time: Jul 03, 2019 at 02:00 PM -- Server version: 10.1.38-MariaDB -- PHP Version: 7.2.16 SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; SET AUTOCOMMIT = 0; START TRANSACTION; SET time_zone = "+00:00"; /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8mb4 */; -- -- Database: `lms` -- -- -------------------------------------------------------- -- -- Table structure for table `exams` -- CREATE TABLE `exams` ( `id` int(11) NOT NULL, `name` varchar(191) DEFAULT NULL, `created_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP, `updated_at` timestamp NULL DEFAULT CURRENT_TIMESTAMP ) ENGINE=InnoDB DEFAULT CHARSET=latin1; -- -- Dumping data for table `exams` -- INSERT INTO `exams` (`id`, `name`, `created_at`, `updated_at`) VALUES (2, '<NAME>', '2019-07-03 10:42:24', '2019-07-03 10:42:24'); -- -------------------------------------------------------- -- -- Table structure for table `migrations` -- CREATE TABLE `migrations` ( `id` int(10) UNSIGNED NOT NULL, `migration` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `batch` int(11) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Dumping data for table `migrations` -- INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES (1, '2014_10_12_000000_create_users_table', 1), (2, '2014_10_12_100000_create_password_resets_table', 1), (3, '2019_06_21_130336_create_table_stds', 1), (4, '2019_06_21_130419_create_table_subjects', 1), (5, '2019_06_21_130441_create_table_parents', 1), (6, '2019_06_21_130502_create_table_students', 1); -- -------------------------------------------------------- -- -- Table structure for table `parents` -- CREATE TABLE `parents` ( `id` bigint(20) UNSIGNED NOT NULL, `name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `email` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `email_verified_at` timestamp NULL DEFAULT NULL, `password` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Dumping data for table `parents` -- INSERT INTO `parents` (`id`, `name`, `email`, `email_verified_at`, `password`, `created_at`, `updated_at`) VALUES (5, '<NAME>', '<EMAIL>', NULL, '$2y$10$oA2I/iEM.SmzYzYUt/lKwuxJUOOoQC.0hhrVUUtGzYb30SBlu8rJy', '2019-06-23 04:26:12', '2019-06-23 04:26:12'), (23, '<NAME>', '<EMAIL>', NULL, '$2y$10$8bathSjqG34.5HuU53FSle8b7HUnU4p3M5czkjyzsLRBl6M6q/39e', '2019-06-23 09:48:53', '2019-06-23 09:48:53'), (32, 'sadat', '<EMAIL>', NULL, '$2y$10$2dos9T3x8mWtsJ83a5CdEeui5Y4QlrCM8SzZAI7UW/q1rRiZ8i36m', '2019-06-24 09:23:42', '2019-06-24 09:23:42'); -- -------------------------------------------------------- -- -- Table structure for table `password_resets` -- CREATE TABLE `password_resets` ( `email` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `token` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `created_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -------------------------------------------------------- -- -- Table structure for table `questions` -- CREATE TABLE `questions` ( `id` int(11) NOT NULL, `subject_id` int(11) DEFAULT NULL, `std_id` int(11) DEFAULT NULL, `test_id` int(11) DEFAULT NULL, `title` text, `a` text, `b` text, `c` text, `d` text, `ans` varchar(5) DEFAULT NULL, `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `updated_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ) ENGINE=InnoDB DEFAULT CHARSET=latin1; -- -- Dumping data for table `questions` -- INSERT INTO `questions` (`id`, `subject_id`, `std_id`, `test_id`, `title`, `a`, `b`, `c`, `d`, `ans`, `created_at`, `updated_at`) VALUES (1, 4, 13, 2, 'This is Test Question.', '10', '20', '30', '80', 'c', '2019-07-02 06:26:01', '2019-07-02 06:26:01'); -- -------------------------------------------------------- -- -- Table structure for table `stds` -- CREATE TABLE `stds` ( `id` bigint(20) UNSIGNED NOT NULL, `name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Dumping data for table `stds` -- INSERT INTO `stds` (`id`, `name`, `created_at`, `updated_at`) VALUES (10, 'Class IV', '2019-07-02 10:00:09', '2019-07-02 10:00:09'), (13, 'Class II', '2019-07-02 10:04:23', '2019-07-02 10:04:23'), (14, 'Class III', '2019-07-02 10:05:20', '2019-07-02 10:05:20'), (18, 'Class I', '2019-07-02 10:09:36', '2019-07-02 10:09:36'); -- -------------------------------------------------------- -- -- Table structure for table `students` -- CREATE TABLE `students` ( `id` bigint(20) UNSIGNED NOT NULL, `name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `parent_id` int(11) NOT NULL, `password` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Dumping data for table `students` -- INSERT INTO `students` (`id`, `name`, `parent_id`, `password`, `created_at`, `updated_at`) VALUES (4, '<NAME>', 32, '$2y$10$9WBLEo9VInGxZC/Pba3gQeGH7Bb1<PASSWORD>2kFQ/Ex<PASSWORD>yZqEd<PASSWORD>', '2019-06-29 03:36:30', '2019-06-29 03:36:30'); -- -------------------------------------------------------- -- -- Table structure for table `subjects` -- CREATE TABLE `subjects` ( `id` bigint(20) UNSIGNED NOT NULL, `std_id` int(11) NOT NULL, `name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Dumping data for table `subjects` -- INSERT INTO `subjects` (`id`, `std_id`, `name`, `created_at`, `updated_at`) VALUES (3, 10, 'Science', '2019-07-02 03:35:38', '2019-07-02 03:35:38'), (4, 13, 'Maths', '2019-07-02 03:36:11', '2019-07-02 03:36:11'), (7, 18, 'Maths', '2019-07-03 02:46:24', '2019-07-03 02:46:24'); -- -------------------------------------------------------- -- -- Table structure for table `users` -- CREATE TABLE `users` ( `id` bigint(20) UNSIGNED NOT NULL, `name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `email` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `email_verified_at` timestamp NULL DEFAULT NULL, `password` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `remember_token` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Indexes for dumped tables -- -- -- Indexes for table `exams` -- ALTER TABLE `exams` ADD PRIMARY KEY (`id`); -- -- Indexes for table `migrations` -- ALTER TABLE `migrations` ADD PRIMARY KEY (`id`); -- -- Indexes for table `parents` -- ALTER TABLE `parents` ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `parents_email_unique` (`email`); -- -- Indexes for table `password_resets` -- ALTER TABLE `password_resets` ADD KEY `password_resets_email_index` (`email`); -- -- Indexes for table `questions` -- ALTER TABLE `questions` ADD PRIMARY KEY (`id`); -- -- Indexes for table `stds` -- ALTER TABLE `stds` ADD PRIMARY KEY (`id`); -- -- Indexes for table `students` -- ALTER TABLE `students` ADD PRIMARY KEY (`id`); -- -- Indexes for table `subjects` -- ALTER TABLE `subjects` ADD PRIMARY KEY (`id`); -- -- Indexes for table `users` -- ALTER TABLE `users` ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `users_email_unique` (`email`); -- -- AUTO_INCREMENT for dumped tables -- -- -- AUTO_INCREMENT for table `exams` -- ALTER TABLE `exams` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=4; -- -- AUTO_INCREMENT for table `migrations` -- ALTER TABLE `migrations` MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=7; -- -- AUTO_INCREMENT for table `parents` -- ALTER TABLE `parents` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=33; -- -- AUTO_INCREMENT for table `questions` -- ALTER TABLE `questions` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3; -- -- AUTO_INCREMENT for table `stds` -- ALTER TABLE `stds` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=19; -- -- AUTO_INCREMENT for table `students` -- ALTER TABLE `students` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5; -- -- AUTO_INCREMENT for table `subjects` -- ALTER TABLE `subjects` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=8; -- -- AUTO_INCREMENT for table `users` -- ALTER TABLE `users` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT; COMMIT; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
-- phpMyAdmin SQL Dump -- version 5.0.4 -- https://www.phpmyadmin.net/ -- -- Servidor: 127.0.0.1 -- Tiempo de generación: 17-02-2021 a las 13:59:54 -- Versión del servidor: 10.4.17-MariaDB -- Versión de PHP: 7.4.13 SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; START TRANSACTION; SET time_zone = "+00:00"; /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8mb4 */; -- -- Base de datos: `mdeis_grupo4_cars_insurance` -- -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `branch_offices` -- CREATE TABLE `branch_offices` ( `id` bigint(20) UNSIGNED NOT NULL, `name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `address` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `discount_min` double(12,2) NOT NULL, `state_id` bigint(20) UNSIGNED NOT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Volcado de datos para la tabla `branch_offices` -- INSERT INTO `branch_offices` (`id`, `name`, `address`, `discount_min`, `state_id`, `created_at`, `updated_at`) VALUES (1, 'Sucursal Sz 1', 'address', 10.00, 1, '2021-02-17 00:21:00', '2021-02-17 09:31:18'), (2, 'Sucursal Sz 2', 'address', 2.50, 1, '2021-02-17 00:21:46', '2021-02-17 00:21:46'), (3, 'Sucursal Lpaz 1', 'address', 2.50, 2, '2021-02-17 00:21:46', '2021-02-17 00:21:46'), (4, 'Sucursal Lpaz 2', 'address', 2.50, 2, '2021-02-17 00:21:46', '2021-02-17 00:21:46'), (5, 'Sucursal Cbba 1', 'address', 2.50, 3, '2021-02-17 00:21:46', '2021-02-17 00:21:46'), (6, 'Sucursal Cbba 2', 'address', 2.50, 3, '2021-02-17 00:21:46', '2021-02-17 00:21:46'); -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `data_rows` -- CREATE TABLE `data_rows` ( `id` int(10) UNSIGNED NOT NULL, `data_type_id` int(10) UNSIGNED NOT NULL, `field` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `type` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `display_name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `required` tinyint(1) NOT NULL DEFAULT 0, `browse` tinyint(1) NOT NULL DEFAULT 1, `read` tinyint(1) NOT NULL DEFAULT 1, `edit` tinyint(1) NOT NULL DEFAULT 1, `add` tinyint(1) NOT NULL DEFAULT 1, `delete` tinyint(1) NOT NULL DEFAULT 1, `details` text COLLATE utf8mb4_unicode_ci DEFAULT NULL, `order` int(11) NOT NULL DEFAULT 1 ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Volcado de datos para la tabla `data_rows` -- INSERT INTO `data_rows` (`id`, `data_type_id`, `field`, `type`, `display_name`, `required`, `browse`, `read`, `edit`, `add`, `delete`, `details`, `order`) VALUES (1, 1, 'id', 'number', 'ID', 1, 0, 0, 0, 0, 0, NULL, 1), (2, 1, 'name', 'text', 'Name', 1, 1, 1, 1, 1, 1, NULL, 2), (3, 1, 'email', 'text', 'Email', 1, 1, 1, 1, 1, 1, NULL, 3), (4, 1, 'password', 'password', 'Password', 1, 0, 0, 1, 1, 0, NULL, 4), (5, 1, 'remember_token', 'text', 'Remember Token', 0, 0, 0, 0, 0, 0, NULL, 5), (6, 1, 'created_at', 'timestamp', 'Created At', 0, 1, 1, 0, 0, 0, NULL, 6), (7, 1, 'updated_at', 'timestamp', 'Updated At', 0, 0, 0, 0, 0, 0, NULL, 7), (8, 1, 'avatar', 'image', 'Avatar', 0, 1, 1, 1, 1, 1, NULL, 8), (9, 1, 'user_belongsto_role_relationship', 'relationship', 'Role', 0, 1, 1, 1, 1, 0, '{\"model\":\"TCG\\\\Voyager\\\\Models\\\\Role\",\"table\":\"roles\",\"type\":\"belongsTo\",\"column\":\"role_id\",\"key\":\"id\",\"label\":\"display_name\",\"pivot_table\":\"roles\",\"pivot\":0}', 10), (10, 1, 'user_belongstomany_role_relationship', 'relationship', 'Roles', 0, 1, 1, 1, 1, 0, '{\"model\":\"TCG\\\\Voyager\\\\Models\\\\Role\",\"table\":\"roles\",\"type\":\"belongsToMany\",\"column\":\"id\",\"key\":\"id\",\"label\":\"display_name\",\"pivot_table\":\"user_roles\",\"pivot\":\"1\",\"taggable\":\"0\"}', 11), (11, 1, 'settings', 'hidden', 'Settings', 0, 0, 0, 0, 0, 0, NULL, 12), (12, 2, 'id', 'number', 'ID', 1, 0, 0, 0, 0, 0, NULL, 1), (13, 2, 'name', 'text', 'Name', 1, 1, 1, 1, 1, 1, NULL, 2), (14, 2, 'created_at', 'timestamp', 'Created At', 0, 0, 0, 0, 0, 0, NULL, 3), (15, 2, 'updated_at', 'timestamp', 'Updated At', 0, 0, 0, 0, 0, 0, NULL, 4), (16, 3, 'id', 'number', 'ID', 1, 0, 0, 0, 0, 0, NULL, 1), (17, 3, 'name', 'text', 'Name', 1, 1, 1, 1, 1, 1, NULL, 2), (18, 3, 'created_at', 'timestamp', 'Created At', 0, 0, 0, 0, 0, 0, NULL, 3), (19, 3, 'updated_at', 'timestamp', 'Updated At', 0, 0, 0, 0, 0, 0, NULL, 4), (20, 3, 'display_name', 'text', 'Display Name', 1, 1, 1, 1, 1, 1, NULL, 5), (21, 1, 'role_id', 'text', 'Role', 1, 1, 1, 1, 1, 1, NULL, 9), (22, 4, 'id', 'text', 'Id', 1, 0, 0, 0, 0, 0, '{}', 1), (23, 4, 'name', 'text', 'Name', 1, 1, 1, 1, 1, 1, '{}', 2), (24, 4, 'created_at', 'timestamp', 'Created At', 0, 1, 1, 1, 0, 1, '{}', 3), (25, 4, 'updated_at', 'timestamp', 'Updated At', 0, 0, 0, 0, 0, 0, '{}', 4), (26, 5, 'id', 'text', 'Id', 1, 0, 0, 0, 0, 0, '{}', 1), (27, 5, 'name', 'text', 'Name', 1, 1, 1, 1, 1, 1, '{}', 3), (28, 5, 'code', 'text', 'Code', 1, 1, 1, 1, 1, 1, '{}', 4), (29, 5, 'vehicle_type_id', 'text', 'Vehicle Type Id', 1, 1, 1, 1, 1, 1, '{}', 2), (30, 5, 'created_at', 'timestamp', 'Created At', 0, 1, 1, 1, 0, 1, '{}', 5), (31, 5, 'updated_at', 'timestamp', 'Updated At', 0, 0, 0, 0, 0, 0, '{}', 6), (32, 5, 'vehicle_model_belongsto_vehicle_type_relationship', 'relationship', 'Vehicle Type', 0, 1, 1, 1, 1, 1, '{\"model\":\"App\\\\VehicleType\",\"table\":\"vehicle_types\",\"type\":\"belongsTo\",\"column\":\"vehicle_type_id\",\"key\":\"id\",\"label\":\"name\",\"pivot_table\":\"branch_offices\",\"pivot\":\"0\",\"taggable\":\"0\"}', 7), (34, 6, 'id', 'text', 'Id', 1, 0, 0, 0, 0, 0, '{}', 1), (35, 6, 'name', 'text', 'Name', 1, 1, 1, 1, 1, 1, '{}', 3), (36, 6, 'code', 'text', 'Code', 1, 1, 1, 1, 1, 1, '{}', 4), (37, 6, 'logo', 'image', 'Logo', 0, 1, 1, 1, 1, 1, '{}', 5), (38, 6, 'vehicle_model_id', 'text', 'Vehicle Model Id', 1, 1, 1, 1, 1, 1, '{}', 2), (39, 6, 'created_at', 'timestamp', 'Created At', 0, 1, 1, 1, 0, 1, '{}', 6), (40, 6, 'updated_at', 'timestamp', 'Updated At', 0, 0, 0, 0, 0, 0, '{}', 7), (41, 6, 'vehicle_make_belongsto_vehicle_model_relationship', 'relationship', 'Vehicle Model', 0, 1, 1, 1, 1, 1, '{\"model\":\"App\\\\VehicleModel\",\"table\":\"vehicle_models\",\"type\":\"belongsTo\",\"column\":\"vehicle_model_id\",\"key\":\"id\",\"label\":\"name\",\"pivot_table\":\"branch_offices\",\"pivot\":\"0\",\"taggable\":\"0\"}', 8), (42, 7, 'id', 'text', 'Id', 1, 0, 0, 0, 0, 0, '{}', 1), (43, 7, 'license_plate', 'text', 'License Plate', 1, 1, 1, 1, 1, 1, '{}', 4), (44, 7, 'price_vehicle', 'text', 'Price Vehicle', 1, 1, 1, 1, 1, 1, '{}', 5), (45, 7, 'vehicle_make_id', 'text', 'Vehicle Make Id', 1, 1, 1, 1, 1, 1, '{}', 2), (46, 7, 'year_id', 'text', 'Year Id', 1, 1, 1, 1, 1, 1, '{}', 3), (47, 7, 'created_at', 'timestamp', 'Created At', 0, 1, 1, 1, 0, 1, '{}', 6), (48, 7, 'updated_at', 'timestamp', 'Updated At', 0, 0, 0, 0, 0, 0, '{}', 7), (49, 7, 'vehicle_belongsto_vehicle_make_relationship', 'relationship', 'Vehicle Make', 0, 1, 1, 1, 1, 1, '{\"model\":\"App\\\\VehicleMake\",\"table\":\"vehicle_makes\",\"type\":\"belongsTo\",\"column\":\"vehicle_make_id\",\"key\":\"id\",\"label\":\"name\",\"pivot_table\":\"branch_offices\",\"pivot\":\"0\",\"taggable\":\"0\"}', 8), (50, 8, 'id', 'text', 'Id', 1, 0, 0, 0, 0, 0, '{}', 1), (51, 8, 'name', 'text', 'Name', 1, 1, 1, 1, 1, 1, '{}', 2), (52, 8, 'created_at', 'timestamp', 'Created At', 0, 1, 1, 1, 0, 1, '{}', 3), (53, 8, 'updated_at', 'timestamp', 'Updated At', 0, 0, 0, 0, 0, 0, '{}', 4), (54, 7, 'vehicle_belongsto_year_relationship', 'relationship', 'Year', 0, 1, 1, 1, 1, 1, '{\"model\":\"App\\\\Year\",\"table\":\"years\",\"type\":\"belongsTo\",\"column\":\"year_id\",\"key\":\"id\",\"label\":\"name\",\"pivot_table\":\"branch_offices\",\"pivot\":\"0\",\"taggable\":\"0\"}', 9), (55, 9, 'id', 'text', 'Id', 1, 0, 0, 0, 0, 0, '{}', 1), (56, 9, 'name', 'text', 'Name', 1, 1, 1, 1, 1, 1, '{}', 2), (57, 9, 'created_at', 'timestamp', 'Created At', 0, 1, 1, 1, 0, 1, '{}', 3), (58, 9, 'updated_at', 'timestamp', 'Updated At', 0, 0, 0, 0, 0, 0, '{}', 4), (59, 10, 'id', 'text', 'Id', 1, 0, 0, 0, 0, 0, '{}', 1), (60, 10, 'percentage', 'text', 'Percentage', 1, 1, 1, 1, 1, 1, '{}', 4), (61, 10, 'vehicle_type_id', 'text', 'Vehicle Type Id', 1, 1, 1, 1, 1, 1, '{}', 2), (62, 10, 'state_id', 'text', 'State Id', 1, 1, 1, 1, 1, 1, '{}', 3), (63, 10, 'created_at', 'timestamp', 'Created At', 0, 1, 1, 0, 0, 1, '{}', 5), (64, 10, 'updated_at', 'timestamp', 'Updated At', 0, 0, 0, 0, 0, 0, '{}', 6), (65, 10, 'state_vehicle_type_belongsto_vehicle_type_relationship', 'relationship', 'Vehicle Type', 0, 1, 1, 1, 1, 1, '{\"model\":\"App\\\\VehicleType\",\"table\":\"vehicle_types\",\"type\":\"belongsTo\",\"column\":\"vehicle_type_id\",\"key\":\"id\",\"label\":\"name\",\"pivot_table\":\"branch_offices\",\"pivot\":\"0\",\"taggable\":\"0\"}', 7), (66, 10, 'state_vehicle_type_belongsto_state_relationship', 'relationship', 'State', 0, 1, 1, 1, 1, 1, '{\"model\":\"App\\\\State\",\"table\":\"states\",\"type\":\"belongsTo\",\"column\":\"state_id\",\"key\":\"id\",\"label\":\"name\",\"pivot_table\":\"branch_offices\",\"pivot\":\"0\",\"taggable\":\"0\"}', 8), (67, 11, 'id', 'text', 'Id', 1, 0, 0, 0, 0, 0, '{}', 1), (68, 11, 'first_name', 'text', 'First Name', 1, 1, 1, 1, 1, 1, '{}', 2), (69, 11, 'last_name', 'text', 'Last Name', 1, 1, 1, 1, 1, 1, '{}', 3), (70, 11, 'phone_number', 'number', 'Phone Number', 1, 1, 1, 1, 1, 1, '{}', 4), (71, 11, 'dni', 'text', 'Dni', 1, 1, 1, 1, 1, 1, '{}', 5), (72, 11, 'gender', 'text', 'Gender', 1, 1, 1, 1, 1, 1, '{}', 6), (73, 11, 'active', 'checkbox', 'Active', 1, 1, 1, 1, 1, 1, '{}', 7), (74, 11, 'created_at', 'timestamp', 'Created At', 0, 1, 1, 1, 0, 1, '{}', 8), (75, 11, 'updated_at', 'timestamp', 'Updated At', 0, 0, 0, 0, 0, 0, '{}', 9), (76, 12, 'id', 'text', 'Id', 1, 0, 0, 0, 0, 0, '{}', 1), (77, 12, 'name', 'text', 'Name', 1, 1, 1, 1, 1, 1, '{}', 3), (78, 12, 'address', 'text', 'Address', 1, 1, 1, 1, 1, 1, '{}', 4), (79, 12, 'discount_min', 'number', 'Discount Min', 1, 1, 1, 1, 1, 1, '{}', 5), (80, 12, 'state_id', 'text', 'State Id', 1, 1, 1, 1, 1, 1, '{}', 2), (81, 12, 'created_at', 'timestamp', 'Created At', 0, 1, 1, 1, 0, 1, '{}', 6), (82, 12, 'updated_at', 'timestamp', 'Updated At', 0, 0, 0, 0, 0, 0, '{}', 7), (83, 12, 'branch_office_belongsto_state_relationship', 'relationship', 'State', 0, 1, 1, 1, 1, 1, '{\"model\":\"App\\\\State\",\"table\":\"states\",\"type\":\"belongsTo\",\"column\":\"state_id\",\"key\":\"id\",\"label\":\"name\",\"pivot_table\":\"branch_offices\",\"pivot\":\"0\",\"taggable\":\"0\"}', 8), (84, 13, 'id', 'text', 'Id', 1, 0, 0, 0, 0, 0, '{}', 1), (85, 13, 'policy_number', 'number', 'Policy Number', 0, 1, 1, 0, 0, 1, '{}', 5), (86, 13, 'policy_effective_date', 'timestamp', 'Policy Effective Date', 1, 1, 1, 0, 0, 1, '{}', 6), (87, 13, 'total_amount', 'number', 'Total Amount', 1, 1, 1, 0, 0, 1, '{}', 7), (88, 13, 'active', 'checkbox', 'Active', 1, 1, 1, 1, 0, 1, '{}', 8), (89, 13, 'date_start', 'timestamp', 'Date Start', 1, 1, 1, 1, 1, 1, '{}', 9), (90, 13, 'date_end', 'timestamp', 'Date End', 1, 1, 1, 1, 1, 1, '{}', 10), (91, 13, 'type', 'select_dropdown', 'Type', 1, 1, 1, 0, 0, 1, '{\"default\":\"1\",\"options\":{\"1\":\"Quotation\",\"2\":\"Policy\"}}', 11), (92, 13, 'discount', 'number', 'Discount', 1, 1, 1, 0, 0, 1, '{}', 12), (93, 13, 'amount', 'number', 'Amount', 1, 1, 1, 0, 0, 1, '{}', 13), (94, 13, 'vehicle_id', 'text', 'Vehicle Id', 1, 1, 1, 1, 1, 1, '{}', 2), (95, 13, 'driver_id', 'text', 'Driver Id', 1, 1, 1, 1, 1, 1, '{}', 3), (96, 13, 'branch_office_id', 'text', 'Branch Office Id', 1, 1, 1, 1, 1, 1, '{}', 4), (97, 13, 'created_at', 'timestamp', 'Created At', 0, 1, 1, 0, 0, 1, '{}', 14), (98, 13, 'updated_at', 'timestamp', 'Updated At', 0, 0, 0, 0, 0, 0, '{}', 15), (99, 13, 'policy_belongsto_branch_office_relationship', 'relationship', 'BranchOffice', 0, 1, 1, 1, 1, 1, '{\"model\":\"App\\\\BranchOffice\",\"table\":\"branch_offices\",\"type\":\"belongsTo\",\"column\":\"branch_office_id\",\"key\":\"id\",\"label\":\"name\",\"pivot_table\":\"branch_offices\",\"pivot\":\"0\",\"taggable\":\"0\"}', 16), (100, 13, 'policy_belongsto_vehicle_relationship', 'relationship', 'Vehicle', 0, 1, 1, 1, 1, 1, '{\"model\":\"App\\\\Vehicle\",\"table\":\"vehicles\",\"type\":\"belongsTo\",\"column\":\"vehicle_id\",\"key\":\"id\",\"label\":\"license_plate\",\"pivot_table\":\"branch_offices\",\"pivot\":\"0\",\"taggable\":\"0\"}', 17), (101, 13, 'policy_belongsto_driver_relationship', 'relationship', 'Driver', 0, 1, 1, 1, 1, 1, '{\"model\":\"App\\\\Driver\",\"table\":\"drivers\",\"type\":\"belongsTo\",\"column\":\"driver_id\",\"key\":\"id\",\"label\":\"first_name\",\"pivot_table\":\"branch_offices\",\"pivot\":\"0\",\"taggable\":\"0\"}', 18), (102, 15, 'id', 'text', 'Id', 1, 0, 0, 0, 0, 0, '{}', 1), (103, 15, 'key', 'text', 'Key', 1, 1, 1, 1, 1, 1, '{}', 2), (104, 15, 'table_name', 'text', 'Table Name', 0, 1, 1, 1, 1, 1, '{}', 3), (105, 15, 'created_at', 'timestamp', 'Created At', 0, 1, 1, 0, 0, 1, '{}', 4), (106, 15, 'updated_at', 'timestamp', 'Updated At', 0, 0, 0, 0, 0, 0, '{}', 5); -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `data_types` -- CREATE TABLE `data_types` ( `id` int(10) UNSIGNED NOT NULL, `name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `slug` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `display_name_singular` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `display_name_plural` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `icon` varchar(191) COLLATE utf8mb4_unicode_ci DEFAULT NULL, `model_name` varchar(191) COLLATE utf8mb4_unicode_ci DEFAULT NULL, `policy_name` varchar(191) COLLATE utf8mb4_unicode_ci DEFAULT NULL, `controller` varchar(191) COLLATE utf8mb4_unicode_ci DEFAULT NULL, `description` varchar(191) COLLATE utf8mb4_unicode_ci DEFAULT NULL, `generate_permissions` tinyint(1) NOT NULL DEFAULT 0, `server_side` tinyint(4) NOT NULL DEFAULT 0, `details` text COLLATE utf8mb4_unicode_ci DEFAULT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Volcado de datos para la tabla `data_types` -- INSERT INTO `data_types` (`id`, `name`, `slug`, `display_name_singular`, `display_name_plural`, `icon`, `model_name`, `policy_name`, `controller`, `description`, `generate_permissions`, `server_side`, `details`, `created_at`, `updated_at`) VALUES (1, 'users', 'users', 'User', 'Users', 'voyager-person', 'TCG\\Voyager\\Models\\User', 'TCG\\Voyager\\Policies\\UserPolicy', 'TCG\\Voyager\\Http\\Controllers\\VoyagerUserController', '', 1, 0, NULL, '2021-02-17 00:30:45', '2021-02-17 00:30:45'), (2, 'menus', 'menus', 'Menu', 'Menus', 'voyager-list', 'TCG\\Voyager\\Models\\Menu', NULL, '', '', 1, 0, NULL, '2021-02-17 00:30:45', '2021-02-17 00:30:45'), (3, 'roles', 'roles', 'Role', 'Roles', 'voyager-lock', 'TCG\\Voyager\\Models\\Role', NULL, 'TCG\\Voyager\\Http\\Controllers\\VoyagerRoleController', '', 1, 0, NULL, '2021-02-17 00:30:45', '2021-02-17 00:30:45'), (4, 'vehicle_types', 'vehicle-types', 'Vehicle Type', 'Vehicle Types', 'voyager-company', 'App\\VehicleType', NULL, NULL, NULL, 1, 1, '{\"order_column\":\"id\",\"order_display_column\":\"name\",\"order_direction\":\"asc\",\"default_search_key\":\"id\"}', '2021-02-17 00:53:00', '2021-02-17 00:53:00'), (5, 'vehicle_models', 'vehicle-models', 'Vehicle Model', 'Vehicle Models', 'voyager-company', 'App\\VehicleModel', NULL, NULL, NULL, 1, 1, '{\"order_column\":\"id\",\"order_display_column\":\"name\",\"order_direction\":\"asc\",\"default_search_key\":\"id\",\"scope\":null}', '2021-02-17 00:54:04', '2021-02-17 00:57:17'), (6, 'vehicle_makes', 'vehicle-makes', 'Vehicle Make', 'Vehicle Makes', 'voyager-company', 'App\\VehicleMake', NULL, NULL, NULL, 1, 1, '{\"order_column\":\"id\",\"order_display_column\":\"name\",\"order_direction\":\"asc\",\"default_search_key\":\"id\",\"scope\":null}', '2021-02-17 01:03:17', '2021-02-17 01:04:32'), (7, 'vehicles', 'vehicles', 'Vehicle', 'Vehicles', 'voyager-company', 'App\\Vehicle', NULL, NULL, NULL, 1, 1, '{\"order_column\":\"id\",\"order_display_column\":\"license_plate\",\"order_direction\":\"asc\",\"default_search_key\":\"id\",\"scope\":null}', '2021-02-17 01:05:50', '2021-02-17 01:08:35'), (8, 'years', 'years', 'Year', 'Years', 'voyager-company', 'App\\Year', NULL, NULL, NULL, 1, 1, '{\"order_column\":\"id\",\"order_display_column\":\"name\",\"order_direction\":\"asc\",\"default_search_key\":\"id\"}', '2021-02-17 01:07:41', '2021-02-17 01:07:41'), (9, 'states', 'states', 'State', 'States', 'voyager-company', 'App\\State', NULL, NULL, NULL, 1, 1, '{\"order_column\":\"id\",\"order_display_column\":\"name\",\"order_direction\":\"asc\",\"default_search_key\":\"id\"}', '2021-02-17 01:12:16', '2021-02-17 01:12:16'), (10, 'state_vehicle_types', 'state-vehicle-types', 'State Vehicle Type', 'State Vehicle Types', 'voyager-company', 'App\\StateVehicleType', NULL, NULL, NULL, 1, 1, '{\"order_column\":\"id\",\"order_display_column\":\"id\",\"order_direction\":\"asc\",\"default_search_key\":\"id\",\"scope\":null}', '2021-02-17 01:13:49', '2021-02-17 09:30:45'), (11, 'drivers', 'drivers', 'Driver', 'Drivers', NULL, 'App\\Driver', NULL, NULL, NULL, 1, 1, '{\"order_column\":\"id\",\"order_display_column\":\"first_name\",\"order_direction\":\"asc\",\"default_search_key\":\"id\"}', '2021-02-17 01:17:15', '2021-02-17 01:17:15'), (12, 'branch_offices', 'branch-offices', 'Branch Office', 'Branch Offices', 'voyager-company', 'App\\BranchOffice', NULL, NULL, NULL, 1, 1, '{\"order_column\":\"id\",\"order_display_column\":\"id\",\"order_direction\":\"asc\",\"default_search_key\":\"id\",\"scope\":null}', '2021-02-17 01:21:40', '2021-02-17 01:22:27'), (13, 'policies', 'policies', 'Policy', 'Policies', 'voyager-company', 'App\\Policy', NULL, 'App\\Http\\Controllers\\PolicyController', NULL, 1, 1, '{\"order_column\":\"id\",\"order_display_column\":\"id\",\"order_direction\":\"asc\",\"default_search_key\":\"id\",\"scope\":null}', '2021-02-17 03:45:59', '2021-02-17 16:26:00'), (15, 'permissions', 'permissions', 'Permission', 'Permissions', 'voyager-lock', 'TCG\\Voyager\\Models\\Permission', NULL, NULL, NULL, 1, 1, '{\"order_column\":\"id\",\"order_display_column\":\"key\",\"order_direction\":\"asc\",\"default_search_key\":\"id\",\"scope\":null}', '2021-02-17 07:44:53', '2021-02-17 08:01:37'); -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `drivers` -- CREATE TABLE `drivers` ( `id` bigint(20) UNSIGNED NOT NULL, `first_name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `last_name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `phone_number` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `dni` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `gender` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `active` tinyint(4) NOT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Volcado de datos para la tabla `drivers` -- INSERT INTO `drivers` (`id`, `first_name`, `last_name`, `phone_number`, `dni`, `gender`, `active`, `created_at`, `updated_at`) VALUES (1, 'Name', 'Last Name', '70000000', '70000000', 'M', 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (2, 'Name', 'Last Name', '70000000', '70000000', 'M', 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (3, 'Name', 'Last Name', '70000000', '70000000', 'M', 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (4, 'Name', 'Last Name', '70000000', '70000000', 'M', 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (5, 'Name', 'Last Name', '70000000', '70000000', 'M', 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (6, 'Name', 'Last Name', '70000000', '70000000', 'M', 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (7, 'Name', 'Last Name', '70000000', '70000000', 'M', 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (8, 'Name', 'Last Name', '70000000', '70000000', 'M', 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (9, 'Name', 'Last Name', '70000000', '70000000', 'M', 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (10, 'Name', 'Last Name', '70000000', '70000000', 'M', 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (11, 'Name', 'Last Name', '70000000', '70000000', 'M', 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (12, 'Name', 'Last Name', '70000000', '70000000', 'M', 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'); -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `menus` -- CREATE TABLE `menus` ( `id` int(10) UNSIGNED NOT NULL, `name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Volcado de datos para la tabla `menus` -- INSERT INTO `menus` (`id`, `name`, `created_at`, `updated_at`) VALUES (1, 'admin', '2021-02-17 00:30:46', '2021-02-17 00:30:46'); -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `menu_items` -- CREATE TABLE `menu_items` ( `id` int(10) UNSIGNED NOT NULL, `menu_id` int(10) UNSIGNED DEFAULT NULL, `title` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `url` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `target` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT '_self', `icon_class` varchar(191) COLLATE utf8mb4_unicode_ci DEFAULT NULL, `color` varchar(191) COLLATE utf8mb4_unicode_ci DEFAULT NULL, `parent_id` int(11) DEFAULT NULL, `order` int(11) NOT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL, `route` varchar(191) COLLATE utf8mb4_unicode_ci DEFAULT NULL, `parameters` text COLLATE utf8mb4_unicode_ci DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Volcado de datos para la tabla `menu_items` -- INSERT INTO `menu_items` (`id`, `menu_id`, `title`, `url`, `target`, `icon_class`, `color`, `parent_id`, `order`, `created_at`, `updated_at`, `route`, `parameters`) VALUES (1, 1, 'Dashboard', '', '_self', 'voyager-boat', NULL, NULL, 1, '2021-02-17 00:30:47', '2021-02-17 00:30:47', 'voyager.dashboard', NULL), (2, 1, 'Media', '', '_self', 'voyager-images', NULL, NULL, 5, '2021-02-17 00:30:47', '2021-02-17 07:45:06', 'voyager.media.index', NULL), (3, 1, 'Users', '', '_self', 'voyager-person', NULL, NULL, 4, '2021-02-17 00:30:47', '2021-02-17 07:45:06', 'voyager.users.index', NULL), (4, 1, 'Roles', '', '_self', 'voyager-lock', NULL, NULL, 2, '2021-02-17 00:30:47', '2021-02-17 00:30:47', 'voyager.roles.index', NULL), (5, 1, 'Tools', '', '_self', 'voyager-tools', NULL, NULL, 6, '2021-02-17 00:30:47', '2021-02-17 07:45:06', NULL, NULL), (6, 1, 'Menu Builder', '', '_self', 'voyager-list', NULL, 5, 1, '2021-02-17 00:30:47', '2021-02-17 03:58:14', 'voyager.menus.index', NULL), (7, 1, 'Database', '', '_self', 'voyager-data', NULL, 5, 2, '2021-02-17 00:30:47', '2021-02-17 03:58:14', 'voyager.database.index', NULL), (8, 1, 'Compass', '', '_self', 'voyager-compass', NULL, 5, 3, '2021-02-17 00:30:47', '2021-02-17 03:58:14', 'voyager.compass.index', NULL), (9, 1, 'BREAD', '', '_self', 'voyager-bread', NULL, 5, 4, '2021-02-17 00:30:47', '2021-02-17 03:58:14', 'voyager.bread.index', NULL), (10, 1, 'Settings', '', '_self', 'voyager-settings', NULL, NULL, 7, '2021-02-17 00:30:47', '2021-02-17 07:45:06', 'voyager.settings.index', NULL), (11, 1, 'Vehicle Types', '', '_self', 'voyager-company', NULL, 22, 1, '2021-02-17 00:53:01', '2021-02-17 03:59:14', 'voyager.vehicle-types.index', NULL), (12, 1, 'Vehicle Models', '', '_self', 'voyager-company', NULL, 22, 2, '2021-02-17 00:54:05', '2021-02-17 03:59:15', 'voyager.vehicle-models.index', NULL), (13, 1, 'Vehicle Makes', '', '_self', 'voyager-company', NULL, 22, 3, '2021-02-17 01:03:17', '2021-02-17 03:59:20', 'voyager.vehicle-makes.index', NULL), (14, 1, 'Vehicles', '', '_self', 'voyager-company', NULL, 22, 4, '2021-02-17 01:05:51', '2021-02-17 03:59:24', 'voyager.vehicles.index', NULL), (15, 1, 'Years', '', '_self', 'voyager-company', NULL, 21, 3, '2021-02-17 01:07:41', '2021-02-17 09:55:08', 'voyager.years.index', NULL), (16, 1, 'States', '', '_self', 'voyager-company', NULL, 21, 2, '2021-02-17 01:12:16', '2021-02-17 09:55:08', 'voyager.states.index', NULL), (17, 1, 'State Vehicle Types', '', '_self', 'voyager-company', NULL, 21, 4, '2021-02-17 01:13:49', '2021-02-17 09:55:08', 'voyager.state-vehicle-types.index', NULL), (18, 1, 'Drivers', '', '_self', 'voyager-company', '#000000', NULL, 10, '2021-02-17 01:17:15', '2021-02-17 07:45:06', 'voyager.drivers.index', 'null'), (19, 1, 'Branch Offices', '', '_self', 'voyager-company', NULL, NULL, 11, '2021-02-17 01:21:40', '2021-02-17 07:45:06', 'voyager.branch-offices.index', NULL), (20, 1, 'Policies', '', '_self', 'voyager-company', NULL, NULL, 12, '2021-02-17 03:45:59', '2021-02-17 07:45:06', 'voyager.policies.index', NULL), (21, 1, 'Config', '', '_self', 'voyager-settings', '#000000', NULL, 8, '2021-02-17 03:58:04', '2021-02-17 07:45:06', NULL, ''), (22, 1, 'Vehicles', '', '_self', 'voyager-company', '#000000', NULL, 9, '2021-02-17 03:59:02', '2021-02-17 07:45:06', NULL, ''), (23, 1, 'Permissions', '', '_self', 'voyager-lock', NULL, NULL, 3, '2021-02-17 07:44:53', '2021-02-17 07:45:06', 'voyager.permissions.index', NULL); -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `migrations` -- CREATE TABLE `migrations` ( `id` int(10) UNSIGNED NOT NULL, `migration` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `batch` int(11) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Volcado de datos para la tabla `migrations` -- INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES (1, '2014_10_12_000000_create_users_table', 1), (2, '2014_10_12_100000_create_password_resets_table', 1), (3, '2016_01_01_000000_add_voyager_user_fields', 1), (4, '2016_01_01_000000_create_data_types_table', 1), (5, '2016_05_19_173453_create_menu_table', 1), (6, '2016_10_21_190000_create_roles_table', 1), (7, '2016_10_21_190000_create_settings_table', 1), (8, '2016_11_30_135954_create_permission_table', 1), (9, '2016_11_30_141208_create_permission_role_table', 1), (10, '2016_12_26_201236_data_types__add__server_side', 1), (11, '2017_01_13_000000_add_route_to_menu_items_table', 1), (12, '2017_01_14_005015_create_translations_table', 1), (13, '2017_01_15_000000_make_table_name_nullable_in_permissions_table', 1), (14, '2017_03_06_000000_add_controller_to_data_types_table', 1), (15, '2017_04_21_000000_add_order_to_data_rows_table', 1), (16, '2017_07_05_210000_add_policyname_to_data_types_table', 1), (17, '2017_08_05_000000_add_group_to_settings_table', 1), (18, '2017_11_26_013050_add_user_role_relationship', 1), (19, '2017_11_26_015000_create_user_roles_table', 1), (20, '2018_03_11_000000_add_user_settings', 1), (21, '2018_03_14_000000_add_details_to_data_types_table', 1), (22, '2018_03_16_000000_make_settings_value_nullable', 1), (23, '2018_12_10_103745_create_vehicle_types_table', 1), (24, '2018_12_10_103746_create_velicle_models_table', 1), (25, '2018_12_10_103747_create_vehicle_makes_table', 1), (26, '2021_02_16_031302_create_states_table', 1), (27, '2021_02_16_031303_create_branch_offices_table', 1), (28, '2021_02_16_031304_create_state_vehicle_types_table', 1), (29, '2021_02_16_031305_create_years_table', 1), (30, '2021_02_16_031429_create_vehicles_table', 1), (31, '2021_02_16_031559_create_drivers_table', 1), (32, '2021_02_16_034024_create_policies_table', 1); -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `password_resets` -- CREATE TABLE `password_resets` ( `email` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `token` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `created_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `permissions` -- CREATE TABLE `permissions` ( `id` bigint(20) UNSIGNED NOT NULL, `key` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `table_name` varchar(191) COLLATE utf8mb4_unicode_ci DEFAULT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Volcado de datos para la tabla `permissions` -- INSERT INTO `permissions` (`id`, `key`, `table_name`, `created_at`, `updated_at`) VALUES (1, 'browse_admin', NULL, '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (2, 'browse_bread', NULL, '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (3, 'browse_database', NULL, '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (4, 'browse_media', NULL, '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (5, 'browse_compass', NULL, '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (6, 'browse_menus', 'menus', '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (7, 'read_menus', 'menus', '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (8, 'edit_menus', 'menus', '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (9, 'add_menus', 'menus', '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (10, 'delete_menus', 'menus', '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (11, 'browse_roles', 'roles', '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (12, 'read_roles', 'roles', '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (13, 'edit_roles', 'roles', '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (14, 'add_roles', 'roles', '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (15, 'delete_roles', 'roles', '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (16, 'browse_users', 'users', '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (17, 'read_users', 'users', '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (18, 'edit_users', 'users', '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (19, 'add_users', 'users', '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (20, 'delete_users', 'users', '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (21, 'browse_settings', 'settings', '2021-02-17 00:30:48', '2021-02-17 00:30:48'), (22, 'read_settings', 'settings', '2021-02-17 00:30:48', '2021-02-17 00:30:48'), (23, 'edit_settings', 'settings', '2021-02-17 00:30:48', '2021-02-17 00:30:48'), (24, 'add_settings', 'settings', '2021-02-17 00:30:48', '2021-02-17 00:30:48'), (25, 'delete_settings', 'settings', '2021-02-17 00:30:48', '2021-02-17 00:30:48'), (26, 'browse_vehicle_types', 'vehicle_types', '2021-02-17 00:53:00', '2021-02-17 00:53:00'), (27, 'read_vehicle_types', 'vehicle_types', '2021-02-17 00:53:00', '2021-02-17 00:53:00'), (28, 'edit_vehicle_types', 'vehicle_types', '2021-02-17 00:53:00', '2021-02-17 00:53:00'), (29, 'add_vehicle_types', 'vehicle_types', '2021-02-17 00:53:00', '2021-02-17 00:53:00'), (30, 'delete_vehicle_types', 'vehicle_types', '2021-02-17 00:53:01', '2021-02-17 00:53:01'), (31, 'browse_vehicle_models', 'vehicle_models', '2021-02-17 00:54:05', '2021-02-17 00:54:05'), (32, 'read_vehicle_models', 'vehicle_models', '2021-02-17 00:54:05', '2021-02-17 00:54:05'), (33, 'edit_vehicle_models', 'vehicle_models', '2021-02-17 00:54:05', '2021-02-17 00:54:05'), (34, 'add_vehicle_models', 'vehicle_models', '2021-02-17 00:54:05', '2021-02-17 00:54:05'), (35, 'delete_vehicle_models', 'vehicle_models', '2021-02-17 00:54:05', '2021-02-17 00:54:05'), (36, 'browse_vehicle_makes', 'vehicle_makes', '2021-02-17 01:03:17', '2021-02-17 01:03:17'), (37, 'read_vehicle_makes', 'vehicle_makes', '2021-02-17 01:03:17', '2021-02-17 01:03:17'), (38, 'edit_vehicle_makes', 'vehicle_makes', '2021-02-17 01:03:17', '2021-02-17 01:03:17'), (39, 'add_vehicle_makes', 'vehicle_makes', '2021-02-17 01:03:17', '2021-02-17 01:03:17'), (40, 'delete_vehicle_makes', 'vehicle_makes', '2021-02-17 01:03:17', '2021-02-17 01:03:17'), (41, 'browse_vehicles', 'vehicles', '2021-02-17 01:05:50', '2021-02-17 01:05:50'), (42, 'read_vehicles', 'vehicles', '2021-02-17 01:05:50', '2021-02-17 01:05:50'), (43, 'edit_vehicles', 'vehicles', '2021-02-17 01:05:50', '2021-02-17 01:05:50'), (44, 'add_vehicles', 'vehicles', '2021-02-17 01:05:50', '2021-02-17 01:05:50'), (45, 'delete_vehicles', 'vehicles', '2021-02-17 01:05:50', '2021-02-17 01:05:50'), (46, 'browse_years', 'years', '2021-02-17 01:07:41', '2021-02-17 01:07:41'), (47, 'read_years', 'years', '2021-02-17 01:07:41', '2021-02-17 01:07:41'), (48, 'edit_years', 'years', '2021-02-17 01:07:41', '2021-02-17 01:07:41'), (49, 'add_years', 'years', '2021-02-17 01:07:41', '2021-02-17 01:07:41'), (50, 'delete_years', 'years', '2021-02-17 01:07:41', '2021-02-17 01:07:41'), (51, 'browse_states', 'states', '2021-02-17 01:12:16', '2021-02-17 01:12:16'), (52, 'read_states', 'states', '2021-02-17 01:12:16', '2021-02-17 01:12:16'), (53, 'edit_states', 'states', '2021-02-17 01:12:16', '2021-02-17 01:12:16'), (54, 'add_states', 'states', '2021-02-17 01:12:16', '2021-02-17 01:12:16'), (55, 'delete_states', 'states', '2021-02-17 01:12:16', '2021-02-17 01:12:16'), (56, 'browse_state_vehicle_types', 'state_vehicle_types', '2021-02-17 01:13:49', '2021-02-17 01:13:49'), (57, 'read_state_vehicle_types', 'state_vehicle_types', '2021-02-17 01:13:49', '2021-02-17 01:13:49'), (58, 'edit_state_vehicle_types', 'state_vehicle_types', '2021-02-17 01:13:49', '2021-02-17 01:13:49'), (59, 'add_state_vehicle_types', 'state_vehicle_types', '2021-02-17 01:13:49', '2021-02-17 01:13:49'), (60, 'delete_state_vehicle_types', 'state_vehicle_types', '2021-02-17 01:13:49', '2021-02-17 01:13:49'), (61, 'browse_drivers', 'drivers', '2021-02-17 01:17:15', '2021-02-17 01:17:15'), (62, 'read_drivers', 'drivers', '2021-02-17 01:17:15', '2021-02-17 01:17:15'), (63, 'edit_drivers', 'drivers', '2021-02-17 01:17:15', '2021-02-17 01:17:15'), (64, 'add_drivers', 'drivers', '2021-02-17 01:17:15', '2021-02-17 01:17:15'), (65, 'delete_drivers', 'drivers', '2021-02-17 01:17:15', '2021-02-17 01:17:15'), (66, 'browse_branch_offices', 'branch_offices', '2021-02-17 01:21:40', '2021-02-17 01:21:40'), (67, 'read_branch_offices', 'branch_offices', '2021-02-17 01:21:40', '2021-02-17 01:21:40'), (68, 'edit_branch_offices', 'branch_offices', '2021-02-17 01:21:40', '2021-02-17 01:21:40'), (69, 'add_branch_offices', 'branch_offices', '2021-02-17 01:21:40', '2021-02-17 01:21:40'), (70, 'delete_branch_offices', 'branch_offices', '2021-02-17 01:21:40', '2021-02-17 01:21:40'), (71, 'browse_policies', 'policies', '2021-02-17 03:45:59', '2021-02-17 03:45:59'), (72, 'read_policies', 'policies', '2021-02-17 03:45:59', '2021-02-17 03:45:59'), (73, 'edit_policies', 'policies', '2021-02-17 03:45:59', '2021-02-17 03:45:59'), (74, 'add_policies', 'policies', '2021-02-17 03:45:59', '2021-02-17 03:45:59'), (75, 'delete_policies', 'policies', '2021-02-17 03:45:59', '2021-02-17 03:45:59'), (76, 'browse_permissions', 'permissions', '2021-02-17 07:44:53', '2021-02-17 07:44:53'), (77, 'read_permissions', 'permissions', '2021-02-17 07:44:53', '2021-02-17 07:44:53'), (78, 'edit_permissions', 'permissions', '2021-02-17 07:44:53', '2021-02-17 07:44:53'), (79, 'add_permissions', 'permissions', '2021-02-17 07:44:53', '2021-02-17 07:44:53'), (80, 'delete_permissions', 'permissions', '2021-02-17 07:44:53', '2021-02-17 07:44:53'), (82, 'quotation_policies', 'policies', '2021-02-17 07:49:06', '2021-02-17 08:01:46'), (83, 'reports_policies', 'policies', '2021-02-17 16:55:07', '2021-02-17 16:55:34'); -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `permission_role` -- CREATE TABLE `permission_role` ( `permission_id` bigint(20) UNSIGNED NOT NULL, `role_id` bigint(20) UNSIGNED NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Volcado de datos para la tabla `permission_role` -- INSERT INTO `permission_role` (`permission_id`, `role_id`) VALUES (1, 1), (1, 3), (2, 1), (3, 1), (4, 1), (5, 1), (6, 1), (7, 1), (8, 1), (9, 1), (10, 1), (11, 1), (12, 1), (13, 1), (14, 1), (15, 1), (16, 1), (17, 1), (18, 1), (19, 1), (20, 1), (21, 1), (22, 1), (23, 1), (24, 1), (25, 1), (26, 1), (27, 1), (28, 1), (29, 1), (30, 1), (31, 1), (32, 1), (33, 1), (34, 1), (35, 1), (36, 1), (37, 1), (38, 1), (39, 1), (40, 1), (41, 1), (42, 1), (43, 1), (44, 1), (45, 1), (46, 1), (47, 1), (48, 1), (49, 1), (50, 1), (51, 1), (52, 1), (53, 1), (54, 1), (55, 1), (56, 1), (57, 1), (58, 1), (59, 1), (60, 1), (61, 1), (62, 1), (63, 1), (64, 1), (65, 1), (66, 1), (67, 1), (68, 1), (69, 1), (70, 1), (71, 1), (71, 3), (72, 1), (72, 3), (73, 1), (73, 3), (74, 1), (74, 3), (75, 1), (75, 3), (76, 1), (77, 1), (78, 1), (79, 1), (80, 1), (82, 1), (82, 3), (83, 1); -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `policies` -- CREATE TABLE `policies` ( `id` bigint(20) UNSIGNED NOT NULL, `policy_number` int(11) UNSIGNED DEFAULT NULL, `policy_effective_date` date NOT NULL, `total_amount` double(12,2) NOT NULL, `active` tinyint(4) NOT NULL, `date_start` date NOT NULL, `date_end` date NOT NULL, `type` tinyint(4) NOT NULL, `discount` double(12,2) NOT NULL, `amount` double(12,2) NOT NULL, `vehicle_id` bigint(20) UNSIGNED NOT NULL, `driver_id` bigint(20) UNSIGNED NOT NULL, `branch_office_id` bigint(20) UNSIGNED NOT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Volcado de datos para la tabla `policies` -- INSERT INTO `policies` (`id`, `policy_number`, `policy_effective_date`, `total_amount`, `active`, `date_start`, `date_end`, `type`, `discount`, `amount`, `vehicle_id`, `driver_id`, `branch_office_id`, `created_at`, `updated_at`) VALUES (1, 1001, '2021-02-16', 300.00, 0, '2021-02-16', '2022-02-16', 1, 0.00, 300.00, 1, 1, 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (2, 1002, '2021-02-16', 600.00, 1, '2021-02-16', '2023-02-16', 2, 0.00, 300.00, 2, 2, 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (3, 1003, '2021-02-16', 300.00, 0, '2021-02-16', '2022-02-16', 2, 0.00, 300.00, 3, 3, 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (4, 1004, '2021-02-16', 400.00, 1, '2021-02-16', '2022-02-16', 2, 0.00, 400.00, 4, 4, 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (5, 1005, '2021-02-16', 450.00, 1, '2021-02-16', '2022-02-16', 2, 0.00, 450.00, 5, 5, 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (6, 1006, '2021-02-16', 1800.00, 0, '2021-02-16', '2024-02-16', 1, 0.00, 600.00, 6, 6, 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (7, 1007, '2021-02-16', 300.00, 0, '2021-02-16', '2022-02-16', 1, 0.00, 300.00, 7, 7, 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (8, 1008, '2021-02-16', 550.00, 0, '2021-02-16', '2022-02-16', 1, 0.00, 550.00, 8, 8, 2, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (9, 1009, '2021-02-16', 345.00, 0, '2021-02-16', '2022-02-16', 1, 0.00, 345.00, 9, 9, 2, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (10, 1010, '2021-02-16', 800.00, 1, '2021-02-16', '2022-02-16', 2, 0.00, 800.00, 10, 10, 2, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (11, 1011, '2021-02-16', 450.00, 0, '2021-02-16', '2022-02-16', 2, 0.00, 450.00, 11, 11, 2, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (12, 1012, '2021-02-16', 1500.00, 1, '2021-02-16', '2026-02-16', 2, 0.00, 1500.00, 12, 12, 2, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (13, 1013, '2021-02-17', 31000.00, 1, '2021-02-16', '2022-07-07', 1, 0.00, 31000.00, 1, 1, 1, '2021-02-17 06:27:41', '2021-02-17 06:27:41'), (14, 1014, '2021-02-17', 0.00, 1, '2021-02-16', '2021-02-16', 1, 0.00, 0.00, 1, 1, 2, '2021-02-17 06:38:58', '2021-02-17 06:38:58'), (15, 1015, '2021-02-17', 0.00, 1, '2021-02-16', '2021-02-16', 1, 0.00, 0.00, 1, 1, 1, '2021-02-17 06:39:24', '2021-02-17 06:39:24'), (16, 1016, '2021-02-17', 8323.33, 1, '2021-02-16', '2021-03-16', 2, 833.33, 8333.33, 1, 1, 1, '2021-02-17 06:58:26', '2021-02-17 09:31:33'), (17, 1017, '2021-02-17', 25833.33, 1, '2021-02-16', '2021-12-23', 2, 0.00, 25833.33, 1, 1, 1, '2021-02-17 07:12:52', '2021-02-17 09:24:38'), (18, 1018, '2021-02-17', 99990.00, 1, '2021-02-17', '2022-12-29', 2, 10000.00, 100000.00, 1, 1, 1, '2021-02-17 09:34:02', '2021-02-17 09:35:47'), (19, 1019, '2021-02-17', 90000.00, 1, '2021-02-17', '2022-12-30', 2, 10000.00, 100000.00, 1, 1, 1, '2021-02-17 09:37:27', '2021-02-17 09:37:47'); -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `roles` -- CREATE TABLE `roles` ( `id` bigint(20) UNSIGNED NOT NULL, `name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `display_name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Volcado de datos para la tabla `roles` -- INSERT INTO `roles` (`id`, `name`, `display_name`, `created_at`, `updated_at`) VALUES (1, 'admin', 'Administrator', '2021-02-17 00:26:49', '2021-02-17 00:26:49'), (2, 'user', 'Normal User', '2021-02-17 00:30:47', '2021-02-17 00:30:47'), (3, 'insurer', 'Insurer', '2021-02-17 16:41:14', '2021-02-17 16:41:14'); -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `settings` -- CREATE TABLE `settings` ( `id` int(10) UNSIGNED NOT NULL, `key` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `display_name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `value` text COLLATE utf8mb4_unicode_ci DEFAULT NULL, `details` text COLLATE utf8mb4_unicode_ci DEFAULT NULL, `type` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `order` int(11) NOT NULL DEFAULT 1, `group` varchar(191) COLLATE utf8mb4_unicode_ci DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Volcado de datos para la tabla `settings` -- INSERT INTO `settings` (`id`, `key`, `display_name`, `value`, `details`, `type`, `order`, `group`) VALUES (1, 'site.title', 'Site Title', 'Site Title', '', 'text', 1, 'Site'), (2, 'site.description', 'Site Description', 'Site Description', '', 'text', 2, 'Site'), (3, 'site.logo', 'Site Logo', '', '', 'image', 3, 'Site'), (4, 'site.google_analytics_tracking_id', 'Google Analytics Tracking ID', '', '', 'text', 4, 'Site'), (5, 'admin.bg_image', 'Admin Background Image', '', '', 'image', 5, 'Admin'), (6, 'admin.title', 'Admin Title', 'Voyager', '', 'text', 1, 'Admin'), (7, 'admin.description', 'Admin Description', 'Welcome to Voyager. The Missing Admin for Laravel', '', 'text', 2, 'Admin'), (8, 'admin.loader', 'Admin Loader', '', '', 'image', 3, 'Admin'), (9, 'admin.icon_image', 'Admin Icon Image', '', '', 'image', 4, 'Admin'), (10, 'admin.google_analytics_client_id', 'Google Analytics Client ID (used for admin dashboard)', '', '', 'text', 1, 'Admin'); -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `states` -- CREATE TABLE `states` ( `id` bigint(20) UNSIGNED NOT NULL, `name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Volcado de datos para la tabla `states` -- INSERT INTO `states` (`id`, `name`, `created_at`, `updated_at`) VALUES (1, 'Santa Cruz', '2021-02-17 00:21:46', '2021-02-17 00:21:46'), (2, 'La Paz', '2021-02-17 00:21:46', '2021-02-17 00:21:46'), (3, 'Cochabamba', '2021-02-17 00:21:46', '2021-02-17 00:21:46'); -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `state_vehicle_types` -- CREATE TABLE `state_vehicle_types` ( `id` bigint(20) UNSIGNED NOT NULL, `percentage` double(12,2) NOT NULL, `vehicle_type_id` bigint(20) UNSIGNED NOT NULL, `state_id` bigint(20) UNSIGNED NOT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Volcado de datos para la tabla `state_vehicle_types` -- INSERT INTO `state_vehicle_types` (`id`, `percentage`, `vehicle_type_id`, `state_id`, `created_at`, `updated_at`) VALUES (1, 10.00, 1, 1, '2021-02-17 00:21:00', '2021-02-17 09:30:27'), (2, 3.00, 1, 2, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (3, 2.80, 1, 3, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (4, 3.50, 2, 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (5, 2.50, 2, 2, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (6, 2.40, 2, 3, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (7, 3.50, 3, 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (8, 2.50, 3, 2, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (9, 2.40, 3, 3, '2021-02-17 00:21:47', '2021-02-17 00:21:47'); -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `translations` -- CREATE TABLE `translations` ( `id` int(10) UNSIGNED NOT NULL, `table_name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `column_name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `foreign_key` int(10) UNSIGNED NOT NULL, `locale` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `value` text COLLATE utf8mb4_unicode_ci NOT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `users` -- CREATE TABLE `users` ( `id` bigint(20) UNSIGNED NOT NULL, `role_id` bigint(20) UNSIGNED DEFAULT NULL, `name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `email` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `avatar` varchar(191) COLLATE utf8mb4_unicode_ci DEFAULT 'users/default.png', `email_verified_at` timestamp NULL DEFAULT NULL, `password` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `remember_token` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL, `settings` text COLLATE utf8mb4_unicode_ci DEFAULT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Volcado de datos para la tabla `users` -- INSERT INTO `users` (`id`, `role_id`, `name`, `email`, `avatar`, `email_verified_at`, `password`, `remember_token`, `settings`, `created_at`, `updated_at`) VALUES (1, 1, 'Admin', '<EMAIL>', 'users/default.png', NULL, '$2y$10$/cvMDrVUYzC1xIogQWEkJuss0t90Hl2hjKwCJjJ4UXVGFUkb6ids6', NULL, NULL, '2021-02-17 00:24:03', '2021-02-17 00:26:49'), (2, 3, 'Insurer', '<EMAIL>', 'users/default.png', NULL, '$2y$10$v/i2/7Nx6RZzRhy8wDb0pOLCMI1qISJ/OdhMo/GqUhLKXmiEAOp6i', NULL, '{\"locale\":\"en\"}', '2021-02-17 16:41:41', '2021-02-17 16:41:41'); -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `user_roles` -- CREATE TABLE `user_roles` ( `user_id` bigint(20) UNSIGNED NOT NULL, `role_id` bigint(20) UNSIGNED NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `vehicles` -- CREATE TABLE `vehicles` ( `id` bigint(20) UNSIGNED NOT NULL, `license_plate` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `price_vehicle` double(12,2) NOT NULL, `vehicle_make_id` bigint(20) UNSIGNED NOT NULL, `year_id` bigint(20) UNSIGNED NOT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Volcado de datos para la tabla `vehicles` -- INSERT INTO `vehicles` (`id`, `license_plate`, `price_vehicle`, `vehicle_make_id`, `year_id`, `created_at`, `updated_at`) VALUES (1, 'PLC001', 10000.00, 1, 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (2, 'PLC002', 12000.00, 2, 2, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (3, 'PLC003', 13000.00, 3, 3, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (4, 'PLC004', 15000.00, 4, 4, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (5, 'PLC005', 17000.00, 5, 5, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (6, 'PLC006', 22000.00, 6, 6, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (7, 'PLC007', 10000.00, 1, 2, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (8, 'PLC008', 12000.00, 2, 3, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (9, 'PLC009', 13000.00, 3, 4, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (10, 'PLC0010', 15000.00, 4, 5, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (11, 'PLC0011', 17000.00, 5, 6, '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (12, 'PLC0012', 22000.00, 6, 1, '2021-02-17 00:21:47', '2021-02-17 00:21:47'); -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `vehicle_makes` -- CREATE TABLE `vehicle_makes` ( `id` bigint(20) UNSIGNED NOT NULL, `name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `code` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `logo` text COLLATE utf8mb4_unicode_ci DEFAULT NULL, `vehicle_model_id` bigint(20) UNSIGNED NOT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Volcado de datos para la tabla `vehicle_makes` -- INSERT INTO `vehicle_makes` (`id`, `name`, `code`, `logo`, `vehicle_model_id`, `created_at`, `updated_at`) VALUES (1, 'Vehiculo 1', '001', NULL, 1, '2021-02-17 00:21:46', '2021-02-17 00:21:46'), (2, 'Vehiculo 2', '002', NULL, 2, '2021-02-17 00:21:46', '2021-02-17 00:21:46'), (3, 'Vehiculo 3', '003', NULL, 3, '2021-02-17 00:21:46', '2021-02-17 00:21:46'), (4, 'Vehiculo 4', '004', NULL, 4, '2021-02-17 00:21:46', '2021-02-17 00:21:46'), (5, 'Vehiculo 5', '005', NULL, 5, '2021-02-17 00:21:46', '2021-02-17 00:21:46'), (6, 'Vehiculo 6', '006', NULL, 6, '2021-02-17 00:21:46', '2021-02-17 00:21:46'); -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `vehicle_models` -- CREATE TABLE `vehicle_models` ( `id` bigint(20) UNSIGNED NOT NULL, `name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `code` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `vehicle_type_id` bigint(20) UNSIGNED NOT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Volcado de datos para la tabla `vehicle_models` -- INSERT INTO `vehicle_models` (`id`, `name`, `code`, `vehicle_type_id`, `created_at`, `updated_at`) VALUES (1, 'Modelo 1', '001', 1, '2021-02-17 00:21:46', '2021-02-17 00:21:46'), (2, 'Modelo 2', '002', 1, '2021-02-17 00:21:46', '2021-02-17 00:21:46'), (3, 'Modelo 3', '003', 1, '2021-02-17 00:21:46', '2021-02-17 00:21:46'), (4, 'Modelo 4', '004', 2, '2021-02-17 00:21:46', '2021-02-17 00:21:46'), (5, 'Modelo 5', '005', 3, '2021-02-17 00:21:46', '2021-02-17 00:21:46'), (6, 'Modelo 6', '006', 3, '2021-02-17 00:21:46', '2021-02-17 00:21:46'); -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `vehicle_types` -- CREATE TABLE `vehicle_types` ( `id` bigint(20) UNSIGNED NOT NULL, `name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Volcado de datos para la tabla `vehicle_types` -- INSERT INTO `vehicle_types` (`id`, `name`, `created_at`, `updated_at`) VALUES (1, 'Vagoneta', '2021-02-17 00:21:46', '2021-02-17 00:21:46'), (2, 'Jeep', '2021-02-17 00:21:46', '2021-02-17 00:21:46'), (3, 'Camioneta', '2021-02-17 00:21:46', '2021-02-17 00:21:46'); -- -------------------------------------------------------- -- -- Estructura de tabla para la tabla `years` -- CREATE TABLE `years` ( `id` bigint(20) UNSIGNED NOT NULL, `name` varchar(191) COLLATE utf8mb4_unicode_ci NOT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Volcado de datos para la tabla `years` -- INSERT INTO `years` (`id`, `name`, `created_at`, `updated_at`) VALUES (1, '2016', '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (2, '2017', '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (3, '2018', '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (4, '2019', '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (5, '2020', '2021-02-17 00:21:47', '2021-02-17 00:21:47'), (6, '2021', '2021-02-17 00:21:47', '2021-02-17 00:21:47'); -- -- Índices para tablas volcadas -- -- -- Indices de la tabla `branch_offices` -- ALTER TABLE `branch_offices` ADD PRIMARY KEY (`id`), ADD KEY `branch_offices_state_id_foreign` (`state_id`); -- -- Indices de la tabla `data_rows` -- ALTER TABLE `data_rows` ADD PRIMARY KEY (`id`), ADD KEY `data_rows_data_type_id_foreign` (`data_type_id`); -- -- Indices de la tabla `data_types` -- ALTER TABLE `data_types` ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `data_types_name_unique` (`name`), ADD UNIQUE KEY `data_types_slug_unique` (`slug`); -- -- Indices de la tabla `drivers` -- ALTER TABLE `drivers` ADD PRIMARY KEY (`id`); -- -- Indices de la tabla `menus` -- ALTER TABLE `menus` ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `menus_name_unique` (`name`); -- -- Indices de la tabla `menu_items` -- ALTER TABLE `menu_items` ADD PRIMARY KEY (`id`), ADD KEY `menu_items_menu_id_foreign` (`menu_id`); -- -- Indices de la tabla `migrations` -- ALTER TABLE `migrations` ADD PRIMARY KEY (`id`); -- -- Indices de la tabla `password_resets` -- ALTER TABLE `password_resets` ADD KEY `password_resets_email_index` (`email`); -- -- Indices de la tabla `permissions` -- ALTER TABLE `permissions` ADD PRIMARY KEY (`id`), ADD KEY `permissions_key_index` (`key`); -- -- Indices de la tabla `permission_role` -- ALTER TABLE `permission_role` ADD PRIMARY KEY (`permission_id`,`role_id`), ADD KEY `permission_role_permission_id_index` (`permission_id`), ADD KEY `permission_role_role_id_index` (`role_id`); -- -- Indices de la tabla `policies` -- ALTER TABLE `policies` ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `policy_number` (`policy_number`), ADD KEY `policies_vehicle_id_foreign` (`vehicle_id`), ADD KEY `policies_driver_id_foreign` (`driver_id`), ADD KEY `policies_branch_office_id_foreign` (`branch_office_id`); -- -- Indices de la tabla `roles` -- ALTER TABLE `roles` ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `roles_name_unique` (`name`); -- -- Indices de la tabla `settings` -- ALTER TABLE `settings` ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `settings_key_unique` (`key`); -- -- Indices de la tabla `states` -- ALTER TABLE `states` ADD PRIMARY KEY (`id`); -- -- Indices de la tabla `state_vehicle_types` -- ALTER TABLE `state_vehicle_types` ADD PRIMARY KEY (`id`), ADD KEY `state_vehicle_types_vehicle_type_id_foreign` (`vehicle_type_id`), ADD KEY `state_vehicle_types_state_id_foreign` (`state_id`); -- -- Indices de la tabla `translations` -- ALTER TABLE `translations` ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `translations_table_name_column_name_foreign_key_locale_unique` (`table_name`,`column_name`,`foreign_key`,`locale`); -- -- Indices de la tabla `users` -- ALTER TABLE `users` ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `users_email_unique` (`email`), ADD KEY `users_role_id_foreign` (`role_id`); -- -- Indices de la tabla `user_roles` -- ALTER TABLE `user_roles` ADD PRIMARY KEY (`user_id`,`role_id`), ADD KEY `user_roles_user_id_index` (`user_id`), ADD KEY `user_roles_role_id_index` (`role_id`); -- -- Indices de la tabla `vehicles` -- ALTER TABLE `vehicles` ADD PRIMARY KEY (`id`), ADD KEY `vehicles_vehicle_make_id_foreign` (`vehicle_make_id`), ADD KEY `vehicles_year_id_foreign` (`year_id`); -- -- Indices de la tabla `vehicle_makes` -- ALTER TABLE `vehicle_makes` ADD PRIMARY KEY (`id`), ADD KEY `vehicle_makes_vehicle_model_id_foreign` (`vehicle_model_id`); -- -- Indices de la tabla `vehicle_models` -- ALTER TABLE `vehicle_models` ADD PRIMARY KEY (`id`), ADD KEY `vehicle_models_vehicle_type_id_foreign` (`vehicle_type_id`); -- -- Indices de la tabla `vehicle_types` -- ALTER TABLE `vehicle_types` ADD PRIMARY KEY (`id`); -- -- Indices de la tabla `years` -- ALTER TABLE `years` ADD PRIMARY KEY (`id`); -- -- AUTO_INCREMENT de las tablas volcadas -- -- -- AUTO_INCREMENT de la tabla `branch_offices` -- ALTER TABLE `branch_offices` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=7; -- -- AUTO_INCREMENT de la tabla `data_rows` -- ALTER TABLE `data_rows` MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=107; -- -- AUTO_INCREMENT de la tabla `data_types` -- ALTER TABLE `data_types` MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=16; -- -- AUTO_INCREMENT de la tabla `drivers` -- ALTER TABLE `drivers` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=13; -- -- AUTO_INCREMENT de la tabla `menus` -- ALTER TABLE `menus` MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=2; -- -- AUTO_INCREMENT de la tabla `menu_items` -- ALTER TABLE `menu_items` MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=25; -- -- AUTO_INCREMENT de la tabla `migrations` -- ALTER TABLE `migrations` MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=33; -- -- AUTO_INCREMENT de la tabla `permissions` -- ALTER TABLE `permissions` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=84; -- -- AUTO_INCREMENT de la tabla `policies` -- ALTER TABLE `policies` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=20; -- -- AUTO_INCREMENT de la tabla `roles` -- ALTER TABLE `roles` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=4; -- -- AUTO_INCREMENT de la tabla `settings` -- ALTER TABLE `settings` MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=11; -- -- AUTO_INCREMENT de la tabla `states` -- ALTER TABLE `states` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=4; -- -- AUTO_INCREMENT de la tabla `state_vehicle_types` -- ALTER TABLE `state_vehicle_types` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=10; -- -- AUTO_INCREMENT de la tabla `translations` -- ALTER TABLE `translations` MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT; -- -- AUTO_INCREMENT de la tabla `users` -- ALTER TABLE `users` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3; -- -- AUTO_INCREMENT de la tabla `vehicles` -- ALTER TABLE `vehicles` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=13; -- -- AUTO_INCREMENT de la tabla `vehicle_makes` -- ALTER TABLE `vehicle_makes` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=7; -- -- AUTO_INCREMENT de la tabla `vehicle_models` -- ALTER TABLE `vehicle_models` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=7; -- -- AUTO_INCREMENT de la tabla `vehicle_types` -- ALTER TABLE `vehicle_types` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=4; -- -- AUTO_INCREMENT de la tabla `years` -- ALTER TABLE `years` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=7; -- -- Restricciones para tablas volcadas -- -- -- Filtros para la tabla `branch_offices` -- ALTER TABLE `branch_offices` ADD CONSTRAINT `branch_offices_state_id_foreign` FOREIGN KEY (`state_id`) REFERENCES `states` (`id`); -- -- Filtros para la tabla `data_rows` -- ALTER TABLE `data_rows` ADD CONSTRAINT `data_rows_data_type_id_foreign` FOREIGN KEY (`data_type_id`) REFERENCES `data_types` (`id`) ON DELETE CASCADE ON UPDATE CASCADE; -- -- Filtros para la tabla `menu_items` -- ALTER TABLE `menu_items` ADD CONSTRAINT `menu_items_menu_id_foreign` FOREIGN KEY (`menu_id`) REFERENCES `menus` (`id`) ON DELETE CASCADE; -- -- Filtros para la tabla `permission_role` -- ALTER TABLE `permission_role` ADD CONSTRAINT `permission_role_permission_id_foreign` FOREIGN KEY (`permission_id`) REFERENCES `permissions` (`id`) ON DELETE CASCADE, ADD CONSTRAINT `permission_role_role_id_foreign` FOREIGN KEY (`role_id`) REFERENCES `roles` (`id`) ON DELETE CASCADE; -- -- Filtros para la tabla `policies` -- ALTER TABLE `policies` ADD CONSTRAINT `policies_branch_office_id_foreign` FOREIGN KEY (`branch_office_id`) REFERENCES `branch_offices` (`id`), ADD CONSTRAINT `policies_driver_id_foreign` FOREIGN KEY (`driver_id`) REFERENCES `drivers` (`id`), ADD CONSTRAINT `policies_vehicle_id_foreign` FOREIGN KEY (`vehicle_id`) REFERENCES `vehicles` (`id`); -- -- Filtros para la tabla `state_vehicle_types` -- ALTER TABLE `state_vehicle_types` ADD CONSTRAINT `state_vehicle_types_state_id_foreign` FOREIGN KEY (`state_id`) REFERENCES `states` (`id`), ADD CONSTRAINT `state_vehicle_types_vehicle_type_id_foreign` FOREIGN KEY (`vehicle_type_id`) REFERENCES `vehicle_types` (`id`); -- -- Filtros para la tabla `users` -- ALTER TABLE `users` ADD CONSTRAINT `users_role_id_foreign` FOREIGN KEY (`role_id`) REFERENCES `roles` (`id`); -- -- Filtros para la tabla `user_roles` -- ALTER TABLE `user_roles` ADD CONSTRAINT `user_roles_role_id_foreign` FOREIGN KEY (`role_id`) REFERENCES `roles` (`id`) ON DELETE CASCADE, ADD CONSTRAINT `user_roles_user_id_foreign` FOREIGN KEY (`user_id`) REFERENCES `users` (`id`) ON DELETE CASCADE; -- -- Filtros para la tabla `vehicles` -- ALTER TABLE `vehicles` ADD CONSTRAINT `vehicles_vehicle_make_id_foreign` FOREIGN KEY (`vehicle_make_id`) REFERENCES `vehicle_makes` (`id`), ADD CONSTRAINT `vehicles_year_id_foreign` FOREIGN KEY (`year_id`) REFERENCES `years` (`id`); -- -- Filtros para la tabla `vehicle_makes` -- ALTER TABLE `vehicle_makes` ADD CONSTRAINT `vehicle_makes_vehicle_model_id_foreign` FOREIGN KEY (`vehicle_model_id`) REFERENCES `vehicle_models` (`id`); -- -- Filtros para la tabla `vehicle_models` -- ALTER TABLE `vehicle_models` ADD CONSTRAINT `vehicle_models_vehicle_type_id_foreign` FOREIGN KEY (`vehicle_type_id`) REFERENCES `vehicle_types` (`id`); COMMIT; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<gh_stars>0 /* * * CREATE TABLE scraped_data */ CREATE TABLE `scraped_data` ( `id` int(11) NOT NULL AUTO_INCREMENT, `codigo_item` varchar(30) DEFAULT NULL, `nombre` varchar(255) DEFAULT NULL, `descripcion` varchar(2000) DEFAULT NULL, `imagen` varchar(100) DEFAULT NULL, `precio` decimal(8,2) DEFAULT NULL, PRIMARY KEY (`id`), UNIQUE KEY `codigo_item_UNIQUE` (`codigo_item`) ) ENGINE=InnoDB AUTO_INCREMENT=6535 DEFAULT CHARSET=latin1; /* * * CREATE TABLE proxy_server */ CREATE TABLE `proxy_server` ( `id` int(11) NOT NULL AUTO_INCREMENT, `ip` varchar(20) NOT NULL, `port` int(11) NOT NULL, `username` varchar(50) DEFAULT NULL, `userpwd` varchar(50) DEFAULT NULL, `enabled` varchar(45) NOT NULL DEFAULT '1', PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=latin1;
<reponame>PaulACoroneos/CS50x SELECT name from people JOIN stars ON people.id = stars.person_id WHERE stars.movie_id IN( SELECT movies.id FROM movies JOIN stars ON movies.id = stars.movie_id JOIN people ON people.id = stars.person_id WHERE name = "<NAME>" AND birth = "1958" ) AND NOT name = "<NAME>";
<gh_stars>0 # --- Created by Ebean DDL # To stop Ebean DDL generation, remove this comment and start using Evolutions # --- !Ups create table image_entry ( id bigint not null, image blob, constraint pk_image_entry primary key (id) ); create sequence image_entry_seq; # --- !Downs drop table if exists image_entry; drop sequence if exists image_entry_seq;
-- phpMyAdmin SQL Dump -- version 4.6.6deb5 -- https://www.phpmyadmin.net/ -- -- Host: localhost:3306 -- Generation Time: Oct 30, 2019 at 08:58 AM -- Server version: 5.7.27-0ubuntu0.18.04.1 -- PHP Version: 7.3.10-1+ubuntu18.04.1+deb.sury.org+1 SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO"; SET time_zone = "+00:00"; /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8mb4 */; -- -- Database: `stock_db` -- -- -------------------------------------------------------- -- -- Table structure for table `migrations` -- CREATE TABLE `migrations` ( `id` int(10) UNSIGNED NOT NULL, `migration` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `batch` int(11) NOT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Dumping data for table `migrations` -- INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES (1, '2014_10_12_000000_create_users_table', 1), (2, '2014_10_12_100000_create_password_resets_table', 1); -- -------------------------------------------------------- -- -- Table structure for table `password_resets` -- CREATE TABLE `password_resets` ( `email` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `token` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `created_at` timestamp NULL DEFAULT NULL ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -------------------------------------------------------- -- -- Table structure for table `roles` -- CREATE TABLE `roles` ( `id` int(11) NOT NULL, `name` varchar(70) COLLATE utf8mb4_unicode_ci NOT NULL, `active` tinyint(4) NOT NULL DEFAULT '1', `create_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Dumping data for table `roles` -- INSERT INTO `roles` (`id`, `name`, `active`, `create_at`) VALUES (1, 'root level', 1, '2019-10-18 16:30:11'), (2, 'administrator', 1, '2019-10-18 16:30:11'), (3, 'Receptionist', 1, '2019-10-23 16:40:00'), (5, 'Bookeeper', 1, '2019-10-23 16:41:52'); -- -------------------------------------------------------- -- -- Table structure for table `users` -- CREATE TABLE `users` ( `id` bigint(20) UNSIGNED NOT NULL, `name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `username` varchar(50) COLLATE utf8mb4_unicode_ci NOT NULL, `email` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `email_verified_at` timestamp NULL DEFAULT NULL, `password` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL, `photo` varchar(500) COLLATE utf8mb4_unicode_ci NOT NULL DEFAULT 'default.png', `role_id` int(11) NOT NULL, `remember_token` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL, `created_at` timestamp NULL DEFAULT NULL, `updated_at` timestamp NULL DEFAULT NULL, `active` tinyint(4) NOT NULL DEFAULT '1' ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci; -- -- Dumping data for table `users` -- INSERT INTO `users` (`id`, `name`, `username`, `email`, `email_verified_at`, `password`, `photo`, `role_id`, `remember_token`, `created_at`, `updated_at`, `active`) VALUES (1, 'administrator', 'admin', '<EMAIL>', NULL, '$2y$10$cNJjIJDsIvXfxlrHOY2GfecwigRxqB/Br1sOVJj0VknEOJTs046ZO', 'images/images/user.png', 2, NULL, NULL, NULL, 1), (2, 'buntheoun sok', 'buntheoun', '<EMAIL>', NULL, '$2y$10$cNJjIJDsIvXfxlrHOY2GfecwigRxqB/Br1sOVJj0VknEOJTs046ZO', 'default.png', 1, NULL, NULL, NULL, 1), (3, 'dara', 'dara1', '<EMAIL>', NULL, '$2y$10$CRh5zR3MzunmomfNzkd9P.OZ5nwAfuQWnPiMT4jwoVhKrdI85Plk2', 'uploads/users/mFi7X4QMySE6zni5CwZRgihLWqUQBMkAcfhANn4F.png', 1, NULL, NULL, NULL, 0), (4, 'sok', '<PASSWORD>', '<EMAIL>', NULL, '$2y$10$YN5XmhJGw/7Av2e829igBux3hyPKhfagzmZRgXcEwdVtKHteGab/u', 'uploads/users/5QfapvWqyg2NNYe8mIDV77X4erpekoMqnIsuhtjN.png', 1, NULL, NULL, NULL, 0), (5, '<NAME>', 'kheng12', '<EMAIL>', NULL, '$2y$10$P/9fLqbywx8k0bAznX9Ipuxo7Rd22t6X0pPAKVdCZRck/aJP2NXam', 'uploads/users/64yY6HPnWrmLkr0LwYfqnuaxOS50Kv8wjt8bv7I4.png', 1, NULL, NULL, NULL, 1), (6, '<NAME>', 'neng123', '<EMAIL>', NULL, '$2y$10$qr7mwTw5KMoX3fiv2OEGqOcN4kWNE34PsjhmSpLDOM8sN0s8JdO5S', 'uploads/users/2N3kRD1JccGXJW56cpiH8G7kgPnLR7he93lPhJIN.png', 1, NULL, NULL, NULL, 1), (7, '<NAME>', 'mora', '<EMAIL>', NULL, '$2y$10$PEZY0x5cqw0QvISKWhjlt.dfjdOSKTJ6DICUGbaYM19jjxjrpotMy', 'uploads/users/MipSjGoTbAbeGlXFxeJvIk7lGw1GojIJjRqIpw1l.jpeg', 1, NULL, NULL, NULL, 1), (8, '<NAME>', 'dengM12', '<EMAIL>', NULL, '$2y$10$TEWjDd/gK0GXPDYkz69nNefcRJojnRdtPhSoSYpq7j1qWdN9DNv.q', 'uploads/users/zamxobcEiEIRuUItB4pXy4DwQaYZi1ouIxlwTSYu.png', 1, NULL, NULL, NULL, 1), (9, 'sokbuntheoun vai', 'ffffffff', '<EMAIL>', NULL, '$2y$10$AR5AMvVBpRE2zmszKSfsguItNakp1mDU9/KogqHhaPMzD0epqXtyK', 'uploads/users/ZjtZpF5de5zo9zfcmZTr4OepUzIpsHRVExR4Y7pQ.jpeg', 2, NULL, NULL, NULL, 0), (11, 'sokbuntheoun vai', 'admin123', '<EMAIL>', NULL, '$2y$10$KvtMTzv36MA4VXK46RNsFuzRgo3fvPW9L78IbqEleK23IB5ZTRQhC', 'uploads/users/ZOxE8ittfTb877iM5EvhIjsJlLDuM7bLGCFw1UTE.jpeg', 2, NULL, NULL, NULL, 1); -- -- Indexes for dumped tables -- -- -- Indexes for table `migrations` -- ALTER TABLE `migrations` ADD PRIMARY KEY (`id`); -- -- Indexes for table `password_resets` -- ALTER TABLE `password_resets` ADD KEY `password_resets_email_index` (`email`); -- -- Indexes for table `roles` -- ALTER TABLE `roles` ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `name` (`name`); -- -- Indexes for table `users` -- ALTER TABLE `users` ADD PRIMARY KEY (`id`), ADD UNIQUE KEY `users_email_unique` (`email`), ADD UNIQUE KEY `username` (`username`); -- -- AUTO_INCREMENT for dumped tables -- -- -- AUTO_INCREMENT for table `migrations` -- ALTER TABLE `migrations` MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3; -- -- AUTO_INCREMENT for table `roles` -- ALTER TABLE `roles` MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=6; -- -- AUTO_INCREMENT for table `users` -- ALTER TABLE `users` MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=12; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
-- Adminer 4.7.0 MySQL dump SET NAMES utf8; SET time_zone = '+00:00'; SET foreign_key_checks = 0; SET sql_mode = 'NO_AUTO_VALUE_ON_ZERO'; DROP TABLE IF EXISTS `konfirmasi_pesanan`; CREATE TABLE `konfirmasi_pesanan` ( `id_konfirmasi` int(11) NOT NULL AUTO_INCREMENT, `id_pesan` int(11) NOT NULL, `nm_pembayar` varchar(50) NOT NULL, `no_rekening` varchar(30) NOT NULL, `gambar_bukti` text NOT NULL, `status` enum('Diproses','Diterima','Ditolak') DEFAULT 'Diproses', PRIMARY KEY (`id_konfirmasi`) ) ENGINE=MyISAM DEFAULT CHARSET=latin1; DROP TABLE IF EXISTS `kota`; CREATE TABLE `kota` ( `id_kota` int(15) NOT NULL AUTO_INCREMENT, `nm_kota` varchar(25) NOT NULL, `tarif` int(15) NOT NULL, PRIMARY KEY (`id_kota`) ) ENGINE=MyISAM DEFAULT CHARSET=latin1; INSERT INTO `kota` (`id_kota`, `nm_kota`, `tarif`) VALUES (1, 'Padang', 15000), (2, 'Bukittinggi', 25000); DROP VIEW IF EXISTS `laporan_pemasukan`; CREATE TABLE `laporan_pemasukan` (`tgl_pesan` timestamp, `nm_produk` varchar(50), `harga` int(11)); DROP TABLE IF EXISTS `level_user`; CREATE TABLE `level_user` ( `id_level` int(11) NOT NULL AUTO_INCREMENT, `nm_level` varchar(30) NOT NULL, PRIMARY KEY (`id_level`) ) ENGINE=MyISAM DEFAULT CHARSET=latin1; INSERT INTO `level_user` (`id_level`, `nm_level`) VALUES (1, 'Admin'), (2, 'Pembeli'); DROP TABLE IF EXISTS `pesanan`; CREATE TABLE `pesanan` ( `id_pesan` int(11) NOT NULL AUTO_INCREMENT, `id_produk` int(11) NOT NULL, `id_user` int(11) NOT NULL, `tgl_pesan` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `status` enum('Belum Selesai','Sudah Selesai') NOT NULL DEFAULT 'Belum Selesai', `alamat` varchar(200) NOT NULL, `nomor_hp` varchar(20) NOT NULL, `id_kota` int(11) NOT NULL, `jumlah` int(5) NOT NULL, PRIMARY KEY (`id_pesan`) ) ENGINE=MyISAM DEFAULT CHARSET=latin1; INSERT INTO `pesanan` (`id_pesan`, `id_produk`, `id_user`, `tgl_pesan`, `status`, `alamat`, `nomor_hp`, `id_kota`, `jumlah`) VALUES (31, 5, 9, '2018-05-24 09:17:09', 'Sudah Selesai', 'Padang', '081345890990', 1, 2); DROP TABLE IF EXISTS `produk`; CREATE TABLE `produk` ( `id_produk` int(11) NOT NULL AUTO_INCREMENT, `nm_produk` varchar(50) NOT NULL, `deskripsi` varchar(200) NOT NULL, `harga` int(11) NOT NULL, `gambar` text NOT NULL, `stok` int(5) NOT NULL, PRIMARY KEY (`id_produk`) ) ENGINE=MyISAM DEFAULT CHARSET=latin1; INSERT INTO `produk` (`id_produk`, `nm_produk`, `deskripsi`, `harga`, `gambar`, `stok`) VALUES (12, '<NAME>', 'tinggi 80 cm', 250000, '9.jpg', 2), (5, 'Bunga Ester', 'merah', 75000, '3.jpg', 35), (6, 'Bambu Kuning', 'Bambu tinggi 2,5 m', 350000, '26.jpg', 3), (7, '<NAME>', 'merah', 80000, '21.jpg', 8), (8, '<NAME>', 'putih', 70000, '11.jpg', 30), (9, 'bunga taman', 'besar', 500000, '12.jpg', 6), (10, '<NAME>', 'Kuning', 75000, '1.jpg', 30), (11, '<NAME>', 'baik', 300000, '25.jpg', 10), (14, 'bunga kertas', 'baik-baik saja', 50000, '4.jpg', 15), (17, '<NAME>', 'baik-baik saja', 150000, '27.jpg', 20); DROP TABLE IF EXISTS `user`; CREATE TABLE `user` ( `id_user` int(11) NOT NULL AUTO_INCREMENT, `username` varchar(50) NOT NULL, `pass` varchar(30) NOT NULL, `nm_lengkap` varchar(50) NOT NULL, `id_level` int(11) NOT NULL, `email` varchar(50) NOT NULL, PRIMARY KEY (`id_user`) ) ENGINE=MyISAM DEFAULT CHARSET=latin1; INSERT INTO `user` (`id_user`, `username`, `pass`, `nm_lengkap`, `id_level`, `email`) VALUES (9, 'admin', 'admin', '', 1, ''), (23, 'Udin', '1234', 'Si udin', 2, ''), (20, 'afdhal', '1234', '', 1, ''), (27, 'jarwo', '1234', '<NAME>', 2, '<EMAIL>'); DROP TABLE IF EXISTS `laporan_pemasukan`; CREATE ALGORITHM=UNDEFINED SQL SECURITY DEFINER VIEW `laporan_pemasukan` AS select `a`.`tgl_pesan` AS `tgl_pesan`,`b`.`nm_produk` AS `nm_produk`,`b`.`harga` AS `harga` from (`pesanan` `a` join `produk` `b` on((`a`.`id_produk` = `b`.`id_produk`))) where (`a`.`status` = 'Sudah Selesai'); -- 2019-01-07 14:21:08
CREATE VIEW `v_rekap_asatid` AS SELECT `k`.`id_asatid` AS `id_asatid`, `j`.`id_jurnal` AS `id_jurnal`, `j`.`kbm_id` AS `kbm_id`, `j`.`tgl` AS `waktu`, SUBSTRING_INDEX(`j`.`tgl`,',',1) AS `hari`, SUBSTRING_INDEX(SUBSTRING_INDEX(`j`.`tgl`,' ',2),' ',-1) AS `tgl`, SUBSTRING_INDEX(SUBSTRING_INDEX(`j`.`tgl`,' ',3),' ',-1) AS `bulan`, SUBSTRING_INDEX(SUBSTRING_INDEX(`j`.`tgl`,' ',4),' ',-1) AS `tahun`, `j`.`materi` AS `materi`, `k`.`id_kelas` AS `id_kelas`, `k`.`jamke` AS `jamke`, `k`.`id_mapel` AS `id_mapel` FROM (`t_jurnal` `j` JOIN `t_kbm` `k` ON (`j`.`kbm_id` = `k`.`id_kbm`)) GROUP BY `j`.`tgl`,`k`.`id_mapel`,`k`.`jamke` ORDER BY `k`.`id_asatid`,SUBSTRING_INDEX(SUBSTRING_INDEX(`j`.`tgl`,' ',2),' ', - 1)
DECLARE @UserId INT = (SELECT Id FROM Users WHERE Username = 'Alex') DECLARE @GameId INT = (SELECT Id FROM Games WHERE [Name] = 'Edinburgh') DECLARE @UserGameId INT = (SELECT Id FROM UsersGames WHERE GameId = @GameId AND UserId = @UserId) DECLARE @ItemsTotalCost DECIMAL(15, 4) = (SELECT SUM(Price) FROM Items WHERE [Name] IN ('Blackguard', 'Bottomless Potion of Amplification', 'Eye of Etlich (Diablo III)', 'Gem of Efficacious Toxin', 'Golden Gorget of Leoric', 'Hellfire Amulet')) BEGIN BEGIN TRANSACTION BEGIN TRY INSERT INTO UserGameItems SELECT Id, @UserGameId FROM Items WHERE [Name] IN ('Blackguard', 'Bottomless Potion of Amplification', 'Eye of Etlich (Diablo III)', 'Gem of Efficacious Toxin', 'Golden Gorget of Leoric', 'Hellfire Amulet') END TRY BEGIN CATCH ROLLBACK RETURN END CATCH BEGIN TRY UPDATE UsersGames SET Cash -= @ItemsTotalCost WHERE Id = @UserGameId END TRY BEGIN CATCH ROLLBACK RETURN END CATCH COMMIT END SELECT u.Username, g.[Name], ug.Cash, i.[Name] AS [Item Name] FROM UsersGames AS ug JOIN Users AS u ON u.Id = ug.UserId JOIN Games AS g ON g.Id = ug.GameId JOIN UserGameItems AS ugi ON ugi.UserGameId = ug.Id JOIN Items AS i ON i.Id = ugi.ItemId WHERE GameId = @GameId ORDER BY i.[Name]
CREATE TABLE IF NOT EXISTS `music` ( `id` int(10) NOT NULL AUTO_INCREMENT, `song` varchar(50) NOT NULL, `artist` varchar(50) NOT NULL, `album` varchar(100) NOT NULL, `genre` varchar(50) NOT NULL, `artwork` varchar(255) NOT NULL, `source` varchar(255) NOT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=latin1 AUTO_INCREMENT=52 ; INSERT INTO `music` (`id`, `song`, `artist`, `album`, `genre`, `artwork`, `source`) VALUES (1, '50 Shades of Pop', 'DJ Earworm', 'Poppin Sounds', 'Pop', 'https://i1.sndcdn.com/artworks-000138397627-vej5ik-t500x500.jpg', 'http://puu.sh/lYhyJ/82683bb254.mp3'), (2, 'Rustic Vinyl', 'Jukedeck', 'Techno Tunes', 'Techno', 'https://index.tnwcdn.com/images/10187d8c89fad233d81d4fe5655ed9af2f194005.png', 'http://puu.sh/lY9L6/a37c40867f.mp3'), (3, '<NAME>', 'Starchild', 'World Wonders', 'World', 'https://i1.sndcdn.com/artworks-000009510124-98eee7-t500x500.jpg', 'http://puu.sh/lYhwA/01b4f293d1.mp3'), (4, 'Miniature Vortex', 'Jukedeck', 'Techno Tunes', 'Techno', 'https://index.tnwcdn.com/images/10187d8c89fad233d81d4fe5655ed9af2f194005.png', 'https://puu.sh/lY9L6/a37c40867f.mp3'), (5, 'Miniscule Brew', 'Jukedeck', 'Techno Tunes', 'Techno', 'https://index.tnwcdn.com/images/10187d8c89fad233d81d4fe5655ed9af2f194005.png', 'https://puu.sh/lY9KA/ae53f567e4.mp3');
<reponame>cerad/cerad # List of emails for registered people select distinct email from person left join person_plan on person_plan.person_id = person.id where person_plan.project_key = 'AYSOS5Games2013'; # Distinct list of physical teams select distinct level.age, level.sex, game_team.name from game left join project on project.id = game.project_id left join game_team on game_team.game_id = game.id left join level on level.id = game_team.level_id where project.hash = 'AYSOS5Games2013' and game.pool = 'PP' order by level.age, level.sex, game_team.name ; # Slots assigned so far select count(*) from game_person left join game on game.id = game_person.game_id left join project on project.id = game.project_id where project.hash = 'AYSOS5Games2013' and game_person.person_id IS NOT NULL ; # Total slots select count(*) from game_person left join game on game.id = game_person.game_id left join project on project.id = game.project_id where project.hash = 'AYSOS5Games2013' ; # Referees for a given game select game.id,game.num,game.dt_beg,game_person.role,game_person.name,person.email from game_person left join game on game.id = game_person.game_id left join project on project.id = game.project_id left join person on person.id = game_person.person_id where project.hash = 'AYSOS5Games2013' and game.num = 224 ; # delete a game delete from game_person
<reponame>h-ssiqueira/HackerRank-problems select round(abs(max(LAT_N)-min(LAT_N))+abs(max(LONG_W)-min(LONG_W)),4) from STATION;
CREATE TABLE users ( username VARCHAR(50) NOT NULL, password VARCHAR(100) NOT NULL, enabled TINYINT NOT NULL DEFAULT 1, PRIMARY KEY (username) ); CREATE TABLE authorities ( username VARCHAR(50) NOT NULL, authority VARCHAR(50) NOT NULL, FOREIGN KEY (username) REFERENCES users(username) ); CREATE UNIQUE INDEX ix_auth_username ON authorities (username,authority);
/* ag parse get functions for PL/PGSQL */ create or replace function ag_parse_get (p_get varchar, p_param varchar, p_default varchar default '') returns varchar as $body$ declare v_get varchar := '&' || p_get || '&'; v_param varchar := '&' || p_param || '='; x integer; begin x = position (v_param in v_get); if x = 0 then return p_default; end if; v_get := substring (v_get from x+length(v_param)); x = position ('&' in v_get); if x = 1 then return p_default; end if; v_get := substring (v_get from 1 for x-1); return db_urldecode(v_get); end; $body$ language plpgsql STABLE COST 100; -- create or replace function ag_parse_get_date ( p_get varchar, p_param varchar, p_default varchar default '' ) returns date as $body$ declare v varchar := ag_parse_get(p_get, p_param, p_default); d date; begin begin d := v::date; exception when others then d := null::date; end; return d; end; $body$ language plpgsql STABLE COST 100; -- create or replace function ag_parse_get_timestamptz ( p_get varchar, p_param varchar, p_default varchar default '' ) returns timestamptz as $body$ declare v varchar := ag_parse_get(p_get, p_param, p_default); d timestamptz; begin begin d := v::timestamptz; exception when others then d := null::timestamptz; end; return d; end; $body$ language plpgsql STABLE COST 100; -- create or replace function ag_parse_get_json ( p_get varchar, p_param varchar, p_default varchar default '' ) returns json as $body$ declare v varchar := ag_parse_get(p_get,p_param,p_default); j json; begin begin j := v::json; exception when others then j := null::json; end; return j; end; $body$ language plpgsql STABLE COST 100;
<reponame>abnesh/transportManagement insert into driverdb.driver values(101,'Ram','RAMKU6787R','1234-5678-9876-5432',9999912345,'KA03-MA-6754'); insert into driverdb.driver values(102,'Shyam','SHYKU1232S','4585-9846-3434-3432',1234567890,'KA10-FA-0098'); insert into driverdb.driver values(103,'Ghanshyam','GHNKU6339G','4535-7854-5521-9856',8888999952,'TN07-TZ-0014'); insert into driverdb.driver values(104,'Ramu','RAMUK2312R','2365-6975-4578-5422',9988776655,'KA03-IQ-1412');
SET NAMES UTF8; DROP DATABASE IF EXISTS dessert; CREATE DATABASE dessert CHARSET=UTF8; USE dessert; /**创建用户表**/ CREATE TABLE user( uid INT PRIMARY KEY AUTO_INCREMENT, uname VARCHAR(20), upwd VARCHAR(15) ); INSERT INTO user VALUES(null,"root","<PASSWORD>"); /**创建类别表**/ CREATE TABLE type( tid INT PRIMARY KEY AUTO_INCREMENT, tname VARCHAR(20)#类别名称 ); INSERT INTO type VALUES(null,"早餐"); INSERT INTO type VALUES(null,"川菜"); INSERT INTO type VALUES(null,"湘菜"); INSERT INTO type VALUES(null,"粤菜"); INSERT INTO type VALUES(null,"甜点"); /**创建菜单表**/ CREATE TABLE menu( mid INT PRIMARY KEY AUTO_INCREMENT, mname VARCHAR(20),#菜单名称 type INT,#菜单所属分类 mimg VARCHAR(500),#成品图片 tips VARCHAR(1000),#小贴士/注意事项 collect INT,#收藏 browse INT#浏览人数 ); /*早餐*/ INSERT INTO menu VALUES(null,"早餐鸡蛋饼",1,"http://i2.chuimg.com/1c83091c273a11e7947d0242ac110002_1775w_2394h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg","慢慢卷起来就好了(注意:饼很烫!!!)。",0,0); INSERT INTO menu VALUES(null,"牛油果三明治",1,"http://i1.chuimg.com/573a093d005c41c99cbefab1bac9dddf_1124w_1124h.jpg@2o_50sh_1pr_1l_640w_520h_1c_1e_90q_1wh.jpg",null,0,0); INSERT INTO menu VALUES(null,"淡奶油软面包",1,"http://i2.chuimg.com/f58395aa882b11e6b87c0242ac110003_584w_560h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg"," 1.大家在制作的过程中也可以加入葡萄干或内馅,如果把淡奶油换成牛奶要减量,否则会使面团太湿,糖可以结合自己的喜好适当增减,本人不喜欢太甜。2.有厨友提问如何鉴别面包是否熟了,从烤箱取出之前,用一个钢勺背压一下面包侧面,如果回弹证明熟了。",0,0); INSERT INTO menu VALUES(null,"快手煎饺",1,"http://i2.chuimg.com/289dce20888f11e6b87c0242ac110003_3894w_2386h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg"," 1.最初无水煎时火不要太小2.焖煮时不要随意翻动,容易破",0,0); INSERT INTO menu VALUES(null,"黄瓜奶香燕麦豆浆",1,"http://i2.chuimg.com/bb8a8d0b422d4d638bf21e3ed5a96415_817w_817h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg","1、甜度根据喜好,燕麦也根据喜好,多放点就浓郁些,不喜欢浓郁的就少放点燕麦2、放炼乳味道会更好哦",0,0); INSERT INTO menu VALUES(null,"开胃早餐面",1,"http://i2.chuimg.com/929c47fa88c311e6b87c0242ac110003_640w_640h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg",null,0,0); INSERT INTO menu VALUES(null,"老油条之一学就会",1,"http://i2.chuimg.com/ca442ee856cb4cc69da5aa0163dc7ebd_640w_565h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg","面团一定要发至面团的两倍大,炸时要不停地翻动。",0,0); INSERT INTO menu VALUES(null,"双色红薯杂粮包",1,"http://i2.chuimg.com/7145ddb96e63433fb1d420a2c5dd5d96_1126w_1500h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg","这个配方可以做20多个馒头,还是一样的冷冻保存,给每日早餐增加新花样!简单易上手,关键是颜值担当!",0,0); INSERT INTO menu VALUES(null,"滑腻柔软的美式炒蛋",1,"http://i2.chuimg.com/7443250aa4954e2cb2b148b0668a4020_1500w_1124h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg","炒蛋的时候火一定要小一点,可以一边冥想一边等待。在蛋液没有全部凝结的时候一定要离火,这样炒出来的蛋才能滑腻柔软,因为蛋液从熟到老,真的只是如同人生一般的匆匆一瞬间哦。",0,0); INSERT INTO menu VALUES(null,"土豆丝鸡蛋饼",1,"http://i2.chuimg.com/9622c1568b8911e6a9a10242ac110002_1920w_1080h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg","普通面粉、饺子粉、小麦粉之类的都可以;高粉、低粉、淀粉我没有试过哈,尝试过的小伙伴可以留言参考吖。",0,0); /*川菜*/ INSERT INTO menu VALUES(null,"麻婆豆腐",2,"http://i2.chuimg.com/7eb024afed0e456c90f330247225ffe4_2044w_1080h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg","大家记得收藏点赞关注哦",0,0); INSERT INTO menu VALUES(null,"川菜鱼香肉丝",2,"http://i2.chuimg.com/7a756ebc882911e6b87c0242ac110003_650w_650h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg","青菜,肉的量可以根据自己的口味随意增减 ,这道菜非常简单,非常好吃,极力推荐大家试试~",0,0); INSERT INTO menu VALUES(null,"宫保鸡丁",2,"http://i2.chuimg.com/fada9853311d4a9ca3f9aaab4d3dcd9e_1080w_864h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg","1.这里可以把配菜换成:1、莴笋丁、黄葱、干辣椒;2、炸过的土豆丁和洋葱,料汁不换,味道都很不错。 2,翻炒可以快一点,因为鸡丁也是滑油过了,其他也要保持脆度,这样鸡丁才会入口特别嫩。 3.不建议用鸡腿,用鸡腿代替鸡胸这个菜谱会比较腻。",0,0); INSERT INTO menu VALUES(null,"麻辣水煮鱼",2,"http://i2.chuimg.com/44f69a40889c11e6a9a10242ac110002_638w_640h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg","买鱼时注意把鱼肉和鱼头骨分开装,先将豆芽和鱼骨捞出是为了更好的煮鱼片,不然一大锅煮,太满会把鱼片弄碎,而且豆芽就是做底菜用,所以提前捞出,注意鱼片不要煮太长时间,放入鱼片后水开一会儿就可以了",0,0); INSERT INTO menu VALUES(null,"水煮肉片",2,"http://i2.chuimg.com/b9e01a07f78e443e8640b295f7c6227b_1080w_810h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg",null,0,0); INSERT INTO menu VALUES(null,"夫妻肺片",2,"http://i2.chuimg.com/b801635dbe244743bc2f9fc087cd2168_4464w_2976h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg","1. 煮好的牛腱、牛杂浸在卤水中直至冷却,可以更好地入味。<br>2. 辣椒碎比较容易糊,要放在最下面,避免与热油直接接触。",0,0); INSERT INTO menu VALUES(null,"辣子鸡丁",2,"http://i1.chuimg.com/85f1baa954f340adbfe794bc838d7935_2048w_2048h.jpg@2o_50sh_1pr_1l_640w_520h_1c_1e_90q_1wh.jpg",null,0,0); INSERT INTO menu VALUES(null,"回锅肉",2,"http://i1.chuimg.com/c60f7fc5f1f9417faaa572b454e56101_1152w_864h.jpg@2o_50sh_1pr_1l_640w_520h_1c_1e_90q_1wh.jpg","1.豆瓣酱选用郫县豆瓣酱<br>2.肉选五花肉,肥瘦相间口感佳。<br>3.肉片要煎过就不会很油腻了。<br>4.除了煮五花肉,其它步骤不能加水。<br>5.青椒可以选择自己喜欢的任意辣度的品种。",0,0); INSERT INTO menu VALUES(null,"东坡肘子",2,"http://i1.chuimg.com/ba4b222a2d6b4367ad68b3a3b7423756_1146w_960h.jpg@2o_50sh_1pr_1l_640w_520h_1c_1e_90q_1wh.jpg",null,0,0); INSERT INTO menu VALUES(null,"钵钵鸡",2,"http://i2.chuimg.com/928d51eaa7e5457c9b362f5b5ad91126_4464w_2976h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg","1. 把油烧热后直接浇在辣椒面上,如果担心这样香气不够或者不好控制温度的话,可以分多次浇油,一次比一次温度高。这样对辣椒有所控制性,不至于糊或者程度不够,并且味道比普通的做法香很多。<br>2. 不建议汤里加酱油、蚝油等颜色较深的调料,要的就是清透的汤底和纯正的红油色。<br>3. 食材和鸡汤一定要放凉再拌,汤里盐一定要加够,蔬菜的浸泡时间可以稍微长一点,食材才能更入味。<br>4. 配菜可以替换成自己喜欢的。",0,0); /*湘菜*/ INSERT INTO menu VALUES(null,"湘菜经典辣椒小炒肉",3,"http://i1.chuimg.com/5b05cbd80c4840e6a2edf5345b70e537_1836w_2448h.jpg@2o_50sh_1pr_1l_640w_520h_1c_1e_90q_1wh.jpg","大家记得收藏点赞关注哦",0,0); INSERT INTO menu VALUES(null,"湘味腊鸭",3,"http://i2.chuimg.com/395ac0208a8c11e6a9a10242ac110002_1125w_1125h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg",null,0,0); INSERT INTO menu VALUES(null,"湘菜~紫苏煎黄瓜",3,"http://i2.chuimg.com/76de42fec2924c1b95765d5aaec6a497_1242w_1656h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg",null,0,0); INSERT INTO menu VALUES(null,"辣子鸡丁",3,"http://i2.chuimg.com/b7a37d191bb6493c90122a933d280e3b_1242w_994h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg",null,0,0); INSERT INTO menu VALUES(null,"家常牛蛙",3,"http://i2.chuimg.com/186f9017d6ee4ff3931556c1b9dd8631_1242w_1656h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg",null,0,0); INSERT INTO menu VALUES(null,"麻辣小龙虾",3,"http://i2.chuimg.com/e55937298f2549b3ab87550bed33ec40_1035w_827h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg",null,0,0); INSERT INTO menu VALUES(null,"水煮牛肉",3,"http://i1.chuimg.com/93a9aaddbb1049b7ab91299384bb3901_750w_1000h.jpg@2o_50sh_1pr_1l_640w_520h_1c_1e_90q_1wh.jpg",null,0,0); INSERT INTO menu VALUES(null,"干锅香辣虾",3,"http://i2.chuimg.com/33389c92876411e6b87c0242ac110003_384w_512h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg",null,0,0); INSERT INTO menu VALUES(null,"香煎土豆片",3,"http://i2.chuimg.com/571701c609884df0aa79a753297829a0_750w_1000h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg",null,0,0); INSERT INTO menu VALUES(null,"剁椒鱼头",3,"http://i2.chuimg.com/529d5add5ea6459390fcb6c114b1a947_1080w_864h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg",null,0,0); /*4粤菜*/ INSERT INTO menu VALUES(null,"香香排骨煲","4","http://i2.chuimg.com/e5568eb79dfe41f39092bf9abb9d4d39_640w_360h.jpg?imageView2/1/w/215/h/136/interlace/1/q/90",null,"0","0"); INSERT INTO menu VALUES(null,"广式糯米卷","4","http://i2.chuimg.com/446ccd9511814adb8e546242516172ce_1215w_2160h.jpg?imageView2/1/w/215/h/136/interlace/1/q/90",null,"0","0"); INSERT INTO menu VALUES(null,"广式鸡煲","4","http://i2.chuimg.com/2d4f90f36fad4fe9b650a8ed92f30718_5184w_3456h.jpg?imageView2/2/w/660/interlace/1/q/90",null,"0","0"); INSERT INTO menu VALUES(null,"广式腊味煲仔饭","4","http://i2.chuimg.com/0ca3bff2311f4ac39d00ed4ea6554ebe_750w_760h.jpg?imageView2/1/w/215/h/136/interlace/1/q/90",null,"0","0"); INSERT INTO menu VALUES(null,"广式猪脚姜","4","http://i2.chuimg.com/416fa74d2faa40df878cc4a261003a9b_750w_600h.jpg?imageView2/1/w/215/h/136/interlace/1/q/90",null,"0","0"); INSERT INTO menu VALUES(null,"红烧乳鸽","4","http://i2.chuimg.com/4ea3f6d5b44b43c8be273632849935e9_550w_440h.jpg?imageView2/2/w/660/interlace/1/q/90",null,"0","0"); INSERT INTO menu VALUES(null,"广式凉拌莲藕","4","http://i1.chuimg.com/bed9c7007c8d4e9daaf1d18568ac261f_750w_1000h.jpg@2o_50sh_1pr_1l_215w_136h_1c_1e_90q_1wh",null,"0","0"); INSERT INTO menu VALUES(null,"柠檬鸡翅","4","http://i2.chuimg.com/0ffe98fa25ff4f9b983a4b3c0ea4c197_2048w_1536h.jpg?imageView2/1/w/215/h/136/interlace/1/q/90",null,"0","0"); INSERT INTO menu VALUES(null,"广式猪骨汤","4","http://i1.chuimg.com/821211388a4b49f8b6a49c3f8a4c05a5_2886w_3072h.jpg@2o_50sh_1pr_1l_215w_136h_1c_1e_90q_1wh",null,"0","0"); INSERT INTO menu VALUES(null,"广式脆皮烧肉","4","http://i2.chuimg.com/43ddc2e1d8764287a5b2f02f5a54a473_1080w_864h.jpg?imageView2/1/w/215/h/136/interlace/1/q/90",null,"0","0"); /*5甜点*/ INSERT INTO menu VALUES(null, '秒杀哈根达斯无冰渣冰激凌',5,'http://i2.chuimg.com/f7ee4cb431f5454788c6b8c761020008_1280w_1024h.jpg?imageView2/1/w/215/h/136/interlace/1/q/90','原方来自哈尼K克,她的是50克糖,我觉得太甜了,减了10克,最后蛋黄糊和淡奶油混合后再打发一下,这样做出来的冰激凌才能真正无冰渣,你们也试试吧,孩子很爱吃',0,0); INSERT INTO menu VALUES(null, '雪媚娘(超详细)',5,'http://i2.chuimg.com/2bc96a728aca11e6a9a10242ac110002_1242w_1221h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg','糯米粉买水磨的,奶油选择动物性淡奶油',0,0); INSERT INTO menu VALUES(null, '冻酸奶芝士蛋糕',5,'http://i1.chuimg.com/2e023d4c874511e6a9a10242ac110002_800w_533h.jpg@2o_50sh_1pr_1l_640w_520h_1c_1e_90q_1wh.jpg','1. 这款冻芝士是可以不用烤箱的,不过我做的时候还是用烤箱烤了戚风蛋糕代替了饼底,我觉得消化饼干太硬了,还是绵软的蛋糕配上芝士口感更好。<br> 2. 家里没有朗姆酒可以用白葡萄酒代替,味道也不错~<br> 3. 柠檬汁的量可以自己控制,如果想酸一点就多加那么点 <br> 4. 这是6寸蛋糕的量',0,0); INSERT INTO menu VALUES(null, '双皮奶',5,'http://i1.chuimg.com/66477c7886f811e6b87c0242ac110003_457w_685h.jpg@2o_50sh_1pr_1l_280w_190h_1c_1e_90q_1wh','1.做双皮奶用了几个蛋清,那剩下的蛋黄怎么办呢?我用它们做了香酥的朗姆蛋黄小饼干。<br> 2.选奶很关键!在广州做双皮奶大多使用水牛奶,因为水牛奶的蛋白质含量很高,所以容易形成较厚的奶皮。没有水牛奶也没有关系,我们用普通的牛奶也可以做到,但是要注意要选全脂鲜奶。<br> 3.结第一次奶皮。这一步很关键,可以把牛奶煮一下,不过不能煮开,烧久了会破坏蛋白质,也结不起奶皮了,如果你向我一样不能把握好时间的话,可以把鲜牛奶盖上一层保鲜膜后放在蒸锅里蒸十分钟。注意一定呀盖上保鲜膜或者碗盖,不然蒸锅里的水滴到牛奶中就不会结奶皮了。<br> 4.奶不倒净,碗底留奶。奶凉晾后会形成一层奶皮,用尖刀沿着碗边划大概十厘米左右的口。倒出大部分牛奶,碗底留点底不要倒净,不然奶皮容易粘在碗上,待会儿就浮不起来了。<br> 5.快速搅拌蛋奶液,过筛,撇沫。鸡蛋清先用筷子搅拌均匀后倒入牛奶中加糖,搅匀后过筛。筛掉蛋清中尚未打发的部分。如果蛋奶液中有气泡和奶沫用勺子撇去,以免影响双皮奶的成型。<br> 6.回倒要耐心。把蛋清奶液缓缓沿着刚才划破的奶皮缺口倒会碗里,让奶皮浮在上面。做这一步的时候要沉得住气,别急,手稳。<br> 7.控制好火候。再次敷上保鲜膜上锅蒸,中火蒸十分钟后关火,此时不要掀开盖子焖五分钟让余温继续加热,五分钟后再取出。',0,0); INSERT INTO menu VALUES(null, '红糖姜番薯糖水',5,'http://i2.chuimg.com/d15b71bb173745578a6ec08257889350_1080w_1440h.jpg?imageView2/1/w/280/h/190/interlace/1/q/90',null,0,0); INSERT INTO menu VALUES(null, '无油无糖无面粉蛋糕~健康甜品??的做法',5,'http://i1.chuimg.com/b10de7c640f74a35ab82e1ce034fd182_1080w_810h.jpg@2o_50sh_1pr_1l_640w_520h_1c_1e_90q_1wh.jpg','配料不固定,爱吃什么放什么 <br> 快手好吃低脂健康的小甜品 <br> 期待开发更多的好吃的吧??',0,0); INSERT INTO menu VALUES(null, '蜜桃少女心(渐变气泡酒)',5,'http://i2.chuimg.com/7efb6bb489a311e6a9a10242ac110002_640w_640h.jpg?imageView2/1/w/215/h/136/interlace/1/q/90','如果雪碧或者rio是提前冷藏过的话,倒出来杯壁上雾蒙蒙的会更梦幻~',0,0); INSERT INTO menu VALUES(null, '玉米杯',5,'http://i1.chuimg.com/ded9a5a7d42242ff850d14ac2bfe5b28_1080w_864h.jpg@2o_50sh_1pr_1l_660w_90q_1wh','1. 其实速冻玉米粒也是可以的!只是新鲜玉米味道更香,吃起来和速冻玉米还是有很大的不同的。<br> 2. 教大家一个印尼玉米杯吃法,可以給搅拌好的玉米淋上巧克力酱、沙拉酱,或是放上起司,也是很美味的吃法喔。<br> 3. 用新鲜玉米的话,剥掉玉米剩下的杆也可以拿来炖汤喔。',0,0); INSERT INTO menu VALUES(null, '提拉米苏',5,'http://i2.chuimg.com/637971136528478e931c6f0067d3f329_1922w_1081h.png?imageView2/1/w/215/h/136/interlace/1/q/90','提拉米苏的配比要求很低,奶油、奶酪、糖、利口酒、手指饼干这些食材在固定的比例里面都可以增加减少。<br> 我一般都是做sabayon,就是水浴加热,差不多83度蛋黄就熟了,这样可以保存时间较久。<br> 这次用的炸弹面糊,是一个甜品师朋友推荐的,相对于其他做法,口感会更加轻盈丝滑。<br> 而且因为糖浆的温度高,在搅拌的时候能将蛋黄煮熟,所以在冷却时它的结构会更稳定,保存时间也更久。',0,0); INSERT INTO menu VALUES(null, '椰奶冻',5,'http://i2.chuimg.com/dd016f3c872911e6a9a10242ac110002_690w_458h.jpg?imageView2/1/w/215/h/136/interlace/1/q/90','冷天的话,步骤6时不用放冰箱,室温就行。<br> 另外,放在冰箱大概5个小时左右即可。',0,0); /**创建用料表**/ CREATE TABLE materials( tid INT PRIMARY KEY AUTO_INCREMENT, mid INT,#所属菜单 tname VARCHAR(20),#材料名称 dosage VARCHAR(20)#用量 ); INSERT INTO materials VALUES(null,1,"面粉","50克"); INSERT INTO materials VALUES(null,1,"冷水","100克"); INSERT INTO materials VALUES(null,1,"小葱","少许"); INSERT INTO materials VALUES(null,1,"熟黑芝麻","一把"); INSERT INTO materials VALUES(null,1,"甜面酱","一勺"); INSERT INTO materials VALUES(null,1,"香菜","少许"); INSERT INTO materials VALUES(null,1,"生菜","一大片"); INSERT INTO materials VALUES(null,1,"鸡蛋","一个"); /*****************************************/ INSERT INTO materials VALUES(null,2,"牛油果","半个"); INSERT INTO materials VALUES(null,2,"鸡蛋","1个"); INSERT INTO materials VALUES(null,2,"面包","1片"); INSERT INTO materials VALUES(null,2,"黑胡椒","少许"); INSERT INTO materials VALUES(null,2,"酸奶","180克"); INSERT INTO materials VALUES(null,2,"水果","若干"); INSERT INTO materials VALUES(null,2,"花生酱","1勺"); /***************************************/ INSERT INTO materials VALUES(null,3,"高筋粉","300g"); INSERT INTO materials VALUES(null,3,"低筋粉","120g"); INSERT INTO materials VALUES(null,3,"干酵母","4g"); INSERT INTO materials VALUES(null,3,"细砂糖","60g-70g"); INSERT INTO materials VALUES(null,3,"盐","5g"); INSERT INTO materials VALUES(null,3,"全蛋液","40g"); INSERT INTO materials VALUES(null,3,"淡奶油","120g"); INSERT INTO materials VALUES(null,3,"牛奶","120g"); INSERT INTO materials VALUES(null,3,"黄油","35g"); INSERT INTO materials VALUES(null,3,"28*28不粘烤盘","一个"); INSERT INTO materials VALUES(null,3,"表面装饰","蛋液,杏仁片一小勺"); /**************************************/ INSERT INTO materials VALUES(null,4,"饺子","新鲜包的和速冻的皆可,根据自己的量"); INSERT INTO materials VALUES(null,4,"油","适量"); INSERT INTO materials VALUES(null,4,"芝麻","选择性"); INSERT INTO materials VALUES(null,4,"葱花","少许"); /****************************************/ INSERT INTO materials VALUES(null,5,"黄瓜","1根"); INSERT INTO materials VALUES(null,5,"即食麦片","20克"); INSERT INTO materials VALUES(null,5,"牛奶","30克"); INSERT INTO materials VALUES(null,5,"炼乳","少量"); INSERT INTO materials VALUES(null,5,"白开水","400毫升"); /************************************/ INSERT INTO materials VALUES(null,6,"湿面条/干面条/挂面","1碗"); INSERT INTO materials VALUES(null,6,"鸡蛋","一个"); INSERT INTO materials VALUES(null,6,"小油菜","一颗"); INSERT INTO materials VALUES(null,6,"海苔或紫菜","三片"); INSERT INTO materials VALUES(null,6,"火腿","一片"); INSERT INTO materials VALUES(null,6,"葱姜末","若干"); /******************************/ INSERT INTO materials VALUES(null,7,"中筋面粉","500克"); INSERT INTO materials VALUES(null,7,"无铝泡打粉","4克"); INSERT INTO materials VALUES(null,7,"小苏打","4克"); INSERT INTO materials VALUES(null,7,"酵母","1克"); INSERT INTO materials VALUES(null,7,"盐","8克"); INSERT INTO materials VALUES(null,7,"鸡蛋","1个"); INSERT INTO materials VALUES(null,7,"纯牛奶","300克"); /***************************/ INSERT INTO materials VALUES(null,8,"面粉","1000克"); INSERT INTO materials VALUES(null,8,"水","380克"); INSERT INTO materials VALUES(null,8,"牛奶","400克"); INSERT INTO materials VALUES(null,8,"酵母","10克"); INSERT INTO materials VALUES(null,8,"杂粮/芝麻","100克"); INSERT INTO materials VALUES(null,8,"紫薯","适量"); INSERT INTO materials VALUES(null,8,"红薯","适量"); INSERT INTO materials VALUES(null,8,"糯米粉","适量"); INSERT INTO materials VALUES(null,8,"黄油","100克"); INSERT INTO materials VALUES(null,8,"蜂蜜","适量"); /*********************/ INSERT INTO materials VALUES(null,9,"鸡蛋","2个"); INSERT INTO materials VALUES(null,9,"牛奶","10毫升"); INSERT INTO materials VALUES(null,9,"盐","适量"); INSERT INTO materials VALUES(null,9,"胡椒","若干"); INSERT INTO materials VALUES(null,9,"椰子油","一小勺"); /**************************/ INSERT INTO materials VALUES(null,10,"鸡蛋","1个"); INSERT INTO materials VALUES(null,10,"土豆","1只"); INSERT INTO materials VALUES(null,10,"普通面粉","少许"); INSERT INTO materials VALUES(null,10,"盐","少许"); INSERT INTO materials VALUES(null,10,"油","锅底一层"); /*麻婆豆腐*/ INSERT INTO materials VALUES(null,11,"嫩豆腐","1块"); INSERT INTO materials VALUES(null,11,"豆瓣酱","1勺半"); INSERT INTO materials VALUES(null,11,"花椒粉","少许"); INSERT INTO materials VALUES(null,11,"生抽","少许"); INSERT INTO materials VALUES(null,11,"肉末","200克"); INSERT INTO materials VALUES(null,11,"糖","3g左右"); INSERT INTO materials VALUES(null,11,"老抽","几滴即可"); INSERT INTO materials VALUES(null,11,"料酒","适量"); INSERT INTO materials VALUES(null,11,"葱姜蒜","适量"); INSERT INTO materials VALUES(null,11,"汁水","勾芡(下面有介绍怎么调汁)"); INSERT INTO materials VALUES(null,11,"鸡精","适量"); INSERT INTO materials VALUES(null,11,"小米椒","2个"); /*川菜鱼香肉丝*/ INSERT INTO materials VALUES(null,12,"猪肉","500g"); INSERT INTO materials VALUES(null,12,"胡萝卜","半根"); INSERT INTO materials VALUES(null,12,"青椒","1个"); INSERT INTO materials VALUES(null,12,"水发木耳","适量"); INSERT INTO materials VALUES(null,12,"香菜","适量"); INSERT INTO materials VALUES(null,12,"郫县豆瓣酱","少许"); INSERT INTO materials VALUES(null,12,"料酒","少许"); INSERT INTO materials VALUES(null,12,"糖","少许"); INSERT INTO materials VALUES(null,12,"醋","少许"); INSERT INTO materials VALUES(null,12,"生抽","少许"); INSERT INTO materials VALUES(null,12,"耗油","少许"); INSERT INTO materials VALUES(null,12,"葱,蒜","少许"); /*宫保鸡丁*/ INSERT INTO materials VALUES(null,13,"鸡胸肉","1块"); INSERT INTO materials VALUES(null,13,"黄瓜","半根"); INSERT INTO materials VALUES(null,13,"胡萝卜","半根"); INSERT INTO materials VALUES(null,13,"花生米","1把"); INSERT INTO materials VALUES(null,13,"黄葱","1根"); INSERT INTO materials VALUES(null,13,"豆瓣","1勺"); INSERT INTO materials VALUES(null,13,"干辣椒","6个"); INSERT INTO materials VALUES(null,13,"花椒","10粒"); INSERT INTO materials VALUES(null,13,"花椒","10粒"); /*麻辣水煮鱼*/ INSERT INTO materials VALUES(null,14,"大蒜","若干"); INSERT INTO materials VALUES(null,14,"生姜","若干"); INSERT INTO materials VALUES(null,14,"鸡蛋","一枚"); INSERT INTO materials VALUES(null,14,"花椒","若干"); INSERT INTO materials VALUES(null,14,"干辣椒","若干"); INSERT INTO materials VALUES(null,14,"郫县豆瓣酱","一勺"); INSERT INTO materials VALUES(null,14,"草鱼","一条(约2.5斤)"); INSERT INTO materials VALUES(null,14,"料酒","若干"); INSERT INTO materials VALUES(null,14,"豆芽","若干"); /*水煮肉片*/ INSERT INTO materials VALUES(null,15,"里脊肉","一块"); INSERT INTO materials VALUES(null,15,"郫县豆瓣酱","4-5勺"); INSERT INTO materials VALUES(null,15,"辣椒面","1勺"); INSERT INTO materials VALUES(null,15,"花椒","2勺"); INSERT INTO materials VALUES(null,15,"蚝油","1勺"); INSERT INTO materials VALUES(null,15,"盐","2勺"); INSERT INTO materials VALUES(null,15,"糖","3勺"); INSERT INTO materials VALUES(null,15,"干辣椒","1个"); INSERT INTO materials VALUES(null,15,"生菜、豆芽、木耳、洋葱","依自己喜好"); INSERT INTO materials VALUES(null,15,"蒜","5瓣"); INSERT INTO materials VALUES(null,15,"葱花","1把"); INSERT INTO materials VALUES(null,15,"白芝麻","1把"); INSERT INTO materials VALUES(null,15,"鸡蛋","1个"); INSERT INTO materials VALUES(null,15,"料酒","1勺"); INSERT INTO materials VALUES(null,15,"胡椒粉","1勺"); /* 夫妻肺片*/ INSERT INTO materials VALUES(null,16,"辣椒碎","100g"); INSERT INTO materials VALUES(null,16,"香叶","1片"); INSERT INTO materials VALUES(null,16,"草果","1个"); INSERT INTO materials VALUES(null,16,"桂皮","1片"); INSERT INTO materials VALUES(null,16,"菜籽油","600ml"); INSERT INTO materials VALUES(null,16,"菜籽油","600ml"); INSERT INTO materials VALUES(null,16,"丁香","少量"); INSERT INTO materials VALUES(null,16,"八角","1个"); INSERT INTO materials VALUES(null,16,"花椒","少量"); INSERT INTO materials VALUES(null,16,"小茴香","少量"); INSERT INTO materials VALUES(null,16,"牛腱子","一条"); INSERT INTO materials VALUES(null,16,"牛肚","一片"); INSERT INTO materials VALUES(null,16,"牛心","1/4个"); INSERT INTO materials VALUES(null,16,"生抽","20g"); INSERT INTO materials VALUES(null,16,"土芹菜","2棵"); INSERT INTO materials VALUES(null,16,"蒜末","适量"); INSERT INTO materials VALUES(null,16,"姜片","适量"); INSERT INTO materials VALUES(null,16,"花椒面","适量"); INSERT INTO materials VALUES(null,16,"盐","大量"); /*辣子鸡丁*/ INSERT INTO materials VALUES(null,17,"鸡胸肉","1块"); INSERT INTO materials VALUES(null,17,"豆瓣酱","2勺"); INSERT INTO materials VALUES(null,17,"花椒","50粒"); INSERT INTO materials VALUES(null,17,"大蒜","5瓣"); INSERT INTO materials VALUES(null,17,"盐","2克"); INSERT INTO materials VALUES(null,17,"淀粉","适量"); INSERT INTO materials VALUES(null,17,"干辣椒","25克"); INSERT INTO materials VALUES(null,17,"姜","8片"); INSERT INTO materials VALUES(null,17,"油","100ml"); /*回锅肉*/ INSERT INTO materials VALUES(null,18,"五花肉","12块左右的肉"); INSERT INTO materials VALUES(null,18,"青椒","1只"); INSERT INTO materials VALUES(null,18,"郫县豆瓣酱","2勺"); INSERT INTO materials VALUES(null,18,"姜片","适量"); INSERT INTO materials VALUES(null,18,"料酒","适量"); INSERT INTO materials VALUES(null,18,"糖","1勺"); INSERT INTO materials VALUES(null,18,"花椒","8粒左右"); INSERT INTO materials VALUES(null,18,"鸡精","少量"); /*东坡肘子*/ INSERT INTO materials VALUES(null,19,"肘子","一个"); INSERT INTO materials VALUES(null,19,"冰糖","适量"); INSERT INTO materials VALUES(null,19,"生抽","适量"); INSERT INTO materials VALUES(null,19,"老抽","适量"); INSERT INTO materials VALUES(null,19,"料酒","一瓶"); INSERT INTO materials VALUES(null,19,"盐","适量"); INSERT INTO materials VALUES(null,19,"八角","2-3个"); INSERT INTO materials VALUES(null,19,"桂皮","一块"); INSERT INTO materials VALUES(null,19,"姜片 ","适量"); INSERT INTO materials VALUES(null,19,"葱","一根 "); /*钵钵鸡*/ INSERT INTO materials VALUES(null,20,"鸡","1整只"); INSERT INTO materials VALUES(null,20,"生姜片","若干"); INSERT INTO materials VALUES(null,20,"盐","适量"); INSERT INTO materials VALUES(null,20,"桂皮","1片"); INSERT INTO materials VALUES(null,20,"八角","2个"); INSERT INTO materials VALUES(null,20,"山奈","少量"); INSERT INTO materials VALUES(null,20,"土豆","1个"); INSERT INTO materials VALUES(null,20,"莲藕","半个"); INSERT INTO materials VALUES(null,20,"莴笋","1段"); INSERT INTO materials VALUES(null,20,"炸豆腐","2片"); INSERT INTO materials VALUES(null,20,"鱼丸","若干"); INSERT INTO materials VALUES(null,20,"木耳","若干"); INSERT INTO materials VALUES(null,20,"辣椒碎","适量"); INSERT INTO materials VALUES(null,20,"十三香","5g"); INSERT INTO materials VALUES(null,20,"芝麻酱","10g"); INSERT INTO materials VALUES(null,20,"糖","5g"); INSERT INTO materials VALUES(null,20,"花椒油","5g"); INSERT INTO materials VALUES(null,20,"麻油","5g"); /*辣椒小炒肉*/ INSERT INTO materials VALUES(null,21,"肉","150克"); INSERT INTO materials VALUES(null,21,"辣椒","6个"); INSERT INTO materials VALUES(null,21,"酱油","一勺"); INSERT INTO materials VALUES(null,21,"盐","适量"); INSERT INTO materials VALUES(null,21,"生抽","半勺"); INSERT INTO materials VALUES(null,21,"大蒜","三掰"); INSERT INTO materials VALUES(null,21,"小米椒","五个"); /*湘味腊鸭*/ INSERT INTO materials VALUES(null,22,"油","少许"); INSERT INTO materials VALUES(null,22,"青椒","大概6-7个的样子"); INSERT INTO materials VALUES(null,22,"腊鸭","半只"); INSERT INTO materials VALUES(null,22,"姜蒜","少许切成块"); INSERT INTO materials VALUES(null,22,"料酒","按口味放"); INSERT INTO materials VALUES(null,22,"生抽",""); INSERT INTO materials VALUES(null,22,"耗油",""); INSERT INTO materials VALUES(null,22,"醋",""); INSERT INTO materials VALUES(null,22,"鸡精",""); INSERT INTO materials VALUES(null,22,"剁辣椒",""); INSERT INTO materials VALUES(null,22,"萝卜干",""); /*紫苏煎黄瓜*/ INSERT INTO materials VALUES(null,23,"紫苏","4~5片"); INSERT INTO materials VALUES(null,23,"蒜","两掰"); INSERT INTO materials VALUES(null,23,"黄瓜","两根"); INSERT INTO materials VALUES(null,23,"耗油","半勺"); INSERT INTO materials VALUES(null,23,"盐","适量"); INSERT INTO materials VALUES(null,23,"鸡精","少许"); INSERT INTO materials VALUES(null,23,"红椒","少许"); INSERT INTO materials VALUES(null,23,"豆豉","少量"); /*辣子鸡丁*/ INSERT INTO materials VALUES(null,24,"大鸡腿肉","2只"); INSERT INTO materials VALUES(null,24,"干辣椒","1把"); INSERT INTO materials VALUES(null,24,"葱段","少许"); INSERT INTO materials VALUES(null,24,"花椒","少许"); INSERT INTO materials VALUES(null,24,"生姜","一块"); INSERT INTO materials VALUES(null,24,"大蒜","8掰"); INSERT INTO materials VALUES(null,24,"五香粉","少许"); INSERT INTO materials VALUES(null,24,"孜然粉","少许勺"); INSERT INTO materials VALUES(null,24,"生抽","少许"); /*家常牛蛙*/ INSERT INTO materials VALUES(null,25,"牛蛙","4只1250克"); INSERT INTO materials VALUES(null,25,"葱姜蒜","各15克"); INSERT INTO materials VALUES(null,25,"洋葱",""); INSERT INTO materials VALUES(null,25,"大红椒大青椒","各半个"); INSERT INTO materials VALUES(null,25,"嫩姜","约25克"); INSERT INTO materials VALUES(null,25,"泡椒","5个"); INSERT INTO materials VALUES(null,25,"酸菜","30克"); INSERT INTO materials VALUES(null,25,"大料","1片"); INSERT INTO materials VALUES(null,25,"菜籽油","150克左右"); INSERT INTO materials VALUES(null,25,"生抽","1勺"); INSERT INTO materials VALUES(null,25,"料酒","1勺"); INSERT INTO materials VALUES(null,25,"豆掰酱","1勺"); INSERT INTO materials VALUES(null,25,"开水","适量"); INSERT INTO materials VALUES(null,25,"花椒面","适量"); INSERT INTO materials VALUES(null,25,"鸡精味精","适量"); INSERT INTO materials VALUES(null,25,"花椒","1撮"); /*麻辣小龙虾*/ INSERT INTO materials VALUES(null,26,"小龙虾","2斤"); INSERT INTO materials VALUES(null,26,"植物油","适量"); INSERT INTO materials VALUES(null,26,"豆瓣酱","半勺"); INSERT INTO materials VALUES(null,26,"葱","2根"); INSERT INTO materials VALUES(null,26,"姜","7-8片"); INSERT INTO materials VALUES(null,26,"蒜","6-8瓣"); INSERT INTO materials VALUES(null,26,"干辣椒","3个"); INSERT INTO materials VALUES(null,26,"香叶","6-8片"); INSERT INTO materials VALUES(null,26,"八角","1个"); INSERT INTO materials VALUES(null,26,"花椒","1把"); INSERT INTO materials VALUES(null,26,"麻椒","1把"); INSERT INTO materials VALUES(null,26,"啤酒","2听"); INSERT INTO materials VALUES(null,26,"冰糖","1汤勺"); INSERT INTO materials VALUES(null,26,"酱油","2汤勺"); INSERT INTO materials VALUES(null,26,"耗油","2汤勺"); INSERT INTO materials VALUES(null,26,"胡椒粉","10克"); INSERT INTO materials VALUES(null,26,"盐","25-30克"); INSERT INTO materials VALUES(null,26,"柠檬皮","少许"); /*水煮牛肉*/ INSERT INTO materials VALUES(null,27,"豆皮",""); INSERT INTO materials VALUES(null,27,"豆芽",""); INSERT INTO materials VALUES(null,27,"青菜",""); INSERT INTO materials VALUES(null,27,"干辣椒","5个"); INSERT INTO materials VALUES(null,27,"花椒",""); INSERT INTO materials VALUES(null,27,"蒜末",""); INSERT INTO materials VALUES(null,27,"葱",""); INSERT INTO materials VALUES(null,27,"料酒","1勺"); INSERT INTO materials VALUES(null,27,"豆瓣酱","1勺"); INSERT INTO materials VALUES(null,27,"生抽","2勺"); INSERT INTO materials VALUES(null,27,"淀粉","2勺"); INSERT INTO materials VALUES(null,27,"辣椒面",""); /*干锅香辣虾*/ INSERT INTO materials VALUES(null,28,"虾","300克"); INSERT INTO materials VALUES(null,28,"土豆","1个"); INSERT INTO materials VALUES(null,28,"黄瓜","半根"); INSERT INTO materials VALUES(null,28,"香菜","1小把"); INSERT INTO materials VALUES(null,28,"干红辣椒","1小把"); INSERT INTO materials VALUES(null,28,"葱姜蒜","适量"); INSERT INTO materials VALUES(null,28,"四川辣酱","1茶勺"); INSERT INTO materials VALUES(null,28,"生抽","1汤勺"); INSERT INTO materials VALUES(null,28,"糖","1茶勺"); INSERT INTO materials VALUES(null,28,"料酒","1汤勺"); INSERT INTO materials VALUES(null,28,"麻辣花生","1汤勺"); INSERT INTO materials VALUES(null,28,"白芝麻","1汤勺"); INSERT INTO materials VALUES(null,28,"年糕条","适量"); /*香煎土豆片*/ INSERT INTO materials VALUES(null,29,"土豆","两个"); INSERT INTO materials VALUES(null,29,"小葱","两根"); INSERT INTO materials VALUES(null,29,"豆瓣酱","一勺"); INSERT INTO materials VALUES(null,29,"生抽","一勺"); INSERT INTO materials VALUES(null,29,"蒜头","三颗"); /*剁椒鱼头*/ INSERT INTO materials VALUES(null,30,"剁椒(菜市场有卖)","200克"); INSERT INTO materials VALUES(null,30,"蒸鱼鼓油","一汤勺"); INSERT INTO materials VALUES(null,30,"油","适量"); INSERT INTO materials VALUES(null,30,"白胡椒粉","适量"); INSERT INTO materials VALUES(null,30,"葱","五根"); INSERT INTO materials VALUES(null,30,"姜","五片"); INSERT INTO materials VALUES(null,30,"金针菇","一把"); INSERT INTO materials VALUES(null,30,"蒜","五颗"); INSERT INTO materials VALUES(null,30,"辣椒油","一汤勺"); INSERT INTO materials VALUES(null,30,"料酒","三汤勺"); INSERT INTO materials VALUES(null,30,"鱼头","一个"); /*排骨*/ insert into materials values(null,"31","排骨","500克"); insert into materials values(null,"31","洋葱","1个"); insert into materials values(null,"31","蒜瓣","12个"); insert into materials values(null,"31","小葱","3根"); insert into materials values(null,"31","姜片","5片"); insert into materials values(null,"31","蚝油","15克"); insert into materials values(null,"31","生抽","15克"); insert into materials values(null,"31","老抽","5克"); insert into materials values(null,"31","米酒","5克"); insert into materials values(null,"31","淀粉","15克"); insert into materials values(null,"31","糖","5克"); insert into materials values(null,"31","白胡椒","2克"); /*糯米*/ insert into materials values(null,"32","糯米","300克"); insert into materials values(null,"32","香菇","100克"); insert into materials values(null,"32","胡萝卜","150克"); insert into materials values(null,"32","虾干或鱿鱼干","50克"); insert into materials values(null,"32","烤熟的花生","80克"); insert into materials values(null,"32","中筋面粉","500克"); insert into materials values(null,"32","水","275克"); insert into materials values(null,"32","糖","20克"); insert into materials values(null,"32","盐","3.5克"); insert into materials values(null,"32","酵母","5克"); insert into materials values(null,"32","白胡椒粉","1小勺"); insert into materials values(null,"32","蚝油","25克"); insert into materials values(null,"32","老抽","10克"); insert into materials values(null,"32","沙茶酱","35克"); /*鸡*/ insert into materials values(null,"33","干葱头","8-10颗"); insert into materials values(null,"33","新鲜鸡一只","2.5斤左右"); insert into materials values(null,"33","生姜","若干"); insert into materials values(null,"33","生抽","适量"); insert into materials values(null,"33","糖","4小勺"); insert into materials values(null,"33","料酒","适量"); insert into materials values(null,"33","盐","0.5小勺"); /*腊味煲仔饭*/ insert into materials values(null,"34","香肠","2根"); insert into materials values(null,"34","自制豆皮","若干张"); insert into materials values(null,"34","绿叶菜","一把"); insert into materials values(null,"34","鸡蛋","1个"); insert into materials values(null,"34","生抽","2勺"); insert into materials values(null,"34","老抽","1勺"); insert into materials values(null,"34","料酒","1勺"); insert into materials values(null,"34","香油","1勺"); insert into materials values(null,"34","糖","1勺"); insert into materials values(null,"34","蚝油","少许"); insert into materials values(null,"34","鱼露","少许"); /*猪脚姜*/ insert into materials values(null,"35","猪蹄","半只"); insert into materials values(null,"35","甜醋","半瓶"); insert into materials values(null,"35","红糖","一片"); insert into materials values(null,"35","姜","若干"); insert into materials values(null,"35","鹌鹑蛋","10只"); /*红烧乳鸽*/ insert into materials values(null,"36","乳鸽","2只"); insert into materials values(null,"36","十三香","适量"); insert into materials values(null,"36","生抽","适量"); insert into materials values(null,"36","老抽","适量"); insert into materials values(null,"36","麻油","半勺"); insert into materials values(null,"36","卤水","适量"); /*凉拌莲藕*/ insert into materials values(null,"37","藕","3节"); insert into materials values(null,"37","酱油","4勺"); insert into materials values(null,"37","陈醋","3勺"); insert into materials values(null,"37","盐","少许"); insert into materials values(null,"37","花生油","约一勺"); insert into materials values(null,"37","芝麻油","约一勺"); insert into materials values(null,"37","蒜头","5瓣"); insert into materials values(null,"37","香菜","3棵"); insert into materials values(null,"37","糖和鸡粉","适量"); insert into materials values(null,"37","冰水","适量"); /*柠檬鸡翅*/ insert into materials values(null,"38","鸡翅","6个"); insert into materials values(null,"38","柠檬","2个"); insert into materials values(null,"38","胡萝卜","半根"); insert into materials values(null,"38","料酒","2勺"); insert into materials values(null,"38","生抽","2勺"); insert into materials values(null,"38","盐","少许"); insert into materials values(null,"38","糖","少许"); insert into materials values(null,"38","蒜","3瓣"); /*猪骨汤*/ insert into materials values(null,"39","猪骨","250克"); insert into materials values(null,"39","姜","2片"); insert into materials values(null,"39","南瓜带皮","350克"); insert into materials values(null,"39","栗子","8颗"); insert into materials values(null,"39","北杏","5克"); /*脆皮烧肉*/ insert into materials values(null,"40","五花肉","1000克"); insert into materials values(null,"40","五香粉","5勺"); insert into materials values(null,"40","盐","2勺"); insert into materials values(null,"40","白醋","适量"); INSERT INTO materials VALUES(null,41,'淡奶油','200克'); INSERT INTO materials VALUES(null,41,'蛋黄','2个'); INSERT INTO materials VALUES(null,41,'糖','40克'); INSERT INTO materials VALUES(null,41,'牛奶','50克'); INSERT INTO materials VALUES(null,41,'盐','0.5克'); INSERT INTO materials VALUES(null,41,'柠檬汁','几滴'); /********************/ INSERT INTO materials VALUES(null,42,'糯米粉','50g'); INSERT INTO materials VALUES(null,42,'玉米淀粉','15g'); INSERT INTO materials VALUES(null,42,'细砂糖','25g'); INSERT INTO materials VALUES(null,42,'牛奶','85g'); INSERT INTO materials VALUES(null,42,'黄油','10g'); INSERT INTO materials VALUES(null,42,'淡奶油','120g'); INSERT INTO materials VALUES(null,42,'细砂糖','8g'); INSERT INTO materials VALUES(null,42,'熟糯米粉','适量'); INSERT INTO materials VALUES(null,42,'时令水果','适量'); /*********************/ INSERT INTO materials VALUES(null,43,'奶油奶酪','200g'); INSERT INTO materials VALUES(null,43,'酸奶','180g'); INSERT INTO materials VALUES(null,43,'牛奶','40ml'); INSERT INTO materials VALUES(null,43,'消化饼干','100g'); INSERT INTO materials VALUES(null,43,'蛋黄','1个'); INSERT INTO materials VALUES(null,43,'吉利丁片','10g'); INSERT INTO materials VALUES(null,43,'黄油','50g'); INSERT INTO materials VALUES(null,43,'细砂糖','75g'); INSERT INTO materials VALUES(null,43,'柠檬汁','15ml'); INSERT INTO materials VALUES(null,43,'朗姆酒','一勺'); /************************/ INSERT INTO materials VALUES(null,44,'牛奶','400毫升'); INSERT INTO materials VALUES(null,44,'鸡蛋清','2个'); INSERT INTO materials VALUES(null,44,'糖','2茶匙'); /************************/ INSERT INTO materials VALUES(null,45,'番薯','3条'); INSERT INTO materials VALUES(null,45,'红糖','适量'); INSERT INTO materials VALUES(null,45,'姜','3片'); /**************************/ INSERT INTO materials VALUES(null,46,'酸奶','200克'); INSERT INTO materials VALUES(null,46,'鸡蛋','1个'); INSERT INTO materials VALUES(null,46,'南瓜','100克'); INSERT INTO materials VALUES(null,46,'红薯','100克'); /***************************/ INSERT INTO materials VALUES(null,47,'水蜜桃口味Rio','适量'); INSERT INTO materials VALUES(null,47,'雪碧','适量'); INSERT INTO materials VALUES(null,47,'勺子','适量'); /***************************/ INSERT INTO materials VALUES(null,48,'玉米','2根'); INSERT INTO materials VALUES(null,48,'牛油','适量'); INSERT INTO materials VALUES(null,48,'盐','适量'); INSERT INTO materials VALUES(null,48,'糖','适量'); /***************************/ INSERT INTO materials VALUES(null,49,'鸡蛋','适量'); INSERT INTO materials VALUES(null,49,'马斯卡彭奶酪','适量'); INSERT INTO materials VALUES(null,49,'奶油','适量'); INSERT INTO materials VALUES(null,49,'浓缩咖啡','适量'); INSERT INTO materials VALUES(null,49,'手指饼干','适量'); INSERT INTO materials VALUES(null,49,'白砂糖','适量'); /****************************/ INSERT INTO materials VALUES(null,50,'椰浆','60g'); INSERT INTO materials VALUES(null,50,'牛奶','135g'); INSERT INTO materials VALUES(null,50,'淡奶油','95g'); INSERT INTO materials VALUES(null,50,'吉利丁片','2片'); INSERT INTO materials VALUES(null,50,'绵白糖','35g'); /**创建步骤表**/ CREATE TABLE step( sid INT PRIMARY KEY AUTO_INCREMENT, mid INT,#所属菜单 simg VARCHAR(500),#当前步骤对应的图片 title VARCHAR(500)#当前步骤对应的描述 ); /* 鸡蛋早餐饼*/ INSERT INTO step VALUES(null,1,"http://i2.chuimg.com/fd2a9b1a273a11e7947d0242ac110002_1992w_2656h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","首先将需要的材料准备好: 面粉加水调成糊状(面粉和水的比例没有精确算过,一般就是50克粉兑100克水,反正调成流动的糊状即可,可以略微稀一点,便于均匀地摊开);小葱和香菜洗净后切碎;生菜洗净后沥干水分。"); INSERT INTO step VALUES(null,1,"http://i1.chuimg.com/008064fc273b11e7bc9d0242ac110002_1992w_2656h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg ","开中火放上锅子,锅中倒一点点油,用耙子绕着锅边拉均匀(有油刷的可以刷一遍)。等锅微微热的时候调小火!!!倒入调好的面糊(锅切不可烧得过热,要不然面糊倒入后马上会凝结不利于摊开),用耙子慢慢晕开(此处请不要用工具不停反复划拉,要等面糊慢慢成形!)。"); INSERT INTO step VALUES(null,1,"http://i2.chuimg.com/049b9408273b11e7947d0242ac110002_1992w_2656h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","开中火,等面糊结成片状微微泛白后,敲一个鸡蛋,也用耙子慢慢摊均匀。"); INSERT INTO step VALUES(null,1,"http://i2.chuimg.com/076da86a273b11e7947d0242ac110002_1992w_2656h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","接着撒上葱花"); INSERT INTO step VALUES(null,1,"http://i1.chuimg.com/0a51e942273b11e7947d0242ac110002_1992w_2656h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","撒熟的黑芝麻,没有也可不撒。"); INSERT INTO step VALUES(null,1,"http://i1.chuimg.com/0d7a6f54273b11e7bc9d0242ac110002_1992w_2656h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","等饼皮完全成形后(翻面的时候饼皮不沾锅为佳,不要太软)小心地把饼皮翻面,抹上甜面酱"); INSERT INTO step VALUES(null,1,"http://i2.chuimg.com/0fe5a894273b11e7947d0242ac110002_1992w_2656h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","撒上香菜,不喜欢这个味道的也可不撒(可是我觉得香菜和甜面酱是这个饼的灵魂哈哈)。"); INSERT INTO step VALUES(null,1,"http://i1.chuimg.com/12a5aeee273b11e7947d0242ac110002_1992w_2656h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","关火,放上生菜(及其它食材:比如黄瓜丝、煎好的培根、香肠、油条等)。"); /*牛油果*/ INSERT INTO step VALUES(null,2,"http://i2.chuimg.com/9eb4a14b91954e719f41d350fc8297e4_3072w_2304h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","准备好所有食材"); INSERT INTO step VALUES(null,2,"http://i2.chuimg.com/cc42243d1f4c4c94bd86fa83c576ba01_1124w_1124h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","鸡蛋加黑芝麻打散"); INSERT INTO step VALUES(null,2,"http://i2.chuimg.com/ec449e6b1432455bac6691d82b6c3d85_1280w_1023h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","半块牛油果加入少量盐和黑胡椒压成泥状备用"); INSERT INTO step VALUES(null,2,"http://i2.chuimg.com/a73fe0b9dfae4ccca08c0d34fd73fad1_1125w_900h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","将面包片裹上蛋液,煎至两面金黄后稍微放凉"); INSERT INTO step VALUES(null,2,"http://i2.chuimg.com/a73fe0b9dfae4ccca08c0d34fd73fad1_1125w_900h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","将面包片裹上蛋液,煎至两面金黄后稍微放凉"); INSERT INTO step VALUES(null,2,"http://i2.chuimg.com/a73fe0b9dfae4ccca08c0d34fd73fad1_1125w_900h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","将面包片裹上蛋液,煎至两面金黄后稍微放凉"); INSERT INTO step VALUES(null,2,NULL,"煎好的面包依次涂上花生酱和牛油果"); INSERT INTO step VALUES(null,2,"http://i1.chuimg.com/9819512c29fd439baff06bfcd4c35b3b_1124w_899h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg", "搭配喜欢的酸奶和水果即可"); /***************************/ INSERT INTO step VALUES(null,3,"http://i2.chuimg.com/2c0893808e0511e6b87c0242ac110003_853w_640h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "把除了黄油以外的原料放在一起,揉至扩展阶段,加入黄油揉至黄油充分吸收,能出膜效果会更好。"); INSERT INTO step VALUES(null,3,"http://i2.chuimg.com/2bccc2568e0511e6a9a10242ac110002_1901w_820h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "放在温暖处发酵至原来的2-2.5倍大,手指蘸面粉插入面团中,小洞不回弹,面团不塌陷,拉开发酵好的面团里面一丝一丝的,然后将面团排气,尽量少加干粉,否则影响粘合,盖上保鲜膜松弛15分钟。"); INSERT INTO step VALUES(null,3,"http://i2.chuimg.com/2b6596ee8e0511e6b87c0242ac110003_640w_853h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "面团重约800g,将面团分成16等份,50g一个滚圆摆入烤盘。"); INSERT INTO step VALUES(null,3,"http://i2.chuimg.com/2b3322908e0511e6a9a10242ac110002_640w_853h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "放在温度和湿度合适的地方进行二次醒发至1.5-2倍大。醒发好之后在表面刷上鸡蛋液(刷蛋液时要轻轻的,否则弄破了面团就会塌,如果一碰就塌说明醒发过度了)。撒上杏仁片。"); INSERT INTO step VALUES(null,3,"http://i2.chuimg.com/2a4cb2928e0511e6b87c0242ac110003_590w_713h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "烤箱180°预热5分钟,烤盘放入下数第二层,调节上火175°,下火160°,烤20-25分钟即可,烤至十几分钟上色满意后加盖锡纸.烤箱不同,会有温差,请大家自己调节好。"); INSERT INTO step VALUES(null,3,"http://i1.chuimg.com/2a208f328e0511e6b87c0242ac110003_270w_240h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg", "掰开一个,拉丝效果不错,非常松软。蛤蛤蛤"); /*************************************************/ INSERT INTO step VALUES(null,4,"http://i2.chuimg.com/91720fce8ef211e6b87c0242ac110003_3854w_2569h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "饺子新鲜的和速冻的皆可,这次是用了速冻的。"); INSERT INTO step VALUES(null,4,null,"平底锅抹油,饺子无需解冻,平铺码好。"); INSERT INTO step VALUES(null,4,null,"中火开始煎,一两分钟后夹起一个观察,若底部焦黄就可倒入纯净水。水没过饺子三分之一(如果是新鲜的饺子,水量适当减少。)加盖,焖五分钟左右。"); INSERT INTO step VALUES(null,4,"http://i2.chuimg.com/93c80ef48ef211e6b87c0242ac110003_4288w_2848h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "水快收干时,倒入一小碗稀面粉水(面粉:水=1:10),撒芝麻和香葱(也可不撒)。家里没葱了,我撒了青椒末。"); INSERT INTO step VALUES(null,4,"http://i1.chuimg.com/95d1d4a08ef211e6b87c0242ac110003_3882w_2588h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg", "小火煎干,完成。"); /**********************************************/ INSERT INTO step VALUES(null,5,"http://i2.chuimg.com/0b3d2d5f776c4620ada59a192c83f8c7_817w_817h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "准备原材:黄瓜洗干净用淡盐水泡一会,燕麦,奶粉,糖的量根据喜好随意放多少,我是为了拍照才这样分别秤量的"); INSERT INTO step VALUES(null,5,"http://i1.chuimg.com/fac55422005544748b65c8b45e7ab510_817w_817h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg", "浸泡好的黄瓜再冲洗下,切片放料理机,加入奶粉,燕麦,白糖或者蜂蜜,炼乳都可以"); INSERT INTO step VALUES(null,5,"http://i1.chuimg.com/661d386077f7435c8ecb3414dd912c9c_817w_817h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg", "加入白开水,小心烫哦(用开水豆浆就不会有青味了,凉白开的话豆浆会有生黄瓜的青味,我是怎么样都可以的,但是娃不喜欢有青味的,所以我用了白开水)"); INSERT INTO step VALUES(null,5,"http://i1.chuimg.com/51c4ab0c37524618be70ca6f393d3929_817w_817h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg", "直接果汁键,2-3分钟就可以了,很香很浓郁顺滑,不管是搭配早餐还是午餐都是完美的"); /*************************************************/ INSERT INTO step VALUES(null,6,null,"锅中水烧开,下面条"); INSERT INTO step VALUES(null,6,"http://i2.chuimg.com/d8c4c3c48fd011e6b87c0242ac110003_640w_640h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "烧水煮面的同时,拿出一只吃面的碗,在碗底铺好准备的调料:葱姜末、虾米、熟芝麻、白胡椒粉、花椒油、麻油、生抽、醋"); INSERT INTO step VALUES(null,6,"http://i1.chuimg.com/d90b4c7c8fd011e6a9a10242ac110002_640w_640h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg", "面条煮到7分熟的时候,加入切好的火腿片进锅中一起煮,快煮好面的最后半分钟加入小青菜,然后关火"); INSERT INTO step VALUES(null,6,"http://i2.chuimg.com/d9597c308fd011e6b87c0242ac110003_640w_640h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "煮面的同时,可另起一支平底锅,用小火煎一只漂亮的太阳蛋"); INSERT INTO step VALUES(null,6,"http://i2.chuimg.com/d9a19fa68fd011e6a9a10242ac110002_640w_640h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "面煮好关火,舀两大勺面汤进碗里冲开调料,捞面、最后一次铺上火腿、青菜、煎蛋,再加两片海苔,大功告成!"); /*****************************************/ INSERT INTO step VALUES(null,7,"http://i1.chuimg.com/bc21b09d50ed45aea29184d2f09f1563_480w_640h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg", "1:盆中加入面粉,泡打粉,小苏打,酵母,盐,混合均匀。2:鸡蛋打成蛋液,与牛奶混合,分次加入面中把面和均匀,揉透,封上保鲜膜醒发5分钟,再次揉面,反复两次把面完全揉光。3:取20克色拉油,把面团表面涂均匀,封上保鲜膜醒发至面团2倍大时即成油条面胚。"); INSERT INTO step VALUES(null,7,"http://i2.chuimg.com/1369abc5052749c09e793b73ef98b8e7_640w_756h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "1:锅中加入宽油,一边烧油一边处理面团。2:案板撒一层铺面,取出面团用手掌用力按压排气。3:左右手分别托住面团两端,将其拖成长条,用擀面杖擀成1公分厚,20公分宽的长条再用刀切成3公分长条"); INSERT INTO step VALUES(null,7,"http://i1.chuimg.com/c08e2540e8974671b143e8d5ff8dd4c0_640w_750h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg", "用擀面杖擀成长条"); INSERT INTO step VALUES(null,7,"http://i2.chuimg.com/b28406ff9c57485192a6df1c66af9866_640w_756h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "取两条面摞在一起,取一根筷子纵向从中间压实,压紧即成胚条。"); INSERT INTO step VALUES(null,7,"http://i2.chuimg.com/9eaca0bb7c62413d8c754b30b289573a_640w_756h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "1:双手捏住胚条两头,将面均匀拉成30公分左右的长条,轻轻放入200度左右的热油中,一边炸一边用筷子不停地翻动让油条受热均匀,待油条丰满膨胀,酥脆金黄时,捞出控油装盘即可。。。"); INSERT INTO step VALUES(null,7,"http://i2.chuimg.com/a240d0332fb94d668aa962e6104ed18f_640w_747h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "金黄酥脆"); INSERT INTO step VALUES(null,7,"http://i2.chuimg.com/8c32756475f7418a947fe8a639a82bff_640w_577h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "酥脆可口"); INSERT INTO step VALUES(null,7,"http://i1.chuimg.com/8db2239a9fcc4adcae16af21039fe799_640w_565h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg", "出锅完成"); /************************************************/ INSERT INTO step VALUES(null,8,null,"锅中水烧开,下面条"); INSERT INTO step VALUES(null,8,"http://i1.chuimg.com/b38f206fa7bc42bfbaf74418f0cbafce_1126w_1500h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","之前买的小籽杂粮,放进密封袋保存啦,没了包装袋,完全忘记这叫什么名字。不过很多东西都可以替换,或是直接改成加一些杂粮粉。"); INSERT INTO step VALUES(null,8,"http://i2.chuimg.com/18599714011f45d5b29b9a3fc97326bf_1126w_1500h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "水、面、牛奶、酵母粉和这个不知名的东东混合一起,揉好后静止醒发。"); INSERT INTO step VALUES(null,8,"http://i2.chuimg.com/1676cc8a5197496d8f08f8f9e231433f_1126w_1500h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "紫薯蒸熟后去皮,压碎,用黄油炒至细腻少颗粒的状态。(这个紫薯很干,容易结块)。"); INSERT INTO step VALUES(null,8,"http://i2.chuimg.com/5fa61de7c9904888b7e92a6fcd81760a_1126w_1500h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "融合了黄油后的感觉,瞬间觉得有食欲啦,加入适量蜂蜜,放凉后备用。"); INSERT INTO step VALUES(null,8,"http://i2.chuimg.com/f794991e0fe04d81a839fa16c1f1246e_1126w_1500h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "红心的红薯蒸熟后,稍稍放凉后,加入适量糯米粉,以不粘手为准(也可加些面粉进去,如果糯米粉加完后,依然觉得粘手的话)。然后擀成饺子皮大小后,包入紫薯馅料备用。"); INSERT INTO step VALUES(null,8,"http://i2.chuimg.com/28a1ca1178224d5b874911dfa4b9f2e6_1126w_1500h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "面团发好后,切成合适大小,擀开包入之前备好的红薯球。"); INSERT INTO step VALUES(null,8,null, "蒸20分钟左右即可完成!"); INSERT INTO step VALUES(null,8,null, "完美的双薯配合!"); /*****************************************/ INSERT INTO step VALUES(null,9,"http://i2.chuimg.com/fcc7c8eb4a094e66ad53f53632aa60ca_1124w_1500h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "将鸡蛋和牛奶混合打散,加入少许盐和胡椒调味。"); INSERT INTO step VALUES(null,9,"http://i1.chuimg.com/a3de7130c2db452ea1017b537def7304_1124w_1500h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","平底锅火上预热,我用的是椰子油,让鸡蛋吃起来有一股淡淡椰香。油溶化后将火关至小火。"); INSERT INTO step VALUES(null,9,"http://i2.chuimg.com/3d2fecdc356a4794b7bc3b9525314bef_480w_384h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "现在,一定要有耐心,等蛋液边缘凝结,可用勺往中间推凝结的蛋液。大概有60%蛋液凝结的时候,把锅移开火焰。用余温将剩下的蛋液继续热熟。"); INSERT INTO step VALUES(null,9,"http://i2.chuimg.com/18768d4e2d1b4269bdc300a935ea62c5_1500w_1124h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "烤上两片面包,开始享受吧。"); /***************************************/ INSERT INTO step VALUES(null,10,"http://i1.chuimg.com/c5fb36be91cf11e6b87c0242ac110003_1080w_1080h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","土豆嗖成丝,用冷水冲一下。(是一位老阿姨教我的,别问为啥了,也可以不冲,看个人喜好哈)"); INSERT INTO step VALUES(null,10,"http://i2.chuimg.com/c5aa380491cf11e6b87c0242ac110003_1080w_1080h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "加入少许面粉,盐、鸡蛋,拌匀。面粉不要太多,不然太厚影响口感。搅拌成糊糊状,摇晃时有流动感是最好。盐根据自己的口味适量即可。"); INSERT INTO step VALUES(null,10,"http://i2.chuimg.com/c554db7091cf11e6a9a10242ac110002_1080w_1080h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "平底锅刷上一层薄薄的油,小火,倒入搅拌好的糊,摊平,小火煎至金黄翻面,两面金黄即可出锅。"); INSERT INTO step VALUES(null,10,"http://i2.chuimg.com/c4f1b2c091cf11e6a9a10242ac110002_1080w_1080h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg", "出锅喽。"); /*麻婆豆腐*/ INSERT INTO step VALUES(null,11,"http://i2.chuimg.com/ab03619633db4b4d8d3ece0750c18e70_2016w_1008h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","姜葱蒜切成粒,小米椒二个,嫩豆腐一块准备好,(200克猪肉剁成酱☞加入适量胡椒粉,淀粉调匀),准备好所有食材之后,开始下一步工序!"); INSERT INTO step VALUES(null,11,"http://i2.chuimg.com/8c90c5bbd7224f17b223a4e8d216e079_2016w_1008h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","嫩豆腐切成块状,大小保持一致,千万不要切坏了,准备好锅,加入水烧开之后加一勺盐,在倒入豆腐☞转小火2分钟左右捞出(主要是去除豆腐的腥味)然后调制好做麻婆豆腐的汁水,碗中加入生抽少许,老抽几滴即可,清水半碗,糖3g,鸡精少许,淀粉适量一起搅拌即可"); INSERT INTO step VALUES(null,11,"http://i2.chuimg.com/ea4605eff65f4331b2dbab6632acb310_480w_960h.gif?imageView2/1/w/800/h/600/q/90/format/gif","起锅烧油,放入葱姜蒜爆香,加入小米椒,肉沫炒制金黄色,在放入一勺半郫县豆瓣酱和料酒翻炒均匀,然后锅中加适量清水,(水淹过豆腐即可,也不要倒入太多,适量就好)"); INSERT INTO step VALUES(null,11,"http://i2.chuimg.com/d1be188143504da6b217689cb1c7281a_480w_960h.gif?imageView2/1/w/800/h/600/q/90/format/gif","在倒入豆腐推至均匀,千万不要用力过大,不然豆腐容易碎掉,二三分钟左右看到汤汁变少黏稠之后,在加入准备好的汁水勾芡即可出锅"); INSERT INTO step VALUES(null,11,"http://i1.chuimg.com/d61863498df44a98a4e54446c729343c_2044w_1080h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","撒上葱花,花椒粉点缀,一道好吃又好看的川味麻婆豆腐就大功告成"); /*川菜鱼香肉丝*/ INSERT INTO step VALUES(null,12,"http://i1.chuimg.com/9ba4a7988daa11e6b87c0242ac110003_380w_380h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","肉切成丝,用盐、料酒、淀粉抓匀"); INSERT INTO step VALUES(null,12,"http://i2.chuimg.com/9bc7758e8daa11e6a9a10242ac110002_557w_557h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","胡萝卜、青椒、木耳切丝,香菜切成段"); INSERT INTO step VALUES(null,12,"http://i2.chuimg.com/9bf5afb28daa11e6b87c0242ac110003_650w_650h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","2勺白糖、2勺醋、1勺生抽、1勺耗油、葱末、蒜末、水、淀粉调成碗汁"); INSERT INTO step VALUES(null,12,"http://i2.chuimg.com/9c3b5fbc8daa11e6a9a10242ac110002_650w_650h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","起锅,放油,把肉丝炒变色,盛出备用"); INSERT INTO step VALUES(null,12,"http://i2.chuimg.com/9c687f9c8daa11e6a9a10242ac110002_650w_650h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","起锅,放少许油,放入2勺郫县豆瓣酱,炒出红油"); INSERT INTO step VALUES(null,12,"http://i1.chuimg.com/9cb661ee8daa11e6b87c0242ac110003_650w_650h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","炒出红油后,放青椒丝、胡萝卜丝炒至断生"); INSERT INTO step VALUES(null,12,"http://i2.chuimg.com/9cf0244c8daa11e6a9a10242ac110002_650w_650h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","放肉丝、木耳丝、香菜段,翻炒一下,放入碗汁,炒匀,关火,出锅前再放入少许蒜末"); /*宫保鸡丁*/ INSERT INTO step VALUES(null,13,"http://i1.chuimg.com/309355b56fdc42b3aa0455835660e18a_750w_750h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","首先是鸡胸肉的处理,把表面的筋膜处理掉,然后用刀背拍一下,切小块,一勺料酒、一点胡椒粉、适量盐、淀粉,用手捞匀。"); INSERT INTO step VALUES(null,13,"http://i1.chuimg.com/1d916b40febd40a1b178a9ca6b3fccca_750w_750h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","配料处理好,黄瓜最好把芯去掉,这样炒出来不会太水。花生米我用的生的,有油炸过的当然可以替代。"); INSERT INTO step VALUES(null,13,"http://i1.chuimg.com/f4b085b3b55b478baa621c8a147f9a05_750w_750h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","然后调汁,一勺醋,大半勺糖,一勺生抽,2勺料酒,一勺耗油,一点鸡粉,一点淀粉。(拍的时候还没加淀粉,加了会白一点)"); INSERT INTO step VALUES(null,13,"http://i1.chuimg.com/37966840ec494226b036c6773e4b92a9_750w_750h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","锅内下油,先把花生酥一下,然后再倒入鸡丁滑一下油,定型,变色捞出。"); INSERT INTO step VALUES(null,13,"http://i2.chuimg.com/c191e388294d4a99bbf29a337b9a6bb8_750w_750h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","然后锅内倒出多余的油,剩下一些炒菜的,倒入花椒,爆出香味过后,捞出。加入一勺豆瓣炒出红油,再下蒜瓣和干辣椒爆香,再下胡萝卜和黄瓜跟鸡丁,翻炒几下倒入黄葱。 加入调好的料汁,略微翻炒收汁,出锅!!"); /*麻辣水煮鱼*/ INSERT INTO step VALUES(null,14,null,"腌鱼:好人家腌鱼包、鸡蛋清、料酒、姜丝、盐少许、花生油少许。手抓均匀,静置15分钟(期间可以切配其他食材) 豆芽过开水后待用"); INSERT INTO step VALUES(null,14,null,"大蒜整颗稍微拍一下,姜切片,干辣椒切断(喜欢吃辣的亲可以用朝天椒拍扁代替),开始热锅下油,放入蒜、姜、辣椒、豆瓣酱一勺、翻炒后放入好人家水煮鱼调味包继续翻炒几下,放入提前备好的高汤(或开水),加一点盐到汤里(注意不要太多,前面的豆瓣酱和调味包已经有盐了),水开后先放鱼骨熬出味,放入豆芽,熟之捞出到盛鱼的大碗里(豆芽垫底做配菜)"); INSERT INTO step VALUES(null,14,null,"捞完锅里的鱼骨和豆芽后剩下的汤继续烧开,下鱼片两分钟水开即可盛盘(鱼片易熟,煮久易碎),汤和鱼全部盛到刚才盛有鱼骨和豆芽的大碗里"); INSERT INTO step VALUES(null,14,"http://i2.chuimg.com/ac3a4e9a8f2711e6b87c0242ac110003_640w_640h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","最后一个步骤:鱼面撒点花椒,热锅下油烧汤淋在花椒上面,再在上面摆上几根香菜,香喷喷的水煮鱼出锅啦!"); /*水煮肉片*/ INSERT INTO step VALUES(null,15,"http://i2.chuimg.com/e82ad0c8f1e3416bab5ee6c7dee93b0a_756w_1008h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","里脊肉洗净,与纹路垂直切成薄片,越薄越好;用胡椒粉、料酒腌制15分钟,倒入蛋清和淀粉抓匀,量上述都有"); INSERT INTO step VALUES(null,15,"http://i2.chuimg.com/8924d756509449a6bbb0f47aab16257b_1008w_756h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","蔬菜我用了生菜和洋葱。生菜只需要焯一头铺到碗底;木耳的话需要提前泡开"); INSERT INTO step VALUES(null,15,"http://i2.chuimg.com/49db83df1b174d2f924d10443175912b_1008w_756h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","将洋葱,蒜末,干辣椒切好待用"); INSERT INTO step VALUES(null,15,"http://i1.chuimg.com/2da6eff493c547039e865dba432e0d77_1008w_756h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","锅内倒入油,放入一半蒜末、一把花椒和郫县豆瓣酱,3勺颜色不够红但适合不会吃辣的人,如果要正宗点就4-5勺,爆香"); INSERT INTO step VALUES(null,15,"http://i2.chuimg.com/f3d75b886e114d0897ce8e14e7ef2701_1008w_756h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","下洋葱,翻炒均匀。有黑木耳的此时下锅一起炒。"); INSERT INTO step VALUES(null,15,"http://i2.chuimg.com/90d3782d719844a7b0d8a94a7bed2ff6_1008w_756h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","加入适量水,依次加入蚝油、糖、盐,盖上锅盖将洋葱煮熟。煮熟后一片片放入腌好摊开的肉片,煮到肉两面发白为止,大概2分钟,因为薄所以熟得块,不要煮太久肉会老,影响口感"); INSERT INTO step VALUES(null,15,"http://i2.chuimg.com/c0e360d327e445b394052f841d302414_1008w_756h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","装盘,倒在焯好的生菜上,上面放上一把花椒,一把葱花,一把芝麻,一把干辣椒段,一把蒜末,一勺辣椒面。锅洗净,重新倒入菜油,热至七成,迅速倒在上面,此时可以看到铺在上面的材料在热油里跳跃。一碗简单又美味的水煮肉片就做好了。"); /* 夫妻肺片*/ INSERT INTO step VALUES(null,16,"http://i2.chuimg.com/f2d3266a98df4aeb84d751ee70b24718_4464w_2976h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","土芹菜洗净,切碎后大半码在盘子里,留小部分放在一个大碗里,一会儿放在拌卤好的牛肉里"); INSERT INTO step VALUES(null,16,"http://i2.chuimg.com/c40a56cf8afb4ebdb08c3d72b60c9b6c_4464w_2976h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","牛心,牛腱子冷水下锅加姜片焯一下。 Tips:焯水用冷水下锅煮出杂质,姜片去腥。"); INSERT INTO step VALUES(null,16,"http://i2.chuimg.com/e423dab0b27d48e38f74975efe31779d_4464w_2976h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","另起一锅水,加入卤水香料,加入20g生抽,20g冰糖,再烧开水,放入牛心,牛腱子"); INSERT INTO step VALUES(null,16,"http://i2.chuimg.com/4b18528624d648778d4fefb3e0ad94e4_4464w_2976h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","加适量的盐,卤一个小时左右。 Tips:卤水需要咸一点,这样便于让食材入味。"); INSERT INTO step VALUES(null,16,"http://i1.chuimg.com/99ccbf9be00142caa4e30d8b3e6c0602_4464w_2976h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","熬红油:准备一个耐热的碗,碗内加入辣椒碎100g,香叶1片,草果1个,桂皮1片,起锅加入菜籽油,烧热至3-4成热后倒入一半的量到辣椒碎内,先让油激发一下辣椒的气味,加热20秒左右再次倒入热油,备用;这样能使辣椒和香料的香味充分释放出来"); INSERT INTO step VALUES(null,16,"http://i2.chuimg.com/baf2c225925a45bb85bf4f504fb04d09_4464w_2976h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","食材卤好后充分放凉,切薄片大小可以根据自己喜好。Tips:食材充分放凉后再切会更容易切,不易散开。"); INSERT INTO step VALUES(null,16,"http://i2.chuimg.com/3bae836fd2bc4066ae6c272772fdafb8_1280w_720h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","切好的卤料放入装有芹菜的碗,然后加入适量的蒜末,花椒面,舀一大勺卤汁和适量红油拌匀"); INSERT INTO step VALUES(null,16,"http://i2.chuimg.com/00dcd6ffbc9242fdaf4695072dbdfe43_4464w_2976h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","码到盘中土芹菜上,浇几勺红油在表面,撒上葱花,芝麻,芹菜叶点缀即可"); /*辣子鸡丁*/ INSERT INTO step VALUES(null,17,"http://i2.chuimg.com/65e0861ae3874992bad9572941da2eee_1242w_1656h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","把鸡胸肉切好,加入盐和淀粉抓匀放置10-15分钟入味"); INSERT INTO step VALUES(null,17,"http://i2.chuimg.com/02c7531c11594a7f87bfc662e3b4d03f_1242w_1656h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","准备好葱、姜、蒜、花椒、剪成段的干辣椒"); INSERT INTO step VALUES(null,17,"http://i1.chuimg.com/8c2ac14845d24bf680e35320727ab66e_1242w_1242h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","把鸡丁放入锅里煎(一般做法都是用油炸)"); INSERT INTO step VALUES(null,17,"http://i2.chuimg.com/b01cce7cb38046f095e1c1ed0cfb2414_2048w_2048h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","煎成金黄色,鸡丁盛出来,油留在锅里"); INSERT INTO step VALUES(null,17,"http://i2.chuimg.com/ec9318ef5fad4dde8f915bb9eb422c19_1242w_1656h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","锅里放入郫县豆瓣酱、葱(葱白部分)、姜、蒜、花椒、干辣椒段炒出红油"); INSERT INTO step VALUES(null,17,"http://i2.chuimg.com/e7d2c75aed144edd918a3950c1ef6f5f_1242w_1656h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","加入煎好的鸡丁"); INSERT INTO step VALUES(null,17,"http://i2.chuimg.com/0510be6d7a6e4578810482e78a579a89_1242w_1656h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","翻炒均匀(如果喜欢加糖的,可以在此步骤加糖,鸡肉本身有鲜味,鸡精和味精不建议添加)"); INSERT INTO step VALUES(null,17,"http://i2.chuimg.com/594630f8391f4a87a01c8af56aa0394c_2048w_2048h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","出锅,大米饭配上!"); /*回锅肉*/ INSERT INTO step VALUES(null,18,"http://i1.chuimg.com/2d7083ed82914842ad4890d9eb53a093_1152w_864h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","选用肥瘦相间的五花肉,冷水冲干净后,锅里放水,加几片姜片、花椒、料酒与五花肉同煮,水烧开后再煮一会,用筷子戳一下没有血水就可以关火了,不用熟透的,待会还要烧了。"); INSERT INTO step VALUES(null,18,"http://i2.chuimg.com/dbf4d85cbe9541fabd5f81fed640a525_1152w_864h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","锅烧热放底油,改小火,放入肉片用铲子划散。"); INSERT INTO step VALUES(null,18,"http://i1.chuimg.com/34657d5b5a8d4735ac3023a9381f2379_864w_1152h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","加入料酒,煎到肉微微泛黄。"); INSERT INTO step VALUES(null,18,"http://i2.chuimg.com/899d85e865ca44b090c672076be31f89_864w_1152h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","加入郫县豆瓣酱,改中火翻炒一下,再加入青椒片和一勺糖,翻炒一下。"); INSERT INTO step VALUES(null,18,"http://i1.chuimg.com/fafbcc88f48b4b3da63a9ba1a590e38a_864w_1152h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","青椒断生后关火,加入少量鸡精翻炒一下。(鸡精不加也不要紧)"); INSERT INTO step VALUES(null,18,"http://i2.chuimg.com/e0a98f3e3d834f3c9696109e27d6ee00_1152w_864h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","出锅装盘,享用美食。"); /*东坡肘子*/ INSERT INTO step VALUES(null,19,"http://i2.chuimg.com/2575fa4c52834c2991e9f03d2e1b2338_780w_1040h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","肘子中间切开,不切断,为了入味,所以切开了"); INSERT INTO step VALUES(null,19,"http://i1.chuimg.com/d82a8cb0a0c84b79aa85bf431be6ba39_780w_1040h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","然后把葱,姜,八角,桂皮放砂锅底部,再把肘子皮朝下放进去"); INSERT INTO step VALUES(null,19,"http://i2.chuimg.com/77679061ca8d4fc8bd256797390b86fc_1040w_780h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","再加入其他所有调料,料酒是一瓶,冰糖要多,要微甜,要是液体没淹没食材就加水,直到淹没所有食材,烧开,转最小火,炖软 "); INSERT INTO step VALUES(null,19,"http://i1.chuimg.com/29c46f5247c74dd1836cdefe54d3797d_1146w_960h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","炖软后肘子捞出来,汁留下收汁浇上面即可"); /*钵钵鸡*/ INSERT INTO step VALUES(null,20,"http://i2.chuimg.com/e7ef69c62e584ba1be7a3710eaab16db_4464w_2976h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","把整只鸡的骨肉分离,鸡胸肉切片。Tips:找好关节处就很容易分离鸡肉,分离鸡胸的时候,在表面的一大块就是鸡大胸肉,在骨架缝隙还有鸡小胸肉,这块肉会更加嫩些。"); INSERT INTO step VALUES(null,20,"http://i2.chuimg.com/ac13ebc623a74f13b476a3e58f04aebb_4464w_2976h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","把鸡胸肉切片装碗;鸡骨,鸡爪,鸡腿,鸡翅冷水下锅,放入姜片,盐,八角,山奈熬煮1小时左右。Tips:煮开的时候可以撇去浮沫,关中小火慢炖。"); INSERT INTO step VALUES(null,20,"http://i2.chuimg.com/4187d12d115e4692a166dcf9f27b51a2_4464w_2976h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","把藕切片,土豆切片,莴笋切片,其他素菜都处理好;用细竹签串好,鸡胸肉也串好"); INSERT INTO step VALUES(null,20,"http://i2.chuimg.com/d1e3faf3d7134f2781fa7350dadca8c8_4464w_2976h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","烧一锅开水把鸡胸肉和素菜烫熟,蔬菜稍微烫熟过冷水后捞出备用。 Tips:过冷水是为了让食材快速降温,保持爽脆的口感,素菜切薄片所以很快就能熟。"); INSERT INTO step VALUES(null,20,"http://i2.chuimg.com/2d8a498cf0534ee8aec35aa2debc79ff_4464w_2976h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","过滤出高汤备用;接着是红油的制作:把菜籽油烧热至6成熟,大碗内装入辣椒面,桂皮1片,八角1个,山奈1个,倒入热油这样可以激香辣椒面,再倒入芝麻和十三香粉拌匀"); INSERT INTO step VALUES(null,20,"http://i2.chuimg.com/32c4d048cf2d4cffb71a391c418c2224_4464w_2976h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","鸡汤装碗,加入芝麻酱,糖,麻油,花椒油,盐,蒜末调味,倒入大量红油拌匀"); INSERT INTO step VALUES(null,20,"http://i2.chuimg.com/b345165827874767ba7b8934d03b86d4_4464w_2976h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","把食材泡到酱汤里1小时,让食材入味"); /*湘味腊鸭*/ INSERT INTO step VALUES(null,21,"http://i2.chuimg.com/186f9017d6ee4ff3931556c1b9dd8631_1242w_1656h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","辣椒炒肉首选螺丝椒"); INSERT INTO step VALUES(null,21,"http://i2.chuimg.com/a84460a907c34840a6572244007447f8_1242w_1656h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","或者香辣的青线椒"); INSERT INTO step VALUES(null,21,"http://i1.chuimg.com/094f489d22544cf2b7a6a1be5c0927ed_3072w_2304h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","辣椒洗净"); INSERT INTO step VALUES(null,21,"http://i2.chuimg.com/bfa5b6bf05274044933d70ceb07016ff_3072w_2304h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","滚刀切片,加了小米椒,提辣,肉切薄皮"); INSERT INTO step VALUES(null,21,"http://i2.chuimg.com/d4f4a889c91f4a9682c711003861b507_1656w_1242h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","热锅放油 肉片翻炒变色"); INSERT INTO step VALUES(null,21,"http://i2.chuimg.com/e3aa7b0b55764a40a903e3fa3cf54e74_1656w_1242h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","加一勺酱油调色 然后拨至一边 锅里如果锅油不多了加一点 "); INSERT INTO step VALUES(null,21,"http://i1.chuimg.com/8ad26396af5a43ee8f593303990376ff_1656w_1242h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","下辣椒小米辣爆炒出虎皮"); INSERT INTO step VALUES(null,21,"http://i2.chuimg.com/65b42428bb30421b9439a6d89c318a4d_1656w_1242h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","加蒜末 葱翻炒出香味 最后加调料 适量盐 少量鸡精 半勺生抽 大火爆炒翻炒入味"); INSERT INTO step VALUES(null,21,"http://i2.chuimg.com/bacb6635def84ce1a55ed6c941acb235_1242w_1656h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","出锅"); /*湘味腊鸭*/ INSERT INTO step VALUES(null,22,"http://i2.chuimg.com/0b643dd6990f11e69ce70242ac110002_1125w_1125h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","最重要的一步,先将整个腊鸭放入锅中倒冷水,煮开,然后拿出来切快,这样不当好切了还可以使腊鸭不会那么的咸"); INSERT INTO step VALUES(null,22,"","热锅下油,先放姜蒜炒出一点湘味"); INSERT INTO step VALUES(null,22,"","接着放腊鸭,爆炒,把鸭皮李的油都炒出来,放料酒,生抽,耗油"); INSERT INTO step VALUES(null,22,"炒一会后放温水大概可以掩盖住腊鸭,因为是腊制品,所以不需要放盐,煮到腊鸭可以咬的动的软度(如果有腌好的萝卜干也可以在这个时候放下一起煮)",""); INSERT INTO step VALUES(null,22,"http://i1.chuimg.com/0b008d68990f11e6b2400242ac110002_1125w_1125h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","最后放青椒和大蒜叶,炒熟,出锅前先不要关火,放几滴醋。装盘出锅,开吃啦"); /* 紫苏煎黄瓜*/ INSERT INTO step VALUES(null,23,"http://i2.chuimg.com/a67a51e228c045d28c61c110d05be367_1080w_810h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","食材准备好,黄瓜,紫苏备用。"); INSERT INTO step VALUES(null,23,"http://i2.chuimg.com/273dd31c16874665a5d234f4fad2d3ed_497w_677h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","紫苏洗干净,沥干,黄瓜要切成有点儿厚度的片"); INSERT INTO step VALUES(null,23,"http://i1.chuimg.com/c5bdd29615bc43979fc470575a5f353a_1080w_810h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","姜切丝,干辣椒切小段。"); INSERT INTO step VALUES(null,23,"http://i2.chuimg.com/ad7889a29f2d410f9456b2d017987e7d_1080w_810h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","起锅放油,放入姜和辣椒炒出香味。"); INSERT INTO step VALUES(null,23,"http://i2.chuimg.com/69125ecde8b74df9a8e772e4499a8f4b_497w_677h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","放入黄瓜翻炒,加紫苏炒匀,黄瓜片变软。"); INSERT INTO step VALUES(null,23,"http://i2.chuimg.com/b05954fd654949d38d20712cc4044841_1080w_810h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","放酱油,加一点点水,叫黄瓜充分吸收汤汁。"); INSERT INTO step VALUES(null,23,"http://i2.chuimg.com/f92455c19dca44c3ad6d44ea01ab375f_1080w_810h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","大火收汁。"); INSERT INTO step VALUES(null,23,"http://i2.chuimg.com/be30fcdd04854a84a9c0d641eae76d90_497w_677h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg"," 吃起来嘛,刚入口是绵软的,再一咬,又能感觉到一点爽脆,黄瓜的原汁搭上紫苏特有的香气,气味全部进到了黄瓜里,味道层次非常丰富"); /*辣子鸡丁*/ INSERT INTO step VALUES(null,24,"http://i2.chuimg.com/81eb5cac55954905a2b6e5c5aa92ad8f_1242w_994h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","1.鸡腿肉切小块,洗干净用盐,生抽,料酒,姜末,五香粉腌制片刻。(我腌了30分钟)"); INSERT INTO step VALUES(null,24,"http://i2.chuimg.com/31779570c38746e4a7a6efb800ee6128_1242w_994h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","2.准备炒鸡肉的调料。干辣椒段和鸡丁1:1,剪成段,葱段少许,花椒一把,大蒜切碎,生姜切成丝。"); INSERT INTO step VALUES(null,24,"http://i1.chuimg.com/c296a7310daf4bb596ee2c1fc99b7599_1242w_994h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","3.起锅烧热油,将鸡肉倒入油锅中炸。(油量刚好没过鸡丁)"); INSERT INTO step VALUES(null,24,"http://i2.chuimg.com/138c5185623f4ecdbf8c2e12d645b664_1242w_994h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","4.鸡丁炸至金黄,便可以捞出控油。"); INSERT INTO step VALUES(null,24,"http://i1.chuimg.com/61d3d415ece0451f9638ff1c6651308c_1242w_993h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg"," 5.起锅热油爆香蒜末,姜丝,花椒,辣椒段等调料。"); INSERT INTO step VALUES(null,24,"http://i2.chuimg.com/4cd04cd52c3d4367903464601d61cc6a_1242w_994h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","6.调料炒香,把鸡丁倒入锅内翻炒,并加入生抽,五香粉,孜然粉,盐(根据个人口味添加),鸡精,白糖等调味"); INSERT INTO step VALUES(null,24,"http://i2.chuimg.com/8140c5f464394804a6bacef32eb4764e_1242w_994h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","7.炒香后,最后放入葱段翻炒几下即可出锅。"); INSERT INTO step VALUES(null,24,"http://i2.chuimg.com/3a0597f75e634591bb59724883d61444_1242w_994h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","香喷喷,麻辣味儿十足得辣子鸡丁就做好啦!"); /*家常牛蛙*/ INSERT INTO step VALUES(null,25,"http://i2.chuimg.com/5d295ae4e46642689223a3386f242209_750w_1000h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","牛蛙宰块用3克盐,一撮花椒,一勺料酒,五克老姜碎腌制一旁待用。"); INSERT INTO step VALUES(null,25,"http://i2.chuimg.com/4e53144d72b340ef986c99c59affb401_750w_1000h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","葱切小节,蒜拍烂,姜切片,大料切碎。"); INSERT INTO step VALUES(null,25,"http://i2.chuimg.com/98241a61ec004c0d85ba3e6d45c5055a_750w_1000h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg"," 洋葱,青红椒,切方块。嫩姜切片。"); INSERT INTO step VALUES(null,25,"http://i1.chuimg.com/2fb24e471238407d8fef7fcb43db03a3_750w_1000h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","酸菜切小,泡海椒切碎。"); INSERT INTO step VALUES(null,25,"http://i2.chuimg.com/d0162fe8b31544f88d61d14800a971d4_750w_1000h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg"," 锅烧热,放入菜籽油150克左右。"); INSERT INTO step VALUES(null,25,"http://i1.chuimg.com/090feba54113436a9cdf645d3c00708a_1000w_750h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","油烧至8成热,关小火,放入酸菜,泡海椒,豆瓣酱,炒出红油。"); INSERT INTO step VALUES(null,25,"http://i1.chuimg.com/befff87ce2324cf58e98ad7656e6875a_750w_1000h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","然后放入葱姜蒜大料,爆香。"); INSERT INTO step VALUES(null,25,"http://i2.chuimg.com/f3879369fa0c460f8f9a74c449d35b6a_750w_1000h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","牛蛙蓖掉腌制的水分。"); INSERT INTO step VALUES(null,25,"http://i2.chuimg.com/99fa059a2c674a2c860d0fcb106e131e_750w_1000h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","放入锅中,开大火炒干水气。大概需要5-8分钟"); INSERT INTO step VALUES(null,25,"http://i2.chuimg.com/a6c0181931ff48729779d5d4d0f3ab5a_750w_1000h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","水气收干后,放入生抽一勺,大约5克。炒至牛蛙上色。"); INSERT INTO step VALUES(null,25,"http://i2.chuimg.com/7274115857554a8fa512d3100d23cd91_750w_1000h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","加入开水适量,半淹牛蛙为宜。其间需要翻炒几次,大火烧至入味。"); INSERT INTO step VALUES(null,25,"http://i2.chuimg.com/e58127b82b0d4c47812d33b4efd7d129_750w_1000h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","汤汁收浓后,放入青红椒,洋葱,嫩姜。翻炒至断生。"); INSERT INTO step VALUES(null,25,"http://i1.chuimg.com/0b9c8ea6c3884edf862131f855abdf87_750w_1000h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg"," 此时可以尝尝味道,看是否增加盐分。因为酸菜,泡海椒,豆瓣酱,生抽都有盐分。我的是加了2克盐的。"); INSERT INTO step VALUES(null,25,"http://i2.chuimg.com/d74d52cc543f488194b0342689df9543_750w_1000h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","蔬菜断生,汤汁收浓以后,关火。放入少量鸡精味精花椒面炒匀增鲜。"); INSERT INTO step VALUES(null,25,"http://i1.chuimg.com/88eb4535eb5344928d00aec81f39a155_750w_1000h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","色香味鲜嫩出锅。我家吃得不辣,这样的辣度孩子可以接受,牛蛙脂肪少,适合我的易胖体质,当然还必须控制少吃点饭"); /*麻辣小龙虾*/ INSERT INTO step VALUES(null,26,"http://i2.chuimg.com/db517b2eadb04dc89573217f19ec4fe5_1104w_828h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","小龙虾洗刷干净,剪掉爪子,拔下尾巴最中间一片,拽出虾线。"); INSERT INTO step VALUES(null,26,"http://i2.chuimg.com/9648d390eed6429c94b3a6103660e391_1104w_828h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","配料洗净"); INSERT INTO step VALUES(null,26,"http://i2.chuimg.com/ff7ae790d74d4d6b9910f4304c95f446_1104w_828h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","葱切小段;姜切厚片;蒜拍扁;八角掰碎;干辣椒切斜段。"); INSERT INTO step VALUES(null,26,"http://i2.chuimg.com/08d340d278314893b228474a6f288716_1104w_828h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","锅烧热,放入大量油,烧至六七成热。"); INSERT INTO step VALUES(null,26,"http://i2.chuimg.com/37949d0aa7a84a3f898c04833c614d5f_1104w_828h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","小龙虾倒入锅中,快速炸一遍,不要超过一分钟,不然肉就干了。"); INSERT INTO step VALUES(null,26,"http://i1.chuimg.com/0083e5e216fb47128d9d9a5769fedc44_1104w_828h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","捞出备用。"); INSERT INTO step VALUES(null,26,"http://i1.chuimg.com/732b6b3830634ea8b0e682353d3c911c_1104w_828h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","刚刚炸虾的油烧热,倒入豆瓣酱,大火煸香。"); INSERT INTO step VALUES(null,26,"http://i1.chuimg.com/ab8f1de0f4dd4480be7dab69a5d082c1_1104w_828h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg"," 然后倒入所有调料,大火煸香。"); INSERT INTO step VALUES(null,26,"http://i1.chuimg.com/aae9b8710ba74acb901f0d95c68de670_1104w_828h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","倒入炸好的虾,倒入2听啤酒"); INSERT INTO step VALUES(null,26,"http://i2.chuimg.com/f40b8460a01f4d829be474f5aad8c49d_1104w_828h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","30g盐、1勺冰糖、2勺蚝油、2勺酱油、10g胡椒粉加入锅中。"); INSERT INTO step VALUES(null,26,"http://i2.chuimg.com/252b77f177a643da8de1cd8f03342f1c_1104w_828h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg"," 从柠檬表面切一些黄色的皮下了,尽量不要带白色的,这是增香提鲜的秘诀。切碎后加入锅中。"); INSERT INTO step VALUES(null,26,"http://i1.chuimg.com/323fbfc551c845c2955fa7e0cfa96a28_1104w_828h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","大火烧开后,小火炖煮1小时。期间注意别烧干了,适当添水。"); INSERT INTO step VALUES(null,26,"http://i1.chuimg.com/66fdc188551d4791a6d6d403642de431_1104w_828h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","小火炖煮完后,大火收汁后即可出锅。出锅前可以适当加一些香葱碎和香菜段翻炒。"); INSERT INTO step VALUES(null,26,"http://i2.chuimg.com/7a2f2cd801db419dab1d46fcf158fe9a_1104w_828h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","锵锵锵~出锅~完活儿~"); /******/ INSERT INTO step VALUES(null,27,"","准备配菜,我的配菜是娃娃菜,豆芽,豆皮(也可以自己喜欢啥就放啥)很随意~"); INSERT INTO step VALUES(null,27,"","准备配菜,我的配菜是娃娃菜,豆芽,豆皮(也可以自己喜欢啥就放啥)很随意~"); INSERT INTO step VALUES(null,27,"","起锅烧油,放一勺豆瓣酱小火炒出红油。放凉水烧开。烧开后尝尝咸淡,加点鸡精味道会更好~烧开后放入配菜烫熟捞出放入碗里牛肉放入烫熟捞出随着汤汁一起倒入碗里"); INSERT INTO step VALUES(null,27,"","放蒜末,葱花,花椒,干辣椒,辣椒面。淋上热油就完成啦"); /*干锅香辣虾*/ INSERT INTO step VALUES(null,28,"http://i2.chuimg.com/1bd0a8dd7744470e8af015f87e43ea56_920w_1632h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","虾洗净,姜切丝,大葱、干辣椒切段。"); INSERT INTO step VALUES(null,28,"http://i1.chuimg.com/c7b4d2577ced40c8a68fe0d746195ff6_720w_576h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","锅下少许油,依次下干辣椒段、蒜、姜丝、花椒、郫县豆瓣酱炒出香味,下放虾翻炒,加料酒、生抽、加水100毫升中火烧10分钟,收汁后放大葱炒熟后起锅。"); /*香煎土豆片*/ INSERT INTO step VALUES(null,29,"http://i2.chuimg.com/5a99aca6d2d4493983aa04234075f75e_750w_750h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","蒜头剁碎备用。土豆切片,尽量厚一点。厚一点(如图)。加清水浸泡5分钟左右。泡土豆的时间锅里烧开水。PS:土豆尽量不要选太小个的,煎起来麻烦呀大个的切出来一大片,吃起来也过瘾4不4"); INSERT INTO step VALUES(null,29,"http://i1.chuimg.com/c1ef524cae424a6d8252f071feca4259_1002w_750h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","把泡好的土豆片放入锅里,煮2-3分钟,完全变色即可。然后捞出晾下~可以手捧住篮子两边,抛一抛~甩干水分建议用不粘锅比较好操作"); INSERT INTO step VALUES(null,29,"http://i2.chuimg.com/737a4b008c1c43ebb965ada6941abd17_1002w_750h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","热锅下油,码入土豆片,中小火煎至两面金黄微微上色,忍不住偷吃了一块(・᷄ᵌ・᷅)"); INSERT INTO step VALUES(null,29,"http://i2.chuimg.com/3bdf3acf0bc84467ba4164bfa3883bb2_660w_528h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","锅里放油,放蒜末爆香,加入一勺豆瓣酱,炒出红油(罐子里的红油特别多,可以舀一勺出来炒土豆片哇 特好吃_(:з」∠)"); INSERT INTO step VALUES(null,29,"http://i2.chuimg.com/cdc779918037436ba8bcd93ec8413b63_938w_750h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","倒入土豆片,加1勺生抽,翻炒均匀,撒入芝麻葱花即可。注意▲(大家可以视土豆大小调整酱料哈。有小伙伴反映咸了真的很抱歉呢,我把酱油和豆瓣酱都减少了哈,大家可以放心做。然后土豆很小或者只做一个,酱油和豆瓣酱就要减半哦)"); INSERT INTO step VALUES(null,29,"http://i2.chuimg.com/9e6fff2e6cfc4b7eb134a6573da9d626_750w_1000h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","我爱吃香菜~就加了点儿(✺ω✺)"); /*剁椒鱼头(金针菇拌)*/ INSERT INTO step VALUES(null,30,"","买回来的鱼头用水清洗干净(务必去掉腹部的黑膜)塞入几片姜 再加入适量的盐跟料酒涂抹均匀腌制30分钟"); INSERT INTO step VALUES(null,30,"","水开后将鱼头放入锅中蒸6分钟左右(根据鱼头的大小)"); INSERT INTO step VALUES(null,30,"http://i2.chuimg.com/4f116cdc4ca047818ba010be7352ac5e_810w_1080h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","将金针菇洗净根部切掉1cm 平铺在碟子里 再放入葱头跟姜片"); INSERT INTO step VALUES(null,30,"http://i1.chuimg.com/a81fdb39d841465da8ef52c76c34e591_810w_1080h.jpg@2o_50sh_1pr_1l_800w_600h_1c_1e_90q_1wh.jpg","将蒸好的鱼头平铺在金针菇上 淋上1汤匙左右的蒸鱼豉油(不要太多 因为剁椒很咸)"); INSERT INTO step VALUES(null,30,"http://i2.chuimg.com/516132dc9add44e791199911803f5558_744w_992h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","起锅烧油 将准备好的姜蒜粒爆香后加入剁椒煸炒入味 再放入适量白胡椒粉 喜欢吃麻或者吃辣的朋友可以加入辣椒油或麻椒粉"); INSERT INTO step VALUES(null,30,"http://i2.chuimg.com/cbda2554170f4d1fa6a90063a6b5cc21_1080w_864h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","将炒好的剁椒均匀铺在鱼头上"); INSERT INTO step VALUES(null,30,"","再次入锅蒸六分钟左右"); INSERT INTO step VALUES(null,30,"http://i2.chuimg.com/39095f45557e4e6eb5a1569abc4e95a7_744w_992h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","关键的一步来了 为了更好的入味 将蒸好鱼的汤汁倒出 加入适量芝麻油搅拌均匀淋到鱼头上(因为之前用料酒腌制过 还放了葱姜 所以不用担心汤汁有腥味)"); INSERT INTO step VALUES(null,30,"http://i2.chuimg.com/07b0b7682253473a8aeebb556837f9bd_744w_992h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg",""); INSERT INTO step VALUES(null,30,"http://i2.chuimg.com/6701680e509141a198e535c12fcebadb_1080w_864h.jpg?imageView2/1/w/800/h/600/q/90/format/jpg","最后撒上葱花 一道美味的剁椒鱼头就做好啦- ̗̀(๑ᵔ⌔ᵔ๑)"); /*排骨*/ insert into step values(null,"31","http://i1.chuimg.com/1468d86d00b245d0879fcb3abf64b23d_972w_648h.gif@2o_50sh_1pr_1l_300w_90q_1wh.gif","1、洋葱切块,小葱切段。"); insert into step values(null,"31","http://i2.chuimg.com/34fc9a678f8648398cbdd99fd50006e3_972w_648h.gif?imageView2/2/w/300/interlace/1/q/90/format/gif/.gif","2、排骨调入1大勺蚝油,1大勺生抽,1小勺老抽,2大勺米酒,1大勺淀粉,1小勺糖,2克白胡椒粉,拌匀腌制半小时。"); insert into step values(null,"31","http://i1.chuimg.com/12eb1cd8673040f6832b56166ef2e22c_972w_648h.gif@2o_50sh_1pr_1l_300w_90q_1wh.gif","3、铸铁锅倒油烧热,放入姜片、葱段、洋葱、蒜瓣爆香,接着把排骨铺到上面。"); insert into step values(null,"31","http://i1.chuimg.com/7088289e7c364212828ca42b8fb00af6_972w_648h.gif@2o_50sh_1pr_1l_300w_90q_1wh.gif","4、再倒入腌排骨剩的汤汁,盖上盖子小火焖烧15-20分钟,如果汤汁少了可以适量加一点清水防止糊锅。"); insert into step values(null,"31","http://i2.chuimg.com/48c8dc87779441eb870f2e21fbb828b4_972w_648h.gif?imageView2/2/w/300/interlace/1/q/90/format/gif/.gif","5、烧好后开盖收个汁,翻炒至酱汁浓稠变少就关火。"); insert into step values(null,"31","http://i2.chuimg.com/d3604f65e8744aaca94c4ef7ba00f743_972w_648h.gif?imageView2/2/w/300/interlace/1/q/90/format/gif/.gif","6、出锅点缀点红椒碎和香菜,非常下饭。锅底的汤汁还可以用来拌面,特别好吃哦。"); /*糯米*/ insert into step values(null,"32","http://i1.chuimg.com/90fd7e825505447d87e21d2c6049ce1c_1280w_1706h.jpg@2o_50sh_1pr_1l_300w_90q_1wh","1、花生烤熟,我都是用烤的,你们要用炒的也行,然后去皮,一分为二。不用弄碎,整粒从中间分开就可以了,这个可以提前做好。"); insert into step values(null,"32","http://i2.chuimg.com/d03c546e0af54db39774ec478d269926_864w_1152h.jpg?imageView2/2/w/300/interlace/1/q/90","2、很多人都问我沙茶是什么东西,我用的就是这个,超市有卖的,网上也有。首先先做糯米饭。我的糯米不是蒸的,不是蒸的,不是蒸的!我是用电饭煲像平时煮饭那样煮的,我不太喜欢蒸出来那种颗粒感比较饱满比较有嚼劲的糯米卷,我喜欢比较软糯的,这个也是为什么人家总是问我说我的糯米卷看起来怎么非常软糯的原因,这个看个人喜欢。如果你喜欢用蒸的,那么糯米就提前浸泡,最好浸泡6个小时以上或者隔夜。如果像我用电饭煲的就直接像煮米饭那样就行了,水可以不用放那么多。"); insert into step values(null,"32","http://i2.chuimg.com/3fe205b403744b23874219c69a98f264_1280w_1706h.jpg?imageView2/2/w/300/interlace/1/q/90","3、煮好的白糯米饭拿出来放一旁,然后开始炒糯米卷的馅料。胡萝卜切丁,泡好的香菇切丁,小虾干或者鱿鱼干,其实这个馅料大家也可以自行调整,例如喜欢吃肉的放一点肉或腊肠,喜欢吃各种豆豆的放一点豆,因为我小孩不喜欢吃我就没有放,可以根据家人喜欢吃的口感来放的。锅烧热,加入油,然后放入胡萝卜,香菇,小虾干,爆炒。炒至7,8成熟,加入沙茶酱和蚝油和老抽,然后翻炒均匀。"); insert into step values(null,"32","http://i2.chuimg.com/893e0f9b58d94b7ea4298655172952ef_1280w_1705h.jpg?imageView2/2/w/300/interlace/1/q/90","4、然后加入白胡椒粉(白胡椒粉千万不要省略),全部翻炒均匀。"); insert into step values(null,"32","http://i2.chuimg.com/4c2f263fecd74b36a21eefbe4f035bcb_1280w_1705h.jpg?imageView2/2/w/300/interlace/1/q/90","5、看到了吗?我做的糯米卷馅就是这样软软糯糯的,不喜欢干硬的那种。"); insert into step values(null,"32","http://i2.chuimg.com/a7b869c8cf2e4f9aacf30ffcd200b78e_1280w_1706h.jpg?imageView2/2/w/300/interlace/1/q/90","6、最后加入烤熟的花生,加入花生之后可以关掉火,然后拌均匀就可以。"); insert into step values(null,"32","http://i2.chuimg.com/2d7ffbc80b54419bb9e0e743c755f82f_1080w_1440h.jpg?imageView2/2/w/300/interlace/1/q/90","7、做好的糯米卷馅可以放进不沾的烤盘里,借助刮板按压到跟烤盘差不多平,然后放进冰箱冷藏。冷藏或冷冻都是为了后面好包。因为我是先做馅料,还没有开始揉面的,所以冷藏就行,如果你是揉好面再来炒馅,等一下就要操作的,就直接放进冷冻。"); insert into step values(null,"32","http://i1.chuimg.com/c08e84467409455eb56496f6ff21ad9e_1080w_1439h.jpg@2o_50sh_1pr_1l_300w_90q_1wh","8、开始揉面,跟我们平时做馒头包子花卷是一样的,所有材料揉到面团光滑就可以,不用像做面包出手套膜,但是面一定要揉光滑了,我是用厨师机揉了10分钟。"); insert into step values(null,"32","http://i2.chuimg.com/9693c12fd0b64716a3ec37a779df9b61_1080w_1439h.jpg?imageView2/2/w/300/interlace/1/q/90","9、放在20多度的环境下进行基础发酵。"); insert into step values(null,"32","http://i2.chuimg.com/ee50e087d91a4569ac11ff13517e5bbc_1280w_1706h.jpg?imageView2/2/w/300/interlace/1/q/90","10、发好之后把面团的气泡揉出,然后把面团大概分为4份左右,当然了,3份5份都行,看个人操作,这个没有多大的要求。取一份擀成长度15×30左右的薄片,记得不要擀太薄,饼皮的尺寸大小可以按照自己喜欢的来,然后用刮板在金盘中切出一条炒好的糯米卷馅,直接放在擀好的饼皮上。馅料不要堆得太高太多,否则蒸的时候都会流出来。"); insert into step values(null,"32","http://i1.chuimg.com/775d12869ede4c28bde082e2e45dcc5d_1280w_2168h.jpg@2o_50sh_1pr_1l_300w_90q_1wh","11、收口捏紧,把收口朝下。表面可以切菱形,但是我不喜欢,所以就没有切,这个无所谓,看个人操作。"); insert into step values(null,"32","http://i2.chuimg.com/30a9bd4efeee479ba40aea9e0e3aceeb_1280w_1706h.jpg?imageView2/2/w/300/interlace/1/q/90","12、用刮板切成自己喜欢的大小。如果家里锅够大的可以整条卷好全部蒸好再切,一般外面卖的都是蒸好再切的。"); insert into step values(null,"32","http://i2.chuimg.com/f3ed340489fb4928bb2d40240ad8cd7a_1280w_1706h.jpg?imageView2/2/w/300/interlace/1/q/90","13、配方中所有的材料可以做出来这么多,吃不了这么多的可以配方减半操作。我每次都做这么多,送一点给家人,送一点给对门的邻居,再留点当早餐,基本上全部消耗完。其实多的蒸熟放凉后可以放在冷冻保存,吃的时候蒸软就行了。蒸锅里放入水加热到50度左右(温水),然后把做好的糯米卷放入蒸锅内醒发15~20分钟。醒发好之后直接开火,水开后中大火蒸10分钟左右(因为内馅都是熟的了)焖三分钟再开盖出锅。"); insert into step values(null,"32","http://i1.chuimg.com/916792aba50148e09eb894d6b493a118_1215w_2160h.jpg@2o_50sh_1pr_1l_300w_90q_1wh","14、趁热吃吧,哈哈~"); /*鸡*/ insert into step values(null,"33","http://i2.chuimg.com/adaf7b4a99dd11e6b2400242ac110002_2048w_2048h.jpg?imageView2/2/w/300/interlace/1/q/90","1、准备好图上材料"); insert into step values(null,"33","http://i2.chuimg.com/aeb9ed6899dd11e6b2400242ac110002_2048w_2048h.jpg?imageView2/2/w/300/interlace/1/q/90" ,"2、鸡洗净去内脏剁块"); insert into step values(null,"33","http://i2.chuimg.com/af76cc8a99dd11e69ce70242ac110002_2048w_2048h.jpg?imageView2/2/w/300/interlace/1/q/90" ,"3、油锅热一下入姜。蒜爆炒下"); insert into step values(null,"33","http://i1.chuimg.com/b151a42699dd11e6b2400242ac110002_2048w_2048h.jpg@2o_50sh_1pr_1l_300w_90q_1wh" ,"4、这个是重点。鸡煲的精髓就在此。我通常是半只鸡下一勺。一整只鸡会下一勺半到两勺。平时喝汤的小陶瓷勺。加入爆炒开的姜。蒜里炒一下再入鸡肉"); insert into step values(null,"33","http://i2.chuimg.com/b219bc9a99dd11e69ce70242ac110002_2048w_2048h.jpg?imageView2/2/w/300/interlace/1/q/90" ,"5、倒入鸡肉翻炒均匀稍微炒一下下。加入一小小茶鸡精。适量蚝油。酱油继续翻炒上色。此煲无需下盐"); insert into step values(null,"33","http://i2.chuimg.com/b34aeabc99dd11e69ce70242ac110002_2048w_2048h.jpg?imageView2/2/w/300/interlace/1/q/90","6、大致炒至图中成色时准备上盖。上盖时加入一小半碗水水无需太多。很多时候我都没下水。但相对来说会没什么汤汁 倒入料酒要马上加盖。然后就待它慢慢焖煮吧。大概时间我没算。自己观察吧。久点就比较入味。上锅后就可以直吃。"); insert into step values(null,"33","http://i2.chuimg.com/b40651bc99dd11e6b2400242ac110002_2048w_2048h.jpg?imageView2/2/w/300/interlace/1/q/90","7、焖煮好后就可以加入葱蒜青椒再翻炒下就可以上锅了。上锅后加上香菜这次忘了买红椒。加点红椒色像会更加分"); insert into step values(null,"33","http://i2.chuimg.com/b4aef89e99dd11e69ce70242ac110002_2048w_1365h.jpg?imageView2/2/w/300/interlace/1/q/90","8、上桌后就可以慢火开吃了。加杯小酒更爽哦。鸡煲是先吃肉然后再慢慢下水煮开再下配菜料。"); insert into step values(null,"33","http://i2.chuimg.com/b58ef84a99dd11e6b2400242ac110002_2048w_2048h.jpg?imageView2/2/w/300/interlace/1/q/90","9、我一般是鸡肉吃到差不多了再下水大火煮开,然后就开始各种配菜开吃。"); /*腊味煲仔饭*/ insert into step values(null,"34","http://i2.chuimg.com/75e1607c993611e6b2400242ac110002_1440w_810h.jpg?imageView2/2/w/300/interlace/1/q/90","1、米洗净后浸泡一小时(米和水的比例为1:1.2)"); insert into step values(null,"34","http://i1.chuimg.com/759c6058993611e6b2400242ac110002_1440w_810h.jpg@2o_50sh_1pr_1l_300w_90q_1wh","2、腊味在沸水中烫3分钟捞出"); insert into step values(null,"34","http://i1.chuimg.com/753d52c0993611e69ce70242ac110002_1440w_810h.jpg@2o_50sh_1pr_1l_300w_90q_1wh","3、砂锅内侧刷上猪油"); insert into step values(null,"34","http://i1.chuimg.com/74d5eb1c993611e69ce70242ac110002_1440w_810h.jpg@2o_50sh_1pr_1l_300w_90q_1wh","4、倒入浸好的水和米"); insert into step values(null,"34","http://i2.chuimg.com/746bd38a993611e69ce70242ac110002_1440w_810h.jpg?imageView2/2/w/300/interlace/1/q/90","5、大火煮沸,三分钟后放入腊味"); insert into step values(null,"34","http://i2.chuimg.com/7401c68e993611e69ce70242ac110002_1440w_810h.jpg?imageView2/2/w/300/interlace/1/q/90 ","6、转小火,煮8分钟"); insert into step values(null,"34","http://i2.chuimg.com/738981e2993611e6b2400242ac110002_1440w_810h.jpg?imageView2/2/w/300/interlace/1/q/90","7、关火后焖15分钟"); insert into step values(null,"34","http://i2.chuimg.com/732f3642993611e69ce70242ac110002_1440w_810h.jpg?imageView2/2/w/300/interlace/1/q/90","8、用生抽、蚝油、高汤、白糖混合调汁"); insert into step values(null,"34","http://i2.chuimg.com/72cc127e993611e6b2400242ac110002_1440w_810h.jpg?imageView2/2/w/300/interlace/1/q/90","9、腊味切片,烫熟的菜心铺在饭上"); insert into step values(null,"34","http://i1.chuimg.com/72760474993611e69ce70242ac110002_1440w_810h.jpg@2o_50sh_1pr_1l_300w_90q_1wh","10、将酱汁均匀浇在煲仔饭上,更加美味"); /*猪脚姜*/ insert into step values(null,"35","http://i2.chuimg.com/7e54eb9313ec44cd929b40dc01824009_2000w_2668h.jpg?imageView2/2/w/300/interlace/1/q/90","1、干锅烤姜"); insert into step values(null,"35","http://i2.chuimg.com/9d26f13e8bab45c4891f4cc433e3f5da_2000w_2668h.jpg?imageView2/2/w/300/interlace/1/q/90","2、猪脚焯水,冷水洗完待用"); insert into step values(null,"35","http://i2.chuimg.com/ca10258fad8e428aaac32d3e6c81ab88_2000w_2668h.jpg?imageView2/2/w/300/interlace/1/q/90","3、姜放砂锅底,放猪脚再放蛋,一整瓶甜醋倒下去,红糖也放进去,个人喜欢可以再加点陈醋,喜欢酸滴可以多加,煮3~4个小时,因为我的是小熊电炖锅火力比较小,煤气中中小火煮2个钟就差不多了"); /*红烧乳鸽*/ insert into step values(null,"36","http://i1.chuimg.com/5de973bcc4c2460797df12530eceeb71_720w_440h.jpg@2o_50sh_1pr_1l_300w_90q_1wh","1、乳鸽买回来后一定要清洗干净。包括把胸腔里面的内脏,脖子的喉管都通通摘干净了,不然怎么煮都会有血水的。"); insert into step values(null,"36","http://i1.chuimg.com/6ea84a34da184c1b9ddf5b9a318739a0_720w_440h.jpg@2o_50sh_1pr_1l_300w_90q_1wh","2、锅里烧水,先焯水。焯水后顺便把还有残留的体毛脏东西都拔掉~~为了后面的工序,不要煮太长时间。"); insert into step values(null,"36","http://i1.chuimg.com/6dd88e566c914e93b545e919f2b8d0b9_718w_440h.jpg@2o_50sh_1pr_1l_300w_90q_1wh","3、煮好以后,马上冷水冲洗干净,包括脏东西的浮沫。滴干水备用。"); insert into step values(null,"36","http://i2.chuimg.com/04a1a8b29f114aaa876a310502cb5c8a_720w_440h.jpg?imageView2/2/w/300/interlace/1/q/90","4、准备卤水。13香适量加水煮开,加入稍微多的生抽,适量的盐,自己试味,稍微咸一点没关系。"); insert into step values(null,"36","http://i2.chuimg.com/047b1b487a59429fac32d45b2fb45509_720w_440h.jpg?imageView2/2/w/300/interlace/1/q/90","5、放乳鸽入卤水中,煮开~为了颜色均匀,要记得给鸽子翻身。"); insert into step values(null,"36","http://i1.chuimg.com/ce8b7c87f86b464aaa5112bb2d107085_720w_440h.jpg@2o_50sh_1pr_1l_300w_90q_1wh","6、可以借助勺子把卤汁倒入鸽子的胸腔内,这样容易入味呢。"); insert into step values(null,"36","http://i2.chuimg.com/e5926bb5f71c4589aa5f8e23311409d7_720w_440h.jpg?imageView2/2/w/300/interlace/1/q/90","7、大约煮了15-20分钟后,让乳鸽浸泡在卤水中。每隔一段时间翻身,让它上色均匀,充分吸收香味。待2个小时左右捞起,滴干汁液备用。"); insert into step values(null,"36","http://i2.chuimg.com/b0d8c7398c3a4fe192fbb23c404babfd_720w_440h.jpg?imageView2/2/w/300/interlace/1/q/90","8、锅里烧开热油。油约8-9成热,把控干卤汁的鸽子放入油锅。借助汤勺浇灌鸽子,让它充分受油热"); insert into step values(null,"36","http://i1.chuimg.com/ce8b7c87f86b464aaa5112bb2d107085_720w_440h.jpg@2o_50sh_1pr_1l_300w_90q_1wh","9、两面都均匀的受热油炸过~鸽子皮变得有光泽就可以出锅啦!晾凉后就可以切开摆盘"); /*凉拌莲藕*/ insert into step values(null,"37","http://i2.chuimg.com/e5b7481bdc974b8e94acfb9edc1fc4f7_1000w_750h.jpg?imageView2/2/w/300/interlace/1/q/90","1、提前冷冻一盆水结冰"); insert into step values(null,"37","http://i2.chuimg.com/2db944ff78714705992d71c0f16ff836_750w_1000h.jpg?imageView2/2/w/300/interlace/1/q/90","2、鲜藕3节"); insert into step values(null,"37","http://i2.chuimg.com/db52b04f71ee48b98323e2bfc49a8655_1000w_750h.jpg?imageView2/2/w/300/interlace/1/q/90","3、鲜藕3节去掉两头去皮清洗干净"); insert into step values(null,"37","http://i2.chuimg.com/f22d4557139b44a48366ca3d5cf5cf88_1000w_750h.jpg?imageView2/2/w/300/interlace/1/q/90","4、水开了后,整个莲藕放入水中煮,小的约10分钟,大的约15分钟"); insert into step values(null,"37","http://i2.chuimg.com/d79ee6aaadb2408681b4b8e3396177fb_750w_1000h.jpg?imageView2/2/w/300/interlace/1/q/90","5、看藕变色了就捞出沥干水分"); insert into step values(null,"37","http://i2.chuimg.com/1dae3ad0d0e34cf88f73ff54ca1d1eb5_750w_1000h.jpg?imageView2/2/w/300/interlace/1/q/90","6、放入准备好的冰水中浸泡约20分钟,这步骤可以让莲藕爽脆。这个时候可以准备酱料"); insert into step values(null,"37","http://i2.chuimg.com/c6281a9e39654efcbbc95bbf16e2a4b5_751w_751h.jpg?imageView2/2/w/300/interlace/1/q/90","7、蒜头剁成泥"); insert into step values(null,"37","http://i1.chuimg.com/6c9752e4ad9d438896b3a24f28c765c1_750w_867h.jpg@2o_50sh_1pr_1l_300w_90q_1wh","8、烧热花生油淋在蒜泥里,加入酱油4匙,陈醋3匙、盐、糖、鸡粉、1小匙芝麻油拌匀"); insert into step values(null,"37","http://i2.chuimg.com/97d7a2e52cdb4da4894f5a663d074865_1536w_2048h.jpg?imageView2/2/w/300/interlace/1/q/90","9、调好酱汁,香菜切断"); insert into step values(null,"37","http://i2.chuimg.com/2b1cfe65abfa4ce7af772076b52a72f9_1000w_750h.jpg?imageView2/2/w/300/interlace/1/q/90","10、烧热花生油淋在蒜泥里,加入酱油4匙,陈醋3匙、盐、糖、鸡粉、1小匙芝麻油拌匀"); insert into step values(null,"37","http://i2.chuimg.com/463af8a3d9ad4acfba67433402b3d577_1536w_2048h.jpg?imageView2/2/w/300/interlace/1/q/90","11、加入香菜和酱汁,带上一次性手套抓均匀即可"); insert into step values(null,"37","http://i2.chuimg.com/8b3ebf9efaf94bacbf5c2e6e318522a4_750w_1000h.jpg?imageView2/2/w/300/interlace/1/q/90","12、美美哒,超级好吃"); /*柠檬鸡翅*/ insert into step values(null,"38","http://i2.chuimg.com/87dc7649799c469f848f04066b1a949f_1536w_2048h.jpg?imageView2/2/w/300/interlace/1/q/90","1、腌鸡翅:盆中加入2勺生抽、2勺料酒、少许盐,1只柠檬切小薄片加入盆中搅拌均匀,加入鸡翅腌30分钟,鸡翅两面各划几刀,容易腌制入味"); insert into step values(null,"38","http://i2.chuimg.com/86f940d8cf77438aa6ca7ca90ba34701_2048w_1536h.jpg?imageView2/2/w/300/interlace/1/q/90","2、半根胡萝卜切丝蒸熟备用,用于最后装盘的时候垫在盘底"); insert into step values(null,"38","http://i2.chuimg.com/86f940d8cf77438aa6ca7ca90ba34701_2048w_1536h.jpg?imageView2/2/w/300/interlace/1/q/90","3、不粘锅放油,油热后转小火,放入腌好的鸡翅,耐心煎到两面金黄色"); insert into step values(null,"38","http://i2.chuimg.com/27fa9ecf47d04f4a8fa18438d1c16930_1536w_2048h.jpg?imageView2/2/w/300/interlace/1/q/90","4、加入少量白糖、半碗清水,盖上锅盖焖至收汁。另一个柠檬切片,蒜拍扁切成小粒,等锅中汁收到差不多的时候加入柠檬片、蒜粒,稍微翻炒下即可出锅"); insert into step values(null,"38","http://i2.chuimg.com/0a961d1a519b4f9a9719706f9e52968f_2048w_1536h.jpg?imageView2/2/w/300/interlace/1/q/90","5、盘底垫上蒸好的胡萝卜丝,放上鸡翅,这道酸甜开胃的柠檬鸡翅就做好啦"); /*猪骨汤*/ insert into step values(null,"39","http://i1.chuimg.com/ac945bda904b481fb00b20d3a7948f7a_1280w_1024h.jpg@2o_50sh_1pr_1l_300w_90q_1wh","1、准备以上食材,南瓜350克,猪骨250克,栗子8颗,北杏少许,两片姜。南瓜是连皮一起煲汤,因为南瓜皮也是很有营养,同时用于煲汤也不会使南瓜煮的太软烂,吃起来也方便。此份量为两人食用,大概4碗的量。"); insert into step values(null,"39","http://i2.chuimg.com/b51df5caef7a41ca93d84814e02b9dee_1280w_1024h.jpg?imageView2/2/w/300/interlace/1/q/90","2、猪骨先冷水下锅,开火加热。这样的目的是可以让猪骨慢慢出浮沫。"); insert into step values(null,"39","http://i2.chuimg.com/1be074a977ec490284f0b72681b6c062_480w_384h.jpg?imageView2/2/w/300/interlace/1/q/90","3、这个时候可以翻面,大概30秒后关火,把猪骨捞起备用。"); insert into step values(null,"39","http://i2.chuimg.com/63c8030937ec4cf48f6debf024bfc6ef_1280w_1024h.jpg?imageView2/2/w/300/interlace/1/q/90","4、依次将食材放进汤锅里,我用的是陶瓷锅,然后注入1.5L已烧开的水,大火先煮15分钟,然后小火焖煮一个半小时,关火前加盐调味即可。"); insert into step values(null,"39","http://i2.chuimg.com/9f5c0581cb014871bc8d177292d23dd9_1280w_1024h.jpg?imageView2/2/w/300/interlace/1/q/90","5、营养美味的南瓜栗子猪骨汤完成"); /*脆皮烧肉*/ insert into step values(null,"40","http://i2.chuimg.com/1f40a780b90a4e64840ad3b57923f3a2_640w_512h.jpg?imageView2/2/w/300/interlace/1/q/90","1、肉表面可以轻轻切几刀,这样腌起来更入味。不要切到猪皮!"); insert into step values(null,"40","http://i1.chuimg.com/b2091189ff6843c798eed99bd808c15d_1040w_780h.jpg@2o_50sh_1pr_1l_300w_90q_1wh","2、五香粉和盐,搅拌均匀涂在肉的表面。只涂肉,不要碰到皮!轻轻切开的部位都要抹到,抹均匀!"); insert into step values(null,"40","http://i2.chuimg.com/b5f060dbfd134c84acc212ab13ba62fb_1040w_780h.jpg?imageView2/2/w/300/interlace/1/q/90","3、在烤盘中间倒一点白醋,把猪皮朝下,泡在醋里大概五分钟。不用担心这样肉会不会因为泡了醋变酸,因为一烤,醋就会挥发掉,只会让皮更脆,不过影响口味。"); insert into step values(null,"40","http://i2.chuimg.com/9f5c0581cb014871bc8d177292d23dd9_1280w_1024h.jpg?imageView2/2/w/300/interlace/1/q/90","4、营养美味的南瓜栗子猪骨汤完成"); insert into step values(null,"40","http://i2.chuimg.com/81620e71b2464fbd9228a06dddbab0f7_1040w_780h.jpg?imageView2/2/w/300/interlace/1/q/90","5、猪皮朝上,其他几面都用锡纸包好,尽量包紧一点。然后放进冰箱冷藏一天,到猪皮表面干了,就可以进烤箱了!"); insert into step values(null,"40","http://i1.chuimg.com/8ab95a0ee44f493e891776e0850cf7da_1080w_864h.jpg@2o_50sh_1pr_1l_300w_90q_1wh","6、烤箱预热150度,上加热管和风扇模式。猪皮朝上放进去烤1小时,然后转190度-200度烤30分钟。最后30分钟多观察猪皮起泡情况可以调节一下温度,我一开始190度烤了十分钟,感觉有点不够,所以换成了200度"); insert into step values(null,"40","http://i1.chuimg.com/03b2a7a7649e40009a1091e101048b17_780w_1040h.jpg@2o_50sh_1pr_1l_300w_90q_1wh","7、烤好的出炉了,烤出来的皮敲上去咔咔咔,非常的脆!切的时候也有小技巧,先切肉再切皮!"); insert into step values(null,"40","http://i1.chuimg.com/c8900f3546d5440e9ce91de38bed97d3_780w_1040h.jpg@2o_50sh_1pr_1l_300w_90q_1wh","8、切好的烧肉在菜板上码齐,肉汁还在不停的往下趟着!"); /********************/ INSERT INTO step VALUES(null,41,'http://i2.chuimg.com/568a77ff1100469c869c479131c1e999_1242w_994h.jpg?imageView2/2/w/300/interlace/1/q/90','1'); INSERT INTO step VALUES(null,41,'http://i2.chuimg.com/b061d3f332c84d38b0e3f2533ce20765_1242w_994h.jpg?imageView2/2/w/300/interlace/1/q/90','以上就是完成了蛋黄高温杀菌,从生变成了熟'); INSERT INTO step VALUES(null,41,'http://i2.chuimg.com/31fb2351e6544dfdb6c91a79dd1bf153_1242w_994h.jpg?imageView2/2/w/300/interlace/1/q/90','3'); INSERT INTO step VALUES(null,41,'http://i2.chuimg.com/2682b57c7f484ac8a2db99e334561cb3_1242w_994h.jpg?imageView2/2/w/300/interlace/1/q/90','加其他果肉的在这一步加上,可可粉累我觉得3克左右就好啦,最终还是要看你自己喜欢的程度'); INSERT INTO step VALUES(null,41,'http://i2.chuimg.com/628001a904204534b6a457b3f43bb3b1_3863w_3024h.jpg?imageView2/2/w/300/interlace/1/q/90','4'); INSERT INTO step VALUES(null,41,'http://i1.chuimg.com/832ddc5567924f4b85514cf6c33566e0_1200w_960h.jpg@2o_50sh_1pr_1l_300w_90q_1wh','口感细腻,毫无冰渣'); /*******************************/ INSERT INTO step VALUES(null,42,'http://i2.chuimg.com/8592eb8097e711e6b2400242ac110002_2448w_2448h.jpg?imageView2/2/w/300/interlace/1/q/90','糯米粉,玉米淀粉,细砂糖,牛奶,搅拌均匀,过筛一遍,盖上保鲜膜,大火蒸至奶糕凝固即可'); INSERT INTO step VALUES(null,42,'http://i2.chuimg.com/848af7b497e711e69ce70242ac110002_2448w_2448h.jpg?imageView2/2/w/300/interlace/1/q/90','蒸好的奶糕趁热加入黄油揉匀,黄油不用融化,奶糕这么烫黄油自然就化掉了,用手揉效果最好,别用筷子什么的,拌不匀的!这步很烫手,我是会戴一个皮手套再戴一次性手套,虽然还是会有些烫,我之前可是直接戴一次性手套,练成铁砂掌了都23333揉好之后放保鲜袋里,尽量摊开让它冷却,冷却到室温再放冰箱,时间急的话揉好直接扔急冻室,30分钟就可以拿出来了,but这样很伤冰箱!时间不急放保鲜也行,可以今天准备奶糕,第二天再擀皮包奶油水果,冻的时间越长越不粘手!'); INSERT INTO step VALUES(null,42,null,'取适量糯米粉小火炒熟,或者放微波炉叮一会,糯米粉发出香味就可以了,太过了会变黄,影响成品颜色!!炒的时候记得不停翻拌,不然糊了!用微波炉叮也是十秒五秒的样子拿出来翻一下,不然有水蒸气会起块!配方的量我一半是吃饭那种小碗半碗的量就够了,多炒一些也没关系,留着下次用,数糯米粉也称糕粉,能保存挺长时间的'); INSERT INTO step VALUES(null,42,null,'冻好的奶糕取出来分成25g一坨,新手可以分30g一坨,皮子重量一样做出来才均匀,如果你不介意可以随便揪一坨,这个量25g可以做7个,一个人吃完全够了好吗,不然你想胖死你自己吗!奶糕还是挺粘的,记得沾糕粉!!'); INSERT INTO step VALUES(null,42,null,'分好奶糕之后开始擀皮!!同样,你的手,案板,擀面杖,奶糕都请洒上糕粉,但不要太多,可以一点一点加,太多影响口感!这个皮不好擀,是糯米粉弄的,弹性很大,慢慢来,擀皮就不用我教了,这是个练耐心的玩意,要想做好看就老老实实擀皮!不然你就两手慢慢扯,扯出来厚薄不匀,丑死了!皮的大小跟小碗碗口差不多大就行了!'); INSERT INTO step VALUES(null,42,'http://i2.chuimg.com/8328061e97e711e69ce70242ac110002_1280w_1280h.jpg?imageView2/2/w/300/interlace/1/q/90','擀好皮可以打奶油了!量不多的情况下建议手动打发!奶油会细腻很多,100g奶油大概三五分钟能打好,天气热的时候奶油放冰上打发!!100g奶油加10g细砂糖我觉得已经很甜了,还要加水果,糖建议少放点。手动打发奶油我是抱着盆然后逆时针打,放在冰上的情况下,蛋抽跟盆底垂直打!千万别太狠,不然容易到处飞溅!!打好的奶油装进裱花袋,可以拿个大杯子把裱花袋套在上面,这样一个人也可以很方便的装奶油的,不会弄得到处都是,裱花口别剪太大,裱花袋也别选太大的,以免天气热的时候奶油才用一半你手握的地方就全化了!'); INSERT INTO step VALUES(null,42,null,'软性水果切成小丁,软性水果就是什么芒果啊草莓啊榴莲啊香蕉啊火龙果之类的,也可以选择酥性饼干碎,什么奥利奥啊或者谷优的玛丽亚还是啥来着'); INSERT INTO step VALUES(null,42,'http://i1.chuimg.com/82b0bc1297e711e6b2400242ac110002_2448w_2448h.jpg@2o_50sh_1pr_1l_300w_90q_1wh','一切准备就绪可以开始包雪媚娘了!!看图!!底铺满奶油再放水果,放了水果再挤一层奶油,然后跟包包子一样把皮捏紧,水果奶油的量看自己喜欢,但是奶油太少水果太多皮比较薄的情况下会破掉!!底部奶油多一点,顶部奶油可以少一点,裱花嘴剪小一点奶油的量好控制一点!我是用雪媚娘的模具,半球形的,这样大小能控制一样,没有模具就用小碗,看图!!'); INSERT INTO step VALUES(null,42,'http://i2.chuimg.com/81c2177497e711e69ce70242ac110002_1242w_1242h.jpg?imageView2/2/w/300/interlace/1/q/90','包好就完成了!!可以吃了!!这个是用模具做出来的,比较小,所以皮子只有18g左右,可以做10个!!'); INSERT INTO step VALUES(null,42,'http://i2.chuimg.com/80f694d297e711e6b2400242ac110002_1280w_1280h.jpg?imageView2/2/w/300/interlace/1/q/90','吃不完的记得放冰箱冷藏!!是冷藏!!保鲜那层!!不是冷冻!!不然奶油会冻坏!!这个最好一天吃完!!吃不完一定要密封!!不然皮会干掉!!分开放,不要挨在一起,记得撒粉,不然会粘!!'); /*****************************/ INSERT INTO step VALUES(null,43,null,'消化饼干放入保鲜袋中,用擀面杖擀碎'); INSERT INTO step VALUES(null,43,null,'黄油放入微波炉里融化,吉利丁片泡水软化,奶油奶酪室温软化'); INSERT INTO step VALUES(null,43,null,'把融化和黄油和压碎的饼干碎混合成饼底材料,把混合好的饼底材料倒入模具,压实后,放入冰箱冷藏备用'); INSERT INTO step VALUES(null,43,null,'奶油奶酪、细砂糖放在容器里,隔温水打发'); INSERT INTO step VALUES(null,43,null,'加入酸奶,搅拌成芝士糊'); INSERT INTO step VALUES(null,43,null,'将牛奶和淡奶油用微波炉加热,不要加热的太烫,将软化的吉利丁片沥干水,放入加热好的混合物中,搅拌融化'); INSERT INTO step VALUES(null,43,null,'然后分三次倒入芝士糊中,边搅拌均匀,再加入下一次'); INSERT INTO step VALUES(null,43,null,'最后将混合物倒入黄油饼底的蛋糕模中,轻轻震几下,可以让芝士糊的气泡消失'); INSERT INTO step VALUES(null,43,null,'然后放入冰箱中冷藏4个小时以上,芝士糊凝结就可以了'); INSERT INTO step VALUES(null,43,null,'脱模的时候可以用热毛巾捂一下,或者用吹风机吹一下,方面脱模'); /*****************************/ INSERT INTO step VALUES(null,44,'http://i2.chuimg.com/f5df9b408bbc11e6b87c0242ac110003_330w_223h.jpg?imageView2/2/w/300/interlace/1/q/90','全脂牛奶放入碗中,在碗上覆上一层保鲜膜,放入蒸锅中,蒸十分钟左右'); INSERT INTO step VALUES(null,44,'http://i2.chuimg.com/f602d1f08bbc11e6a9a10242ac110002_330w_223h.jpg?imageView2/2/w/300/interlace/1/q/90','把鸡蛋里的蛋清和蛋白分离,蛋清搅拌均匀即可,不用打很久不然就打发了'); INSERT INTO step VALUES(null,44,'http://i2.chuimg.com/f62aafea8bbc11e6b87c0242ac110003_330w_223h.jpg?imageView2/2/w/300/interlace/1/q/90','蒸好的牛奶拿出蒸锅放凉,放凉后上面会形成一层奶皮,用尖尖的刀子,沿着碗边划大概十厘米左右的口'); INSERT INTO step VALUES(null,44,'http://i2.chuimg.com/f650dcb08bbc11e6b87c0242ac110003_330w_223h.jpg?imageView2/2/w/300/interlace/1/q/90','倒出大部分牛奶,碗底留点底不要倒净,不然奶皮容易粘在碗上,待会儿就浮不起来了'); INSERT INTO step VALUES(null,44,'http://i2.chuimg.com/f67ab6f28bbc11e6b87c0242ac110003_330w_223h.jpg?imageView2/2/w/300/interlace/1/q/90','把搅拌均匀的蛋清和糖加入牛奶中'); INSERT INTO step VALUES(null,44,'http://i2.chuimg.com/f6a511908bbc11e6a9a10242ac110002_330w_223h.jpg?imageView2/2/w/300/interlace/1/q/90','蛋清奶液过筛,筛掉蛋清中尚未打发的部分。如果蛋奶液中有气泡和奶沫用勺子撇去'); INSERT INTO step VALUES(null,44,'http://i1.chuimg.com/f6d8c2b08bbc11e6b87c0242ac110003_330w_223h.jpg@2o_50sh_1pr_1l_300w_90q_1wh','把蛋清奶液缓缓沿着刚才划破的奶皮缺口倒会碗里,让奶皮浮在上面'); INSERT INTO step VALUES(null,44,'http://i2.chuimg.com/f713079a8bbc11e6b87c0242ac110003_330w_223h.jpg?imageView2/2/w/300/interlace/1/q/90','再次敷上保鲜膜上锅蒸,中火蒸十分钟后关火,此时不要掀开盖子焖五分钟再取出。其状如膏,色洁白,口感细腻嫩滑,如丝如绸,入口即化,口味香而不重,甜而不腻'); /***************************************/ INSERT INTO step VALUES(null,45,'http://i2.chuimg.com/3dc441259b574b87815b0cf7a1bdf213_1080w_1440h.jpg?imageView2/2/w/300/interlace/1/q/90','番薯去皮洗干净,切块'); INSERT INTO step VALUES(null,45,'http://i2.chuimg.com/81336ee1265942908a806a664d097674_1080w_1440h.jpg?imageView2/2/w/300/interlace/1/q/90','姜拍松'); INSERT INTO step VALUES(null,45,'http://i1.chuimg.com/e803ab5b7d334e32be4df29c61c4f210_1080w_1440h.jpg@2o_50sh_1pr_1l_300w_90q_1wh','番薯、姜、红糖放到电饭锅里,加入适量的水煮熟(冬天喝热乎的驱寒,夏天冰镇后解暑)'); /***********************************/ INSERT INTO step VALUES(null,46,'http://i2.chuimg.com/3f58fb9367674bc3ac8019ae35066980_1080w_810h.jpg?imageView2/2/w/300/interlace/1/q/90','南瓜,红薯去皮蒸熟'); INSERT INTO step VALUES(null,46,'http://i2.chuimg.com/c1b9d264786941439ae20f4c63111d3b_1080w_810h.jpg?imageView2/2/w/300/interlace/1/q/90','蒸熟的南瓜红薯和酸奶,鸡蛋放入料理机打成泥,倒入烤碗'); INSERT INTO step VALUES(null,46,'http://i2.chuimg.com/2524663af81b47dea5e961b23c35f38a_1080w_810h.jpg?imageView2/2/w/300/interlace/1/q/90','撒入配料,微波炉高火7分钟'); /******************************/ INSERT INTO step VALUES(null,47,null,'杯子里放入大概4/5的雪碧'); INSERT INTO step VALUES(null,47,'http://i1.chuimg.com/52757988991411e69ce70242ac110002_2448w_2448h.jpg@2o_50sh_1pr_1l_300w_90q_1wh','取一把比较大的勺子,凸出来那面向上,把勺子边缘放到杯子里紧挨着杯壁,勺子呈45—60度角放置,然后顺着勺子背面往杯子里缓缓倒入蜜桃味rio'); INSERT INTO step VALUES(null,47,null,'静置一会儿就好啦。'); /******************************/ INSERT INTO step VALUES(null,48,'http://i2.chuimg.com/398fcc8f0ca74377a9dd043399c6d4fb_1152w_864h.jpg?imageView2/2/w/300/interlace/1/q/90','玉米剥好,上锅大火蒸10分钟。牛油或马遮林提前取出来放软。'); INSERT INTO step VALUES(null,48,'http://i2.chuimg.com/3489516aacb24995ad7f8bbe7712e050_1152w_864h.jpg?imageView2/2/w/300/interlace/1/q/90','准备一个大小合适的碗,将适量的牛油、糖、盐和蒸好的玉米粒趁热搅拌均匀,可以根据自己的喜好适量增减。'); INSERT INTO step VALUES(null,48,'http://i1.chuimg.com/62232be3c40943aa950c25d1c6e39ef6_1152w_864h.jpg@2o_50sh_1pr_1l_300w_90q_1wh','香喷喷的玉米杯就做好啦!'); /*****************************/ INSERT INTO step VALUES(null,49,'http://i2.chuimg.com/4af8ae08ff9b49aa83c263936d6b8438_740w_334h.bmp?imageView2/2/w/300/interlace/1/q/90','先做鸡蛋的部分,60g白砂糖,20g水,糖和水的比例是3:1。'); INSERT INTO step VALUES(null,49,'http://i2.chuimg.com/2f8f2c7958e5483f8adf1ee07cd0bfd2_740w_334h.bmp?imageView2/2/w/300/interlace/1/q/90','取2个蛋黄放入搅拌机里搅拌。'); INSERT INTO step VALUES(null,49,'http://i2.chuimg.com/71f364d849fe45098ccb6ac7963c797d_740w_333h.bmp?imageView2/2/w/300/interlace/1/q/90','同时煮糖浆的部分,煮到116度-120度之间即可。'); INSERT INTO step VALUES(null,49,'http://i1.chuimg.com/69435e1dd5e943e69fe3b89c44ed1e4d_740w_334h.bmp@2o_50sh_1pr_1l_300w_90q_1wh','中高速搅拌鸡蛋,沿着盆子慢慢加入糖浆。直到这个质感,然后摸一下盆底温度,是室温就可以了,炸蛋面糊部分完成。'); INSERT INTO step VALUES(null,49,'http://i2.chuimg.com/b55de3bd49b04ab193157b6e0631db55_740w_334h.bmp?imageView2/2/w/300/interlace/1/q/90','没有搅拌器的话,可以用Sabayon隔水加热。2-3个蛋黄,60g糖,稍微打发一下。'); INSERT INTO step VALUES(null,49,'http://i1.chuimg.com/46ac16a374b148dcac150284e77506cf_740w_333h.bmp@2o_50sh_1pr_1l_300w_90q_1wh','加入2小勺咖啡利口酒,再搅拌均匀。'); INSERT INTO step VALUES(null,49,'http://i2.chuimg.com/ba0948c194fb4851885623c08f492102_744w_334h.bmp?imageView2/2/w/300/interlace/1/q/90','然后烧一锅水煮沸后转小火,把盆子放在锅上,隔水加热。'); INSERT INTO step VALUES(null,49,'http://i2.chuimg.com/09d449cac5764ab696df5b8e34ea4d74_744w_334h.bmp?imageView2/2/w/300/interlace/1/q/90','慢慢打发到这个程度,整个过程大概是5分钟。'); INSERT INTO step VALUES(null,49,'http://i2.chuimg.com/8498356f0b364ed2ae89ef5b83adcc1c_740w_333h.bmp?imageView2/2/w/300/interlace/1/q/90','然后做奶酪糊的部分,250g马斯卡彭,奶油奶酪也可以。'); INSERT INTO step VALUES(null,49,'http://i2.chuimg.com/dd288e4ab34e428b9435cae067d24808_740w_333h.bmp?imageView2/2/w/300/interlace/1/q/90','马斯卡彭需要提前回温,可以放微波炉稍微加热一下,更容易搅拌至丝滑。'); INSERT INTO step VALUES(null,49,'http://i2.chuimg.com/d7f3cb2726e24691b90b773868c8b4d6_740w_334h.bmp?imageView2/2/w/300/interlace/1/q/90','加入刚刚的炸蛋面糊,搅拌。'); INSERT INTO step VALUES(null,49,'http://i2.chuimg.com/c8c6f588e103424280fb5efd335b38c6_740w_333h.bmp?imageView2/2/w/300/interlace/1/q/90','然后打发奶油,用200ml奶油,打发至不流动。'); INSERT INTO step VALUES(null,49,'http://i2.chuimg.com/6963caf43e2943e2ad55e9a0b5554624_740w_334h.bmp?imageView2/2/w/300/interlace/1/q/90','再把奶油加入到刚刚的面糊内,搅拌均匀,尽量减少消泡。'); INSERT INTO step VALUES(null,49,'http://i2.chuimg.com/78025d5f63a14cac9f16d8a890d804c0_740w_333h.bmp?imageView2/2/w/300/interlace/1/q/90','加入2小勺咖啡利口酒,搅拌均匀。'); INSERT INTO step VALUES(null,49,'http://i2.chuimg.com/d5f5a651725a4cd2a22a8430b1fb9897_740w_333h.bmp?imageView2/2/w/300/interlace/1/q/90','准备8个手指饼干,依次在冷却的浓缩咖啡里快速浸泡一下。'); INSERT INTO step VALUES(null,49,'http://i1.chuimg.com/815e520c11dc44d3b93bd69ffa169db4_740w_333h.bmp@2o_50sh_1pr_1l_300w_90q_1wh','再覆盖一层奶酪糊。'); INSERT INTO step VALUES(null,49,'http://i1.chuimg.com/e9b24834b92845a4893155ff028a2f50_740w_333h.bmp@2o_50sh_1pr_1l_300w_90q_1wh','抹均匀后,撒上可可粉,我用的是法芙娜的可可粉比较推荐。'); INSERT INTO step VALUES(null,49,'http://i1.chuimg.com/281b1eb52af24608bcfab4a6a4b36ae3_740w_333h.bmp@2o_50sh_1pr_1l_300w_90q_1wh','再重复一遍刚刚的步骤,冰箱冷藏过夜。'); INSERT INTO step VALUES(null,49,'http://i1.chuimg.com/3a4a0931d4054cea8a3a6b6e3b044d37_400w_225h.gif@2o_50sh_1pr_1l_300w_90q_1wh.gif','第二天吃之前撒上可可粉。'); /**************/ INSERT INTO step VALUES(null,50,'http://i2.chuimg.com/1a67628c8bfd11e6b87c0242ac110003_338w_230h.jpg?imageView2/2/w/300/interlace/1/q/90','把吉利丁片放入冰水里,浸泡15分钟左右至软后捞出挤净水分备用'); INSERT INTO step VALUES(null,50,'http://i2.chuimg.com/1a8b69e88bfd11e6a9a10242ac110002_345w_229h.jpg?imageView2/2/w/300/interlace/1/q/90','牛奶装进较大的容器里,加入淡奶油,椰浆,绵白糖拌匀'); INSERT INTO step VALUES(null,50,'http://i2.chuimg.com/1aac298a8bfd11e6a9a10242ac110002_344w_231h.jpg?imageView2/2/w/300/interlace/1/q/90','入锅,隔水加热至糖化。混合椰奶液的温度约为50度左右'); INSERT INTO step VALUES(null,50,'http://i1.chuimg.com/1ad451a88bfd11e6b87c0242ac110003_340w_227h.jpg@2o_50sh_1pr_1l_300w_90q_1wh','放入泡好的吉利丁片,搅拌至溶化关火'); INSERT INTO step VALUES(null,50,'http://i1.chuimg.com/1afedf5e8bfd11e6b87c0242ac110003_342w_230h.jpg@2o_50sh_1pr_1l_300w_90q_1wh','在密封盒里铺上一层保鲜膜'); INSERT INTO step VALUES(null,50,'http://i2.chuimg.com/1b24028e8bfd11e6a9a10242ac110002_343w_229h.jpg?imageView2/2/w/300/interlace/1/q/90','待椰奶液放冷后倒入,入冰箱冷藏至凝固'); INSERT INTO step VALUES(null,50,'http://i2.chuimg.com/1b42deca8bfd11e6a9a10242ac110002_340w_222h.jpg?imageView2/2/w/300/interlace/1/q/90','取出'); INSERT INTO step VALUES(null,50,'http://i2.chuimg.com/1b6beedc8bfd11e6b87c0242ac110003_344w_229h.jpg?imageView2/2/w/300/interlace/1/q/90','切小块'); INSERT INTO step VALUES(null,50,'http://i1.chuimg.com/1b898ab48bfd11e6b87c0242ac110003_342w_225h.jpg@2o_50sh_1pr_1l_300w_90q_1wh','表面醮一层椰蓉即可'); /**创建用户评论表**/ CREATE TABLE comment( cid INT PRIMARY KEY AUTO_INCREMENT, uid INT, #所属用户 mid INT, #所属菜单 time INT,#发表时间 content VARCHAR(250)#评论内容 ); /* 创建每日推荐表*/ CREATE TABLE recommend( rid INT PRIMARY KEY AUTO_INCREMENT, mname VARCHAR(20),#菜单名称 type INT,#菜单所属分类 mimg VARCHAR(500),#成品图片 mid INT ); INSERT INTO recommend VALUES(null,"早餐鸡蛋饼",1,"http://i2.chuimg.com/1c83091c273a11e7947d0242ac110002_1775w_2394h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg",1); INSERT INTO recommend VALUES(null,"麻婆豆腐",2,"http://i2.chuimg.com/7eb024afed0e456c90f330247225ffe4_2044w_1080h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg",11); INSERT INTO recommend VALUES(null,"麻辣小龙虾",3,"http://i2.chuimg.com/e55937298f2549b3ab87550bed33ec40_1035w_827h.jpg?imageView2/1/w/640/h/520/q/90/format/jpg",26); INSERT INTO recommend VALUES(null,"广式鸡煲",4,"http://i2.chuimg.com/2d4f90f36fad4fe9b650a8ed92f30718_5184w_3456h.jpg?imageView2/2/w/660/interlace/1/q/90",33); INSERT INTO recommend VALUES(null, '双皮奶',5,'http://i1.chuimg.com/66477c7886f811e6b87c0242ac110003_457w_685h.jpg@2o_50sh_1pr_1l_280w_190h_1c_1e_90q_1wh',44);
-- missing_data_user (view) SELECT run_days.report_date AS report_date, gen_data.drive AS drive, gen_data.gmail AS gmail, gen_data.gplus AS gplus, FROM ( SELECT report_date FROM [YOUR_PROJECT_ID:raw_data.daily_report_status] GROUP BY 1 ) run_days LEFT JOIN ( SELECT date, SUM(CASE WHEN parameters.name = 'drive:num_items_viewed' THEN (IFNULL(parameters.intValue,NULL)) ELSE NULL END) AS drive, MAX(CASE WHEN parameters.name = 'gmail:last_interaction_time' THEN (IFNULL(parameters.datetimeValue,NULL)) ELSE NULL END) AS gmail, SUM(CASE WHEN parameters.name = 'gplus:num_shares' THEN (IFNULL(parameters.intValue,NULL)) ELSE NULL END) AS gplus FROM [YOUR_PROJECT_ID:raw_data.user_usage] GROUP BY 1) gen_data ON gen_data.date = run_days.report_date WHERE gen_data.drive IS NULL OR gen_data.gmail IS NULL OR gen_data.gplus IS NULL ORDER BY 1 DESC
<reponame>reshke/gpdb !\retcode gpconfig -r statement_timeout; !\retcode gpstop -u;
ALTER TABLE `abuse_reports` ADD COLUMN `client_id` varchar(64), ADD COLUMN `addon_name` varchar(255), ADD COLUMN `addon_summary` varchar(255), ADD COLUMN `addon_version` varchar(255), ADD COLUMN `addon_signature` smallint UNSIGNED, ADD COLUMN `application` smallint UNSIGNED, ADD COLUMN `application_version` varchar(255), ADD COLUMN `application_locale` varchar(255), ADD COLUMN `operating_system` varchar(255), ADD COLUMN `operating_system_version` varchar(255), ADD COLUMN `install_date` datetime(6), ADD COLUMN `reason` smallint UNSIGNED, ADD COLUMN `addon_install_origin` varchar(255), ADD COLUMN `addon_install_method` smallint UNSIGNED, ADD COLUMN `addon_install_entry_point` smallint UNSIGNED;
\connect upmugateway SET default_tablespace=upmu_datastore; DROP TABLE IF EXISTS upmu_data_file; DROP TYPE IF EXISTS file_state; CREATE TYPE file_state AS ENUM ('abandoned', 'processing', 'released'); CREATE TABLE upmu_data_file (id serial PRIMARY KEY, file_name varchar(32) NOT NULL, directory serial references upmu_data_directory(id) NOT NULL, state file_state NOT NULL, access_start_time timestamp NOT NULL, release_time timestamp);
<filename>ExoCompteBancaire/Tables/Tables.sql DROP TABLE [bank_operation]; DROP TABLE [bank_compte]; DROP TABLE [bank_client]; CREATE TABLE [dbo].[bank_operation] ( [id] INT IDENTITY (1, 1) NOT NULL, [numCompte] VARCHAR (50) NOT NULL, [dateCreation] DATETIME NOT NULL, [type] VARCHAR (50) NOT NULL, [montant] DECIMAL (10, 2) NOT NULL, PRIMARY KEY CLUSTERED ([id] ASC) ); CREATE TABLE [dbo].[bank_compte] ( [id] INT IDENTITY (1, 1) NOT NULL, [numero] VARCHAR (50) NOT NULL, [idClient] INT NOT NULL, [solde] DECIMAL (10, 2) NOT NULL, [dateCreation] DATETIME NOT NULL, PRIMARY KEY CLUSTERED ([id] ASC) ); CREATE TABLE [dbo].[bank_client] ( [id] INT IDENTITY (1, 1) NOT NULL, [nom] VARCHAR (50) NOT NULL, [prenom] VARCHAR (50) NOT NULL, [telephone] VARCHAR (50) NOT NULL, PRIMARY KEY CLUSTERED ([id] ASC) );
<reponame>osamhack2020/WEB_AmongUs_0262 INSERT INTO user VALUES ("admin", "1q2w3e4r!","어드민", "111111", 1, 1, 8, "<EMAIL>", "01088888888", "2222"); INSERT INTO user VALUES ("user1", "1q2w3e4r!","유저1", "111111", 0, 0, 2, "<EMAIL>", "01088888888", "2222"); INSERT INTO user VALUES ("user2", "1q2w3e4r!","유저2", "111111", 0, 0, 2, "<EMAIL>", "01088888888", "2222"); INSERT INTO user VALUES ("user3", "1q2w3e4r!","유저3", "111111", 0, 0, 2, "<EMAIL>", "01088888888", "2222"); INSERT INTO user VALUES ("user4", "1q2w3e4r!","유저4", "111111", 0, 0, 1, "<EMAIL>", "01088888888", "2222"); INSERT INTO user VALUES ("user5", "1q2w3e4r!","유저5", "111111", 0, 0, 1, "<EMAIL>", "01088888888", "2222"); INSERT INTO user VALUES ("user6", "1q2w3e4r!","유저6", "111111", 0, 0, 1, "<EMAIL>", "01088888888", "2222"); INSERT INTO user VALUES ("user7", "1q2w3e4r!","유저7", "111111", 0, 0, 4, "<EMAIL>", "01088888888", "2222"); INSERT INTO user VALUES ("user8", "1q2w3e4r!","유저8", "111111", 0, 0, 4, "<EMAIL>", "01088888888", "2222");
<filename>opzet.sql CREATE TABLE contactpersoon ( id int not null auto_increment, contact_achternaam varchar(25), contact_voornaam varchar(25), contact_bedrijf varchar(25), contact_straatnaam varchar(50), contact_postcode varchar(25), contact_plaats varchar(25), contact_land varchar(25), contact_relatie varchar(30), contact_website varchar(100), primary key(id) ); -- koppeltabel, een contact kan meerdere telefoonnummers hebben CREATE TABLE telefoonnummer ( id int not null auto_increment, telnr varchar(15), primary key(id), contact_id integer REFERENCES contactpersoon(id) ON DELETE CASCADE ); -- koppeltabel, een contact kan meerdere emails hebben CREATE TABLE email ( id int not null auto_increment, email varchar(60), primary key(id), contact_id integer REFERENCES contactpersoon(id) ON DELETE CASCADE ); -- als een contact verwijderd wordt, dan wordt ook de opdracht verwijderd?? CREATE TABLE offerte ( id int not null auto_increment, datum VARCHAR(12), correspondentienummer INT(25), naamklant VARCHAR(25), uren DOUBLE(9, 2), btwPercentage DOUBLE(9, 2), kostenBruto DOUBLE(9, 2), kostenBTW DOUBLE(9, 2), kostenNetto DOUBLE(9, 2), primary key(id) ); CREATE TABLE factuur ( id int not null auto_increment, datum VARCHAR(12), #id is het factuur nummer aflever_datum VARCHAR(12), factuur_omschrijving VARCHAR(250), bruto_kosten DOUBLE(9, 2), btw_percentage INT(2), btw_kosten DOUBLE(9, 2), netto_kosten DOUBLE(9, 2), primary key(id) ); CREATE TABLE onkosten ( id int not null auto_increment, onkosten_bedrijf VARCHAR(50), onkosten_datum VARCHAR(20), onkosten_kostenpost VARCHAR(50), onkosten_omschrijving VARCHAR(250), onkosten_bruto_kosten DOUBLE(9,2), onkosten_btw_percentage INT(2), onkosten_btw_kosten DOUBLE(9,2), onkosten_netto_kosten DOUBLE(9,2), primary key(id) ); CREATE TABLE kostenpost ( id int not null auto_increment, kostenpost_kostenpost VARCHAR(50), primary key(id) ); CREATE TABLE brief ( id int not null auto_increment, datum VARCHAR(12), correspondentie int, betreft VARCHAR(25), adressering VARCHAR(50), verhaal TEXT, primary key(id) ); CREATE TABLE btw_percentage ( id int not null auto_increment, btw_percentage_hoog INT(2), btw_percentage_laag INT(2), primary key(id) ); CREATE TABLE gebruiker ( id int not null auto_increment, gebruikersnaam varchar(25), email_adres varchar(25), wachtwoord varchar(64), rol varchar(25), primary key(id) );
--create database vehicleloanproject create table UserRegistration( UserId varchar(50) NOT NULL, Password varchar(50) not null, RoleId int not null, constraint pk_user_registration primary key (UserId), constraint fk_user_registration foreign key(RoleId) references RoleType(RoleId) ) create table RoleType( RoleId int identity(100,1), RoleName varchar(10) not null, constraint pk_user_type primary key (RoleId) ) create table ApplicantDetails( CustomerId int identity(10000,1), FirstName varchar(20) not null, MiddleName varchar(20) not null, LastName varchar(20) not null, Age int not null, Gender varchar(10) not null, ContactNo bigint not null, EmailId varchar(50) not null, Address varchar(100) not null, State varchar(50) not null, City varchar(50) not null, Pincode int not null, TypeOfEmployement varchar(50) not null, YearlySalary decimal(18,2) not null, ExistingEmi decimal(18,2), UserId varchar(50) not null, constraint pk_applicant_details primary key (CustomerId), constraint fk_applicant_details foreign key (UserId) references UserRegistration(UserId) ); create table Vehicle( VehicleId INT identity(100,1) NOT NULL, CarMake varchar(20) not null, CarModel varchar(20) not null, ExshowroomPrice decimal(18,2) default 0, OnroadPrice decimal(18,2) default 0, CustomerId int not null, constraint pk_vehicle primary key (VehicleId ), constraint fk_vehicle foreign key (CustomerId) references ApplicantDetails(CustomerId) ); create table IdentityDocuments( IdentityId int identity(100,1), Adharcard binary not null, Pancard binary not null, Photo binary not null, Salaryslip binary not null, CustomerId int not null, constraint pk_identity_documents primary key (IdentityId), constraint fk_identity_documents foreign key (CustomerId) references ApplicantDetails(CustomerId) ); create table ApplicationStatus( StatusId int identity(1,1), StatusName varchar(10) not null, constraint pk_application_status primary key (StatusId) ) create table LoanDetails( LoanAppId int identity(100,1), LoanAmount decimal(18,2) not null, LoanTenure int not null, InterestRate int not null, CustomerId int not null, StatusId int NOT NULL, constraint pk_loan_details primary key (LoanAppId), constraint fk_loan_details_custid foreign key (CustomerId) references ApplicantDetails(CustomerId), constraint fk_loan_details_status_id foreign key (StatusId) references ApplicationStatus(StatusId), ) create table LoanScheme( SchemeId int identity(100,1), SchemeName varchar(50) not null, MaxLoanAmount decimal(18,2) not null, InterestRate int not null, Emi decimal(18,2) not null, ProcessingFee decimal(18,2) not null, AccountType varchar(50) not null, CustomerId int not null, constraint pk_loan_scheme primary key (SchemeId), constraint fk_loan_scheme foreign key (CustomerId) references ApplicantDetails(CustomerId) )
-- MySQL dump 10.16 Distrib 10.1.26-MariaDB, for osx10.6 (i386) -- -- Host: 127.0.0.1 Database: opentradeengine -- ------------------------------------------------------ -- Server version 10.1.26-MariaDB /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; /*!40101 SET NAMES utf8 */; /*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; /*!40103 SET TIME_ZONE='+00:00' */; /*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; /*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; /*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; /*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; -- -- Table structure for table `Currencies` -- DROP TABLE IF EXISTS `Currencies`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `Currencies` ( `ID` int(11) NOT NULL AUTO_INCREMENT, `Symbol` char(10) NOT NULL, `Name` varchar(255) NOT NULL, PRIMARY KEY (`ID`), UNIQUE KEY `Currencies_ID_uindex` (`ID`), UNIQUE KEY `Currencies_Symbol_uindex` (`Symbol`), UNIQUE KEY `Currencies_Name_uindex` (`Name`) ) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=latin1; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `Currencies` -- LOCK TABLES `Currencies` WRITE; /*!40000 ALTER TABLE `Currencies` DISABLE KEYS */; INSERT INTO `Currencies` VALUES (1,'USD','US Dollar'),(2,'BTC','Bitcoin'); /*!40000 ALTER TABLE `Currencies` ENABLE KEYS */; UNLOCK TABLES; -- -- Table structure for table `EXAMPLEBuys` -- DROP TABLE IF EXISTS `EXAMPLEBuys`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `EXAMPLEBuys` ( `ID` int(11) NOT NULL AUTO_INCREMENT, `TS` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `Price` decimal(16,8) unsigned NOT NULL, `Quantity` decimal(16,8) unsigned NOT NULL, `Type` varchar(10) NOT NULL, `Owner` int(11) NOT NULL, `Symbol` int(11) NOT NULL, `FeePercent` decimal(6,3) NOT NULL DEFAULT '0.000', PRIMARY KEY (`ID`), KEY `EXAMPLEBuys_Traders_ID_fk` (`Owner`), KEY `EXAMPLEBuys_Symbols_ID_fk` (`Symbol`), CONSTRAINT `EXAMPLEBuys_Symbols_ID_fk` FOREIGN KEY (`Symbol`) REFERENCES `Symbols` (`ID`), CONSTRAINT `EXAMPLEBuys_Traders_ID_fk` FOREIGN KEY (`owner`) REFERENCES `Traders` (`ID`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `EXAMPLEBuys` -- LOCK TABLES `EXAMPLEBuys` WRITE; /*!40000 ALTER TABLE `EXAMPLEBuys` DISABLE KEYS */; /*!40000 ALTER TABLE `EXAMPLEBuys` ENABLE KEYS */; UNLOCK TABLES; -- -- Table structure for table `EXAMPLESells` -- DROP TABLE IF EXISTS `EXAMPLESells`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `EXAMPLESells` ( `ID` int(11) NOT NULL AUTO_INCREMENT, `TS` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `Price` decimal(16,8) unsigned NOT NULL, `Quantity` decimal(16,8) unsigned NOT NULL, `Type` varchar(10) NOT NULL, `Owner` int(11) NOT NULL, `Symbol` int(11) NOT NULL, `FeePercent` decimal(6,3) NOT NULL DEFAULT '0.000', PRIMARY KEY (`ID`), KEY `EXAMPLESells_Traders_ID_fk` (`Owner`), KEY `EXAMPLESells_Symbols_ID_fk` (`Symbol`), CONSTRAINT `EXAMPLESells_Symbols_ID_fk` FOREIGN KEY (`Symbol`) REFERENCES `Symbols` (`ID`), CONSTRAINT `EXAMPLESells_Traders_ID_fk` FOREIGN KEY (`owner`) REFERENCES `Traders` (`ID`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `EXAMPLESells` -- LOCK TABLES `EXAMPLESells` WRITE; /*!40000 ALTER TABLE `EXAMPLESells` DISABLE KEYS */; /*!40000 ALTER TABLE `EXAMPLESells` ENABLE KEYS */; UNLOCK TABLES; -- -- Table structure for table `EXAMPLETrades` -- DROP TABLE IF EXISTS `EXAMPLETrades`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `EXAMPLETrades` ( `ID` int(11) NOT NULL AUTO_INCREMENT, `TS` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `Price` decimal(16,8) NOT NULL, `Quantity` decimal(16,8) NOT NULL, `Type` varchar(10) NOT NULL, `Side` varchar(4) NOT NULL, `Owner` int(11) NOT NULL, `ActingTraderID` int(11) NOT NULL, `Volume` decimal(16,8) NOT NULL, `BuyFee` decimal(16,8) NOT NULL, `SellFee` decimal(16,8) NOT NULL, `TotalRight` decimal(16,8) NOT NULL, `TotalLeft` decimal(16,8) NOT NULL, PRIMARY KEY (`ID`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `EXAMPLETrades` -- LOCK TABLES `EXAMPLETrades` WRITE; /*!40000 ALTER TABLE `EXAMPLETrades` DISABLE KEYS */; /*!40000 ALTER TABLE `EXAMPLETrades` ENABLE KEYS */; UNLOCK TABLES; -- -- Table structure for table `EXAMPLETransactions` -- DROP TABLE IF EXISTS `EXAMPLETransactions`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `EXAMPLETransactions` ( `ID` int(11) unsigned NOT NULL AUTO_INCREMENT, `TS` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `Sender` varchar(300) NOT NULL, `Receiver` varchar(300) NOT NULL, `Amount` decimal(16,8) NOT NULL DEFAULT '0.00000000', `ReceiverID` int(11) NOT NULL, `SenderID` int(11) NOT NULL, PRIMARY KEY (`ID`), KEY `EXAMPLETransactions_Traders_ID_fk` (`SenderID`), KEY `EXAMPLETransactions_Traders2_ID_fk` (`ReceiverID`), CONSTRAINT `EXAMPLETransactions_Traders2_ID_fk` FOREIGN KEY (`ReceiverID`) REFERENCES `Traders` (`ID`), CONSTRAINT `EXAMPLETransactions_Traders_ID_fk` FOREIGN KEY (`SenderID`) REFERENCES `Traders` (`ID`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `EXAMPLETransactions` -- LOCK TABLES `EXAMPLETransactions` WRITE; /*!40000 ALTER TABLE `EXAMPLETransactions` DISABLE KEYS */; /*!40000 ALTER TABLE `EXAMPLETransactions` ENABLE KEYS */; UNLOCK TABLES; -- -- Table structure for table `EXAMPLEWithdrawals` -- DROP TABLE IF EXISTS `EXAMPLEWithdrawals`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `EXAMPLEWithdrawals` ( `ID` int(11) unsigned NOT NULL AUTO_INCREMENT, `TS` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `Address` varchar(300) NOT NULL DEFAULT '', `Currency` int(11) NOT NULL, `Amount` decimal(16,8) NOT NULL DEFAULT '0.00000000', `Trader` int(11) NOT NULL DEFAULT '0', `TransactionID` varchar(300) NOT NULL DEFAULT '', PRIMARY KEY (`ID`), KEY `EXAMPLEWithdrawals_Currencies_ID_fk` (`Currency`), KEY `EXAMPLEWithdrawals_Traders_ID_fk` (`Trader`), CONSTRAINT `EXAMPLEWithdrawals_Currencies_ID_fk` FOREIGN KEY (`Currency`) REFERENCES `Currencies` (`ID`), CONSTRAINT `EXAMPLEWithdrawals_Traders_ID_fk` FOREIGN KEY (`Trader`) REFERENCES `Traders` (`ID`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `EXAMPLEWithdrawals` -- LOCK TABLES `EXAMPLEWithdrawals` WRITE; /*!40000 ALTER TABLE `EXAMPLEWithdrawals` DISABLE KEYS */; /*!40000 ALTER TABLE `EXAMPLEWithdrawals` ENABLE KEYS */; UNLOCK TABLES; -- -- Table structure for table `FeeTotals` -- DROP TABLE IF EXISTS `FeeTotals`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `FeeTotals` ( `Currency` int(11) NOT NULL, `Total` decimal(32,8) NOT NULL DEFAULT '0.00000000', PRIMARY KEY (`Currency`), UNIQUE KEY `FeeTotals_Currency_uindex` (`Currency`), CONSTRAINT `FeeTotals_Currencies_ID_fk` FOREIGN KEY (`Currency`) REFERENCES `Currencies` (`ID`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `FeeTotals` -- LOCK TABLES `FeeTotals` WRITE; /*!40000 ALTER TABLE `FeeTotals` DISABLE KEYS */; /*!40000 ALTER TABLE `FeeTotals` ENABLE KEYS */; UNLOCK TABLES; -- -- Table structure for table `OrderErrors` -- DROP TABLE IF EXISTS `OrderErrors`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `OrderErrors` ( `ID` int(11) NOT NULL AUTO_INCREMENT, `TS` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `Error` varchar(255) NOT NULL, PRIMARY KEY (`ID`) ) ENGINE=InnoDB DEFAULT CHARSET=latin1; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `OrderErrors` -- LOCK TABLES `OrderErrors` WRITE; /*!40000 ALTER TABLE `OrderErrors` DISABLE KEYS */; /*!40000 ALTER TABLE `OrderErrors` ENABLE KEYS */; UNLOCK TABLES; -- -- Table structure for table `Symbols` -- DROP TABLE IF EXISTS `Symbols`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `Symbols` ( `ID` int(11) NOT NULL AUTO_INCREMENT, `Symbol` char(20) NOT NULL, `LeftCurrency` int(11) NOT NULL, `RightCurrency` int(11) NOT NULL, `MakerFee` decimal(16,8) NOT NULL DEFAULT '0.00000000', `TakerFee` decimal(16,8) NOT NULL DEFAULT '0.00000000', PRIMARY KEY (`ID`), UNIQUE KEY `Symbols_ID_uindex` (`ID`), UNIQUE KEY `Symbols_code_uindex` (`Symbol`), KEY `Symbols_Currencies_ID_fk` (`LeftCurrency`), KEY `Symbols_Currencies2_ID_fk` (`RightCurrency`), CONSTRAINT `Symbols_Currencies2_ID_fk` FOREIGN KEY (`rightCurrency`) REFERENCES `Currencies` (`ID`), CONSTRAINT `Symbols_Currencies_ID_fk` FOREIGN KEY (`leftCurrency`) REFERENCES `Currencies` (`ID`) ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=latin1; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `Symbols` -- LOCK TABLES `Symbols` WRITE; /*!40000 ALTER TABLE `Symbols` DISABLE KEYS */; INSERT INTO `Symbols` VALUES (1,'EXAMPLE',1,2,0.01000000,0.00000000); /*!40000 ALTER TABLE `Symbols` ENABLE KEYS */; UNLOCK TABLES; -- -- Table structure for table `TraderCurrencies` -- DROP TABLE IF EXISTS `TraderCurrencies`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `TraderCurrencies` ( `Currency` int(11) NOT NULL, `Balance` decimal(16,8) NOT NULL DEFAULT '0.00000000', `Trader` int(11) NOT NULL, `HeldBalance` decimal(16,8) NOT NULL DEFAULT '0.00000000', `PendingBalance` decimal(16,8) NOT NULL DEFAULT '0.00000000', `Completed` decimal(16,8) NOT NULL DEFAULT '0.00000000', UNIQUE KEY `TraderCurrencies_Currency_Trader_pk` (`Currency`,`Trader`), KEY `TraderCurrencies_Traders__fk` (`Trader`), CONSTRAINT `TraderCurrencies_Currencies.ID__fk` FOREIGN KEY (`Currency`) REFERENCES `Currencies` (`ID`), CONSTRAINT `TraderCurrencies_Traders__fk` FOREIGN KEY (`Trader`) REFERENCES `Traders` (`ID`) ) ENGINE=InnoDB DEFAULT CHARSET=latin1; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `TraderCurrencies` -- LOCK TABLES `TraderCurrencies` WRITE; /*!40000 ALTER TABLE `TraderCurrencies` DISABLE KEYS */; INSERT INTO `TraderCurrencies` VALUES (1,10000.00000000,4,0.00000000,0.00000000,0.00000000),(1,10000.00000000,5,0.00000000,0.00000000,0.00000000),(2,10000.00000000,4,0.00000000,0.00000000,0.00000000),(2,10000.00000000,5,0.00000000,0.00000000,0.00000000); /*!40000 ALTER TABLE `TraderCurrencies` ENABLE KEYS */; UNLOCK TABLES; -- -- Table structure for table `Traders` -- DROP TABLE IF EXISTS `Traders`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `Traders` ( `ID` int(11) NOT NULL AUTO_INCREMENT, `TS` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `UserName` varchar(30) NOT NULL, `FirstName` varchar(50) NOT NULL, `LastName` varchar(50) NOT NULL, `PasswordHash` char(60) NOT NULL, `BirthDate` date NOT NULL DEFAULT '0000-00-00', `PhoneNumber` varchar(22) NOT NULL, `SecurityQuestion` varchar(300) NOT NULL, `SecurityAnswer` varchar(255) NOT NULL, `PIN` char(4) NOT NULL, `Email` varchar(255) NOT NULL, `AddressLineOne` varchar(255) NOT NULL, `AddressLineTwo` varchar(255) NOT NULL, `PostCode` varchar(10) NOT NULL, `City` varchar(50) NOT NULL, `RegisterIP` varchar(45) NOT NULL, `Referrer` int(11) NOT NULL, `Activated` tinyint(1) unsigned NOT NULL DEFAULT '0', `AccountNumber` varchar(255) NOT NULL, `Points` decimal(16,8) unsigned NOT NULL DEFAULT '0.00000000', `PointsEarned` decimal(16,8) unsigned NOT NULL DEFAULT '0.00000000', `PinCount` tinyint(2) unsigned NOT NULL DEFAULT '0', `PassCount` tinyint(2) unsigned NOT NULL DEFAULT '0', `RecoverCount` tinyint(2) unsigned NOT NULL DEFAULT '0', `TransactionCount` int(10) unsigned NOT NULL DEFAULT '0', PRIMARY KEY (`ID`), UNIQUE KEY `UserName` (`UserName`), KEY `Traders_Traders_ID_fk` (`Referrer`), CONSTRAINT `Traders_Traders_ID_fk` FOREIGN KEY (`Referrer`) REFERENCES `Traders` (`ID`) ) ENGINE=InnoDB AUTO_INCREMENT=6 DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `Traders` -- LOCK TABLES `Traders` WRITE; /*!40000 ALTER TABLE `Traders` DISABLE KEYS */; INSERT INTO `Traders` VALUES (3,'2017-10-21 17:51:54','test','test','test','test','2017-10-31','999999999','a','a','111','a','','a','a','a','a',3,0,'1111111111',0.00000000,0.00000000,0,0,0,0),(4,'2017-10-21 18:50:20','buyer','John','Smith','$2y$12$XJmVGSZz/gM.Ho3aHLlJEuSdHZO38QvyaFioKphljtnphdkch5vNa','2010-11-11','999-999-9999','Question','Answer','555','<EMAIL>','','','','','',3,0,'',0.00000000,0.00000000,0,0,0,0),(5,'2017-10-21 18:50:20','seller','Joe','Smithie','$2y$12$4.x9ZhanHsvtGr..4PZXduFX7zw1uocIVLNC8jbAtztRfVykQ8UBe','0000-00-00','999-888-9999','Question','Answer','444','<EMAIL>','','','','','',3,0,'',0.00000000,0.00000000,0,0,0,0); /*!40000 ALTER TABLE `Traders` ENABLE KEYS */; UNLOCK TABLES; -- -- Table structure for table `WithdrawErrors` -- DROP TABLE IF EXISTS `WithdrawErrors`; /*!40101 SET @saved_cs_client = @@character_set_client */; /*!40101 SET character_set_client = utf8 */; CREATE TABLE `WithdrawErrors` ( `ID` int(11) unsigned NOT NULL AUTO_INCREMENT, `TS` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `Amount` varchar(300) NOT NULL DEFAULT '', `Address` varchar(300) NOT NULL DEFAULT '', `Trader` int(11) NOT NULL DEFAULT '0', `Currency` int(11) NOT NULL DEFAULT '0', `Message` varchar(300) NOT NULL, PRIMARY KEY (`ID`), KEY `WithdrawErrors_Currencies_ID_fk` (`Currency`), KEY `WithdrawErrors_Traders_ID_fk` (`Trader`), CONSTRAINT `WithdrawErrors_Currencies_ID_fk` FOREIGN KEY (`Currency`) REFERENCES `Currencies` (`ID`), CONSTRAINT `WithdrawErrors_Traders_ID_fk` FOREIGN KEY (`Trader`) REFERENCES `Traders` (`ID`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; /*!40101 SET character_set_client = @saved_cs_client */; -- -- Dumping data for table `WithdrawErrors` -- LOCK TABLES `WithdrawErrors` WRITE; /*!40000 ALTER TABLE `WithdrawErrors` DISABLE KEYS */; /*!40000 ALTER TABLE `WithdrawErrors` ENABLE KEYS */; UNLOCK TABLES; /*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; /*!40101 SET SQL_MODE=@OLD_SQL_MODE */; /*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; /*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; /*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; -- Dump completed on 2018-01-21 14:32:01
<filename>db/delta/25-epg_claims.sql -- ALTER TABLE `media_claims` ADD `no_epg` int not null default 0; ALTER TABLE `media_claims` ADD `wrong_epg` int not null default 0; ALTER TABLE `daily_media_claims` ADD `no_epg` int not null default 0; ALTER TABLE `daily_media_claims` ADD `wrong_epg` int not null default 0; --//@UNDO ALTER TABLE `media_claims` DROP `no_epg`; ALTER TABLE `media_claims` DROP `wrong_epg`; ALTER TABLE `daily_media_claims` DROP `no_epg`; ALTER TABLE `daily_media_claims` DROP `wrong_epg`; --
<gh_stars>10-100 DO $body$ BEGIN IF NOT EXISTS (SELECT * FROM pg_catalog.pg_user WHERE usename = 'gh1') THEN CREATE USER gh1 WITH PASSWORD '<PASSWORD>'; GRANT usage on schema public to gh1; END IF; END $body$; DROP DATABASE IF EXISTS gh1; CREATE DATABASE gh1 WITH OWNER gh1 TEMPLATE template0 ENCODING 'UTF8' TABLESPACE pg_default LC_COLLATE 'en_US.utf8' LC_CTYPE 'en_US.utf8' CONNECTION LIMIT -1; \c gh1; CREATE TABLE websites ( id serial not null primary key, title varchar(250) not null, long_url varchar(1000) not null, code varchar(10) not null, created_at date default current_date, last_access date default current_date, hits integer default 0, is_visible boolean not null default true, tags varchar(100)[] default '{}', constraint long_url unique(long_url), constraint code unique(code) );
<filename>sql/booru_settings.sql CREATE TABLE IF NOT EXISTS booru_settings ( guild_id bigint PRIMARY KEY, tags text[] DEFAULT '{}', sites text[] DEFAULT '{}', nsfwServer boolean DEFAULT FALSE, minScore integer DEFAULT null, topicEnable boolean DEFAULT FALSE, disableNextImage boolean DEFAULT FALSE );
SELECT authors.name, books.name, publishers.name FROM books JOIN authors ON books.author_id = authors.id JOIN publishers ON books.publisher_id = publishers.id;
-- file:privileges.sql ln:519 expect:true CREATE OPERATOR !+! (PROCEDURE = int4pl, LEFTARG = testdomain1, RIGHTARG = testdomain1)
<reponame>kevinhillinger/openhack-serverless SELECT productId, productName, ROUND(AVG(sentimentScore), 2) as [averageSentimentScore], COUNT(*) AS [count] INTO [powerbi-sentiment] FROM [productsentiment] GROUP BY productId, productName, TumblingWindow(minute, 5) SELECT productId, productName, source, ROUND(SUM(purchaseTotal), 2) as [totalSales], COUNT(*) AS [count] INTO [powerbi-distributor] FROM [productpurchases] WHERE source = 'distributor' GROUP BY productId, productName, source, TumblingWindow(minute, 5) SELECT productId, productName, source, ROUND(SUM(purchaseTotal), 2) as [totalSales], COUNT(*) AS [count] INTO [powerbi-pos] FROM [productpurchases] WHERE source = 'pos' GROUP BY productId, productName, source, TumblingWindow(minute, 5)
<reponame>danchris/DatabaseSystemsProject<filename>sql/triggers.sql<gh_stars>0 CREATE TRIGGER totalPaymentAmount BEFORE INSERT ON Payment_Transaction FOR EACH ROW SET @tPA = @tPA +NEW.Payment_Amount; CREATE TRIGGER totalDeletedCustomer BEFORE DELETE ON Customer FOR EACH ROW SET @tDC = @tDC +1;
-------------------------------------------------------------- ciudad -------------------------------------------------------------- CREATE TABLE `reservas`.`ciudad` ( `ciudad` VARCHAR(25) NOT NULL, PRIMARY KEY (`ciudad`) ) ENGINE = InnoDB; -------------------------------------------------------------- -------------------------------------------------------------- reservas -------------------------------------------------------------- CREATE TABLE `reservas`.`reservas` ( `nro_reserva` BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, `fecha_salida` DATE NOT NULL, `fecha_regreso` DATE NOT NULL, `ciudad_origen` VARCHAR(25) NOT NULL, `ciudad_destino` VARCHAR(25) NOT NULL, `nro_documento_pasajero` BIGINT UNSIGNED NOT NULL, `pasajero` VARCHAR(35) NOT NULL, `nro_vuelo_salida` BIGINT UNSIGNED NOT NULL, `nro_vuelo_regreso` BIGINT UNSIGNED NOT NULL, `hotel_reservado` VARCHAR(35) NOT NULL, PRIMARY KEY (`nro_reserva`), CONSTRAINT `fk_reservas_ciudad_origen` FOREIGN KEY `fk_reservas_ciudad_origen` (`ciudad_origen`) REFERENCES `ciudad` (`ciudad`) ON DELETE RESTRICT ON UPDATE CASCADE, CONSTRAINT `fk_reservas_ciudad_destino` FOREIGN KEY `fk_reservas_ciudad_destino` (`ciudad_destino`) REFERENCES `ciudad` (`ciudad`) ON DELETE RESTRICT ON UPDATE CASCADE ) ENGINE = InnoDB; -------------------------------------------------------------- -------------------------------------------------------------- carga de datos -------------------------------------------------------------- INSERT INTO ciudad (ciudad) VALUES ('BU<NAME>'), ('CORDOBA'), ('NEUQUEN'), ('SALTA'), ('SANTA FE') --------------------------------------------------------------
# --- Created by Ebean DDL # To stop Ebean DDL generation, remove this comment and start using Evolutions # --- !Ups create table image ( id bigint not null, constraint pk_image primary key (id)) ; create table product ( id bigint not null, name varchar(255), description varchar(255), shine float, price float, rarity integer, color varchar(255), face integer, image_id bigint, constraint pk_product primary key (id)) ; create table review ( id integer not null, constraint pk_review primary key (id)) ; create sequence image_seq; create sequence users_id_seq; create sequence review_seq; alter table product add constraint fk_product_image_1 foreign key (image_id) references image (id); create index ix_product_image_1 on product (image_id); # --- !Downs drop table if exists image cascade; drop table if exists product cascade; drop table if exists review cascade; drop sequence if exists image_seq; drop sequence if exists users_id_seq; drop sequence if exists review_seq;
<gh_stars>0 -- Copyright © 2018 <NAME> -- -- Licensed under the Apache License, Version 2.0 (the "License"); -- you may not use this file except in compliance with the License. -- You may obtain a copy of the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, -- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -- See the License for the specific language governing permissions and -- limitations under the License. -- :name target-karma :? :1 -- :doc Get the target's karma from the workspace. SELECT karma FROM karma WHERE workspace_id = :workspace_id AND target = :target -- :name top-karma :? :* -- :doc Get the top karma from the workspace. SELECT target, karma FROM karma WHERE workspace_id = :workspace_id ORDER BY karma DESC LIMIT :n -- :name- increment-karma :! :n -- :doc Increment the karma of a target. INSERT OR REPLACE INTO karma (workspace_id, target, karma) SELECT workspace_id, target, karma + :karma FROM karma WHERE workspace_id = :workspace_id AND target = :target -- :name- decrement-karma :! :n -- :doc Decrement the karma of a target. INSERT OR REPLACE INTO karma (workspace_id, target, karma) SELECT workspace_id, target, karma - :karma FROM karma WHERE workspace_id = :workspace_id AND target = :target -- :name- set-karma :! :n -- :doc Set the karma of a target. INSERT OR REPLACE INTO karma (workspace_id, target, karma) VALUES (:workspace_id, :target, :karma)
<gh_stars>0 ALTER TABLE `collaborators` ADD COLUMN `external_id` varchar(255) NULL COMMENT 'An identifier for the data submitter. If that submitter is an individual, ORCID identifiers are recommended.' AFTER `phone`; ALTER TABLE `datasetcollaborators` ADD COLUMN `collaborator_roles` varchar(255) NULL COMMENT 'Type of contribution of the person to the investigation (e.g. data submitter; author; corresponding author)' AFTER `collaborator_id`;
<reponame>webignorant/chipcore /* Navicat MySQL Data Transfer Source Server : localhost_mysql Source Server Version : 50553 Source Host : localhost:3306 Source Database : chipcore_community Target Server Type : MYSQL Target Server Version : 50553 File Encoding : 65001 Date: 2017-12-28 00:39:22 */ SET FOREIGN_KEY_CHECKS=0; -- ---------------------------- -- Table structure for common_application -- ---------------------------- DROP TABLE IF EXISTS `common_application`; CREATE TABLE `common_application` ( `application_id` smallint(6) unsigned NOT NULL AUTO_INCREMENT COMMENT '编号', `displayorder` tinyint(3) NOT NULL DEFAULT '0' COMMENT '显示顺序', `name` varchar(100) NOT NULL COMMENT '应用名称', `url` varchar(255) NOT NULL COMMENT '应用地址', `description` mediumtext NOT NULL COMMENT '应用说明', `logo` varchar(255) DEFAULT NULL COMMENT '应用logo', `ifshow` smallint(1) NOT NULL DEFAULT '1' COMMENT '是否显示', `type` tinyint(3) NOT NULL DEFAULT '0' COMMENT '分组', PRIMARY KEY (`application_id`) ) ENGINE=MyISAM AUTO_INCREMENT=9 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of common_application -- ---------------------------- INSERT INTO `common_application` VALUES ('4', '0', '照片', 'pictrue.php', '照片', 'public/image/icon/image.png', '1', '0'); INSERT INTO `common_application` VALUES ('1', '1', '心情', 'record.php', '心情记录', 'public/image/icon/mood.png', '1', '0'); INSERT INTO `common_application` VALUES ('2', '2', '日记', 'diary.php', '编写日记', 'public/image/icon/diary.png', '1', '0'); INSERT INTO `common_application` VALUES ('3', '3', '文章', 'article.php', '编写文章', 'public/image/icon/article.png', '0', '0'); INSERT INTO `common_application` VALUES ('5', '5', '音乐', 'music.php', '音乐', 'public/image/icon/music.png', '1', '0'); INSERT INTO `common_application` VALUES ('6', '6', '视频', 'video.php', '视频', 'public/image/icon/video.png', '0', '0'); INSERT INTO `common_application` VALUES ('7', '7', '群组', 'group.php', '群组', 'public/image/icon/groups.png', '0', '0'); INSERT INTO `common_application` VALUES ('8', '8', '圈子', 'circle.php', '圈子', 'public/image/icon/circle.png', '0', '0'); -- ---------------------------- -- Table structure for common_friendlink -- ---------------------------- DROP TABLE IF EXISTS `common_friendlink`; CREATE TABLE `common_friendlink` ( `friendlink_id` smallint(6) unsigned NOT NULL AUTO_INCREMENT COMMENT '编号', `displayorder` tinyint(3) NOT NULL DEFAULT '0' COMMENT '显示顺序', `name` varchar(100) NOT NULL COMMENT '站点名称', `url` varchar(255) NOT NULL COMMENT '站点地址', `description` mediumtext NOT NULL COMMENT '站点说明', `logo` varchar(255) DEFAULT NULL COMMENT '站点logo', `ifshow` smallint(1) NOT NULL DEFAULT '1' COMMENT '是否显示', `type` tinyint(3) NOT NULL DEFAULT '0' COMMENT '分组', PRIMARY KEY (`friendlink_id`) ) ENGINE=MyISAM AUTO_INCREMENT=4 DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of common_friendlink -- ---------------------------- INSERT INTO `common_friendlink` VALUES ('1', '0', '百度一下', 'http://www.baidu.com', '百度中国', 'public/image/logo/baidu.jpg', '1', '0'); INSERT INTO `common_friendlink` VALUES ('2', '0', '谷歌搜索', 'http://www.google.com.hk', '谷歌香港', 'public/image/logo/google.jpg', '1', '0'); -- ---------------------------- -- Table structure for diary_info -- ---------------------------- DROP TABLE IF EXISTS `diary_info`; CREATE TABLE `diary_info` ( `diaryID` int(10) NOT NULL AUTO_INCREMENT COMMENT '日记编号', `title` varchar(40) DEFAULT NULL COMMENT '文章标题', `content` varchar(16000) DEFAULT NULL COMMENT '文章内容', `userID` int(10) DEFAULT NULL COMMENT '发表人ID', `addTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '发表时间', `typeID` int(3) DEFAULT '1' COMMENT '分类编号', `callPurview` varchar(1) NOT NULL COMMENT '访问权限 0不允许 1允许全部人 2允许好友', `forwardingNumber` int(10) DEFAULT NULL COMMENT '转发次数', `comments` int(10) DEFAULT NULL COMMENT '评论次数', PRIMARY KEY (`diaryID`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of diary_info -- ---------------------------- -- ---------------------------- -- Table structure for friendgroup -- ---------------------------- DROP TABLE IF EXISTS `friendgroup`; CREATE TABLE `friendgroup` ( `id` int(10) NOT NULL AUTO_INCREMENT COMMENT '表编号', `groupID` int(10) NOT NULL COMMENT '分组编号 1:我的好友', `userID` int(10) NOT NULL COMMENT '用户编号 FK UserInfo(userID)', `groupName` varchar(20) NOT NULL COMMENT '分组名称', PRIMARY KEY (`id`) ) ENGINE=MyISAM DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of friendgroup -- ---------------------------- -- ---------------------------- -- Table structure for friends -- ---------------------------- DROP TABLE IF EXISTS `friends`; CREATE TABLE `friends` ( `id` int(10) NOT NULL AUTO_INCREMENT COMMENT '表编号', `friendID` int(10) NOT NULL COMMENT '好友编号 PK.FK UserInfo(UserID)', `userID` int(10) NOT NULL COMMENT '用户编号 PK.FK UserInfo(UserID)', `groupID` int(10) NOT NULL DEFAULT '1' COMMENT '好友组号', `friendComment` varchar(50) DEFAULT NULL COMMENT '好友备注', PRIMARY KEY (`id`) ) ENGINE=MyISAM DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of friends -- ---------------------------- -- ---------------------------- -- Table structure for image_info -- ---------------------------- DROP TABLE IF EXISTS `image_info`; CREATE TABLE `image_info` ( `imageID` int(10) NOT NULL AUTO_INCREMENT COMMENT '照片编号 PK Identity(1,1)', `imageName` varchar(50) NOT NULL COMMENT '照片名称 规则 Image+ I++', `userID` int(10) DEFAULT NULL COMMENT '拥有者编号', `addTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', `specialID` int(10) DEFAULT NULL COMMENT '专辑编号', `imagePath` varchar(100) NOT NULL COMMENT '照片地址', `imageRemark` varchar(100) DEFAULT NULL COMMENT '照片描述', PRIMARY KEY (`imageID`), UNIQUE KEY `specialID` (`specialID`) ) ENGINE=MyISAM DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of image_info -- ---------------------------- -- ---------------------------- -- Table structure for message_short -- ---------------------------- DROP TABLE IF EXISTS `message_short`; CREATE TABLE `message_short` ( `messageID` int(10) NOT NULL AUTO_INCREMENT COMMENT '消息编号 PK identity(1,1)', `sendUserID` int(10) NOT NULL COMMENT '发送方编号 FK UserInfo(UserID)', `receiveUserID` int(10) NOT NULL COMMENT '接收方编号 FK UserInfo(UserID)', `sendTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '发送时间', `message` varchar(500) NOT NULL COMMENT '消息内容', `isIgnored` int(1) DEFAULT NULL COMMENT '是否忽略 1为忽略 0 为未忽略', `status` int(11) DEFAULT NULL COMMENT '消息状态 0为接收方已清除 1为发送方已清除 2为都已经清除', PRIMARY KEY (`messageID`) ) ENGINE=MyISAM DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of message_short -- ---------------------------- -- ---------------------------- -- Table structure for music_info -- ---------------------------- DROP TABLE IF EXISTS `music_info`; CREATE TABLE `music_info` ( `musicID` int(10) NOT NULL AUTO_INCREMENT COMMENT '音乐编号 PK Identity(1,1)', `musicName` varchar(50) NOT NULL COMMENT '音乐名称 规则 music+ I++', `userID` int(10) DEFAULT NULL COMMENT '拥有者编号', `addTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', `specialID` int(10) DEFAULT NULL COMMENT '专辑编号', `musicPath` varchar(100) NOT NULL COMMENT '音乐地址', `musicRemark` varchar(100) DEFAULT NULL COMMENT '音乐描述', PRIMARY KEY (`musicID`), UNIQUE KEY `specialID` (`specialID`) ) ENGINE=MyISAM DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of music_info -- ---------------------------- -- ---------------------------- -- Table structure for record_info -- ---------------------------- DROP TABLE IF EXISTS `record_info`; CREATE TABLE `record_info` ( `recordID` int(10) NOT NULL AUTO_INCREMENT COMMENT '心情编号', `emotion` int(10) NOT NULL COMMENT '目前情感 1开心 2伤心 3郁闷 4愤怒', `content` varchar(50) NOT NULL COMMENT '内容', `userID` int(10) NOT NULL COMMENT '发表人编号', `addTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '发表时间', PRIMARY KEY (`recordID`) ) ENGINE=MyISAM DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of record_info -- ---------------------------- -- ---------------------------- -- Table structure for share_comment -- ---------------------------- DROP TABLE IF EXISTS `share_comment`; CREATE TABLE `share_comment` ( `commentID` int(10) NOT NULL AUTO_INCREMENT COMMENT '评论编号', `shareType` int(10) NOT NULL COMMENT '分享类型 1心情 2日记 3文章 4照片 5音乐 6视频 7文件', `shareUserID` int(10) NOT NULL COMMENT '分享者编号', `reviewersID` int(10) NOT NULL COMMENT '评论者编号', `shareID` int(10) NOT NULL COMMENT '被评论的分享编号', `content` varchar(200) NOT NULL COMMENT '评论内容', `addTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '评论时间', PRIMARY KEY (`commentID`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of share_comment -- ---------------------------- -- ---------------------------- -- Table structure for space_visitor -- ---------------------------- DROP TABLE IF EXISTS `space_visitor`; CREATE TABLE `space_visitor` ( `id` int(10) NOT NULL AUTO_INCREMENT COMMENT '编号', `visitorID` int(10) NOT NULL COMMENT '访客编号', `userID` int(10) NOT NULL COMMENT '用户编号', `spaceID` int(10) NOT NULL COMMENT '空间编号', `count` int(20) NOT NULL COMMENT '访问次数', PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of space_visitor -- ---------------------------- -- ---------------------------- -- Table structure for user_custom -- ---------------------------- DROP TABLE IF EXISTS `user_custom`; CREATE TABLE `user_custom` ( `userID` int(10) NOT NULL COMMENT '用户编号', `theme` varchar(50) NOT NULL DEFAULT 'public/theme/Default.css' COMMENT '主题设置', `priateSet` int(5) NOT NULL DEFAULT '0' COMMENT '隐私设置', PRIMARY KEY (`userID`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='用户自定义网站表'; -- ---------------------------- -- Records of user_custom -- ---------------------------- -- ---------------------------- -- Table structure for user_dynamic -- ---------------------------- DROP TABLE IF EXISTS `user_dynamic`; CREATE TABLE `user_dynamic` ( `dynamicID` int(20) NOT NULL AUTO_INCREMENT COMMENT '动态编号', `userID` int(10) NOT NULL COMMENT '用户编号', `actionType` int(2) NOT NULL COMMENT '用户动作: 1分享心情 2分享日记 3分享文章 4分享照片 5分享音乐 6分享视频 7分享文件 8添加好友 9添加关注 10新人报道', `actionObject` varchar(200) NOT NULL COMMENT '动作对象 用户之间存用户名 分享之间存标题', `actionTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '动作时间', PRIMARY KEY (`dynamicID`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of user_dynamic -- ---------------------------- -- ---------------------------- -- Table structure for user_info -- ---------------------------- DROP TABLE IF EXISTS `user_info`; CREATE TABLE `user_info` ( `userID` int(10) NOT NULL AUTO_INCREMENT COMMENT '用户编号 PK Identity(1,1)', `nickName` varchar(20) DEFAULT NULL COMMENT '昵称', `password` varchar(50) NOT NULL COMMENT '用户密码', `regTime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '注册时间', `lastTimeOnline` varchar(15) DEFAULT NULL COMMENT '上次登录时间', `statue` int(2) DEFAULT NULL COMMENT '状态 1为登录 0为下线 3正常 3为冻结', `photo` varchar(100) DEFAULT 'public/image/common/default_Avatar.png' COMMENT '头像地址', `priateSet` varchar(5) DEFAULT NULL COMMENT '隐私设置', `realName` varchar(20) NOT NULL COMMENT '真实姓名', `sex` varchar(5) NOT NULL COMMENT '性别', `birthday` datetime NOT NULL COMMENT '生日', `bloodType` varchar(2) DEFAULT NULL COMMENT '血型', `about` varchar(100) DEFAULT NULL COMMENT '简介', `status` int(3) DEFAULT NULL COMMENT '目前身份 1为学生 2为工作者 3为其他', `location` varchar(20) NOT NULL COMMENT '居住地址', `homeCity` varchar(20) DEFAULT NULL COMMENT '家乡', `email` varchar(25) NOT NULL COMMENT '电子邮件', `QQ` varchar(12) DEFAULT NULL COMMENT 'QQ', `MSN` varchar(30) DEFAULT NULL COMMENT 'MSN', PRIMARY KEY (`userID`) ) ENGINE=MyISAM DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of user_info -- ---------------------------- -- ---------------------------- -- Table structure for user_schoolinfo -- ---------------------------- DROP TABLE IF EXISTS `user_schoolinfo`; CREATE TABLE `user_schoolinfo` ( `schoolID` int(10) NOT NULL DEFAULT '0' COMMENT '编号 PK Identity(1,1)', `userID` int(10) DEFAULT NULL COMMENT '用户编号 FK UserInfo(UserID)', `schoolType` varchar(10) DEFAULT NULL COMMENT '毕业学校类型', `schoolName` varchar(20) DEFAULT NULL COMMENT '学校名称', `grade` varchar(20) DEFAULT NULL COMMENT '院系', `classes` varchar(20) DEFAULT NULL COMMENT '班级', `admissionTime` datetime DEFAULT NULL COMMENT '入学时间', PRIMARY KEY (`schoolID`), UNIQUE KEY `userID` (`userID`) ) ENGINE=MyISAM DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of user_schoolinfo -- ---------------------------- -- ---------------------------- -- Table structure for user_space -- ---------------------------- DROP TABLE IF EXISTS `user_space`; CREATE TABLE `user_space` ( `spaceID` int(10) NOT NULL COMMENT '空间编号', `userID` int(10) NOT NULL COMMENT '用户编号', `frontCover` varchar(100) DEFAULT 'public/image/common/default_frontCover.png' COMMENT '空间封面', `callPurview` varchar(1) NOT NULL COMMENT '访问权限 0不允许 1允许全部人 2允许好友', `flashbg` varchar(50) NOT NULL DEFAULT 'public/flash/space/Pulley.swf' COMMENT 'Flash背景风格', PRIMARY KEY (`spaceID`), UNIQUE KEY `userID` (`userID`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of user_space -- ---------------------------- -- ---------------------------- -- Table structure for user_worksinfo -- ---------------------------- DROP TABLE IF EXISTS `user_worksinfo`; CREATE TABLE `user_worksinfo` ( `workID` int(10) NOT NULL AUTO_INCREMENT COMMENT '工作编号 PK Identity(1,1)', `userID` int(10) NOT NULL COMMENT '用户编号 FK UserInfo(UserID)', `companyName` varchar(30) DEFAULT NULL COMMENT '工作单位', `departmentName` varchar(20) DEFAULT NULL COMMENT '部门名称', `joinTime` datetime DEFAULT NULL COMMENT '加入时间', `departureTime` datetime DEFAULT NULL COMMENT '离职时间', PRIMARY KEY (`workID`), UNIQUE KEY `userID` (`userID`) ) ENGINE=MyISAM DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of user_worksinfo -- ----------------------------
<reponame>tharangar/k8s-webserver -- db_patches INSERT INTO `system_patches` (`issue`, `created`) VALUES('POCOR-3728', NOW()); -- alerts Table UPDATE `alerts` SET `process_name` = 'AlertAttendance' WHERE `name` = 'Attendance'; INSERT INTO `alerts` (`name`, `process_name`, `process_id`, `modified_user_id`, `modified`, `created_user_id`, `created`) VALUES ('LicenseValidity', 'AlertLicenseValidity', NULL, NULL, NULL, '1', NOW()), ('RetirementWarning', 'AlertRetirementWarning', NULL, NULL, NULL, '1', NOW()), ('StaffEmployment', 'AlertStaffEmployment', NULL, NULL, NULL, '1', NOW()), ('StaffLeave', 'AlertStaffLeave', NULL, NULL, NULL, '1', NOW()), ('StaffType', 'AlertStaffType', NULL, NULL, NULL, '1', NOW()); -- alert_rules Table ALTER TABLE `alert_rules` CHANGE `threshold` `threshold` VARCHAR(100) NOT NULL;
<reponame>Smithsonian/DPO-Informatics-Data-Sources<gh_stars>1-10 --gbif_vernacularnames DROP MATERIALIZED VIEW gbif_vernacularnames; CREATE MATERIALIZED VIEW gbif_vernacularnames AS SELECT t.taxonID, t.scientificName, t.scientificNameAuthorship, t.canonicalName, t.genericName, t.specificEpithet, t.infraspecificEpithet, t.taxonRank, t.nameAccordingTo, t.kingdom, t.phylum, t.class, t._order, t.family, t.genus, v.vernacularName, v.language, v.countryCode, v.source FROM gbif_taxonomy_vernacularname v, gbif_taxonomy_taxon t WHERE v.taxonID = t.taxonID;
-- No good reason to drop the btree_gist extension again
drop table if exists ao1; create table ao1(a int, b int) with (appendonly=true); -- test insert and select from within same transaction begin; insert into ao1 select i, i from generate_series(1, 1000) i; select * from ao1; abort; insert into ao1 select i, i from generate_series(1, 1000) i; select * from ao1;
<filename>src/test/tinc/tincrepo/mpp/gpdb/tests/queries/basic/window/mpp23955/mpp23955.sql -- @optimizer_mode on with t (a,b,d) as (select 1,2,1 from pg_class limit 1) SELECT cup.* FROM t, ( SELECT sum(t.b) OVER(PARTITION BY t.a ) AS e FROM (select 1 as a, 2 as b from pg_class limit 1)foo,t ) as cup GROUP BY cup.e;
-- -- Adds a view to flatten location terminology definitions -- DROP VIEW IF EXISTS location_terminologies_v; CREATE VIEW location_terminologies_v AS SELECT t0.description AS country , t1.description AS subnational1 , t2.description AS subnational2 , t3.description AS subnational3 , t4.description AS subnational4 , t5.description AS subnational5 FROM terminologies t0 LEFT JOIN terminologies t1 ON t1.target_module = t0.target_module AND t1.name = 'SUBNATIONAL1' LEFT JOIN terminologies t2 ON t2.target_module = t0.target_module AND t2.name = 'SUBNATIONAL2' LEFT JOIN terminologies t3 ON t3.target_module = t0.target_module AND t3.name = 'SUBNATIONAL3' LEFT JOIN terminologies t4 ON t4.target_module = t0.target_module AND t4.name = 'SUBNATIONAL4' LEFT JOIN terminologies t5 ON t5.target_module = t0.target_module AND t5.name = 'SUBNATIONAL5' WHERE t0.target_module = 'Location' AND t0.name = 'COUNTRY' ;