Instruction
stringlengths
14
778
input_code
stringlengths
0
4.24k
output_code
stringlengths
1
5.44k
Add sql to create tables
-- Create syntax for TABLE 'article_tags' CREATE TABLE `article_tags` ( `id` int(11) unsigned NOT NULL AUTO_INCREMENT, `article_id` int(11) unsigned NOT NULL, `tag_id` int(11) unsigned NOT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- Create syntax for TABLE 'articles' CREATE TABLE `articles` ( `id` int(11) unsigned NOT NULL AUTO_INCREMENT, `title` text NOT NULL, `body` text NOT NULL, `created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `user_id` int(11) unsigned NOT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=12 DEFAULT CHARSET=utf8; -- Create syntax for TABLE 'tags' CREATE TABLE `tags` ( `id` int(11) unsigned NOT NULL AUTO_INCREMENT, `name` varchar(255) NOT NULL DEFAULT '', PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- Create syntax for TABLE 'users' CREATE TABLE `users` ( `id` int(11) unsigned NOT NULL AUTO_INCREMENT, `name` varchar(256) NOT NULL DEFAULT '', `gender` int(1) unsigned DEFAULT NULL, `attr1` int(11) DEFAULT NULL, `attr2` int(11) DEFAULT NULL, `attr3` int(11) DEFAULT NULL, `attr4` int(11) DEFAULT NULL, `attr5` int(11) DEFAULT NULL, `attr6` int(11) DEFAULT NULL, `attr7` int(11) DEFAULT NULL, `attr8` int(11) DEFAULT NULL, `attr9` int(11) DEFAULT NULL, `attr10` int(11) DEFAULT NULL, `attr11` int(11) DEFAULT NULL, `attr12` int(11) DEFAULT NULL, `attr13` int(11) DEFAULT NULL, `attr14` int(11) DEFAULT NULL, `attr15` int(11) DEFAULT NULL, `attr16` int(11) DEFAULT NULL, `attr17` int(11) DEFAULT NULL, `attr18` int(11) DEFAULT NULL, `attr19` int(11) DEFAULT NULL, `attr20` int(11) DEFAULT NULL, `attr21` int(11) DEFAULT NULL, `attr22` int(11) DEFAULT NULL, `attr23` int(11) DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8;
Add script to create baseline table
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ -- Allows us to use gist in this database CREATE EXTENSION IF NOT EXISTS btree_gist; -- create baseline table CREATE TABLE baseline ( meter_id INT NOT NULL REFERENCES meters (id), apply_range tsrange NOT NULL, calc_range tsrange NOT NULL, baseline_value DOUBLE PRECISION NOT NULL, PRIMARY KEY (meter_id, apply_range), EXCLUDE USING GIST ( meter_id WITH =, apply_range WITH && ) );
Add SQL constraint to ensure unicity of emails
DO $$ BEGIN BEGIN CREATE UNIQUE INDEX sys_user_unique_email ON sys_user(email) WHERE email != ''; END; BEGIN CREATE UNIQUE INDEX cd_profiles_unique_email ON cd_profiles(email) WHERE email != ''; END; END; $$
Add SA table for VIRTA-JTP/Latausraportit/Duplikaatit
IF NOT EXISTS ( select * from INFORMATION_SCHEMA.TABLES where TABLE_SCHEMA='sa' and TABLE_NAME='sa_virta_jtp_latausraportit_duplikaatit' ) BEGIN CREATE TABLE sa.sa_virta_jtp_latausraportit_duplikaatit( id bigint IDENTITY(1,1) NOT NULL, duplikaattiID int null, organisaatiotunnus nvarchar(50) null, organisaationimi nvarchar(max) null, kuvaus nvarchar(max) null, julkaisunorgtunnus nvarchar(50) null, duplikaattijulkaisunorgtunnus nvarchar(50) null, julkaisunnimi nvarchar(max) null, duplikaattijulkaisunnimi nvarchar(max) null, julkaisutyyppikoodi varchar(4) null, tila int null, tarkistusID int null, luontipaivamaara varchar(50) null, ilmoitusvuosi int null, julkaisuvuosi int null, loadtime datetime2(4) NOT NULL, source nvarchar(255) NULL, username nvarchar(128) NOT NULL, CONSTRAINT PK__sa_virta_jtp_latausraportit_duplikaatit PRIMARY KEY CLUSTERED (id ASC) ) ; ALTER TABLE sa.sa_virta_jtp_latausraportit_duplikaatit ADD CONSTRAINT DF__sa_virta_jtp_latausraportit_duplikaatit__loadtime DEFAULT (getdate()) FOR loadtime ; ALTER TABLE sa.sa_virta_jtp_latausraportit_duplikaatit ADD CONSTRAINT DF__sa_virta_jtp_latausraportit_duplikaatit__username DEFAULT (suser_name()) FOR username ; END
Change mapstats path on mapfull-template
UPDATE portti_bundle SET startup='{ "bundlename" : "mapfull" "metadata" : { "Import-Bundle" : { "core-base" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "core-map" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "sandbox-base" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "sandbox-map" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "event-base" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "event-map" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "event-map-layer" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "request-base" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "request-map" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "request-map-layer" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "service-base" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "service-map" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "domain" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "mapmodule-plugin" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "mapwfs2" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "mapwmts" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "mapstats" : { "bundlePath" : "/Oskari/packages/mapping/ol2/" }, "mapuserlayers" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "mapanalysis" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "mapmyplaces" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "oskariui" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "mapfull" : { "bundlePath" : "/Oskari/packages/framework/bundle/" }, "ui-components": { "bundlePath": "/Oskari/packages/framework/bundle/" } } } }' where name = 'mapfull';
Handle unique constraint change during migration in 'issues/4197'
UPDATE IDN_OAUTH2_ACCESS_TOKEN SET TOKEN_SCOPE_HASH=(dbms_random.string('L', 32)) WHERE TOKEN_ID IN ( SELECT DISTINCT a.TOKEN_ID FROM IDN_OAUTH2_ACCESS_TOKEN a JOIN ( SELECT CONSUMER_KEY_ID, AUTHZ_USER, USER_DOMAIN, TENANT_ID, TOKEN_SCOPE_HASH, USER_TYPE, TOKEN_STATE, COUNT(*) FROM IDN_OAUTH2_ACCESS_TOKEN GROUP BY CONSUMER_KEY_ID, AUTHZ_USER, USER_DOMAIN, TENANT_ID, TOKEN_SCOPE_HASH, USER_TYPE, TOKEN_STATE HAVING COUNT(*)>1 AND TOKEN_STATE='ACTIVE' ) b ON a.CONSUMER_KEY_ID = b.CONSUMER_KEY_ID AND a.AUTHZ_USER = b.AUTHZ_USER AND a.USER_DOMAIN = b.USER_DOMAIN AND a.TENANT_ID = b.TENANT_ID AND a.TOKEN_SCOPE_HASH = b.TOKEN_SCOPE_HASH AND a.USER_TYPE = b.USER_TYPE AND a.TOKEN_STATE = b.TOKEN_STATE ) / ALTER TABLE IDN_OAUTH2_ACCESS_TOKEN DROP CONSTRAINT CON_APP_KEY / ALTER TABLE IDN_OAUTH2_ACCESS_TOKEN ADD CONSTRAINT CON_APP_KEY UNIQUE (CONSUMER_KEY_ID,AUTHZ_USER,TENANT_ID,USER_DOMAIN,USER_TYPE,TOKEN_SCOPE_HASH, TOKEN_STATE,TOKEN_STATE_ID) /
Add How old your backups script
/* Author: Tim Ford Original link: http://sqlmag.com/database-backup-and-recovery/how-old-are-your-backups */ WITH full_backups AS ( SELECT ROW_NUMBER() OVER(PARTITION BY BS.database_name, BS.type ORDER BY BS.database_name ASC, BS.backup_finish_date DESC ) AS [Row Number], D.name AS [database_name], BS.backup_set_id, BS.type AS backup_type, BS.backup_finish_date, D.recovery_model_desc FROM master.sys.databases AS D LEFT JOIN msdb.dbo.[backupset] AS BS ON D.name = BS.database_name /* FILTERING OPTIONS*/ --WHERE BS.[type] = '<backup_type,,D>' --WHERE BS.[name] = '<database_name,,Foo_DB>' ) SELECT FB.database_name, CASE FB.backup_type WHEN 'D' THEN 'Data' WHEN 'I' THEN 'Differential' WHEN 'L' THEN 'Transaction Log' END AS backup_type_desc, FB.recovery_model_desc, FB.backup_finish_date, BMF.physical_device_name, DATEDIFF(hour, FB.backup_finish_date, GETDATE()) AS backup_hours, DATEDIFF(minute, FB.backup_finish_date, GETDATE()) AS backup_minutes FROM full_backups FB LEFT JOIN msdb.dbo.[backupset] BS ON FB.backup_set_id = BS.backup_set_id LEFT JOIN msdb.dbo.backupmediafamily BMF ON BS.media_set_id = BMF.media_set_id WHERE FB.[Row Number] = 1 ORDER BY FB.database_name, FB.[Row Number], FB.backup_type;
Add sql script to delete /Old Internal Statistics folder and children
-- Delete the child folders of '/Old Internal Statistics' -- Additional sub-select needed to get round MySQL's inability to copy with having the table being -- deleted from in an immediate sub-select DELETE FROM FOLDER WHERE fk_folder_id IN ( SELECT id FROM ( SELECT * FROM FOLDER WHERE name = 'Old Internal Statistics' AND fk_folder_id IS NULL ) AS sub ); -- Delete the folder '/Old Internal Statistics' DELETE FROM FOLDER WHERE name = 'Old Internal Statistics' AND fk_folder_id IS NULL;
Add migration removing 'jetpack' tag
-- Uncomment this locally if you can't create files anymore. -- On dev/stage/prod, this isn't necessary as the column already has a default value. -- The column will be removed entirely in a future push. -- ALTER TABLE `files` MODIFY COLUMN `requires_chrome` tinyint(1) NOT NULL DEFAULT 0; DELETE `users_tags_addons` FROM `users_tags_addons` INNER JOIN `tags` ON ( `users_tags_addons`.`tag_id` = `tags`.`id` ) WHERE `tags`.`tag_text` = 'jetpack'; DELETE FROM `tags` WHERE `tag_text` = 'jetpack';
Create event_images table for storing info about images
-- table to store the available images for an event create table event_images ( id int auto_increment primary key, event_id int not null, type varchar(30) not null, url varchar(255) not null, width int, height int, index event_image_type (event_id, type) ); -- populate it also with our old-style images insert into event_images (event_id, type, url, width, height) select ID, "small", CONCAT("https://joind.in/inc/img/event_icons/", event_icon), 90, 90 from events where (event_icon is not null and event_icon <> ""); INSERT INTO patch_history SET patch_number = 65;
Use DELETE-INSERT instead of REPLACE for SQL updates
INSERT INTO version_db_world (`sql_rev`) VALUES ('1527682153565076000'); REPLACE INTO `smart_scripts` (`entryorguid`,`source_type`,`id`,`link`,`event_type`,`event_phase_mask`,`event_chance`,`event_flags`,`event_param1`,`event_param2`,`event_param3`,`event_param4`,`action_type`,`action_param1`,`action_param2`,`action_param3`,`action_param4`,`action_param5`,`action_param6`,`target_type`,`target_param1`,`target_param2`,`target_param3`,`target_x`,`target_y`,`target_z`,`target_o`,`comment`) VALUES (177385,1,0,1,70,0,100,0,2,0,0,0,9,0,0,0,0,0,0,14,43178,175610,0,0,0,0,0,'Torch - On Gossip Hello - Set Gameobject State'),(177385,1,1,0,61,0,100,0,0,0,0,0,106,16,0,0,0,0,0,20,176944,200,0,0,0,0,0,'Torch - On Gossip Hello - Remove Gameobject Flag');
INSERT INTO version_db_world (`sql_rev`) VALUES ('1527682153565076000'); DELETE FROM `smart_scripts` WHERE `entryorguid` = 177385; INSERT INTO `smart_scripts` (`entryorguid`,`source_type`,`id`,`link`,`event_type`,`event_phase_mask`,`event_chance`,`event_flags`,`event_param1`,`event_param2`,`event_param3`,`event_param4`,`action_type`,`action_param1`,`action_param2`,`action_param3`,`action_param4`,`action_param5`,`action_param6`,`target_type`,`target_param1`,`target_param2`,`target_param3`,`target_x`,`target_y`,`target_z`,`target_o`,`comment`) VALUES (177385,1,0,1,70,0,100,0,2,0,0,0,9,0,0,0,0,0,0,14,43178,175610,0,0,0,0,0,'Torch - On Gossip Hello - Set Gameobject State'), (177385,1,1,0,61,0,100,0,0,0,0,0,106,16,0,0,0,0,0,20,176944,200,0,0,0,0,0,'Torch - On Gossip Hello - Remove Gameobject Flag');
Make first version of clone_schema unusable.
-- http://wiki.postgresql.org/wiki/Clone_schema CREATE OR REPLACE FUNCTION clone_schema(source_schema text, dest_schema text) RETURNS void AS $$ DECLARE object text; buffer text; default_ text; column_ text; trigger_ text; view_ record; BEGIN EXECUTE 'CREATE SCHEMA ' || dest_schema ; -- TODO: Find a way to make this sequence's owner is the correct column. -- Not a huge priority. FOR object IN SELECT sequence_name::text FROM information_schema.SEQUENCES WHERE sequence_schema = source_schema LOOP EXECUTE 'CREATE SEQUENCE ' || dest_schema || '.' || object; END LOOP; -- Iterate through all tables in the source schema. FOR object IN SELECT table_name::text FROM information_schema.TABLES WHERE table_schema = source_schema AND table_type = 'BASE TABLE' LOOP -- Create a table with the relevant data in the new schema. buffer := dest_schema || '.' || object; EXECUTE 'CREATE TABLE ' || buffer || ' (LIKE ' || source_schema || '.' || object || ' INCLUDING CONSTRAINTS INCLUDING INDEXES INCLUDING DEFAULTS)'; -- Ensure any default values that refer to the old schema now refer to the new schema. FOR column_, default_ IN SELECT column_name::text, replace(column_default::text, source_schema, dest_schema) FROM information_schema.COLUMNS WHERE table_schema = dest_schema AND table_name = object AND column_default LIKE 'nextval(%' || source_schema || '%::regclass)' LOOP EXECUTE 'ALTER TABLE ' || buffer || ' ALTER COLUMN ' || column_ || ' SET DEFAULT ' || default_; END LOOP; -- Ensure any triggers also come across... -- We can do the same trick we did for the default values. FOR trigger_ IN SELECT replace(pg_catalog.pg_get_triggerdef(oid, false)::text, source_schema, dest_schema) FROM pg_catalog.pg_trigger WHERE tgrelid = (source_schema || '.' || object)::regclass::pg_catalog.oid AND NOT tgisinternal LOOP EXECUTE trigger_; END LOOP; END LOOP; -- Finally, repeat for any views. FOR view_ IN SELECT viewname, definition FROM pg_views WHERE schemaname = source_schema LOOP EXECUTE 'CREATE VIEW ' || dest_schema || '.' || quote_ident(view_.viewname) || ' AS ' || replace(view_.definition, source_schema || '.', dest_schema || '.'); END LOOP; END; $$ LANGUAGE plpgsql VOLATILE;
-- http://wiki.postgresql.org/wiki/Clone_schema CREATE OR REPLACE FUNCTION clone_schema(source_schema text, dest_schema text) RETURNS void AS $$ DECLARE BEGIN RAISE EXCEPTION 'This function is no longer supported. Please upgrade.'; END; $$ LANGUAGE plpgsql VOLATILE;
Use DELETE-INSERT instead of REPLACE for SQL updates
INSERT INTO version_db_world (`sql_rev`) VALUES ('1527682153565076000'); REPLACE INTO `smart_scripts` (`entryorguid`,`source_type`,`id`,`link`,`event_type`,`event_phase_mask`,`event_chance`,`event_flags`,`event_param1`,`event_param2`,`event_param3`,`event_param4`,`action_type`,`action_param1`,`action_param2`,`action_param3`,`action_param4`,`action_param5`,`action_param6`,`target_type`,`target_param1`,`target_param2`,`target_param3`,`target_x`,`target_y`,`target_z`,`target_o`,`comment`) VALUES (177385,1,0,1,70,0,100,0,2,0,0,0,9,0,0,0,0,0,0,14,43178,175610,0,0,0,0,0,'Torch - On Gossip Hello - Set Gameobject State'),(177385,1,1,0,61,0,100,0,0,0,0,0,106,16,0,0,0,0,0,20,176944,200,0,0,0,0,0,'Torch - On Gossip Hello - Remove Gameobject Flag');
INSERT INTO version_db_world (`sql_rev`) VALUES ('1527682153565076000'); DELETE FROM `smart_scripts` WHERE `entryorguid` = 177385; INSERT INTO `smart_scripts` (`entryorguid`,`source_type`,`id`,`link`,`event_type`,`event_phase_mask`,`event_chance`,`event_flags`,`event_param1`,`event_param2`,`event_param3`,`event_param4`,`action_type`,`action_param1`,`action_param2`,`action_param3`,`action_param4`,`action_param5`,`action_param6`,`target_type`,`target_param1`,`target_param2`,`target_param3`,`target_x`,`target_y`,`target_z`,`target_o`,`comment`) VALUES (177385,1,0,1,70,0,100,0,2,0,0,0,9,0,0,0,0,0,0,14,43178,175610,0,0,0,0,0,'Torch - On Gossip Hello - Set Gameobject State'), (177385,1,1,0,61,0,100,0,0,0,0,0,106,16,0,0,0,0,0,20,176944,200,0,0,0,0,0,'Torch - On Gossip Hello - Remove Gameobject Flag');
Add owner_app to owner_id unique index
DROP TYPE IF EXISTS app; CREATE TYPE app AS ENUM ('telegram', 'vk'); CREATE TABLE IF NOT EXISTS rooms ( id SERIAL PRIMARY KEY, owner_id BIGINT NOT NULL, owner_app app NOT NULL, guest_id BIGINT, guest_app app, active BOOLEAN NOT NULL DEFAULT TRUE, created_at TIMESTAMP WITHOUT TIME ZONE DEFAULT NOW() CHECK (owner_id <> guest_id OR owner_app <> guest_app) CHECK ((guest_id IS NOT NULL AND guest_app IS NOT NULL) OR (guest_id IS NULL AND guest_app IS NULL)) ); CREATE UNIQUE INDEX ON rooms (owner_id) WHERE active = 'true';
DROP TYPE IF EXISTS app; CREATE TYPE app AS ENUM ('telegram', 'vk'); CREATE TABLE IF NOT EXISTS rooms ( id SERIAL PRIMARY KEY, owner_id BIGINT NOT NULL, owner_app app NOT NULL, guest_id BIGINT, guest_app app, active BOOLEAN NOT NULL DEFAULT TRUE, created_at TIMESTAMP WITHOUT TIME ZONE DEFAULT NOW() CHECK (owner_id <> guest_id OR owner_app <> guest_app) CHECK ((guest_id IS NOT NULL AND guest_app IS NOT NULL) OR (guest_id IS NULL AND guest_app IS NULL)) ); CREATE UNIQUE INDEX ON rooms (owner_id, owner_app) WHERE active = 'true';
Add db index to improve speed of get_open_games
create table games ( id char(48) primary key, data jsonb );
create table games ( id char(48) primary key, data jsonb ); create index idx_get_open_games on games((data->>'active'), (data->>'ended_at'), (data->>'created_at'));
Add dropping of commodprices table to SQL migration scripts
DROP TABLE IF EXISTS `obj_load_min_level`; DROP TABLE IF EXISTS `shopgoldtmp`; DROP TABLE IF EXISTS `talenhistory`;
DROP TABLE IF EXISTS `obj_load_min_level`; DROP TABLE IF EXISTS `shopgoldtmp`; DROP TABLE IF EXISTS `talenhistory`; DROP TABLE IF EXISTS `commodprices`;
Fix SQL for database setup.
use memegendb; create table MemeInfo if not exists ( id bigint auto_increment primary key , filename varchar(256) unique, upvotes bigint, downvotes bigint, createdby varchar(256), createdon datetime ); create table MemeTags if not exists ( tagid bigint auto_increment primary key, memeid bigint, index (memeid), foreign key (memeid) refenreces MemeInfo(id), tag varchar(64) );
use memegendb; create table if not exists MemeInfo ( meme_id bigint auto_increment, filename varchar(256) unique, upvotes bigint, downvotes bigint, createdby varchar(256), createdon datetime, primary key (meme_id) ); create table if not exists MemeTags ( tag_id bigint auto_increment primary key, meme_id bigint, tag varchar(64), index using hash (meme_id), foreign key (meme_id) references MemeInfo(meme_id) on delete cascade on update cascade );
Drop function from public and create in pg_catalog
DROP FUNCTION master_update_shard_statistics(shard_id bigint); CREATE FUNCTION pg_catalog.master_update_shard_statistics(shard_id bigint) RETURNS bigint LANGUAGE C STRICT AS 'MODULE_PATHNAME', $$master_update_shard_statistics$$; COMMENT ON FUNCTION master_update_shard_statistics(bigint) IS 'updates shard statistics and returns the updated shard size';
DROP FUNCTION IF EXISTS public.master_update_shard_statistics(shard_id bigint); CREATE OR REPLACE FUNCTION pg_catalog.master_update_shard_statistics(shard_id bigint) RETURNS bigint LANGUAGE C STRICT AS 'MODULE_PATHNAME', $$master_update_shard_statistics$$; COMMENT ON FUNCTION master_update_shard_statistics(bigint) IS 'updates shard statistics and returns the updated shard size';
Include educators with no LASID in educators export file
use x2data SELECT 'state_id', 'local_id', 'full_name', 'staff_type', 'homeroom', 'status', 'login_name', 'school_local_id' UNION ALL SELECT STF_ID_STATE, STF_ID_LOCAL, stf_name_view, STF_STAFF_TYPE, STF_HOMEROOM, STF_STATUS, USR_LOGIN_NAME, SKL_SCHOOL_ID FROM staff INNER JOIN school ON staff.STF_SKL_OID=school.SKL_OID INNER JOIN person ON staff.STF_PSN_OID=person.PSN_OID INNER JOIN user_info ON person.PSN_OID=user_info.USR_PSN_OID WHERE STF_STATUS = 'Active' AND STF_ID_LOCAL IS NOT NULL INTO OUTFILE "E:/_BACKUP_MYSQL/CodeForAmerica/educators_export.txt" FIELDS TERMINATED BY ',' ENCLOSED BY '"' LINES TERMINATED BY '\r\n'
use x2data SELECT 'state_id', 'local_id', 'full_name', 'staff_type', 'homeroom', 'status', 'login_name', 'school_local_id' UNION ALL SELECT STF_ID_STATE, STF_ID_LOCAL, stf_name_view, STF_STAFF_TYPE, STF_HOMEROOM, STF_STATUS, USR_LOGIN_NAME, SKL_SCHOOL_ID FROM staff INNER JOIN school ON staff.STF_SKL_OID=school.SKL_OID INNER JOIN person ON staff.STF_PSN_OID=person.PSN_OID INNER JOIN user_info ON person.PSN_OID=user_info.USR_PSN_OID WHERE STF_STATUS = 'Active' INTO OUTFILE "E:/_BACKUP_MYSQL/CodeForAmerica/educators_export.txt" FIELDS TERMINATED BY ',' ENCLOSED BY '"' LINES TERMINATED BY '\r\n'
Change tag order to match infoQuestion.json order
-- Returns a JSON array describing the tags for question question_id. CREATE OR REPLACE FUNCTION tags_for_question (question_id bigint) RETURNS JSONB AS $$ SELECT JSONB_AGG(JSONB_BUILD_OBJECT( 'name',tag.name, 'id',tag.id, 'color',tag.color, 'number', tag.number ) ORDER BY tag.number, tag.id) FROM tags AS tag JOIN question_tags AS qt ON (qt.tag_id = tag.id AND qt.question_id = tags_for_question.question_id) $$ LANGUAGE SQL STABLE;
-- Returns a JSON array describing the tags for question question_id. CREATE OR REPLACE FUNCTION tags_for_question (question_id bigint) RETURNS JSONB AS $$ SELECT JSONB_AGG(JSONB_BUILD_OBJECT( 'name',tag.name, 'id',tag.id, 'color',tag.color ) ORDER BY qt.number, tag.id) FROM tags AS tag JOIN question_tags AS qt ON (qt.tag_id = tag.id AND qt.question_id = tags_for_question.question_id) $$ LANGUAGE SQL STABLE;
Remove unmatched migration down column
ALTER TABLE app_info DROP COLUMN IF EXISTS certificate; ALTER TABLE module_config DROP COLUMN IF EXISTS certificate CASCADE, DROP COLUMN IF EXISTS private_key CASCADE; DROP TABLE IF EXISTS app_certificates;
ALTER TABLE module_config DROP COLUMN IF EXISTS certificate CASCADE, DROP COLUMN IF EXISTS private_key CASCADE; DROP TABLE IF EXISTS app_certificates;
Add update timestamp to retrieved columns
DROP PROCEDURE IF EXISTS selectOrganization; DELIMITER $$ CREATE PROCEDURE selectOrganization(p_orgid INT UNSIGNED) BEGIN SELECT org.orgid, org.org_name, usr.person_name, usr.email, usr.email_is_verified, org.user_id, org.org_website, org.money_url, org.mission, org.abbreviated_name, org.customer_notice, org.customer_contact, org.admin_contact, org.active_ind FROM org INNER JOIN app_user usr on usr.user_id = org.user_id WHERE org.orgid = p_orgid; END $$ DELIMITER ; GRANT EXECUTE ON PROCEDURE selectOrganization TO movemusr@localhost;
DROP PROCEDURE IF EXISTS selectOrganization; DELIMITER $$ CREATE PROCEDURE selectOrganization(p_orgid INT UNSIGNED) BEGIN SELECT org.orgid, org.org_name, usr.person_name, usr.email, usr.email_is_verified, org.user_id, org.update_timestamp, org.org_website, org.money_url, org.mission, org.abbreviated_name, org.customer_notice, org.customer_contact, org.admin_contact, org.active_ind FROM org INNER JOIN app_user usr on usr.user_id = org.user_id WHERE org.orgid = p_orgid; END $$ DELIMITER ; GRANT EXECUTE ON PROCEDURE selectOrganization TO movemusr@localhost;
Remove postgres 9.6 specific settings
SET statement_timeout = 0; SET lock_timeout = 0; SET idle_in_transaction_session_timeout = 0; SET client_encoding = 'UTF8'; SET standard_conforming_strings = on; SET check_function_bodies = false; SET client_min_messages = warning; SET row_security = off; -- -- Name: plpgsql; Type: EXTENSION; Schema: -; Owner: - -- CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog; -- -- Name: EXTENSION plpgsql; Type: COMMENT; Schema: -; Owner: - -- COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language'; -- -- PostgreSQL database dump complete -- SET search_path TO "$user", public;
SET statement_timeout = 0; SET lock_timeout = 0; SET client_encoding = 'UTF8'; SET standard_conforming_strings = on; SET check_function_bodies = false; SET client_min_messages = warning; -- -- Name: plpgsql; Type: EXTENSION; Schema: -; Owner: - -- CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog; -- -- Name: EXTENSION plpgsql; Type: COMMENT; Schema: -; Owner: - -- COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language'; -- -- PostgreSQL database dump complete -- SET search_path TO "$user", public;
Fix for live_agents with not enough closer_campaign space.
# 07/07/2009 ALTER TABLE osdial_xfer_log ADD uniqueid VARCHAR(20) NOT NULL default ''; CREATE INDEX uniqueid ON osdial_xfer_log (uniqueid); ALTER TABLE osdial_agent_log ADD uniqueid VARCHAR(20) NOT NULL default ''; CREATE INDEX uniqueid ON osdial_agent_log (uniqueid); ALTER TABLE recording_log ADD uniqueid VARCHAR(20) NOT NULL default ''; CREATE INDEX uniqueid ON recording_log (uniqueid); UPDATE system_settings SET version='2.1.4.028'; UPDATE system_settings SET last_update_check=DATE_SUB(NOW(), INTERVAL 1 DAY);
# 07/07/2009 ALTER TABLE osdial_xfer_log ADD uniqueid VARCHAR(20) NOT NULL default ''; CREATE INDEX uniqueid ON osdial_xfer_log (uniqueid); ALTER TABLE osdial_agent_log ADD uniqueid VARCHAR(20) NOT NULL default ''; CREATE INDEX uniqueid ON osdial_agent_log (uniqueid); ALTER TABLE recording_log ADD uniqueid VARCHAR(20) NOT NULL default ''; CREATE INDEX uniqueid ON recording_log (uniqueid); ALTER TABLE osdial_live_agents MODIFY closer_campaigns TEXT default ''; UPDATE system_settings SET version='2.1.4.028'; UPDATE system_settings SET last_update_check=DATE_SUB(NOW(), INTERVAL 1 DAY);
Add new table to upgrade SQL file
CREATE TABLE IF NOT EXISTS `uploadfile` ( `id` int(11) NOT NULL auto_increment, `filename` varchar(255) NOT NULL, `filesize` int(11) NOT NULL DEFAULT '0', `sha1sum` varchar(40) NOT NULL, PRIMARY KEY(`id`), KEY `sha1sum` (`sha1sum`) ); CREATE TABLE IF NOT EXISTS `build2uploadfile` ( `fileid` bigint(11) NOT NULL, `buildid` bigint(11) NOT NULL, KEY `fileid` (`fileid`), KEY `buildid` (`buildid`) );
CREATE TABLE IF NOT EXISTS `uploadfile` ( `id` int(11) NOT NULL auto_increment, `filename` varchar(255) NOT NULL, `filesize` int(11) NOT NULL DEFAULT '0', `sha1sum` varchar(40) NOT NULL, PRIMARY KEY(`id`), KEY `sha1sum` (`sha1sum`) ); CREATE TABLE IF NOT EXISTS `build2uploadfile` ( `fileid` bigint(11) NOT NULL, `buildid` bigint(11) NOT NULL, KEY `fileid` (`fileid`), KEY `buildid` (`buildid`) ); CREATE TABLE IF NOT EXISTS client_jobschedule2submission ( scheduleid bigint(20) NOT NULL, submissionid bigint(11) NOT NULL, PRIMARY KEY (`submissionid`), UNIQUE KEY `scheduleid` (`scheduleid`) );
Add references oldstyle for easy understandings
-- Script: Tabelas -- Learn: http://www.postgresql.org/docs/current/static/sql-createtable.html CREATE TABLE usuarios ( username VARCHAR(30) PRIMARY KEY, password VARCHAR(30) ); CREATE TABLE batalha_usuario ( username VARCHAR(30) PRIMARY KEY FOREIGN KEY, password VARCHAR(30) PRIMARY KEY FOREIGN KEY ); CREATE TABLE batalha_usuario ( id INT PRIMARY KEY, turnos INT, vencedor VARCHAR(30) FOREIGN KEY );
-- Script: Tabelas -- Learn: http://www.postgresql.org/docs/current/static/sql-createtable.html CREATE TABLE USUARIOS ( username VARCHAR(30) PRIMARY KEY, password VARCHAR(30) ); CREATE TABLE BATALHA_USUARIO ( username VARCHAR(30), batalha_id INT, PRIMARY KEY (username, password), FOREIGN KEY batalha_id REFERENCES BATALHAS(id) FOREIGN KEY username REFERENCES USUARIOS(username) ); CREATE TABLE BATALHAS ( id INT, turnos INT, vencedor VARCHAR(30), PRIMARY KEY (id), FOREIGN KEY vencedor REFERENCES USUARIOS(username) ); delete from USUARIOS; delete from BATALHA_USUARIO; delete from BATALHAS;
Add comment (really,doing this mostly to kick off a new build).
DROP TABLE IF EXISTS LOAN_MONITORING; /* The table LOAN_MONITORING will contain the default behavior of the group loan with individual monitoring on the MFI - Configuration This information can not be configured through the UI, so it does need to be configured in the script */ CREATE TABLE LOAN_MONITORING ( LOAN_MONITORING_ID INTEGER NOT NULL, INDIVIDUAL_MONITORING_FLAG SMALLINT NOT NULL, CREATED_BY SMALLINT, CREATED_DATE DATE, UPDATED_BY SMALLINT, UPDATED_DATE DATE, VERSION_NO INTEGER NOT NULL, PRIMARY KEY(LOAN_MONITORING_ID) ) ENGINE=InnoDB CHARACTER SET utf8; -- 0 by default: Don't Allow group loan account with individual monitoring -- 1: Allow group loan account with individual monitoring INSERT INTO LOAN_MONITORING (LOAN_MONITORING_ID,INDIVIDUAL_MONITORING_FLAG,CREATED_BY,CREATED_DATE,UPDATED_BY,UPDATED_DATE,VERSION_NO) VALUES(1,0,NULL,NULL,NULL,NULL,1); UPDATE DATABASE_VERSION SET DATABASE_VERSION = 148 WHERE DATABASE_VERSION = 147;
DROP TABLE IF EXISTS LOAN_MONITORING; /* The table LOAN_MONITORING will contain the default behavior of the group loan with individual monitoring on the MFI - Configuration This information can not be configured through the UI, so it does need to be configured in the script */ CREATE TABLE LOAN_MONITORING ( LOAN_MONITORING_ID INTEGER NOT NULL, INDIVIDUAL_MONITORING_FLAG SMALLINT NOT NULL, CREATED_BY SMALLINT, CREATED_DATE DATE, UPDATED_BY SMALLINT, UPDATED_DATE DATE, VERSION_NO INTEGER NOT NULL, PRIMARY KEY(LOAN_MONITORING_ID) ) ENGINE=InnoDB CHARACTER SET utf8; -- 0: by default: Don't Allow group loan account with individual monitoring -- 1: Allow group loan account with individual monitoring INSERT INTO LOAN_MONITORING (LOAN_MONITORING_ID,INDIVIDUAL_MONITORING_FLAG,CREATED_BY,CREATED_DATE,UPDATED_BY,UPDATED_DATE,VERSION_NO) VALUES(1,0,NULL,NULL,NULL,NULL,1); UPDATE DATABASE_VERSION SET DATABASE_VERSION = 148 WHERE DATABASE_VERSION = 147;
Add comments on how to create a db and a db user
-- Type of spelling -- a classic reference table CREATE TABLE spelling ( id TINYINT PRIMARY KEY, name VARCHAR(256) NOT NULL ); CREATE TABLE words ( word VARCHAR(256) NOT NULL, hyphenation VARCHAR(256) NOT NULL, spelling TINYINT NOT NULL, PRIMARY KEY(word, spelling), FOREIGN KEY(spelling) REFERENCES spelling(id) ); INSERT INTO spelling VALUES (0, 'Orthographische Konferenz von 1901'), (1, 'Deutsche Rechtschreibreform von 1996');
-- Create a db -- CREATE DATABASE hyphenation CHARACTER SET utf8 COLLATE utf8_bin; -- Create a user -- CREATE USER 'hyphenation'@'localhost' IDENTIFIED BY 'sekret'; -- GRANT ALL ON hyphenation.* TO 'hyphenation'@'localhost'; -- FLUSH PRIVILEGES; -- Type of spelling -- a classic reference table CREATE TABLE spelling ( id TINYINT PRIMARY KEY, name VARCHAR(256) NOT NULL ); CREATE TABLE words ( word VARCHAR(256) NOT NULL, hyphenation VARCHAR(256) NOT NULL, spelling TINYINT NOT NULL, PRIMARY KEY(word, spelling), FOREIGN KEY(spelling) REFERENCES spelling(id) ); INSERT INTO spelling VALUES (0, 'Orthographische Konferenz von 1901'), (1, 'Deutsche Rechtschreibreform von 1996');
Create roll and grant priveleges
CREATE DATABASE FAM_SARP; \c fam_sarp CREATE TABLE participants ( subject smallserial NOT NULL, email character varying(40) UNIQUE NOT NULL, sessions_completed smallint NOT NULL, rng_seed integer NOT NULL, PRIMARY KEY(subject) ); CREATE TABLE stimuli ( id smallserial NOT NULL, target character varying(12) NOT NULL, semantic_cue_1 character varying(12) NOT NULL, semantic_cue_2 character varying(12) NOT NULL, semantic_cue_3 character varying(12) NOT NULL, episodic_cue character varying(12) NOT NULL, PRIMARY KEY(target) ); COPY stimuli(target, semantic_cue_1, semantic_cue_2, semantic_cue_3, episodic_cue) FROM 'C:\Users\will\source\FAM_SARP_experiment\db\stimuli_table.csv' DELIMITER ',' CSV HEADER;
CREATE DATABASE FAM_SARP; \c fam_sarp CREATE TABLE participants ( subject smallserial NOT NULL, email character varying(40) UNIQUE NOT NULL, sessions_completed smallint NOT NULL, rng_seed integer NOT NULL, PRIMARY KEY(subject) ); CREATE TABLE stimuli ( id smallserial NOT NULL, target character varying(12) NOT NULL, semantic_cue_1 character varying(12) NOT NULL, semantic_cue_2 character varying(12) NOT NULL, semantic_cue_3 character varying(12) NOT NULL, episodic_cue character varying(12) NOT NULL, PRIMARY KEY(target) ); COPY stimuli(target, semantic_cue_1, semantic_cue_2, semantic_cue_3, episodic_cue) FROM 'C:\Users\will\source\FAM_SARP_experiment\db\stimuli_table.csv' DELIMITER ',' CSV HEADER; CREATE ROLE will LOGIN; GRANT ALL PRIVILEGES ON DATABASE "fam_sarp" TO will; GRANT ALL PRIVILEGES ON SCHEMA public TO will; GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO will; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO will; GRANT ALL PRIVILEGES ON ALL FUNCTIONS IN SCHEMA public TO will;
Remove useless join which gobbled memory
begin; drop table if exists line_joints; create table line_joints ( synth_id varchar(64), objects varchar(64) array, terminal_id int[], area geometry(polygon, 3857) ); insert into line_joints (synth_id, terminal_id, area, objects) select concat('j', nextval('synthetic_objects')), array_agg(s.v) as terminal_id, st_union(t.area), array_agg(t.osm_id) from terminal_sets s join line_terminals t on t.id = s.v join power_line l on l.osm_id = t.osm_id group by s.k having count(*) > 2; insert into power_station (osm_id, power_name, location, area) select synth_id, 'joint', st_centroid(area), area from line_joints; insert into osm_objects (osm_id, objects) select synth_id, source_objects(objects) from line_joints; update power_line l set terminals = minimal_terminals(l.extent, j.area, l.terminals) from line_joints j where l.osm_id = any(j.objects); delete from terminal_intersections where id in ( select id from terminal_intersections i join line_joints j on i.src = any(j.terminal_id) or i.dst = any(j.terminal_id) ); delete from line_terminals where id in ( select unnest(terminal_id) from line_joints ); commit;
begin; drop table if exists line_joints; create table line_joints ( synth_id varchar(64), objects varchar(64) array, terminal_id int[], area geometry(polygon, 3857) ); insert into line_joints (synth_id, terminal_id, area, objects) select concat('j', nextval('synthetic_objects')), array_agg(s.v) as terminal_id, st_union(t.area), array_agg(t.osm_id) from terminal_sets s join line_terminals t on t.id = s.v group by s.k having count(*) > 2; insert into power_station (osm_id, power_name, location, area) select synth_id, 'joint', st_centroid(area), area from line_joints; insert into osm_objects (osm_id, objects) select synth_id, source_objects(objects) from line_joints; update power_line l set terminals = minimal_terminals(l.extent, j.area, l.terminals) from line_joints j where l.osm_id = any(j.objects); delete from terminal_intersections where id in ( select id from terminal_intersections i join line_joints j on i.src = any(j.terminal_id) or i.dst = any(j.terminal_id) ); delete from line_terminals where id in ( select unnest(terminal_id) from line_joints ); commit;
Fix 'dynamic theme' tag migration. The column blacklisted got renamed to denied.
INSERT INTO tags (tag_text, blacklisted, restricted) VALUES ('dynamic theme', 0, 1) ON DUPLICATE KEY UPDATE restricted = 1;
INSERT INTO tags (tag_text, denied, restricted) VALUES ('dynamic theme', 0, 1) ON DUPLICATE KEY UPDATE restricted = 1;
Fix column already exist issue
ALTER TABLE vaccine_distributions ADD COLUMN isNotificationSent Boolean default false;
DO $$ BEGIN BEGIN ALTER TABLE vaccine_distributions ADD COLUMN isNotificationSent Boolean default false; EXCEPTION WHEN duplicate_column THEN RAISE NOTICE 'column isNotificationSent already exists in vaccine_distributions.'; END; END; $$
Add 'used' attribute to DB
CREATE TABLE IF NOT EXISTS tickets ( code BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, firstname VARCHAR(50) NOT NULL, lastname VARCHAR(50) NOT NULL, street VARCHAR(50) NOT NULL, housenumber VARCHAR(5) NOT NULL, postalcode INT(5) UNSIGNED NOT NULL, town VARCHAR(50) NOT NULL, date TIMESTAMP NOT NULL, PRIMARY KEY (code) );
CREATE TABLE IF NOT EXISTS tickets ( code BIGINT UNSIGNED NOT NULL AUTO_INCREMENT, firstname VARCHAR(50) NOT NULL, lastname VARCHAR(50) NOT NULL, street VARCHAR(50) NOT NULL, housenumber VARCHAR(5) NOT NULL, postalcode INT(5) UNSIGNED NOT NULL, town VARCHAR(50) NOT NULL, date TIMESTAMP NOT NULL, used BOOLEAN NOT NULL DEFAULT false, PRIMARY KEY (code) );
Fix table def to autonumber id field.
CREATE USER itest; CREATE DATABASE itest OWNER itest; \c itest; /* Create test tables */ create table users ( id int not null, first_name varchar(80), last_name varchar(80), primary key (id) );
CREATE USER itest; CREATE DATABASE itest OWNER itest; \c itest; CREATE SEQUENCE users_id_sequence /* Create test tables */ CREATE TABLE users ( id int DEFAULT nextval('users_id_sequence') PRIMARY KEY, first_name varchar(80), last_name varchar(80) );
Add default ReplayGain pref to schema
-- Up ALTER TABLE "media" ADD COLUMN "rgTrackGain" REAL; ALTER TABLE "media" ADD COLUMN "rgTrackPeak" REAL; -- Down
-- Up ALTER TABLE "media" ADD COLUMN "rgTrackGain" REAL; ALTER TABLE "media" ADD COLUMN "rgTrackPeak" REAL; INSERT INTO prefs (key,data) VALUES ('isReplayGainEnabled','false'); -- Down
Remove redundant CREATE TRIGGER calls
CREATE ROLE administrator; CREATE USER administrator_user PASSWORD 'password' IN ROLE administrator; GRANT SELECT, UPDATE, INSERT, DELETE ON TABLE users, contracts, calls TO administrator; CREATE OR REPLACE FUNCTION _check_time() RETURNS TRIGGER AS $$ BEGIN IF (current_user = 'administrator_user') THEN IF NOT (current_time BETWEEN '8:00:00' AND '15:00:00') THEN RAISE EXCEPTION 'Current time is %s, you can work between 8:00 and 15:00', current_time; END IF; END IF; RETURN NULL; END; $$ LANGUAGE plpgsql; CREATE TRIGGER check_time_trigger BEFORE INSERT OR UPDATE OR DELETE ON contracts EXECUTE PROCEDURE _check_time(); CREATE TRIGGER check_time_trigger BEFORE INSERT OR UPDATE OR DELETE ON users EXECUTE PROCEDURE _check_time(); CREATE TRIGGER check_time_trigger BEFORE INSERT OR UPDATE OR DELETE ON calls EXECUTE PROCEDURE _check_time();
CREATE ROLE administrator; CREATE USER administrator_user PASSWORD 'password' IN ROLE administrator; GRANT SELECT, UPDATE, INSERT, DELETE ON TABLE users, contracts, calls TO administrator; CREATE OR REPLACE FUNCTION _check_time() RETURNS TRIGGER AS $$ BEGIN IF (current_user = 'administrator_user') THEN IF NOT (current_time BETWEEN '8:00:00' AND '15:00:00') THEN RAISE EXCEPTION 'Current time is %s, you can work between 8:00 and 15:00', current_time; END IF; END IF; RETURN NULL; END; $$ LANGUAGE plpgsql; CREATE TRIGGER check_time_trigger BEFORE INSERT OR UPDATE OR DELETE ON contracts EXECUTE PROCEDURE _check_time();
Add dummy "paid_out" column to the payments SQL query.
select * from transactions join transaction_types on transaction_types.transaction_type_id=transactions.transaction_type_id where transaction_id in (select transaction_id from transactions join transaction_types on transaction_types.transaction_type_id=transactions.transaction_type_id where account_id=? and transaction_types.transaction_is_wage_payment order by transaction_id desc fetch next %d rows only) order by transaction_id
select t.*, tt.*, false as paid_out from transactions t join transaction_types tt on tt.transaction_type_id=t.transaction_type_id where t.transaction_id in (select transaction_id from transactions join transaction_types on transaction_types.transaction_type_id=transactions.transaction_type_id where account_id=? and transaction_types.transaction_is_wage_payment order by transaction_id desc fetch next %d rows only) order by t.transaction_id
Update equipment status line item table schema
CREATE TABLE equipment_status_line_items( id SERIAL PRIMARY KEY, facilityId INT NOT NULL REFERENCES facilities(id), programId INT NOT NULL REFERENCES programs(id), periodId INT NOT NULL REFERENCES processing_periods(id), operationalStatusId INT NOT NULL REFERENCES equipment_operational_status(id), testCount INT NULL, totalCount INT NULL, daysOutOfUse INT NOT NULL, remarks VARCHAR (200) NULL, createdBy INTEGER, createdDate TIMESTAMP DEFAULT CURRENT_TIMESTAMP, modifiedBy INTEGER, modifiedDate TIMESTAMP DEFAULT CURRENT_TIMESTAMP );
DROP TABLE IF EXISTS equipment_status_line_items; CREATE TABLE equipment_status_line_items( id SERIAL PRIMARY KEY, rnrId INT NOT NULL REFERENCES requisitions(id), code VARCHAR(200) NOT NULL, equipmentName VARCHAR(200) NOT NULL, equipmentCategory VARCHAR(200) NOT NULL, equipmentModel VARCHAR(200) NULL, equipmentSerial VARCHAR(200) NULL, equipmentInventoryId INT NOT NULL, operationalStatusId INT NOT NULL REFERENCES equipment_operational_status(id), testCount INT NULL, totalCount INT NULL, daysOutOfUse INT NOT NULL, remarks VARCHAR (2000) NULL, createdBy INTEGER, createdDate TIMESTAMP DEFAULT CURRENT_TIMESTAMP, modifiedBy INTEGER, modifiedDate TIMESTAMP DEFAULT CURRENT_TIMESTAMP );
Add new layer to polygons
drop table if exists minsk_polygons; create table minsk_polygons as ( with segments as ( select (ST_Dump( ST_Transform( ST_VoronoiPolygons( ST_Collect( ST_SetSRID( ST_MakePoint( ST_X( ST_Transform(point, 3857) )::bigint, ST_Y( ST_Transform(point, 3857) )::bigint ), 3857 ) ) ), 3857 ) )).geom as geom from minsk_points ) select ST_Transform(geom, 4326) as geom, greatest(ceil(duration/300.0), 1) as duration from segments left join minsk_points on ST_Intersects(geom, point) ); drop table if exists minsk_areas; create table minsk_areas as ( select duration, ST_Union(geom) as geom from minsk_polygons group by 1 ); update minsk_areas set geom = (select ST_Union(geom) from minsk_areas where duration >2) where duration = 3; delete from minsk_areas where duration>3;
drop table if exists minsk_polygons; create table minsk_polygons as ( with segments as ( select (ST_Dump( ST_Transform( ST_VoronoiPolygons( ST_Collect( ST_SetSRID( ST_MakePoint( ST_X( ST_Transform(point, 3857) )::bigint, ST_Y( ST_Transform(point, 3857) )::bigint ), 3857 ) ) ), 3857 ) )).geom as geom from minsk_points ) select ST_Transform(geom, 4326) as geom, greatest(ceil(duration/300.0), 1) as duration from segments left join minsk_points on ST_Intersects(geom, point) ); drop table if exists minsk_areas; create table minsk_areas as ( select duration, ST_Union(geom) as geom from minsk_polygons group by 1 ); update minsk_areas set geom = (select ST_Union(geom) from minsk_areas where duration >3) where duration = 4; delete from minsk_areas where duration>4;
Add Collate for user nickname
CREATE EXTENSION IF NOT EXISTS CITEXT; CREATE TABLE IF NOT EXISTS users ( id SERIAL PRIMARY KEY, nickname CITEXT NOT NULL UNIQUE, fullname TEXT NOT NULL, email CITEXT NOT NULL UNIQUE, about TEXT ); CREATE INDEX IF NOT EXISTS idx_users_nickname ON users (nickname);
CREATE EXTENSION IF NOT EXISTS CITEXT; CREATE TABLE IF NOT EXISTS users ( id SERIAL PRIMARY KEY, nickname CITEXT NOT NULL UNIQUE COLLATE UCS_BASIC, fullname TEXT NOT NULL, email CITEXT NOT NULL UNIQUE, about TEXT ); CREATE INDEX IF NOT EXISTS idx_users_nickname ON users (nickname);
Make result enum match ReportLine level strings.
# Table structure for healthcheck output database # Healthcheck running sessions. CREATE TABLE session ( session_id INT(10) UNSIGNED NOT NULL AUTO_INCREMENT, start_time DATETIME, end_time DATETIME, release INT, PRIMARY KEY (session_id), KEY release_idx(release) ); # Individual healthcheck reports CREATE TABLE report ( report_id INT(10) UNSIGNED NOT NULL AUTO_INCREMENT, session_id INT(10) UNSIGNED NOT NULL, species VARCHAR(255), testcase VARCHAR(255), result ENUM("pass", "fail"), text VARCHAR(255), PRIMARY KEY (report_id), KEY session_idx(session_id) ); # Store annotations about healthcheck results CREATE TABLE annotation ( report_id INT(10) UNSIGNED NOT NULL, person VARCHAR(255), action ENUM("ignore", "normal", "flag"), reason ENUM("not relevant", "will be fixed", "healthcheck bug"), comment VARCHAR(255) );
# Table structure for healthcheck output database # Healthcheck running sessions. CREATE TABLE session ( session_id INT(10) UNSIGNED NOT NULL AUTO_INCREMENT, start_time DATETIME, end_time DATETIME, release INT, PRIMARY KEY (session_id), KEY release_idx(release) ); # Individual healthcheck reports CREATE TABLE report ( report_id INT(10) UNSIGNED NOT NULL AUTO_INCREMENT, session_id INT(10) UNSIGNED NOT NULL, species VARCHAR(255), testcase VARCHAR(255), result ENUM("PROBLEM", "CORRECT", "WARNING", "INFO"), text VARCHAR(255), PRIMARY KEY (report_id), KEY session_idx(session_id) ); # Store annotations about healthcheck results CREATE TABLE annotation ( report_id INT(10) UNSIGNED NOT NULL, person VARCHAR(255), action ENUM("ignore", "normal", "flag"), reason ENUM("not relevant", "will be fixed", "healthcheck bug"), comment VARCHAR(255) );
Fix syntax errors in database schema.
create table entities ( id serial primary key, jid text not null unique ); create table nodes ( id serial primary key, node text not null unique, persistent boolean not null default true, deliver_payload boolean not null default true send_last_published_item text not null default 'on_sub' check (send_last_published_item in ('never', 'on_sub')), ); create table affiliations ( id serial primary key, entity_id integer not null references entities on delete cascade, node_id integer not null references nodes on delete cascade, affiliation text not null check (affiliation in ('outcast', 'publisher', 'owner')), unique (entity_id, node_id) ); create table subscriptions ( id serial primary key, entity_id integer not null references entities on delete cascade, resource text, node_id integer not null references nodes on delete cascade, subscription text not null default 'subscribed' check (subscription in ('subscribed', 'pending', 'unconfigured')), unique (entity_id, resource, node_id) ); create table items ( id serial primary key, node_id integer not null references nodes on delete cascade, item text not null, publisher text not null, data text, date timestamp with time zone not null default now(), unique (node_id, item) );
create table entities ( id serial primary key, jid text not null unique ); create table nodes ( id serial primary key, node text not null unique, persistent boolean not null default true, deliver_payload boolean not null default true, send_last_published_item text not null default 'on_sub' check (send_last_published_item in ('never', 'on_sub')) ); create table affiliations ( id serial primary key, entity_id integer not null references entities on delete cascade, node_id integer not null references nodes on delete cascade, affiliation text not null check (affiliation in ('outcast', 'publisher', 'owner')), unique (entity_id, node_id) ); create table subscriptions ( id serial primary key, entity_id integer not null references entities on delete cascade, resource text, node_id integer not null references nodes on delete cascade, subscription text not null default 'subscribed' check (subscription in ('subscribed', 'pending', 'unconfigured')), unique (entity_id, resource, node_id) ); create table items ( id serial primary key, node_id integer not null references nodes on delete cascade, item text not null, publisher text not null, data text, date timestamp with time zone not null default now(), unique (node_id, item) );
Fix up the votes db query so that it gets the winner correctly
select c.id, c.name, count(v.id) as votes from "choice" c left outer join ( select v.* from vote v join "round" r on v.round_id = r.id where r.id = $1 ) as v on v.choice_id = c.id where (c.added_in is null or c.added_in <= $1) and (c.removed_in is null or c.removed_in >= $1) group by c.id order by c.id asc;
select c.id, c.name, r.winning_choice_id = c.id as winner, count(v.id) as votes from choice c join round r on r.id = $1 left outer join vote v on v.choice_id = c.id and v.round_id = $1 where (c.added_in is null or c.added_in <= $1) and (c.removed_in is null or c.removed_in >= $1) group by c.id, r.winning_choice_id order by c.id asc;
Use temp tables for migration.
ALTER TABLE ${ohdsiSchema}.sec_permission ADD COLUMN for_role_id INTEGER; INSERT INTO ${ohdsiSchema}.sec_permission (id, value, for_role_id) SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), REPLACE('vocabulary:%s:concept:*:ancestorAndDescendant:get', '%s', REPLACE(REPLACE(value, 'source:', ''), ':access', '')), role_id FROM ${ohdsiSchema}.sec_permission sp JOIN ${ohdsiSchema}.sec_role_permission srp on sp.id = srp.permission_id WHERE sp.value LIKE 'source:%:access'; INSERT INTO ${ohdsiSchema}.sec_role_permission (id, role_id, permission_id) SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), sp.for_role_id, sp.id FROM ${ohdsiSchema}.sec_permission sp WHERE sp.for_role_id IS NOT NULL; ALTER TABLE ${ohdsiSchema}.sec_permission DROP COLUMN for_role_id;
CREATE TEMP TABLE temp_migration ( from_perm_id int, new_value character varying(255) ); INSERT INTO temp_migration (from_perm_id, new_value) SELECT sp.id as from_id, REPLACE('vocabulary:%s:concept:*:ancestorAndDescendant:get', '%s', REPLACE(REPLACE(value, 'source:', ''), ':access', '')) as new_value FROM ${ohdsiSchema}.sec_permission sp WHERE sp.value LIKE 'source:%:access'; INSERT INTO ${ohdsiSchema}.sec_permission (id, value) SELECT nextval('${ohdsiSchema}.sec_permission_id_seq'), new_value FROM temp_migration; INSERT INTO ${ohdsiSchema}.sec_role_permission (id,role_id, permission_id) SELECT nextval('${ohdsiSchema}.sec_role_permission_sequence'), srp.role_id, sp.id as permission_id FROM temp_migration m JOIN ${ohdsiSchema}.sec_permission sp on m.new_value = sp.value JOIN ${ohdsiSchema}.sec_role_permission srp on m.from_perm_id = srp.permission_id;
Fix bug with null equivalence that led us to lose the first span.
-- -- Copyright 2020 The Android Open Source Project -- -- Licensed under the Apache License, Version 2.0 (the "License"); -- you may not use this file except in compliance with the License. -- You may obtain a copy of the License at -- -- https://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, -- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -- See the License for the specific language governing permissions and -- limitations under the License. -- DROP VIEW IF EXISTS {{table_name}}_span; CREATE VIEW {{table_name}}_span AS SELECT ts, LEAD(ts, 1, (SELECT end_ts + 1 FROM trace_bounds)) OVER(ORDER BY ts) - ts AS dur, CAST(value AS INT) AS {{table_name}}_val FROM ( SELECT ts, value, LAG(value) OVER (ORDER BY ts) as lag_value FROM counter c JOIN counter_track t ON t.id = c.track_id WHERE t.type = 'counter_track' AND name = '{{counter_name}}' ) WHERE value != lag_value;
-- -- Copyright 2020 The Android Open Source Project -- -- Licensed under the Apache License, Version 2.0 (the "License"); -- you may not use this file except in compliance with the License. -- You may obtain a copy of the License at -- -- https://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, -- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -- See the License for the specific language governing permissions and -- limitations under the License. -- DROP VIEW IF EXISTS {{table_name}}_span; CREATE VIEW {{table_name}}_span AS SELECT ts, LEAD(ts, 1, (SELECT end_ts + 1 FROM trace_bounds)) OVER(ORDER BY ts) - ts AS dur, CAST(value AS INT) AS {{table_name}}_val FROM ( SELECT ts, value, LAG(value) OVER (ORDER BY ts) as lag_value FROM counter c JOIN counter_track t ON t.id = c.track_id WHERE t.type = 'counter_track' AND name = '{{counter_name}}' ) WHERE value != lag_value OR lag_value IS NULL;
Add index on (channel, nick) for faster nick: searches.
DROP TABLE IF EXISTS `irclog`; CREATE TABLE `irclog` ( id INT auto_increment, channel VARCHAR(30), nick VARCHAR(40), opcode VARCHAR(20), timestamp INT, line TEXT, PRIMARY KEY(`id`) ) CHARSET=utf8 ENGINE=MyISAM; CREATE INDEX `irclog_channel_timestamp_index` ON `irclog` (channel, timestamp); CREATE INDEX `irclog_channel_opcode_index` ON `irclog` (channel, opcode); CREATE FULLTEXT INDEX `irclog_fulltext_index` ON `irclog` (nick, line);
DROP TABLE IF EXISTS `irclog`; CREATE TABLE `irclog` ( id INT auto_increment, channel VARCHAR(30), nick VARCHAR(40), opcode VARCHAR(20), timestamp INT, line TEXT, PRIMARY KEY(`id`) ) CHARSET=utf8 ENGINE=MyISAM; CREATE INDEX `irclog_channel_timestamp_index` ON `irclog` (channel, timestamp); CREATE INDEX `irclog_channel_opcode_index` ON `irclog` (channel, opcode); CREATE INDEX `irclog_channel_nick_index` ON `irclog` (channel, nick); CREATE FULLTEXT INDEX `irclog_fulltext_index` ON `irclog` (nick, line);
Fix for hierarchy to get the top level.
CREATE OR REPLACE FUNCTION ti.gettaxonhierarchy(_taxonname character varying) RETURNS TABLE(taxonid integer, taxonname character varying, valid boolean, highertaxonid integer) LANGUAGE sql AS $function$ WITH RECURSIVE lowertaxa AS (SELECT txa.taxonid, txa.highertaxonid FROM ndb.taxa AS txa WHERE (txa.taxonname ILIKE _taxonname) UNION ALL SELECT m.taxonid, m.highertaxonid FROM ndb.taxa AS m JOIN lowertaxa ON lowertaxa.highertaxonid = m.taxonid WHERE NOT m.taxonid = m.highertaxonid) SELECT txa.taxonid, txa.taxonname, txa.valid, txa.highertaxonid FROM lowertaxa AS taxa LEFT OUTER JOIN ndb.taxa AS txa ON txa.taxonid = taxa.taxonid $function$
CREATE OR REPLACE FUNCTION ti.gettaxonhierarchy(_taxonname character varying) RETURNS TABLE(taxonid integer, taxonname character varying, valid boolean, highertaxonid integer) LANGUAGE sql AS $function$ WITH RECURSIVE lowertaxa AS (SELECT txa.taxonid, txa.highertaxonid FROM ndb.taxa AS txa WHERE (txa.taxonname ILIKE _taxonname) UNION ALL SELECT m.taxonid, m.highertaxonid FROM ndb.taxa AS m JOIN lowertaxa ON lowertaxa.highertaxonid = m.taxonid WHERE NOT lowertaxa.taxonid = lowertaxa.highertaxonid) SELECT txa.taxonid, txa.taxonname, txa.valid, txa.highertaxonid FROM lowertaxa AS taxa LEFT OUTER JOIN ndb.taxa AS txa ON txa.taxonid = taxa.taxonid $function$
Update documentation for new chat access scopes
CREATE TABLE `pb_bot` ( `id` INT(11) UNSIGNED NOT NULL AUTO_INCREMENT, `name` VARCHAR(64) NOT NULL COMMENT 'i.e. snusbot', `twitch_access_token` VARCHAR(64) NULL DEFAULT NULL COMMENT 'Bot level access-token', `twitch_refresh_token` VARCHAR(64) NULL DEFAULT NULL COMMENT 'Bot level refresh-token', PRIMARY KEY (`id`) ) COMMENT='Store available bot accouns, requires an access token with chat_login scope' COLLATE='utf8mb4_general_ci' ENGINE=InnoDB ;
CREATE TABLE `pb_bot` ( `id` INT(11) UNSIGNED NOT NULL AUTO_INCREMENT, `name` VARCHAR(64) NOT NULL COMMENT 'i.e. snusbot', `twitch_access_token` VARCHAR(64) NULL DEFAULT NULL COMMENT 'Bot level access-token', `twitch_refresh_token` VARCHAR(64) NULL DEFAULT NULL COMMENT 'Bot level refresh-token', PRIMARY KEY (`id`) ) COMMENT='Store available bot accouns, requires an access token with all chat/PubSub scopes' COLLATE='utf8mb4_general_ci' ENGINE=InnoDB ;
Clean up temp precompute tables
vacuum (analyze, verbose);
DROP TABLE load_qa_status; DROP TABLE load_precomputed; DROP TABLE precompute_urs; DROP TABLE precompute_urs_taxid; DROP TABLE precompute_urs_accession; vacuum (analyze, verbose);
Fix substring -> substr in TPCH query set
SELECT cntrycode, count(*) AS numcust, sum(acctbal) AS totacctbal FROM ( SELECT substring(c.phone,1,2) AS cntrycode, c.acctbal FROM "${database}"."${schema}"."${prefix}customer" c WHERE substring(c.phone,1,2) IN ('13', '31', '23', '29', '30', '18', '17') AND c.acctbal > ( SELECT avg(c.acctbal) FROM "${database}"."${schema}"."${prefix}customer" c WHERE c.acctbal > 0.00 AND substring(c.phone,1,2) IN ('13', '31', '23', '29', '30', '18', '17') ) AND NOT EXISTS ( SELECT * FROM "${database}"."${schema}"."${prefix}orders" o WHERE o.custkey = c.custkey ) ) AS custsale GROUP BY cntrycode ORDER BY cntrycode ;
SELECT cntrycode, count(*) AS numcust, sum(acctbal) AS totacctbal FROM ( SELECT substr(c.phone,1,2) AS cntrycode, c.acctbal FROM "${database}"."${schema}"."${prefix}customer" c WHERE substr(c.phone,1,2) IN ('13', '31', '23', '29', '30', '18', '17') AND c.acctbal > ( SELECT avg(c.acctbal) FROM "${database}"."${schema}"."${prefix}customer" c WHERE c.acctbal > 0.00 AND substr(c.phone,1,2) IN ('13', '31', '23', '29', '30', '18', '17') ) AND NOT EXISTS ( SELECT * FROM "${database}"."${schema}"."${prefix}orders" o WHERE o.custkey = c.custkey ) ) AS custsale GROUP BY cntrycode ORDER BY cntrycode ;
Convert MyISAM table to InnoDB for consistency
-- Licensed to the Apache Software Foundation (ASF) under one -- or more contributor license agreements. See the NOTICE file -- distributed with this work for additional information -- regarding copyright ownership. The ASF licenses this file -- to you under the Apache License, Version 2.0 (the -- "License"); you may not use this file except in compliance -- with the License. You may obtain a copy of the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, -- software distributed under the License is distributed on an -- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -- KIND, either express or implied. See the License for the -- specific language governing permissions and limitations -- under the License. --; -- Schema upgrade from 4.9.0 to 4.9.1; --; -- Fix default user role description UPDATE `cloud`.`roles` SET `description`='Default user role' WHERE `id`=4 AND `role_type`='User' AND `description`='Default Root Admin role';
-- Licensed to the Apache Software Foundation (ASF) under one -- or more contributor license agreements. See the NOTICE file -- distributed with this work for additional information -- regarding copyright ownership. The ASF licenses this file -- to you under the Apache License, Version 2.0 (the -- "License"); you may not use this file except in compliance -- with the License. You may obtain a copy of the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, -- software distributed under the License is distributed on an -- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -- KIND, either express or implied. See the License for the -- specific language governing permissions and limitations -- under the License. --; -- Schema upgrade from 4.9.0 to 4.9.1; --; -- Fix default user role description UPDATE `cloud`.`roles` SET `description`='Default user role' WHERE `id`=4 AND `role_type`='User' AND `description`='Default Root Admin role'; ALTER TABLE cloud.load_balancer_cert_map ENGINE=INNODB; ALTER TABLE cloud.monitoring_services ENGINE=INNODB; ALTER TABLE cloud.nic_ip_alias ENGINE=INNODB; ALTER TABLE cloud.sslcerts ENGINE=INNODB; ALTER TABLE cloud.op_lock ENGINE=INNODB; ALTER TABLE cloud.op_nwgrp_work ENGINE=INNODB; ALTER TABLE cloud_usage.quota_account ENGINE=INNODB;
Clear the fact tables before loading
/* truncate table synth_ma.synth_cousub_stats; truncate table synth_ma.synth_county_stats; truncate table synth_ma.synth_cousub_facts; truncate table synth_ma.synth_county_facts; */
/* truncate table synth_ma.synth_condition_facts; truncate table synth_ma.synth_disease_fact; truncate table synth_ma.synth_pop_facts; */
Add public printout method for testing
set define off create or replace public class Upi { public static String getUpi( long id ) { String str = Long.toHexString( id ).toUpperCase(); return "UPI0000000000".substring(0, 13 - str.length() ) + str; } } create or replace package upi is /* * Returns a new protein identifier. */ function get_upi ( in_id in number) return varchar2; pragma restrict_references (get_upi, RNDS, WNDS, RNPS, WNPS); end upi; / create or replace package body upi is function get_upi ( in_id in number) return varchar2 as language java name 'Upi.getUpi(long) return java.lang.String'; end upi; / set define on
set define off create or replace public class Upi { public static String getUpi( long id ) { String str = Long.toHexString( id ).toUpperCase(); return "UPI0000000000".substring(0, 13 - str.length() ) + str; } public static void main (String args[]) { long l = Long.parseLong(args[0]); System.out.println(getUpi(l)); } } create or replace package upi is /* * Returns a new protein identifier. */ function get_upi ( in_id in number) return varchar2; pragma restrict_references (get_upi, RNDS, WNDS, RNPS, WNPS); end upi; / create or replace package body upi is function get_upi ( in_id in number) return varchar2 as language java name 'Upi.getUpi(long) return java.lang.String'; end upi; / set define on
Set linux version to Trusty
CREATE TABLE IF NOT EXISTS `HardCache` ( `Id` varchar(255) NOT NULL, `Bucket` varchar(255) NOT NULL, `Value` longtext NOT NULL, `EndDate` datetime NOT NULL, `Created` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, `Modified` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `TTL` int(11) NOT NULL, PRIMARY KEY (`Id`,`Bucket`) USING BTREE, KEY `k_EndDate` (`EndDate`) USING BTREE ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
CREATE TABLE IF NOT EXISTS `HardCache` ( `Id` varchar(255) NOT NULL, `Bucket` varchar(255) NOT NULL, `Value` longtext NOT NULL, `EndDate` datetime NOT NULL, `Created` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `Modified` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `TTL` int(11) NOT NULL, PRIMARY KEY (`Id`,`Bucket`) USING BTREE, KEY `k_EndDate` (`EndDate`) USING BTREE ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
Fix email overlap check so it looks at cohorts with transfer sender (but not approved)
CREATE PROCEDURE [dbo].[CheckForOverlappingEmailsForTable] @Emails [EmailCheckTable] READONLY, @CohortId BIGINT = NULL AS BEGIN SELECT E.RowId, A.Id, A.FirstName, A.LastName, A.DateOfBirth, A.CommitmentId AS CohortId, A.StartDate, A.EndDate, A.IsApproved, A.Email, dbo.CourseDatesOverlap(A.StartDate, dbo.GetEndDateForOverlapChecks(A.PaymentStatus, A.EndDate, A.StopDate, A.CompletionDate), E.StartDate, E.EndDate) AS OverlapStatus FROM Apprenticeship A JOIN @Emails E ON E.Email = A.Email WHERE CASE WHEN @CohortId IS NULL AND A.IsApproved = 1 THEN 1 WHEN @CohortId IS NOT NULL AND A.CommitmentId = @CohortId AND A.IsApproved = 0 AND A.StartDate IS NOT NULL AND A.EndDate IS NOT NULL THEN 1 WHEN A.IsApproved = 1 THEN 1 ELSE 0 END = 1 AND A.Id != ISNULL(E.ApprenticeshipId,0) AND A.Email = E.Email AND dbo.CourseDatesOverlap(A.StartDate, dbo.GetEndDateForOverlapChecks(A.PaymentStatus, A.EndDate, A.StopDate, A.CompletionDate), E.StartDate, E.EndDate) >= 1 END
CREATE PROCEDURE [dbo].[CheckForOverlappingEmailsForTable] @Emails [EmailCheckTable] READONLY, @CohortId BIGINT = NULL AS BEGIN SELECT E.RowId, A.Id, A.FirstName, A.LastName, A.DateOfBirth, A.CommitmentId AS CohortId, A.StartDate, A.EndDate, A.IsApproved, A.Email, dbo.CourseDatesOverlap(A.StartDate, dbo.GetEndDateForOverlapChecks(A.PaymentStatus, A.EndDate, A.StopDate, A.CompletionDate), E.StartDate, E.EndDate) AS OverlapStatus FROM Apprenticeship A JOIN @Emails E ON E.Email = A.Email WHERE CASE WHEN @CohortId IS NOT NULL AND A.CommitmentId = @CohortId AND A.IsApproved = 0 AND A.StartDate IS NOT NULL AND A.EndDate IS NOT NULL THEN 1 WHEN C.WithParty = 4 AND A.IsApproved = 0 THEN 1 WHEN A.IsApproved = 1 THEN 1 ELSE 0 END = 1 AND A.Id != ISNULL(E.ApprenticeshipId,0) AND A.Email = E.Email AND dbo.CourseDatesOverlap(A.StartDate, dbo.GetEndDateForOverlapChecks(A.PaymentStatus, A.EndDate, A.StopDate, A.CompletionDate), E.StartDate, E.EndDate) >= 1 END
Set download_url to longer length to match VO
-- -- Schema upgrade from 5.3.2 to 5.3.3; -- -- VirtIO-SCSI INSERT IGNORE INTO `cloud`.`guest_os` (id, uuid, category_id, display_name, created) VALUES (1002, UUID(), 7, 'VirtIO-SCSI capable OS (64-bit)', utc_timestamp()); INSERT IGNORE INTO `cloud`.`guest_os_hypervisor` (uuid,hypervisor_type, hypervisor_version, guest_os_name, guest_os_id, created, is_user_defined) VALUES (UUID(),'KVM', 'default', 'VirtIO-SCSI capable OS', 1002, utc_timestamp(), 0); INSERT IGNORE INTO `cloud`.`guest_os` (id, uuid, category_id, display_name, created) VALUES (1023, UUID(), 6, 'Windows VirtIO-SCSI', utc_timestamp()); INSERT IGNORE INTO `cloud`.`guest_os_hypervisor` (uuid,hypervisor_type, hypervisor_version, guest_os_name, guest_os_id, created, is_user_defined) VALUES (UUID(),'KVM', 'default', 'Windows VirtIO-SCSI', 1023, utc_timestamp(), 0);
-- -- Schema upgrade from 5.3.2 to 5.3.3; -- -- VirtIO-SCSI INSERT IGNORE INTO `cloud`.`guest_os` (id, uuid, category_id, display_name, created) VALUES (1002, UUID(), 7, 'VirtIO-SCSI capable OS (64-bit)', utc_timestamp()); INSERT IGNORE INTO `cloud`.`guest_os_hypervisor` (uuid,hypervisor_type, hypervisor_version, guest_os_name, guest_os_id, created, is_user_defined) VALUES (UUID(),'KVM', 'default', 'VirtIO-SCSI capable OS', 1002, utc_timestamp(), 0); INSERT IGNORE INTO `cloud`.`guest_os` (id, uuid, category_id, display_name, created) VALUES (1023, UUID(), 6, 'Windows VirtIO-SCSI', utc_timestamp()); INSERT IGNORE INTO `cloud`.`guest_os_hypervisor` (uuid,hypervisor_type, hypervisor_version, guest_os_name, guest_os_id, created, is_user_defined) VALUES (UUID(),'KVM', 'default', 'Windows VirtIO-SCSI', 1023, utc_timestamp(), 0); -- URL field length ALTER TABLE `cloud`.`template_store_ref` MODIFY COLUMN `download_url` varchar(2048);
Change primary key, add compressed column.
CREATE TABLE file ( ufn VARCHAR(64) NOT NULL, uhn VARCHAR(64) NOT NULL, hostname VARCHAR(255) NOT NULL, path TEXT NOT NULL, filename TEXT NOT NULL, crcsum VARCHAR(64) NOT NULL, block_size INTEGER NOT NULL ); ALTER TABLE file ADD CONSTRAINT file_pkey PRIMARY KEY (ufn); CREATE TABLE file_block ( file VARCHAR(64) NOT NULL, id INTEGER NOT NULL, crcsum VARCHAR(64) ); ALTER TABLE file_block ADD CONSTRAINT file_block_pkey PRIMARY KEY (file); ALTER TABLE file_block ADD CONSTRAINT file_block_fk FOREIGN KEY (file) REFERENCES file (ufn) ON DELETE CASCADE ON UPDATE CASCADE;
CREATE TABLE file ( ufn VARCHAR(64) NOT NULL, uhn VARCHAR(64) NOT NULL, hostname VARCHAR(255) NOT NULL, path TEXT NOT NULL, filename TEXT NOT NULL, crcsum VARCHAR(64) NOT NULL, block_size INTEGER NOT NULL ); ALTER TABLE file ADD CONSTRAINT file_pkey PRIMARY KEY (ufn); CREATE TABLE file_block ( file VARCHAR(64) NOT NULL, id INTEGER NOT NULL, crcsum VARCHAR(64) NOT NULL, compressed BOOLEAN NOT NULL DEFAULT FALSE ); ALTER TABLE file_block ADD CONSTRAINT file_block_pkey PRIMARY KEY (file,id); ALTER TABLE file_block ADD CONSTRAINT file_block_fk FOREIGN KEY (file) REFERENCES file (ufn) ON DELETE CASCADE ON UPDATE CASCADE;
Standardize on varchar rather than text (they're virtually identical)
-- Table names are flush left, and column definitions are -- indented by at least one space or tab. Blank lines and -- lines beginning with a double hyphen are comments. tracks id serial primary key artist varchar not null default '' title varchar not null default '' filename varchar not null default '' artwork bytea not null default '' length double precision not null default 0 xfade int not null default 0 itrim int not null default 0 otrim int not null default 0 lyrics text not null default '' story text not null default '' status smallint not null default 0 submitted timestamptz not null default now() submitter varchar not null default '' submitteremail varchar not null default '' comments varchar not null default '' enqueued int not null default 0 played int not null default 0 sequence int not null default 0 keywords varchar not null default '' url varchar not null default '' analysis varchar not null default '' userid int not null default 0 users id serial primary key username varchar not null unique email varchar not null unique password varchar not null default '' user_level int not null default 1 -- banned=0, user=1, admin=2 status int not null default 0 hex_key varchar not null default '' outreach id serial primary key message varchar not null default '' created timestamptz not null default now() processed int not null default 0
-- Table names are flush left, and column definitions are -- indented by at least one space or tab. Blank lines and -- lines beginning with a double hyphen are comments. tracks id serial primary key artist varchar not null default '' title varchar not null default '' filename varchar not null default '' artwork bytea not null default '' length double precision not null default 0 xfade int not null default 0 itrim int not null default 0 otrim int not null default 0 lyrics varchar not null default '' story varchar not null default '' status smallint not null default 0 submitted timestamptz not null default now() submitter varchar not null default '' submitteremail varchar not null default '' comments varchar not null default '' enqueued int not null default 0 played int not null default 0 sequence int not null default 0 keywords varchar not null default '' url varchar not null default '' analysis varchar not null default '' userid int not null default 0 users id serial primary key username varchar not null unique email varchar not null unique password varchar not null default '' user_level int not null default 1 -- banned=0, user=1, admin=2 status int not null default 0 hex_key varchar not null default '' outreach id serial primary key message varchar not null default '' created timestamptz not null default now() processed int not null default 0
Make expected timeline test case use the same sql joins as actual
-- -- Copyright 2020 The Android Open Source Project -- -- Licensed under the Apache License, Version 2.0 (the "License"); -- you may not use this file except in compliance with the License. -- You may obtain a copy of the License at -- -- https://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, -- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -- See the License for the specific language governing permissions and -- limitations under the License. select ts, dur, process.pid as pid, display_frame_token, surface_frame_token, layer_name from expected_frame_timeline_slice join process_track on process_track.upid = expected_frame_timeline_slice.upid join process on process_track.upid = process.upid where process_track.name = 'Expected Timeline' order by ts
-- -- Copyright 2020 The Android Open Source Project -- -- Licensed under the Apache License, Version 2.0 (the "License"); -- you may not use this file except in compliance with the License. -- You may obtain a copy of the License at -- -- https://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, -- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -- See the License for the specific language governing permissions and -- limitations under the License. select ts, dur, process.pid as pid, display_frame_token, surface_frame_token, layer_name from (select t.*, process_track.name as track_name from process_track left join expected_frame_timeline_slice t on process_track.id = t.track_id) s join process using(upid) where s.track_name = 'Expected Timeline' order by ts
Fix migration file to include a non null default date
alter table invoice_tracking_ids add column is_active boolean default true after record_date; alter table invoice_tracking_ids add column updated_by varchar(50) NOT NULL after created_date; alter table invoice_tracking_ids add column updated_date datetime NOT NULL after updated_by; create index invoice_tracking_invoice_id_idx on invoice_tracking_ids(tenant_record_id, invoice_id);
alter table invoice_tracking_ids add column is_active boolean default true after record_date; alter table invoice_tracking_ids add column updated_by varchar(50) NOT NULL after created_date; alter table invoice_tracking_ids add column updated_date datetime NOT NULL DEFAULT '1970-01-01 00:00:00' after updated_by; create index invoice_tracking_invoice_id_idx on invoice_tracking_ids(tenant_record_id, invoice_id);
Add migration comments about type altering
ALTER TABLE _auth_provider_password ALTER COLUMN auth_data TYPE text; ALTER TABLE _auth_provider_password RENAME COLUMN auth_data TO login_id; ALTER TABLE _auth_provider_password ADD login_id_key text;
/* This migration script only alter auth_data type from jsonb to text, and doesn't delete previous signup rows. */ ALTER TABLE _auth_provider_password ALTER COLUMN auth_data TYPE text; ALTER TABLE _auth_provider_password RENAME COLUMN auth_data TO login_id; ALTER TABLE _auth_provider_password ADD login_id_key text;
Add UUID to trekking views
CREATE OR REPLACE VIEW {# geotrek.trekking #}.v_treks AS ( SELECT e.geom, e.id, i.* FROM trekking_trek AS i, core_topology AS e WHERE i.topo_object_id = e.id AND e.deleted = FALSE ); CREATE OR REPLACE VIEW {# geotrek.trekking #}.v_pois AS ( SELECT e.geom, e.id, i.* FROM trekking_poi AS i, core_topology AS e WHERE i.topo_object_id = e.id AND e.deleted = FALSE );
CREATE OR REPLACE VIEW {# geotrek.trekking #}.v_treks AS ( SELECT e.geom, e.id, e.uuid, i.* FROM trekking_trek AS i, core_topology AS e WHERE i.topo_object_id = e.id AND e.deleted = FALSE ); CREATE OR REPLACE VIEW {# geotrek.trekking #}.v_pois AS ( SELECT e.geom, e.id, e.uuid, i.* FROM trekking_poi AS i, core_topology AS e WHERE i.topo_object_id = e.id AND e.deleted = FALSE );
Make the default user a whatif user.
INSERT INTO users (email, enabled, firstname, lastname, PASSWORD) values('##user##@##hostname##', '1', 'Whatif', 'User', '$2a$10$IMlSuqhi3F..6V4zG/Y78.DCg2DmXT.B7JsvZVpwf5d4FiLiNQo4K');
-- create user INSERT INTO users (email, enabled, firstname, lastname, PASSWORD) values('##user##@##hostname##', '1', 'Whatif', 'User', '$2a$10$IMlSuqhi3F..6V4zG/Y78.DCg2DmXT.B7JsvZVpwf5d4FiLiNQo4K'); -- give the user access to whatif as a user INSERT INTO user_apps (user_app_id, app_id, user_id) values(1, 1, 1); INSERT INTO user_roles (user_role_id, role_id, user_id) values(1, 2, 1);
Add PI to user details
SELECT DISTINCT pc.value AS practice_code, d.value AS email_address FROM redcap6170_briccsext.redcap_data d INNER JOIN redcap6170_briccsext.redcap_data pc ON pc.project_id = d.project_id AND pc.field_name = 'practice_code' AND pc.record = d.record WHERE d.project_id IN (29, 53) AND d.field_name IN ( 'practice_manager_email', 'sen_part_email', 'contact_email_add' ) ;
SELECT DISTINCT pc.value AS practice_code, d.value AS email_address FROM redcap6170_briccsext.redcap_data d INNER JOIN redcap6170_briccsext.redcap_data pc ON pc.project_id = d.project_id AND pc.field_name = 'practice_code' AND pc.record = d.record WHERE d.project_id IN (29, 53) AND d.field_name IN ( 'practice_manager_email', 'sen_part_email', 'contact_email_add', 'pi_email_add' ) ;
Add uuid-ossp extension to first migration.
-- A table that will store the whole transaction log of the database. CREATE TABLE IF NOT EXISTS txlog ( id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), created_at timestamptz DEFAULT current_timestamp, payload text ) WITH (OIDS=FALSE);
CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; -- A table that will store the whole transaction log of the database. CREATE TABLE IF NOT EXISTS txlog ( id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), created_at timestamptz DEFAULT current_timestamp, payload text ) WITH (OIDS=FALSE);
Add missing not null props to user table fields.
CREATE TABLE IF NOT EXISTS users ( id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), created_at timestamptz DEFAULT current_timestamp, modified_at timestamptz DEFAULT current_timestamp, photo text, username text, email text, password text ) WITH (OIDS=FALSE); CREATE UNIQUE INDEX users_username_idx ON users USING btree (username); CREATE UNIQUE INDEX users_email_idx ON users USING btree (email);
CREATE TABLE IF NOT EXISTS users ( id uuid PRIMARY KEY DEFAULT uuid_generate_v4(), created_at timestamptz DEFAULT current_timestamp, modified_at timestamptz DEFAULT current_timestamp, username text NOT NULL, email text NOT NULL, password text NOT NULL ) WITH (OIDS=FALSE); CREATE UNIQUE INDEX users_username_idx ON users USING btree (username); CREATE UNIQUE INDEX users_email_idx ON users USING btree (email);
Fix zoom from scale condition for NULL result
-- Maximum supported zoom level CREATE OR REPLACE FUNCTION _CDB_MaxSupportedZoom() RETURNS int LANGUAGE SQL IMMUTABLE AS $$ -- The maximum zoom level has to be limited for various reasons, -- e.g. zoom levels greater than 31 would require tile coordinates -- that would not fit in an INTEGER (which is signed, 32 bits long). -- We'll choose 20 as a limit which is safe also when the JavaScript shift -- operator (<<) is used for computing powers of two. SELECT 29; $$; CREATE OR REPLACE FUNCTION cartodb.CDB_ZoomFromScale(scaleDenominator numeric) RETURNS int LANGUAGE SQL IMMUTABLE AS $$ SELECT CASE -- Don't bother if the scale is larger than ~zoom level 0 WHEN scaleDenominator > 600000000 OR scaleDenominator = 0 THEN NULL WHEN scaleDenominator = 0 THEN _CDB_MaxSupportedZoom() ELSE CAST (LEAST(ROUND(LOG(2, 559082264.028/scaleDenominator)), _CDB_MaxSupportedZoom()) AS INTEGER) END; $$;
-- Maximum supported zoom level CREATE OR REPLACE FUNCTION _CDB_MaxSupportedZoom() RETURNS int LANGUAGE SQL IMMUTABLE AS $$ -- The maximum zoom level has to be limited for various reasons, -- e.g. zoom levels greater than 31 would require tile coordinates -- that would not fit in an INTEGER (which is signed, 32 bits long). -- We'll choose 20 as a limit which is safe also when the JavaScript shift -- operator (<<) is used for computing powers of two. SELECT 29; $$; CREATE OR REPLACE FUNCTION cartodb.CDB_ZoomFromScale(scaleDenominator numeric) RETURNS int LANGUAGE SQL IMMUTABLE AS $$ SELECT CASE WHEN scaleDenominator > 600000000 THEN -- Scale is smaller than zoom level 0 NULL WHEN scaleDenominator = 0 THEN -- Actual zoom level would be infinite _CDB_MaxSupportedZoom() ELSE CAST ( LEAST( ROUND(LOG(2, 559082264.028/scaleDenominator)), _CDB_MaxSupportedZoom() ) AS INTEGER) END; $$;
Increase counter after a succesful DB migration (11 in this case)
BEGIN; ALTER TABLE subscribers RENAME TO sub; create table subscribers ( id serial primary key, msisdn varchar, name varchar, authorized smallint not null default 0, balance decimal not null default 0.00, subscription_status smallint not null default 0, subscription_date timestamp default current_timestamp, location varchar, created timestamp default current_timestamp ); INSERT INTO subscribers(id,msisdn,name,authorized,balance,subscription_status,subscription_date,created) SELECT id,msisdn,name,authorized,balance,subscription_status,subscription_date,created FROM sub; DROP TABLE sub; CREATE TABLE locations ( id serial primary key, name varchar not null ); COMMIT;
BEGIN; UPDATE meta SET value='11' WHERE key='db_revision'; ALTER TABLE subscribers RENAME TO sub; create table subscribers ( id serial primary key, msisdn varchar, name varchar, authorized smallint not null default 0, balance decimal not null default 0.00, subscription_status smallint not null default 0, subscription_date timestamp default current_timestamp, location varchar, created timestamp default current_timestamp ); INSERT INTO subscribers(id,msisdn,name,authorized,balance,subscription_status,subscription_date,created) SELECT id,msisdn,name,authorized,balance,subscription_status,subscription_date,created FROM sub; DROP TABLE sub; CREATE TABLE locations ( id serial primary key, name varchar not null ); COMMIT;
Change dbv revision file to set default status
ALTER TABLE `contest` ADD `visibility` ENUM( 'Visible', 'Hidden' ) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL DEFAULT 'Hidden' AFTER `status`; UPDATE `contest` SET `startDate` = NOW(), `endDate` = NOW() + INTERVAL 3 YEAR WHERE `status` = 'RunningContest'; UPDATE `contest` SET `startDate` = NOW() + INTERVAL 2 YEAR, `endDate` = NOW() + INTERVAL 3 YEAR WHERE `status` = 'FutureContest'; UPDATE `contest` SET `startDate` = NOW() - INTERVAL 2 YEAR, `endDate` = NOW() WHERE `status` = 'PastContest'; UPDATE `contest` SET `visibilty` = 'Visible' WHERE `status` != 'Hidden'; ALTER TABLE `contest` CHANGE `status` `status` ENUM( 'Open', 'Closed' ) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL DEFAULT 'Open';
ALTER TABLE `contest` ADD `visibility` ENUM( 'Visible', 'Hidden' ) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL DEFAULT 'Hidden' AFTER `status`; UPDATE `contest` SET `startDate` = NOW(), `endDate` = NOW() + INTERVAL 3 YEAR WHERE `status` = 'RunningContest'; UPDATE `contest` SET `startDate` = NOW() + INTERVAL 2 YEAR, `endDate` = NOW() + INTERVAL 3 YEAR WHERE `status` = 'FutureContest'; UPDATE `contest` SET `startDate` = NOW() - INTERVAL 2 YEAR, `endDate` = NOW() WHERE `status` = 'PastContest'; UPDATE `contest` SET `visibilty` = 'Visible' WHERE `status` != 'Hidden'; ALTER TABLE `contest` CHANGE `status` `status` ENUM( 'Open', 'Closed' ) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL DEFAULT 'Open'; UPDATE `contest` SET `status` = 'Open';
Drop the go annotation publications
COPY ( SELECT json_build_object( 'rna_id', anno.rna_id, 'go_annotations', array_agg( json_build_object( 'go_term_id', anno.ontology_term_id, 'qualifier', anno.qualifier, 'go_name', ont.name, 'assigned_by', anno.assigned_by, 'pubmed_ids', pubmed.ref_pubmed_id, 'dois', pubmed.doi ) ) ) FROM go_term_annotations anno JOIN ontology_terms ont ON ont.ontology_term_id = anno.ontology_term_id LEFT JOIN go_term_publication_map go_map ON go_map.go_term_annotation_id = anno.go_term_annotation_id LEFT JOIN ref_pubmed pubmed ON pubmed.ref_pubmed_id = go_map.ref_pubmed_id GROUP BY anno.rna_id ) TO STDOUT
COPY ( SELECT json_build_object( 'rna_id', anno.rna_id, 'go_annotations', array_agg( json_build_object( 'go_term_id', anno.ontology_term_id, 'qualifier', anno.qualifier, 'go_name', ont.name, 'assigned_by', anno.assigned_by ) ) ) FROM go_term_annotations anno JOIN ontology_terms ont ON ont.ontology_term_id = anno.ontology_term_id GROUP BY anno.rna_id ) TO STDOUT
Support regional geoblocking (Ops), revise copyright header
/* Copyright 2015 Cisco, Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ -- +goose Up -- SQL in section 'Up' is executed when this migration is applied alter table deliveryservice add regional_geo_blocking tinyint(1) not null; -- +goose Down -- SQL section 'Down' is executed when this migration is rolled back alter table deliveryservice drop column regional_geo_blocking;
/* Copyright 2015 Cisco Systems, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ -- +goose Up -- SQL in section 'Up' is executed when this migration is applied alter table deliveryservice add regional_geo_blocking tinyint(1) not null; -- +goose Down -- SQL section 'Down' is executed when this migration is rolled back alter table deliveryservice drop column regional_geo_blocking;
Add default image description for images that do not have a caption
CREATE OR REPLACE VIEW ObjectsImagesExport AS SELECT NULLIF(TRIM(o.ItemType), '') AS ItemType, NULLIF(TRIM(i.Accession_Full_ID), '') AS Accession_Full_ID, NULLIF(TRIM(o.ItemName), '') AS ItemName, NULLIF(TRIM(ObjectsImagesID), '') AS ObjectsImagesID, NULLIF(TRIM(CONCAT_WS('/', REPLACE( REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(ImageFilePath, '\\', '/'), 'Y:/', ''), 'Y:', ''), '//Foyer/c/Images Mosaic/', ''), '//Foyer/c/Images Mosaic/', ''), '//SERVER1/Images Mosaic/', ''), ImageFileName)), '') AS ImagePath, NULLIF(TRIM(ImageDescription), '') AS ImageDescription, NULLIF(TRIM(DisplayImage), '') AS DisplayImage, NULLIF(TRIM(DefaultImageIndicator), '') AS DefaultImageIndicator FROM ObjectsImages i JOIN Objects o ON (i.Accession_Full_ID = o.Accession_Full_ID) WHERE TRIM(ImageFileName) <> '' ORDER BY ObjectsImagesID;
CREATE OR REPLACE VIEW ObjectsImagesExport AS SELECT NULLIF(TRIM(o.ItemType), '') AS ItemType, NULLIF(TRIM(i.Accession_Full_ID), '') AS Accession_Full_ID, NULLIF(TRIM(o.ItemName), '') AS ItemName, NULLIF(TRIM(ObjectsImagesID), '') AS ObjectsImagesID, NULLIF(TRIM(CONCAT_WS('/', REPLACE( REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(ImageFilePath, '\\', '/'), 'Y:/', ''), 'Y:', ''), '//Foyer/c/Images Mosaic/', ''), '//Foyer/c/Images Mosaic/', ''), '//SERVER1/Images Mosaic/', ''), ImageFileName)), '') AS ImagePath, IFNULL(NULLIF(TRIM(ImageDescription), ''), TRIM(o.ItemName)) AS ImageDescription, NULLIF(TRIM(DisplayImage), '') AS DisplayImage, NULLIF(TRIM(DefaultImageIndicator), '') AS DefaultImageIndicator FROM ObjectsImages i JOIN Objects o ON (i.Accession_Full_ID = o.Accession_Full_ID) WHERE TRIM(ImageFileName) <> '' ORDER BY ObjectsImagesID;
Fix bug in migration to add hitl job status type
-- Add a table for human-in-the-loop jobs CREATE TYPE public.hitl_job_status AS ENUM ( 'NOTRUN', 'TORUN', 'RUNNING', 'RAN', 'FAILED' ); ALTER TYPE public.hitl_job_status OWNER TO rasterfoundry; CREATE TABLE public.hitl_jobs ( id UUID PRIMARY KEY, created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL, created_by CHARACTER VARYING(255) NOT NULL REFERENCES public.users(id) ON DELETE CASCADE, modified_at TIMESTAMP WITHOUT TIME ZONE NOT NULL, owner CHARACTER VARYING(255) NOT NULL REFERENCES public.users(id) ON DELETE CASCADE, campaign_id UUID NOT NULL REFERENCES public.campaigns(id) ON DELETE CASCADE, project_id UUID NOT NULL REFERENCES public.annotation_projects(id) ON DELETE CASCADE, status public.hitl_job_status NOT NULL, version int default 0 NOT NULL ); CREATE INDEX hitl_jobs_owner_idx ON public.hitl_jobs USING btree(owner); CREATE INDEX hitl_jobs_created_by_idx ON public.hitl_jobs USING btree(created_by); CREATE INDEX hitl_jobs_status_idx ON public.hitl_jobs USING btree(status); CREATE INDEX hitl_jobs_campaign_id_idx ON public.hitl_jobs USING btree(campaign_id); CREATE INDEX hitl_jobs_project_id_idx ON public.hitl_jobs USING btree(project_id); CREATE INDEX hitl_jobs_version_idx ON public.hitl_jobs USING btree(version);
-- Add a table for human-in-the-loop jobs CREATE TYPE public.hitl_job_status AS ENUM ( 'NOTRUN', 'TORUN', 'RUNNING', 'RAN', 'FAILED' ); CREATE TABLE public.hitl_jobs ( id UUID PRIMARY KEY, created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL, created_by CHARACTER VARYING(255) NOT NULL REFERENCES public.users(id) ON DELETE CASCADE, modified_at TIMESTAMP WITHOUT TIME ZONE NOT NULL, owner CHARACTER VARYING(255) NOT NULL REFERENCES public.users(id) ON DELETE CASCADE, campaign_id UUID NOT NULL REFERENCES public.campaigns(id) ON DELETE CASCADE, project_id UUID NOT NULL REFERENCES public.annotation_projects(id) ON DELETE CASCADE, status public.hitl_job_status NOT NULL, version int default 0 NOT NULL ); CREATE INDEX hitl_jobs_owner_idx ON public.hitl_jobs USING btree(owner); CREATE INDEX hitl_jobs_created_by_idx ON public.hitl_jobs USING btree(created_by); CREATE INDEX hitl_jobs_status_idx ON public.hitl_jobs USING btree(status); CREATE INDEX hitl_jobs_campaign_id_idx ON public.hitl_jobs USING btree(campaign_id); CREATE INDEX hitl_jobs_project_id_idx ON public.hitl_jobs USING btree(project_id); CREATE INDEX hitl_jobs_version_idx ON public.hitl_jobs USING btree(version);
Fix changelog issues with mysql and existing state
# Create new sms_alerts table CREATE TABLE sms_alerts ( id INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY, citation_number VARCHAR(25), court_date DATETIME, phone_number VARCHAR(25), date_of_birth DATE NULL )ENGINE=InnoDB; # Add auto_increment to violations ALTER TABLE violations MODIFY COLUMN INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY; # Add auto_increment to citations ALTER TABLE citations MODIFY COLUMN INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY;
# Create new sms_alerts table CREATE TABLE sms_alerts ( id INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY, citation_number VARCHAR(25), court_date DATETIME, phone_number VARCHAR(25), date_of_birth DATE NULL )ENGINE=InnoDB; # Add auto_increment to violations ALTER TABLE violations MODIFY id INTEGER NOT NULL AUTO_INCREMENT; # Add auto_increment to citations ALTER TABLE citations MODIFY id INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY;
Implement COI event type of "IACUC Protocol"
INSERT INTO COI_DISCLOSURE_EVENT_TYPE ( EVENT_TYPE_CODE,DESCRIPTION,UPDATE_TIMESTAMP,UPDATE_USER,VER_NBR,OBJ_ID,EXCLUDE_FROM_MASTER_DISCL,ACTIVE_FLAG,USE_SLCT_BOX_1,REQ_SLCT_BOX_1,SLCT_BOX_1_LABEL,SLCT_BOX_1_VAL_FNDR,PROJECT_ID_LABEL,PROJECT_TITLE_LABEL) VALUES('16','Manual IACUC Protocol',SYSDATE,'admin',1,SYS_GUID(),'N','Y','Y','Y','Protocol Type','org.kuali.kra.iacuc.protocol.IacucProtocolTypeValuesFinder,'Protocol Number','Protocol Name') /
INSERT INTO COI_DISCLOSURE_EVENT_TYPE ( EVENT_TYPE_CODE,DESCRIPTION,UPDATE_TIMESTAMP,UPDATE_USER,VER_NBR,OBJ_ID,EXCLUDE_FROM_MASTER_DISCL,ACTIVE_FLAG,USE_SLCT_BOX_1,REQ_SLCT_BOX_1,SLCT_BOX_1_LABEL,SLCT_BOX_1_VAL_FNDR,PROJECT_ID_LABEL,PROJECT_TITLE_LABEL) VALUES('16','Manual IACUC Protocol',SYSDATE,'admin',1,SYS_GUID(),'N','Y','Y','Y','Protocol Type','org.kuali.kra.iacuc.protocol.IacucProtocolTypeValuesFinder','Protocol Number','Protocol Name') /
Add trivial templates aka server pages implementation.
# # This is simple(?) convertor for "Squirrel server pages" to Squirrel code. # Supported syntax: # <? code ?> - inline Squirrel code # <?= value ?> - output Squirrel value (equivalent to <? print(value) ?>) # The page received parameters from caller in dictionary named "d". # Simple example: # # Hello <?= d.name ?>! # <? for (i = 0; i < 5; i++) { ?> # i = <?= i ?> # <? } ?> # const TEXT = 1 const CODE = 2 const PRINT = 3 # Character to use after < and before > for inline code const MARKER = "?" function start_page() { print("function render(d) {\n"); } function stop_page() { print("}\n"); print("render({\"name\": \"test\"})\n"); } function start_text() { print("print(@\""); } function stop_text() { print("\")\n"); } function start_print() { print("print("); } function stop_print() { print(")\n"); } function process(f) { local c; local state = TEXT start_page() start_text() while (c = f.read(1)) { if (c == "<") { c2 = f.read(1) if (c2 == MARKER) { stop_text() state = CODE c = f.read(1) if (c == "=") { state = PRINT start_print() continue } print(c) continue } print(c) c = c2 } else if (c == MARKER) { c2 = f.read(1) if (c2 == ">") { local old_state = state; if (old_state == PRINT) stop_print() start_text() state = TEXT // Eat newline after closing code bracket, but not print bracket if (old_state == PRINT) continue c = f.read(1) if (c == "\n") continue } print(c) c = c2 } print(c); } if (state == TEXT) stop_text() stop_page() } f = file(vargv[1], "r") process(f) f.close()
Add a comment on CUST
(* This is the sum type containing all the operators in JonPRL's * programming language. *) structure OperatorData = struct type opid = string datatype 'i operator = S of 'i ScriptOperator.t | LVL_OP of 'i LevelOperator.t | VEC_LIT of Sort.t * int | OP_SOME of Sort.t | OP_NONE of Sort.t | CUST of opid * ('i * Sort.t) list * Arity.t end
(* This is the sum type containing all the operators in JonPRL's * programming language. *) structure OperatorData = struct type opid = string datatype 'i operator = S of 'i ScriptOperator.t | LVL_OP of 'i LevelOperator.t | VEC_LIT of Sort.t * int | OP_SOME of Sort.t | OP_NONE of Sort.t | CUST of opid * ('i * Sort.t) list * Arity.t (* it may make sense in the future to replace [opid] with ['i]; * then, we could seamlessly rename operators when importing one * development into another. *) end
Update the compiler version for testing.
(* Copyright (c) 2007-17 David C.J. Matthews This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License version 2.1 as published by the Free Software Foundation. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA *) structure CompilerVersion = struct val compilerVersion = "5.7.1 Release" val versionNumber = 571 val versionSuffix = Int.toString versionNumber end;
(* Copyright (c) 2007-17 David C.J. Matthews This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License version 2.1 as published by the Free Software Foundation. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA *) structure CompilerVersion = struct val compilerVersion = "5.7.2 Testing" val versionNumber = 572 val versionSuffix = Int.toString versionNumber end;
Add Int and Char structures to Basis
structure Basis = struct structure String = String structure List = List structure Os = OS structure Posix = Posix structure Socket = Socket end; fun cleanPath (path: string, right: bool) : string = if right andalso Basis.String.isSuffix "/" path then cleanPath (Basis.String.substring (path, 0, String.size path - 1), true) else if not right andalso Basis.String.isPrefix "/" path then cleanPath (Basis.String.extract (path, 1, NONE), false) else path; val PONYO_ROOT = case Basis.Os.Process.getEnv "PONYO_ROOT" of NONE => (print "PONYO_ROOT must be set. (Directory of source is a good default.)\n"; raise Fail "") | SOME root => case Basis.String.isPrefix "~/" root of false => root | true => case Basis.Os.Process.getEnv "HOME" of NONE => (print "Bad PONYO_ROOT: HOME undefined.\n"; raise Fail "") | SOME home => cleanPath(home, true) ^ "/" ^ (cleanPath (Basis.String.extract (root, 2, NONE), false)); val ponyoLib = PONYO_ROOT ^ "/src"; PolyML.make (ponyoLib)
structure Basis = struct structure String = String structure List = List structure Os = OS structure Posix = Posix structure Socket = Socket structure Int = Int structure Char = Char end; fun cleanPath (path: string, right: bool) : string = if right andalso Basis.String.isSuffix "/" path then cleanPath (Basis.String.substring (path, 0, String.size path - 1), true) else if not right andalso Basis.String.isPrefix "/" path then cleanPath (Basis.String.extract (path, 1, NONE), false) else path; val PONYO_ROOT = case Basis.Os.Process.getEnv "PONYO_ROOT" of NONE => (print "PONYO_ROOT must be set. (Directory of source is a good default.)\n"; raise Fail "") | SOME root => case Basis.String.isPrefix "~/" root of false => root | true => case Basis.Os.Process.getEnv "HOME" of NONE => (print "Bad PONYO_ROOT: HOME undefined.\n"; raise Fail "") | SOME home => cleanPath(home, true) ^ "/" ^ (cleanPath (Basis.String.extract (root, 2, NONE), false)); val ponyoLib = PONYO_ROOT ^ "/src"; PolyML.make (ponyoLib)
Change mail from gmail to protonmail
[user] email = jakub.bortlik@gmail.com name = Jakub Bortlik [color] ui = true [core] editor = nvim [diff "praat"] lextconv = base64 [alias] b = branch c = commit d = diff ca = commit -a cb = copy-branch-name co = checkout cm = checkout master d = diff l = log --graph --pretty=format:'%Cred%h%Creset %an: %s - %Creset %C(yellow)%d%Creset %Cgreen(%cr)%Creset' --abbrev-commit --date=relative ll = log --oneline --graph --all --decorate p = push origin HEAD pl = pull --prune s = status sb = status -sb [credential] helper = store ctags = !.git/hooks/ctags [init] templatedir = ~/.git_template [push] default = simple
[user] email = jakub.bortlik@protonmail.com name = Jakub Bortlik [color] ui = true [core] editor = nvim [diff "praat"] lextconv = base64 [alias] b = branch c = commit d = diff ca = commit -a cb = copy-branch-name co = checkout cm = checkout master d = diff l = log --graph --pretty=format:'%Cred%h%Creset %an: %s - %Creset %C(yellow)%d%Creset %Cgreen(%cr)%Creset' --abbrev-commit --date=relative ll = log --oneline --graph --all --decorate p = push origin HEAD pl = pull --prune s = status sb = status -sb [credential] helper = store ctags = !.git/hooks/ctags [init] templatedir = ~/.git_template [push] default = simple
Make a habit out of defining module-specific local attributes as Local.__<Module name>_<property name>__
import Local let Mouse = { x: Local.__mouseX__, y: Local.__mouseY__, isDown: Local.__mouseIsDown__ }
import Local let Mouse = { x: Local.__Mouse_x__, y: Local.__Mouse_y__, isDown: Local.__Mouse_isDown__ }
Structure Bool and value Int.toString needed to replace makestring calls
(* Title: Pure/NJ ID: $Id$ Author: Lawrence C Paulson, Cambridge University Computer Laboratory Copyright 1993 University of Cambridge Basis Library emulation Needed for Poly/ML and Standard ML of New Jersey version 0.93 Full compatibility cannot be obtained using a file: what about char constants? *) structure Int = struct fun max (x, y) = if x < y then y else x : int; fun min (x, y) = if x < y then x else y : int; end; structure TextIO = struct type instream = instream and outstream = outstream exception Io of {name: string, function: string, cause: exn} val stdIn = std_in val stdOut = std_out val openIn = open_in val openAppend = open_append val openOut = open_out val closeIn = close_in val closeOut = close_out val inputN = input val inputAll = fn is => inputN (is, 999999) val inputLine = input_line val endOfStream = end_of_stream val output = output val flushOut = flush_out end;
(* Title: Pure/NJ ID: $Id$ Author: Lawrence C Paulson, Cambridge University Computer Laboratory Copyright 1993 University of Cambridge Basis Library emulation Needed for Poly/ML and Standard ML of New Jersey version 0.93 Full compatibility cannot be obtained using a file: what about char constants? *) structure Bool = struct fun toString true = "true" | toString false = "false" end; structure Int = struct fun toString (i: int) = makestring i; fun max (x, y) = if x < y then y else x : int; fun min (x, y) = if x < y then x else y : int; end; structure TextIO = struct type instream = instream and outstream = outstream exception Io of {name: string, function: string, cause: exn} val stdIn = std_in val stdOut = std_out val openIn = open_in val openAppend = open_append val openOut = open_out val closeIn = close_in val closeOut = close_out val inputN = input val inputAll = fn is => inputN (is, 999999) val inputLine = input_line val endOfStream = end_of_stream val output = output val flushOut = flush_out end;
Add some more allowed characters
structure JonprlLanguageDef :> LANGUAGE_DEF = struct open ParserCombinators CharParser infixr 1 || type scanner = char charParser val commentStart = SOME "(*" val commentEnd = SOME "*)" val commentLine = SOME "|||" val nestedComments = false val identLetter = letter || oneOf (String.explode "-'_ΑαΒβΓγΔδΕεΖζΗηΘθΙιΚκΛλΜμΝνΞξΟοΠπΡρΣσΤτΥυΦφΧχΨψΩω") || digit val identStart = identLetter val opStart = fail "Operators not supported" : scanner val opLetter = opStart val reservedNames = [] val reservedOpNames = [] val caseSensitive = true end structure JonprlTokenParser :> TOKEN_PARSER = TokenParser (JonprlLanguageDef)
structure JonprlLanguageDef :> LANGUAGE_DEF = struct open ParserCombinators CharParser infixr 1 || type scanner = char charParser val commentStart = SOME "(*" val commentEnd = SOME "*)" val commentLine = SOME "|||" val nestedComments = false val fancyChars = "-'_ΑαΒβΓγΔδΕεΖζΗηΘθΙιΚκΛλΜμΝνΞξΟοΠπΡρΣσΤτΥυΦφΧχΨψΩω¬⊗⊕∫+-!@#$%^&*⇓↓↼⇀↽⇁↿↾⇃⇂" val identLetter = letter || oneOf (String.explode fancyChars) || digit val identStart = identLetter val opStart = fail "Operators not supported" : scanner val opLetter = opStart val reservedNames = [] val reservedOpNames = [] val caseSensitive = true end structure JonprlTokenParser :> TOKEN_PARSER = TokenParser (JonprlLanguageDef)
Add BV AST and show routines.
(* BV language definition *) datatype id = Ident of string datatype unop = Not | Shl1 | Shr1 | Shr4 | Shr16 datatype binop = And | Or | Xor | Plus datatype expr = Zero | One | Id of id (* if e0 == 0 then e1 else e2 *) | Ifz of expr * expr * expr (* fold e_vec e_init (\x_element x_accum -> e_step) *) | Fold of expr * expr * id * id * expr | Unop of unop * expr | Binop of binop * expr datatype program = Lambda of id * expr fun show_id (Ident s) = s fun show_unop Not = "not" | show_unop Shl1 = "shl1" | show_unop Shr1 = "shr1" | show_unop Shr4 = "shr4" | show_unop Shr16 = "shr16" fun show_binop And = "and" | show_binop Or = "or" | show_binop Xor = "xor" | show_binop Plus = "plus" fun show_expr Zero = "0" | show_expr One = "1" | show_expr (Id x) = show_id x | show_expr (Ifz (e0,e1,e2)) = "(if0 " ^ show_expr e0 ^ " " ^ show_expr e1 ^ " " ^ show_expr e2 ^ ")" | show_expr (Fold (ev,e0,x,y,e)) = "(fold " ^ show_expr ev ^ " " ^ show_expr e0 ^ " (lambda (" ^ show_id x ^ " " ^ show_id y ^ ") " ^ show_expr e ^ ")" | show_expr (Unop (oper,e)) = "(" ^ show_unop oper ^ " " ^ show_expr e ^ ")" | show_expr (Binop (oper,e)) = "(" ^ show_binop oper ^ " " ^ show_expr e ^ ")" fun show_program (Lambda (x,e)) = "(lambda (" ^ show_id x ^ ") " ^ show_expr e ^ ")"
Add regression test for X87 caching bug.
(* Regression test for bug with caching X87 values. *) fun f x = let val res = x+ 1.0 in if Real.isFinite res then res else raise Fail "bad" end; f 3.0;
Add some near-future examples of fun code
<div class="compose"> <input data=Local.compose /> <button onClick=sendMessage>Send</button> </div> let sendMessage = handler() { Global.messages.push({ user: Session.User, text: Local.compose }) } <div class="messages"> for (message in Global.messages) { <div class="message"> <img src=message.user.pictureURL /> message.user.name " says:" <span class="text"> message.text </span> </div> } </div>
Fix path quoting in the clean target
# define all project wide variables export ROOTDIR=$(pwd) export SRCDIR=$ROOTDIR/src export OUTDIR=$ROOTDIR/out export VERSION="pre-0.01" DESTDIR=${DESTDIR-/usr/local/bin} if [ "$1" = "all" ]; then redo-ifchange "$OUTDIR/redo" elif [ "$1" = "clean" ]; then rm -rf "$OUTDIR/*.tmp" "$OUTDIR/*.o" "$OUTDIR/redo" "$OUTDIR/CC" # autoconf stuff rm -rf autom4te.cache config.h.in configure config.status config.log config.h elif [ "$1" = "install" ]; then redo-ifchange all mkdir -p "$DESTDIR" install "$OUTDIR/redo" "$DESTDIR" ln -sf "$DESTDIR/redo" "$DESTDIR/redo-ifchange" ln -sf "$DESTDIR/redo" "$DESTDIR/redo-ifcreate" ln -sf "$DESTDIR/redo" "$DESTDIR/redo-always" echo "Finished installing." fi
# define all project wide variables export ROOTDIR=$(pwd) export SRCDIR=$ROOTDIR/src export OUTDIR=$ROOTDIR/out export VERSION="pre-0.01" DESTDIR=${DESTDIR-/usr/local/bin} if [ "$1" = "all" ]; then redo-ifchange "$OUTDIR/redo" elif [ "$1" = "clean" ]; then rm -rf "$OUTDIR"/*.tmp "$OUTDIR"/*.o "$OUTDIR"/redo "$OUTDIR"/CC # autoconf stuff rm -rf autom4te.cache config.h.in configure config.status config.log config.h elif [ "$1" = "install" ]; then redo-ifchange all mkdir -p "$DESTDIR" install "$OUTDIR/redo" "$DESTDIR" ln -sf "$DESTDIR/redo" "$DESTDIR/redo-ifchange" ln -sf "$DESTDIR/redo" "$DESTDIR/redo-ifcreate" ln -sf "$DESTDIR/redo" "$DESTDIR/redo-always" echo "Finished installing." fi
Fix Stylus `absolute` function - `'auto'` is an invalid value; use `auto` instead - eliminate `width` & `height` as they were never used (and aren't involved in absolute positioning)
border-radius() -webkit-border-radius arguments -moz-border-radius arguments border-radius arguments box-shadow() -webkit-box-shadow arguments -moz-box-shadow arguments box-shadow arguments enable-flex(direction=row, wp=nowrap, justify=space-between, alignContent=flex-end, alignItems=flex-end) display flex flex-direction direction flex-wrap wp justify-content justify align-content alignContent align-items alignItems display -webkit-flex -webkit-flex-direction direction -webkit-flex-wrap wp -webkit-justify-content justify -webkit-align-content alignContent -webkit-align-items alignItems flex-content(flex-grow=1, flex-shrink=0, flex-basis=10px) flex-grow flex-grow flex-shrink flex-shrink -webkit-flex-grow flex-grow -webkit-flex-shrink flex-shrink absolute(top = 'auto', left = 'auto', right = 'auto', bottom = 'auto', height = 'auto', width = 'auto') position absolute top top left left right right bottom bottom height height width width no-select() -webkit-touch-callout none -webkit-user-select none -khtml-user-select none -moz-user-select none -ms-user-select none user-select none
border-radius() -webkit-border-radius arguments -moz-border-radius arguments border-radius arguments box-shadow() -webkit-box-shadow arguments -moz-box-shadow arguments box-shadow arguments enable-flex(direction=row, wp=nowrap, justify=space-between, alignContent=flex-end, alignItems=flex-end) display flex flex-direction direction flex-wrap wp justify-content justify align-content alignContent align-items alignItems display -webkit-flex -webkit-flex-direction direction -webkit-flex-wrap wp -webkit-justify-content justify -webkit-align-content alignContent -webkit-align-items alignItems flex-content(flex-grow=1, flex-shrink=0, flex-basis=10px) flex-grow flex-grow flex-shrink flex-shrink -webkit-flex-grow flex-grow -webkit-flex-shrink flex-shrink absolute(top = auto, left = auto, right = auto, bottom = auto) position absolute top top left left right right bottom bottom no-select() -webkit-touch-callout none -webkit-user-select none -khtml-user-select none -moz-user-select none -ms-user-select none user-select none
Fix a problem that does not read the text of the menu in Windows
@import "common/Color.styl" @import "common/Size.styl" @import "common/MixIn.styl" .selectbox { position relative height 28px background-color color_indigo_dark border solid 1px color_indigo_dark border-radius 2px &__dropdown { width size_full height size_full cursor pointer &:after { icon_style() position absolute box-sizing border-box padding-top 8px padding-left 4px right 4px height size_full content icon_chevron_down color color_white font-size 14px } &__select { unselectable() appearance_none() z-index 1 position absolute width size_full padding .3em 24px .3em .5em outline none border none background-color transparent font-family sans-serif font-size 14px color color_white cursor pointer } } }
@import "common/Color.styl" @import "common/Size.styl" @import "common/MixIn.styl" .selectbox { position relative height 28px background-color color_indigo_dark border solid 1px color_indigo_dark border-radius 2px &__dropdown { width size_full height size_full cursor pointer &:after { icon_style() position absolute box-sizing border-box padding-top 8px padding-left 4px right 4px height size_full content icon_chevron_down color color_white font-size 14px } &__select { unselectable() appearance_none() z-index 1 position absolute width size_full padding .3em 24px .3em .5em outline none border none background-color color_indigo_dark font-family sans-serif font-size 14px color color_white cursor pointer } } }
Remove now-expired slot styl import
// libs @require 'nib' @require 'normalize.styl' // global @import 'stylus/vars' @import 'stylus/mixins' @import 'stylus/styles' // templates @import 'stylus/common/*' @import 'directive/*' @import 'site/*' @import 'party/*' @import 'volume/*' @import 'slot/*' @import 'asset/*' // shame @import 'stylus/shame'
// libs @require 'nib' @require 'normalize.styl' // global @import 'stylus/vars' @import 'stylus/mixins' @import 'stylus/styles' // templates @import 'stylus/common/*' @import 'directive/*' @import 'site/*' @import 'party/*' @import 'volume/*' @import 'asset/*' // shame @import 'stylus/shame'
Support theme on admin table
/* * Copyright (C) 2012-2017 Online-Go.com * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ .audit-container { flex-grow: 1; flex-shrink:0; flex-basis:auto; display: flex; flex-direction: column; .audit-actions { flex-grow: 1; display: flex; justify-content: flex-end; } .position-link { themed color link-color; } .position-link:hover { cursor:pointer; text-decoration:underline; } }
/* * Copyright (C) 2012-2017 Online-Go.com * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ .audit-container { flex-grow: 1; flex-shrink:0; flex-basis:auto; display: flex; flex-direction: column; .audit-actions { flex-grow: 1; display: flex; justify-content: flex-end; } .position-link { themed color link-color; } .position-link:hover { cursor:pointer; text-decoration:underline; } .ReactTable, .ReactTable * { themed border-color shade5 !important; } .pagination-bottom .-pagination { .-btn { themed color fg; themed background card-background-color; } input, select { themed background input-bg; } } }
Fix toolips in user skill edit in Chrome
// Browser vendor prefixes library @import 'nib' @import '_config' h1, h2, h3, h4 title-font() .tooltip::after title-font() shadow-elevation-16dp() round-corners() z-index: 1 content: attr(data-tooltip) position: absolute top: 100% left: -2em white-space: nowrap padding: .5em color: white background-color: rgb(92, 97, 108) font-size: .8em transform: translate3d(0, -20%, 0); opacity: 0 pointer-events: none transition: opacity transition-timing transition-function, transform transition-timing transition-function .tooltip:hover::after transform: translate3d(0, 0, 0); opacity: 1 main > .search width: 20em position: relative margin: 1.5em auto 0 auto main > .search label content: "🔎" position: absolute left: .3em top: .3em font-size: inherit main > .search input width: 100% padding: .3em padding-left: 2em display: block
// Browser vendor prefixes library @import 'nib' @import '_config' h1, h2, h3, h4 title-font() .tooltip position: relative .tooltip::after title-font() shadow-elevation-16dp() round-corners() z-index: 1 content: attr(data-tooltip) position: absolute top: 100% left: -2em white-space: nowrap padding: .5em color: white background-color: rgb(92, 97, 108) font-size: .8em transform: translate3d(0, -20%, 0); opacity: 0 pointer-events: none transition: opacity transition-timing transition-function, transform transition-timing transition-function .tooltip:hover::after transform: translate3d(0, 0, 0); opacity: 1 main > .search width: 20em position: relative margin: 1.5em auto 0 auto main > .search label content: "🔎" position: absolute left: .3em top: .3em font-size: inherit main > .search input width: 100% padding: .3em padding-left: 2em display: block
Remove expandingArea from survey css
.survey-page #main input[type=text], input[type='email'], input[type='password'], textarea, select, .expandingArea > pre background-color $bg_light_gray color $text_dk font-size 17px &:focus border-color $contextual
.survey-page #main input[type=text], input[type='email'], input[type='password'], textarea, select background-color $bg_light_gray color $text_dk font-size 17px &:focus border-color $contextual
Add background color on checked checbox to avoid white offset easily spotted with a microscope :microscope:
@require 'cozy-ui' :local .pho-section left-spaced(32px) .pho-photo position relative display inline-block background-color grey-10 margin 0 1em 1em 0 a vertical-align middle .pho-photo-item height em(240px) transition transform .2s ease .pho-photo-select position absolute z-index 2 opacity 0 height 2em width 2em padding 1em cursor pointer &[data-input=checkbox] label::before box-shadow 0 0 0 3px rgba(0,0,0,.3), inset 0 0 0 1.5em rgba(255,255,255,.8) &[data-input=checkbox] input[type=checkbox]:checked + label::after background-size 1.5em background-position center center &[data-input=checkbox] input[type=checkbox]:checked + label::before box-shadow 0 0 0 3px dodger-blue, inset 0 0 0 1.5em dodger-blue .pho-photo--selected .pho-photo-item transform:scale(0.9); .pho-section--has-selection .pho-photo-select width 100% height 100% .pho-photo:hover .pho-photo-select opacity 1 .pho-list-selection .pho-photo-select opacity 1
@require 'cozy-ui' :local .pho-section left-spaced(32px) .pho-photo position relative display inline-block background-color grey-10 margin 0 1em 1em 0 a vertical-align middle .pho-photo-item height em(240px) transition transform .2s ease .pho-photo-select position absolute z-index 2 opacity 0 height 2em width 2em padding 1em cursor pointer &[data-input=checkbox] label::before box-shadow 0 0 0 3px rgba(0,0,0,.3), inset 0 0 0 1.5em rgba(255,255,255,.8) &[data-input=checkbox] input[type=checkbox]:checked + label::after background-size 1.5em background-position center center &[data-input=checkbox] input[type=checkbox]:checked + label::before background-color dodger-blue box-shadow 0 0 0 3px dodger-blue, inset 0 0 0 1.5em dodger-blue .pho-photo--selected .pho-photo-item transform:scale(0.9); .pho-section--has-selection .pho-photo-select width 100% height 100% .pho-photo:hover .pho-photo-select opacity 1 .pho-list-selection .pho-photo-select opacity 1
Add more margin above headers
.LandingAbout__section { padding: grid(4); } @media screen and (min-width: 700px) { .LandingAbout__section { display: flex; } .LandingAbout__section:nth-child(even) .LandingAbout__imageContainer { order: 10; } .LandingAbout__imageContainer, .LandingAbout__text { padding: grid(5); flex:1; } } .LandingAbout__image { width: 100%; } .LandingAbout__text { flex: 1; } .LandingAbout__headline { Text__headerScale(3); margin-top: grid(2); margin-bottom: grid(4); font-family: $Text__monospacedFont; font-weight: 800; color: #792359; line-height: 1.4em; } @media screen and (max-width: 900px) { .LandingAbout__headline { Text__headerScale(2); } }
.LandingAbout__section { padding: grid(4); } @media screen and (min-width: 700px) { .LandingAbout__section { display: flex; } .LandingAbout__section:nth-child(even) .LandingAbout__imageContainer { order: 10; } .LandingAbout__imageContainer, .LandingAbout__text { padding: grid(5); flex:1; } } .LandingAbout__image { width: 100%; } .LandingAbout__text { flex: 1; } .LandingAbout__headline { Text__headerScale(3); margin-top: grid(3); margin-bottom: grid(4); font-family: $Text__monospacedFont; font-weight: 800; color: #792359; line-height: 1.4em; } @media screen and (max-width: 900px) { .LandingAbout__headline { Text__headerScale(2); } }
Use fixed header bar position for convenience on scrolly pages
body color #333 font-family sans-serif margin 0 padding 0 i -webkit-user-select none -moz-user-select none -user-select none a cursor pointer .dropdown-menu>li>a padding-left 6px .c-app-header-container background-color #333 height 40px .c-app-body-container position absolute left 0 right 0 top 40px bottom 0 z-index 0
body color #333 font-family sans-serif margin 0 padding 0 i -webkit-user-select none -moz-user-select none -user-select none a cursor pointer .dropdown-menu>li>a padding-left 6px .c-app-header-container background-color #333 height 40px width 100% position fixed top 0 z-index 1000 .c-app-body-container position absolute left 0 right 0 top 40px bottom 0 z-index 0
Add padding to menu to line text up with logo
.LoggedOutMenu position absolute top 2em left 1em font-size font-size-base line-height line-height-tall user-select none -webkit-touch-callout none &__toggle display none position relative cursor pointer width 1.5em height @width // Patties > span width 1.5em height 2px top 50% left 50% transform translate(-50%, -50%) & &:before &:after display block position absolute background-color colors-gray-text &:before &:after content '' width 100% height 100% &:before top 300% &:after bottom 300% &__options margin-top 1em > a display block text-decoration none &[data-state='active'] font-weight bold color colors-gray-hover &:first-child display none &--active .LoggedOutMenu &__options display block &__toggle > span & &:before &:after background-color colors-gray-lighter +below(logged-out-nav-breakpoint) .LoggedOutMenu position relative top 0 left 0 background-color white &__toggle display block &__options display none > a:first-child display block
.LoggedOutMenu position absolute top 2em left 3.25em font-size font-size-base line-height line-height-tall user-select none -webkit-touch-callout none &__toggle display none position relative cursor pointer width 1.5em height @width // Patties > span width 1.5em height 2px top 50% left 50% transform translate(-50%, -50%) & &:before &:after display block position absolute background-color colors-gray-text &:before &:after content '' width 100% height 100% &:before top 300% &:after bottom 300% &__options margin-top 1em > a display block text-decoration none &[data-state='active'] font-weight bold color colors-gray-hover &:first-child display none &--active .LoggedOutMenu &__options display block &__toggle > span & &:before &:after background-color colors-gray-lighter +below(logged-out-nav-breakpoint) .LoggedOutMenu position relative top 0 left 0 background-color white &__toggle display block &__options display none > a:first-child display block
Increase font size on secondary pages
.secondary-page font-size: 10px line-height: 20px .secondary-page-side-bar flex: 0 1 190px nav align-items: flex-start display: inline-flex flex-direction: column flex-wrap: wrap margin-top: 0.75em @media screen and (max-width: 400px) flex-direction: row .side-bar-button color: #5a5a5a font-size: 14px margin: 6.75px 6.75px 20.25px outline: none &.active font-weight: 700 .markdown, .secondary-page-copy h1, h2, h3 color: #5a5a5a h2 font-size: 2.4em line-height: 26px h3 font-size: 1.4em p, li color: #989898 margin: 1em 0 max-width: 700px ul > li font-size: 14px img max-width: 100% pre background-color: #E8E8E8 border-radius: 4px padding: 1em .centered-grid margin: 0 auto max-width: 960px padding: 40px 3vw
.secondary-page font-size: 10px line-height: 20px .secondary-page-side-bar flex: 0 1 190px nav align-items: flex-start display: inline-flex flex-direction: column flex-wrap: wrap margin-top: 0.75em @media screen and (max-width: 400px) flex-direction: row .side-bar-button color: #5a5a5a font-size: 14px margin: 6.75px 6.75px 20.25px outline: none &.active font-weight: 700 .markdown, .secondary-page-copy h1, h2, h3 color: #5a5a5a h2 font-size: 2.4em line-height: 26px h3 font-size: 1.4em p, li color: #989898 margin: 1em 0 max-width: 700px font-size: 14px ul > li font-size: 14px img max-width: 100% pre background-color: #E8E8E8 border-radius: 4px padding: 1em .centered-grid margin: 0 auto max-width: 960px padding: 40px 3vw
Add bottom space to current weather
.current-weather-container height: 100% display: none flex-direction: column-reverse align-items: center justify-content: space-around opacity: 0 .current-weather display: flex flex-direction: column align-items: center justify-content: center background: rgba(160, 158, 151, 0.5) box-shadow: 4px 0 25px 3px rgba(0, 0, 0, 0.1) border-radius: 50% width: 400px height: 400px font-size: 1.6rem font-family: cursive color: #ccc .city-name margin-bottom: 8px font-size: 1.8rem text-align: center .button-container margin: 30px 0 .btn text-align: center &.show opacity: 1 transition: opacity .3s display: flex
.current-weather-container height: 100% display: none flex-direction: column-reverse align-items: center justify-content: space-around opacity: 0 .current-weather display: flex flex-direction: column align-items: center justify-content: center background: rgba(160, 158, 151, 0.5) box-shadow: 4px 0 25px 3px rgba(0, 0, 0, 0.1) border-radius: 50% width: 400px height: 400px font-size: 1.6rem font-family: cursive color: #ccc margin-bottom: 20px; .city-name margin-bottom: 8px font-size: 1.8rem text-align: center .button-container margin: 30px 0 .btn text-align: center &.show opacity: 1 transition: opacity .3s display: flex
Clean up styles at section management page.
.section-list list-style: none .section-control margin: 3px 0px padding: 5px background-color: #EFEFEF border: solid 1px #EAEAEA border-radius: 5px &:hover background-color: #E2E2E2 .section-drag-mark margin: 5px font-size: 18px color: #808080 user-select: none cursor: move .section-moderators-list width: 200px text-align: right .section-delete-button width: 100px text-align: center .section-title margin: 5px 0px 5px 10px font-size: 12pt .section-placeholder margin-top: 3px margin-bottom: 3px height: 25px background-color: #FFFB91 border: solid 2px #FFF947 border-radius: 5px .section-children margin-left: 50px .section-children-placeholder:empty @extend .section-placeholder background-color: #FAFAFA border-style: dashed border-color: #E8E8E8
.section-list list-style: none .section-control margin: 3px 0px padding: 5px background-color: #EFEFEF border: solid 1px #EAEAEA border-radius: 5px &:hover background-color: #E2E2E2 .section-drag-mark color: #808080 user-select: none cursor: move .section-moderators-list width: 200px text-align: right .section-delete-button width: 100px text-align: center .section-placeholder margin-top: 3px margin-bottom: 3px height: 25px background-color: #FFFB91 border: solid 2px #FFF947 border-radius: 5px .section-children margin-left: 50px .section-children-placeholder:empty @extend .section-placeholder background-color: #FAFAFA border-style: dashed border-color: #E8E8E8